diff --git "a/perf-df-awq-1xA10.csv" "b/perf-df-awq-1xA10.csv" --- "a/perf-df-awq-1xA10.csv" +++ "b/perf-df-awq-1xA10.csv" @@ -1,38944 +1,3 @@ -config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.processor_kwargs.trust_remote_code,config.backend.hub_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.quantization_config.bits,config.backend.quantization_config.version,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.latency,config.scenario.memory,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,config.backend.model_type,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency,config.backend.quantization_config.exllama_config.version,config.backend.quantization_config.exllama_config.max_input_len,config.backend.quantization_config.exllama_config.max_batch_size,config.backend.hub_kwargs.revision,config.backend.hub_kwargs.force_download,config.backend.hub_kwargs.local_files_only -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491da-14392e250078240e4874a1fe;ce456542-069f-4140-ac2f-0547584442ec) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17839.316992,21365.260288,0.0,20772.29056,20328.900608,s,1,14.270123046875,14.270123046875,0.0,14.270123046875,14.270123046875,14.270123046875,14.270123046875,[14.270123046875],,kWh,8.785522863680532e-05,4.8131702797130805e-05,0.0001293223256800015,0.0002653092571139376,,MB,4436.238336,21900.034048,0.0,21246.246912,20640.73728,s,10,5.115546295166016,0.5115546295166016,0.00035586424573261113,0.5114437408447265,0.5120385986328124,0.5121834533691406,0.5122993371582031,"[0.5117526550292969, 0.5123283081054687, 0.5111006164550781, 0.5120064086914062, 0.5113591613769531, 0.5114493713378906, 0.5114381103515625, 0.5115598754882813, 0.5112826232910156, 0.5112691650390625]",tokens/s,500.43531077396295,kWh,6.049332903055498e-06,3.314601949239647e-06,3.548093116250012e-05,4.484486601479526e-05,tokens/kWh,5708568.73372172,MB,4444.581888,21927.297024,0.0,21273.509888,20640.73984,s,10,51.208193847656254,5.120819384765625,0.00976273560880766,5.123133056640626,5.130911328125,5.131098974609375,5.131249091796875,"[5.12930810546875, 5.13128662109375, 5.1063916015625, 5.1287978515625, 5.11746826171875, 5.13086962890625, 5.1303046875, 5.10876708984375, 5.109875, 5.115125]",tokens/s,12.30271862105198,kWh,6.07029192444444e-05,3.326953015721448e-05,0.00022003438436069936,0.0003140068337623583,tokens/kWh,200632.5761931623,,s,630,51.20564021301268,0.08127879398890905,0.0006255798257233262,0.08103628921508789,0.08228055114746094,0.08262005653381348,0.08336339813232423,"[0.0810250244140625, 0.08089087677001953, 0.08082022094726563, 0.08121343994140626, 0.08080793762207031, 0.08237158203125, 0.08264192199707031, 0.08259584045410157, 0.08097484588623047, 0.08121651458740234, 0.0813803482055664, 0.08108544158935548, 0.08213811492919922, 0.08099533081054687, 0.08141311645507812, 0.08081407928466797, 0.08143257904052735, 0.0810096664428711, 0.08081613159179687, 0.08216678619384765, 0.08203263854980469, 0.08095334625244141, 0.08111820983886718, 0.08112435150146484, 0.0819261474609375, 0.08179097747802734, 0.0823214111328125, 0.08095231628417969, 0.08156774139404296, 0.08124518585205077, 0.08161996459960938, 0.08175718688964843, 0.08183708953857421, 0.0816352996826172, 0.081691650390625, 0.08236646270751953, 0.08114995574951171, 0.08177356719970703, 0.08180531311035157, 0.0807014389038086, 0.08086835479736328, 0.08100249481201172, 0.080932861328125, 0.08096051025390626, 0.08259379577636719, 0.08176332855224609, 0.08237773132324219, 0.08155238342285157, 0.08214323425292969, 0.08176435089111328, 0.08094924926757813, 0.08076185607910157, 0.08081510162353515, 0.08090624237060547, 0.08067993927001953, 0.0808642578125, 0.08123091125488281, 0.08097785949707031, 0.08092057800292969, 0.08124723052978515, 0.08074649810791015, 0.08164147186279297, 0.08218726348876954, 0.08274636840820312, 0.08235622406005859, 0.08195378875732422, 0.08134349060058593, 0.08259481811523438, 0.08228147125244141, 0.08120320129394532, 0.08179199981689453, 0.08201318359375, 0.08110591888427734, 0.08313037109375, 0.08358399963378907, 0.08137420654296874, 0.08228044891357422, 0.08230912017822266, 0.08115404510498046, 0.08225382232666016, 0.08344882965087891, 0.08095539093017579, 0.08091136169433594, 0.08071577453613281, 0.08082637023925782, 0.08077311706542968, 0.08085401916503906, 0.08084786987304687, 0.08078438568115234, 0.08189542388916016, 0.08272383880615235, 0.08283340454101562, 0.08157183837890625, 0.08082841491699219, 0.08105779266357421, 0.08164454650878906, 0.0810465316772461, 0.08073932647705079, 0.08094617462158203, 0.08269004821777344, 0.08131890869140625, 0.08220057678222656, 0.08102092742919922, 0.08141516876220703, 0.08189030456542969, 0.08115609741210937, 0.08081613159179687, 0.08090828704833984, 0.08093081665039062, 0.08077721405029296, 0.08084786987304687, 0.08124211120605469, 0.08103526306152344, 0.08085401916503906, 0.08110182189941406, 0.08071987152099609, 0.0816527328491211, 0.08091136169433594, 0.08075775909423828, 0.08089603424072266, 0.08076796722412109, 0.08071782684326172, 0.08105574035644532, 0.08058573150634765, 0.0809195556640625, 0.0809543685913086, 0.08064307403564454, 0.08085606384277344, 0.08080998229980468, 0.08076493072509766, 0.08083660888671874, 0.08113766479492188, 0.08126771545410157, 0.08075775909423828, 0.08050994873046875, 0.08117759704589844, 0.08216985321044921, 0.08088063812255859, 0.08078233337402344, 0.08055296325683593, 0.08105677032470703, 0.08089395141601563, 0.08110182189941406, 0.08110079956054687, 0.08077005004882812, 0.08106086730957031, 0.08082637023925782, 0.08099021148681641, 0.08107417297363281, 0.08105369567871094, 0.08087859344482422, 0.08077516937255859, 0.08083455657958985, 0.08088678741455078, 0.0814755859375, 0.08101376342773438, 0.08089395141601563, 0.08126976013183594, 0.08088678741455078, 0.0817448959350586, 0.08249651336669922, 0.08083558654785156, 0.08098508453369141, 0.08098508453369141, 0.08085504150390625, 0.08076390075683594, 0.08068608093261719, 0.0810096664428711, 0.08101785278320313, 0.08105267333984376, 0.08092876434326172, 0.08068710327148437, 0.08075981140136719, 0.0807710723876953, 0.08080384063720703, 0.0826398696899414, 0.08128205108642578, 0.08141004943847656, 0.0808622055053711, 0.0807874526977539, 0.08171417236328125, 0.08106803131103515, 0.08159954833984374, 0.08092768096923827, 0.08127487945556641, 0.08098611450195313, 0.08070246124267579, 0.08106393432617187, 0.08145613098144532, 0.08090009307861327, 0.08067174530029297, 0.08317644500732421, 0.08147353363037109, 0.0819609603881836, 0.0815841293334961, 0.081328125, 0.08093389129638671, 0.08077311706542968, 0.08137420654296874, 0.08092876434326172, 0.08146125030517579, 0.0808826904296875, 0.08186265563964844, 0.08184422302246094, 0.08081715393066406, 0.08107417297363281, 0.08104447937011719, 0.08097894287109375, 0.08097074890136718, 0.08143052673339844, 0.08187391662597657, 0.08181350708007812, 0.08326860809326173, 0.08107520294189453, 0.08131788635253906, 0.08252620697021484, 0.08126569366455078, 0.08121033477783203, 0.08078131103515625, 0.0809717788696289, 0.08223744201660156, 0.08169574737548828, 0.0809881591796875, 0.08068915557861328, 0.0810782699584961, 0.08094924926757813, 0.08133631896972657, 0.08116633605957031, 0.08083455657958985, 0.08081613159179687, 0.08204902648925781, 0.0817254409790039, 0.08120320129394532, 0.0808427505493164, 0.08193536376953126, 0.08158924865722657, 0.08215039825439453, 0.08297475433349609, 0.08234595489501953, 0.08133017730712891, 0.08098918151855469, 0.08060928344726563, 0.08083763122558593, 0.08056832122802735, 0.0811325454711914, 0.08131686401367187, 0.0822845458984375, 0.0818892822265625, 0.08079564666748047, 0.08146739196777343, 0.08086630249023438, 0.08223744201660156, 0.08169062042236327, 0.08316006469726563, 0.08217088317871094, 0.08226099395751953, 0.080932861328125, 0.08097792053222656, 0.08103526306152344, 0.08176127624511718, 0.08084377288818359, 0.08079974365234376, 0.08083558654785156, 0.08088678741455078, 0.08075161743164062, 0.0808089599609375, 0.08083148956298829, 0.08094617462158203, 0.08070246124267579, 0.08236953735351563, 0.08116326141357422, 0.08105779266357421, 0.08071475219726562, 0.08094822692871094, 0.08098201751708985, 0.08106803131103515, 0.08074956512451172, 0.0808253402709961, 0.08072499084472656, 0.08080083465576172, 0.0809051513671875, 0.08074137878417968, 0.08066150665283203, 0.08075468444824219, 0.08100863647460937, 0.0818309097290039, 0.08378982543945312, 0.08116121673583984, 0.08084172821044922, 0.08081305694580078, 0.08135167694091797, 0.08078233337402344, 0.08104959869384766, 0.08075878143310547, 0.08108544158935548, 0.0814028778076172, 0.08119500732421875, 0.08129228973388672, 0.08195174407958984, 0.08116223907470703, 0.08109465789794922, 0.08279244995117188, 0.08094515228271484, 0.08093901062011719, 0.08107622528076172, 0.0809349136352539, 0.08089907073974609, 0.08085504150390625, 0.08271769714355469, 0.08127487945556641, 0.08173772430419922, 0.08115711975097656, 0.08112947082519531, 0.08151551818847656, 0.08081100463867187, 0.08205721282958985, 0.08147360229492187, 0.08134649658203125, 0.08098713684082032, 0.08106905364990234, 0.08135884857177735, 0.08129843139648438, 0.08208589172363281, 0.08076697540283204, 0.08098508453369141, 0.08169267272949218, 0.08088166046142578, 0.08079974365234376, 0.08080691528320312, 0.08089292907714844, 0.08064307403564454, 0.08070655822753907, 0.08091033935546875, 0.08075981140136719, 0.08106905364990234, 0.08088575744628906, 0.08090930938720703, 0.08120626831054688, 0.08235110473632813, 0.08094207763671875, 0.081544189453125, 0.08222515106201173, 0.08239308929443359, 0.0812042236328125, 0.08141824340820313, 0.08217292785644531, 0.08113664245605469, 0.08126361846923828, 0.08075981140136719, 0.08128205108642578, 0.08245350646972656, 0.08222310638427735, 0.08097382354736328, 0.08171724700927735, 0.08073625946044923, 0.08178892517089843, 0.08292147064208985, 0.08280985260009766, 0.08185446166992187, 0.08254361724853515, 0.08237773132324219, 0.08121036529541016, 0.08264601898193359, 0.08230092620849609, 0.08097074890136718, 0.08073318481445313, 0.0812390365600586, 0.08052019500732421, 0.08178585815429687, 0.08184627532958984, 0.08151757049560547, 0.08071270751953125, 0.08138240051269531, 0.08074547576904297, 0.082334716796875, 0.08137932586669921, 0.0817100830078125, 0.08089395141601563, 0.08115302276611328, 0.0818647689819336, 0.08188409423828125, 0.08197734069824218, 0.08236339569091797, 0.08199987030029297, 0.08106803131103515, 0.08088883209228516, 0.08074752044677734, 0.08125337219238281, 0.08122470092773437, 0.08245862579345703, 0.08104243469238281, 0.08116633605957031, 0.082123779296875, 0.08239718627929687, 0.08118886566162109, 0.08286617279052734, 0.0838440933227539, 0.0829276123046875, 0.08102706909179687, 0.08094207763671875, 0.08093695831298828, 0.08096768188476562, 0.08105574035644532, 0.08081817626953125, 0.08088985443115235, 0.08096562957763671, 0.0810250244140625, 0.08075065612792968, 0.08062150573730469, 0.0808089599609375, 0.0808796157836914, 0.08288460540771485, 0.08096460723876953, 0.08109267425537109, 0.0810474853515625, 0.08087551879882812, 0.08085913848876954, 0.08081613159179687, 0.08096051025390626, 0.08362496185302734, 0.08186470031738281, 0.08086118316650391, 0.0807741470336914, 0.08091545867919922, 0.0808980484008789, 0.08172748565673828, 0.08331468963623047, 0.08127283477783204, 0.08076287841796875, 0.0815964126586914, 0.0811509780883789, 0.08148274993896484, 0.08221798706054688, 0.08079462432861328, 0.08135065460205078, 0.08168243408203125, 0.08092671966552735, 0.08122777557373047, 0.08076493072509766, 0.08196300506591797, 0.08124723052978515, 0.08108236694335938, 0.08114585876464844, 0.08105267333984376, 0.08114995574951171, 0.08114892578125, 0.08126258850097656, 0.08128307342529296, 0.08108748626708985, 0.08092262268066407, 0.08113561248779297, 0.08102194976806641, 0.08147968292236328, 0.0814940185546875, 0.08209101104736329, 0.08176127624511718, 0.08076799774169922, 0.0812769317626953, 0.08095027160644531, 0.08072191619873047, 0.08105471801757813, 0.08098303985595703, 0.08102604675292968, 0.08082125091552735, 0.08194252777099609, 0.0819230728149414, 0.0806983642578125, 0.08096768188476562, 0.08106700897216797, 0.08113152313232422, 0.08114380645751954, 0.08084172821044922, 0.0808089599609375, 0.08100454711914062, 0.08127999877929687, 0.08061849975585937, 0.08075468444824219, 0.08232038116455079, 0.08085196685791016, 0.0810403823852539, 0.08079462432861328, 0.08053862762451172, 0.08103731536865234, 0.08106086730957031, 0.08065843200683594, 0.08094515228271484, 0.08113970947265625, 0.08087245178222656, 0.08180121612548828, 0.08142438507080078, 0.08072704315185547, 0.08087757110595703, 0.0811325454711914, 0.08084480285644531, 0.08107417297363281, 0.08078643035888672, 0.08088371276855469, 0.08092774200439454, 0.08123699188232422, 0.08119602966308594, 0.08094207763671875, 0.08080076599121094, 0.08082943725585938, 0.08087347412109375, 0.08097996520996094, 0.08093798065185547, 0.08286310577392578, 0.08338329315185547, 0.08127180480957032, 0.08128717041015625, 0.08110182189941406, 0.08094515228271484, 0.08075161743164062, 0.08137625885009765, 0.08080384063720703, 0.08088883209228516, 0.0809144287109375, 0.08087245178222656, 0.08096051025390626, 0.0821944351196289, 0.08173670196533203, 0.08166297912597656, 0.08245350646972656, 0.08109056091308593, 0.08079257965087891, 0.08149913787841796, 0.08070246124267579, 0.08094207763671875, 0.08078950500488281, 0.08083558654785156, 0.08286105346679687, 0.08256204986572266, 0.08101785278320313, 0.08090521240234375, 0.08076799774169922, 0.08092364501953125, 0.08100454711914062, 0.08122061157226562, 0.08070655822753907, 0.0808652801513672, 0.0807874526977539, 0.08080793762207031, 0.08073318481445313, 0.08080178833007813, 0.08115814208984375, 0.08080998229980468, 0.0808304672241211, 0.08083660888671874, 0.08066252899169922, 0.08090828704833984, 0.08113561248779297, 0.0808826904296875, 0.0806789093017578, 0.08072806549072266, 0.08072191619873047, 0.08072704315185547, 0.08063999938964844, 0.08065945434570312, 0.08051712036132813, 0.08067481231689454, 0.08095231628417969, 0.08081919860839844, 0.08072191619873047, 0.0809881591796875, 0.08202137756347656, 0.08079769897460938, 0.08073216247558594, 0.08093389129638671, 0.08091238403320312, 0.08094822692871094, 0.0807936019897461, 0.0808089599609375, 0.08107724761962891, 0.08113459014892578, 0.08077311706542968, 0.08092160034179688, 0.0809195556640625, 0.08265523529052735, 0.08215142059326172, 0.08084992218017578, 0.08109260559082031, 0.08113868713378906, 0.08088575744628906, 0.08069631958007813, 0.08135679626464844, 0.08101273345947266, 0.08100863647460937, 0.08101990509033204, 0.08122777557373047, 0.0809349136352539, 0.0824432601928711, 0.08351334381103516, 0.08275865936279297, 0.08238387298583984, 0.08088371276855469, 0.08104857635498047, 0.08075468444824219, 0.08085913848876954, 0.0808458251953125, 0.08095027160644531, 0.08079974365234376, 0.08072294616699219, 0.08080384063720703, 0.08112230682373046, 0.08082227325439453, 0.08124211120605469, 0.08080178833007813, 0.08073318481445313, 0.08095641326904297, 0.08099327850341796, 0.08089497375488282, 0.08103321838378906, 0.08088371276855469, 0.08099839782714843, 0.08077516937255859, 0.08112127685546874, 0.08087449645996093, 0.08081203460693359, 0.08115302276611328, 0.08076185607910157, 0.0808253402709961, 0.08085401916503906, 0.08081613159179687, 0.08235724639892578, 0.08290918731689453, 0.08115814208984375, 0.08178585815429687, 0.08164556884765625, 0.08152166748046875, 0.08099942779541015]",tokens/s,12.303332159879925,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1213.62432,1005.060096,0.0,358.612992,318.913024,s,23,0.17213225698471069,0.007484011173248292,0.0002954761638434772,0.00736678409576416,0.007821849536895752,0.007996079683303832,0.008394954490661622,"[0.008502976417541504, 0.007853087902069091, 0.007601344108581543, 0.007378464221954346, 0.007268896102905273, 0.007655648231506348, 0.007272704124450684, 0.0072911038398742675, 0.007318528175354004, 0.007696896076202392, 0.007447904109954834, 0.007239327907562256, 0.0075630397796630855, 0.00736678409576416, 0.00744265604019165, 0.0073199357986450195, 0.0072409920692443844, 0.007361631870269775, 0.008011967658996581, 0.007415616035461426, 0.007298208236694336, 0.007331200122833252, 0.007253344058990478]",tokens/s,34206.25571953657,kWh,8.567134822637681e-08,4.694379339333844e-08,1.8252431812478173e-07,3.1513945974449696e-07,tokens/kWh,812338766.4862884,MB,1213.919232,1005.060096,0.0,358.612992,328.804864,s,23,10.179620391845702,0.44259219094981317,0.01146519580496095,0.440910888671875,0.4509195068359375,0.45184952392578126,0.47989753540039065,"[0.48779537963867187, 0.448875732421875, 0.44378817749023436, 0.45189608764648437, 0.4514304504394531, 0.44095208740234376, 0.4469196472167969, 0.44514892578125, 0.4451697692871094, 0.4480313720703125, 0.43288784790039064, 0.4424725341796875, 0.43655374145507814, 0.4358661193847656, 0.43621551513671875, 0.43264309692382813, 0.43890673828125, 0.4397151184082031, 0.440910888671875, 0.43340951538085937, 0.4314309692382812, 0.4308625793457031, 0.43773809814453124]",tokens/s,142.34322540757108,kWh,5.0166983791335025e-06,2.748945515611113e-06,8.752610144789825e-06,1.6518254039534438e-05,tokens/kWh,3813962.4108708547,,s,1448,10.335379461288454,0.007137692998127384,0.0009868185197434377,0.006937600135803222,0.007357337760925293,0.007660748958587645,0.014534717473983765,"[0.008195072174072266, 0.00812339210510254, 0.00801587200164795, 0.008057855606079101, 0.008011775970458984, 0.007906303882598878, 0.00791756820678711, 0.007833600044250488, 0.007880703926086426, 0.007682047843933106, 0.007670783996582031, 0.007756800174713135, 0.007795711994171142, 0.007764992237091065, 0.007696383953094482, 0.007667712211608887, 0.0077578239440917966, 0.00774348783493042, 0.007739391803741455, 0.007702527999877929, 0.007682047843933106, 0.007706624031066894, 0.007713791847229004, 0.007598144054412842, 0.007613376140594482, 0.007663616180419922, 0.007570432186126709, 0.007678976058959961, 0.007840767860412597, 0.00774451208114624, 0.007902207851409913, 0.007857151985168457, 0.007699456214904785, 0.007651328086853027, 0.0077608962059021, 0.007738368034362793, 0.007944191932678223, 0.007785535812377929, 0.007604159832000732, 0.007642111778259277, 0.007798783779144287, 0.00800153636932373, 0.007988224029541016, 0.007651328086853027, 0.007670783996582031, 0.007618559837341309, 0.007618559837341309, 0.0076871681213378906, 0.007786496162414551, 0.007779327869415284, 0.007712768077850342, 0.0077209601402282715, 0.007654399871826172, 0.00759500789642334, 0.007616511821746826, 0.007543807983398438, 0.007494656085968018, 0.007342080116271973, 0.0075345921516418455, 0.007566336154937744, 0.0072427520751953125, 0.007305215835571289, 0.015635456085205078, 0.007311359882354736, 0.007363584041595459, 0.007344128131866455, 0.0074403839111328125, 0.007830527782440186, 0.008690688133239746, 0.008432703971862793, 0.00839571189880371, 0.007342112064361572, 0.007289824008941651, 0.007284736156463623, 0.007127039909362793, 0.007045119762420654, 0.007054336071014404, 0.007201791763305664, 0.006994944095611572, 0.006974463939666748, 0.006927360057830811, 0.007143424034118652, 0.0069550080299377445, 0.006974463939666748, 0.00693555212020874, 0.006971392154693603, 0.0069816322326660156, 0.006960159778594971, 0.006988768100738525, 0.0069632000923156735, 0.006927360057830811, 0.00689356803894043, 0.007094272136688232, 0.006958079814910889, 0.0069621758460998535, 0.006900735855102539, 0.006949888229370117, 0.006958144187927246, 0.006945727825164795, 0.006938623905181885, 0.006916128158569336, 0.00694268798828125, 0.007147520065307617, 0.007170048236846924, 0.006980607986450196, 0.006986752033233643, 0.0069816322326660156, 0.006958079814910889, 0.007050240039825439, 0.006980607986450196, 0.006947840213775635, 0.00692633581161499, 0.006940671920776367, 0.007095295906066895, 0.006964223861694336, 0.00694374418258667, 0.006953983783721924, 0.006959104061126709, 0.006947840213775635, 0.006913023948669434, 0.0069550080299377445, 0.006953983783721924, 0.006941696166992187, 0.007145472049713135, 0.007001088142395019, 0.014993408203125, 0.00709222412109375, 0.006938623905181885, 0.006882304191589355, 0.006881279945373535, 0.006842368125915528, 0.006936575889587402, 0.006913023948669434, 0.006815743923187256, 0.006918144226074219, 0.006865920066833496, 0.006867968082427979, 0.0068577280044555666, 0.006858751773834228, 0.006863872051239014, 0.006875135898590088, 0.006853631973266602, 0.007525375843048096, 0.007489535808563232, 0.007001088142395019, 0.00703385591506958, 0.00728166389465332, 0.007162879943847656, 0.007536640167236328, 0.007039999961853028, 0.007048192024230957, 0.007231488227844239, 0.0069283838272094726, 0.006907904148101806, 0.0068055038452148435, 0.006906879901885986, 0.0068986878395080565, 0.0068249602317810056, 0.006871039867401123, 0.006906879901885986, 0.006820864200592041, 0.006870016098022461, 0.006823935985565186, 0.006914048194885254, 0.006959136009216309, 0.0072754878997802735, 0.007408639907836914, 0.007152703762054444, 0.007389120101928711, 0.007087103843688965, 0.007016448020935059, 0.007097343921661377, 0.007047167778015137, 0.0071157760620117185, 0.007316480159759522, 0.00704204797744751, 0.006982656002044678, 0.007080959796905518, 0.007049215793609619, 0.007058432102203369, 0.00704307222366333, 0.006999040126800537, 0.006974463939666748, 0.007352320194244385, 0.007324672222137451, 0.007363584041595459, 0.007136256217956543, 0.007076863765716553, 0.014966815948486327, 0.007409696102142334, 0.007022528171539306, 0.00738099193572998, 0.00745472002029419, 0.007050240039825439, 0.007060480117797851, 0.006982656002044678, 0.007408639907836914, 0.007277567863464355, 0.007288832187652588, 0.007314432144165039, 0.007117824077606201, 0.0073471999168395995, 0.007028736114501953, 0.007010303974151612, 0.007296000003814697, 0.00727347183227539, 0.007276576042175293, 0.007039968013763428, 0.0069621758460998535, 0.0071905279159545895, 0.00707583999633789, 0.007655424118041992, 0.0072427520751953125, 0.0071157760620117185, 0.007131135940551757, 0.007264256000518799, 0.007327744007110596, 0.007291903972625732, 0.00704204797744751, 0.0073820161819458, 0.0073175039291381834, 0.007080959796905518, 0.0070256638526916505, 0.007310336112976074, 0.007068672180175781, 0.007049215793609619, 0.007359488010406494, 0.0070266880989074704, 0.007018496036529541, 0.007278592109680176, 0.007113728046417236, 0.007501823902130127, 0.0073175039291381834, 0.007334911823272705, 0.007279615879058838, 0.0070830078125, 0.007013376235961914, 0.007484416007995606, 0.007090176105499267, 0.007300096035003662, 0.007268352031707764, 0.007077888011932373, 0.00702569580078125, 0.00705020809173584, 0.0067727680206298825, 0.006994912147521973, 0.0068577280044555666, 0.00687718391418457, 0.0069621758460998535, 0.0068392958641052244, 0.006848512172698974, 0.01468825626373291, 0.007111680030822754, 0.007364607810974121, 0.007379968166351319, 0.007293951988220215, 0.007302144050598145, 0.006940671920776367, 0.007279615879058838, 0.007326720237731933, 0.006956031799316406, 0.007027711868286133, 0.007060480117797851, 0.007076863765716553, 0.007342080116271973, 0.007336959838867187, 0.007346176147460938, 0.007097343921661377, 0.007057407855987549, 0.0070522880554199216, 0.0072837119102478025, 0.007009280204772949, 0.007290880203247071, 0.006999040126800537, 0.007349247932434082, 0.007264256000518799, 0.006982656002044678, 0.007415808200836181, 0.0070553598403930665, 0.007288832187652588, 0.0072540159225463864, 0.00703385591506958, 0.007256063938140869, 0.007058432102203369, 0.007041024208068848, 0.007236608028411865, 0.007375872135162354, 0.0073062400817871095, 0.007067647933959961, 0.007071743965148926, 0.006905856132507324, 0.006862847805023193, 0.006831103801727295, 0.006933504104614257, 0.007030784130096435, 0.007114751815795899, 0.006961184024810791, 0.007378911972045899, 0.007236608028411865, 0.0070860800743103025, 0.007071743965148926, 0.007001120090484619, 0.007321568012237549, 0.0070266880989074704, 0.007316480159759522, 0.007300096035003662, 0.007020544052124023, 0.00733900785446167, 0.00739737606048584, 0.007048192024230957, 0.007324672222137451, 0.006982656002044678, 0.0072765440940856935, 0.007237631797790528, 0.01510707187652588, 0.006985727787017822, 0.006979584217071533, 0.007490560054779053, 0.007256063938140869, 0.007046144008636474, 0.0067348480224609375, 0.006822912216186523, 0.006837247848510742, 0.006843391895294189, 0.006853631973266602, 0.006819839954376221, 0.006825984001159668, 0.006837247848510742, 0.0069283838272094726, 0.006841343879699707, 0.006808576107025147, 0.006806528091430664, 0.006895616054534912, 0.006815743923187256, 0.006829055786132812, 0.006858751773834228, 0.00704204797744751, 0.007030784130096435, 0.007068672180175781, 0.00708403205871582, 0.006973440170288086, 0.006959104061126709, 0.006964223861694336, 0.006947840213775635, 0.0069621758460998535, 0.00695091199874878, 0.006960127830505371, 0.006967296123504638, 0.006970367908477783, 0.006953983783721924, 0.006884352207183838, 0.006816768169403077, 0.006875135898590088, 0.006854656219482422, 0.006850560188293457, 0.006810624122619629, 0.006873087882995605, 0.006870016098022461, 0.007052320003509522, 0.007278560161590576, 0.007266304016113281, 0.0069847040176391605, 0.007292960166931152, 0.007252960205078125, 0.007074816226959229, 0.00704204797744751, 0.006967296123504638, 0.007014431953430176, 0.007003104209899903, 0.007286784172058106, 0.006977568149566651, 0.00709935998916626, 0.007393280029296875, 0.0073359360694885255, 0.007111680030822754, 0.0069928960800170895, 0.0073359360694885255, 0.014533632278442383, 0.006855679988861084, 0.007506944179534912, 0.007064576148986816, 0.007284736156463623, 0.0070522880554199216, 0.007080959796905518, 0.00704204797744751, 0.006953983783721924, 0.007001088142395019, 0.007280640125274658, 0.006976511955261231, 0.006967296123504638, 0.00738099193572998, 0.007073791980743408, 0.00704204797744751, 0.007324672222137451, 0.006993919849395752, 0.007066624164581299, 0.007356416225433349, 0.007020544052124023, 0.006980607986450196, 0.0068884482383728025, 0.007078911781311035, 0.007345183849334717, 0.007293920040130616, 0.007304192066192627, 0.007058432102203369, 0.007054336071014404, 0.006940671920776367, 0.007336959838867187, 0.007268352031707764, 0.007261184215545655, 0.007383039951324463, 0.007385087966918945, 0.007312384128570557, 0.007021567821502686, 0.0069959678649902345, 0.007068672180175781, 0.006961152076721191, 0.007123968124389648, 0.007250944137573242, 0.007342080116271973, 0.007113728046417236, 0.00704204797744751, 0.007173120021820068, 0.007278592109680176, 0.007329792022705078, 0.007449600219726562, 0.00695091199874878, 0.006851583957672119, 0.006880256175994873, 0.006897664070129395, 0.006848512172698974, 0.007051263809204102, 0.0069253120422363285, 0.006867968082427979, 0.006882304191589355, 0.006858751773834228, 0.006792191982269287, 0.006841343879699707, 0.006779903888702392, 0.006895616054534912, 0.01447219181060791, 0.007172095775604248, 0.0073471999168395995, 0.007576576232910156, 0.007465983867645264, 0.007352320194244385, 0.007072768211364746, 0.007324672222137451, 0.007292928218841553, 0.007303167819976806, 0.006990848064422607, 0.007078911781311035, 0.006947840213775635, 0.0070266880989074704, 0.007362559795379638, 0.0069816322326660156, 0.007054336071014404, 0.0070348801612854, 0.007058432102203369, 0.006946815967559815, 0.006883359909057617, 0.006878176212310791, 0.006874112129211426, 0.006825984001159668, 0.007016448020935059, 0.007073791980743408, 0.007029759883880615, 0.006994944095611572, 0.007189504146575928, 0.007060480117797851, 0.007044095993041993, 0.006945792198181152, 0.007048192024230957, 0.006937600135803222, 0.006941696166992187, 0.006807551860809326, 0.006872064113616944, 0.00687820816040039, 0.006831103801727295, 0.006799359798431396, 0.0067870721817016606, 0.006863872051239014, 0.00684441614151001, 0.006880256175994873, 0.00683622407913208, 0.0068351998329162595, 0.007450623989105225, 0.00693452787399292, 0.007296000003814697, 0.0073768959045410155, 0.006998015880584717, 0.007038976192474365, 0.007259136199951172, 0.007058432102203369, 0.007111680030822754, 0.007057407855987549, 0.007038976192474365, 0.007117824077606201, 0.007286784172058106, 0.007263232231140137, 0.007035903930664063, 0.007259136199951172, 0.00704307222366333, 0.01439027214050293, 0.00682700777053833, 0.006859776020050049, 0.007128064155578613, 0.007023615837097168, 0.0073175039291381834, 0.006998015880584717, 0.007355391979217529, 0.007331840038299561, 0.00693555212020874, 0.007288832187652588, 0.007228415966033935, 0.007097343921661377, 0.006993919849395752, 0.006975488185882568, 0.006969344139099121, 0.007475200176239013, 0.007070720195770264, 0.0070225920677185055, 0.007303167819976806, 0.006873087882995605, 0.006823935985565186, 0.00674508810043335, 0.006897664070129395, 0.0068392958641052244, 0.007434239864349365, 0.006982656002044678, 0.007323647975921631, 0.0073062400817871095, 0.007073791980743408, 0.006951935768127441, 0.007050240039825439, 0.006871039867401123, 0.006867968082427979, 0.0067717118263244626, 0.0068915200233459475, 0.006905856132507324, 0.006823935985565186, 0.006816768169403077, 0.006823935985565186, 0.006849535942077637, 0.006812672138214112, 0.006834239959716797, 0.0068269438743591305, 0.006854656219482422, 0.006822912216186523, 0.006806528091430664, 0.007372799873352051, 0.0073431038856506346, 0.007008255958557129, 0.007047167778015137, 0.007320576190948487, 0.006998015880584717, 0.007415808200836181, 0.006987775802612305, 0.007264256000518799, 0.007129087924957276, 0.00744755220413208, 0.007197696208953858, 0.007391232013702393, 0.007756800174713135, 0.00724889612197876, 0.007106560230255127, 0.015258624076843261, 0.007053311824798584, 0.00704307222366333, 0.00687718391418457, 0.0068986878395080565, 0.006837247848510742, 0.006834176063537598, 0.006780928134918213, 0.006927360057830811, 0.006812672138214112, 0.006830080032348633, 0.006882304191589355, 0.006819839954376221, 0.006924287796020508, 0.006856704235076904, 0.007327744007110596, 0.0070225920677185055, 0.007003136157989502, 0.00709222412109375, 0.007301119804382325, 0.007288832187652588, 0.0069959678649902345, 0.0072724480628967286, 0.007219200134277344, 0.00728985595703125, 0.007054336071014404, 0.007180287837982178, 0.007039999961853028, 0.007304192066192627, 0.007309343814849854, 0.007380959987640381, 0.00703385591506958, 0.006970367908477783, 0.006937600135803222, 0.007117824077606201, 0.006958079814910889, 0.007385087966918945, 0.006951935768127441, 0.007331840038299561, 0.007268352031707764, 0.0072837119102478025, 0.006937600135803222, 0.007274496078491211, 0.007081984043121338, 0.007299071788787842, 0.006993919849395752, 0.007104512214660645, 0.007288832187652588, 0.0070563840866088865, 0.007156735897064209, 0.00703385591506958, 0.007282688140869141, 0.00704307222366333, 0.0074035201072692874, 0.006968319892883301, 0.007314432144165039, 0.007324672222137451, 0.007074816226959229, 0.006970367908477783, 0.008046591758728027, 0.007085055828094483, 0.0070522880554199216, 0.007054336071014404, 0.01510912036895752, 0.0069918718338012695, 0.007070720195770264, 0.0072724480628967286, 0.006951935768127441, 0.006856704235076904, 0.006837247848510742, 0.006831103801727295, 0.006818816184997558, 0.006773759841918945, 0.006811647891998291, 0.006907904148101806, 0.0067983360290527345, 0.0068689918518066405, 0.006816768169403077, 0.006976511955261231, 0.006865920066833496, 0.006833151817321777, 0.006848512172698974, 0.006790143966674805, 0.00694374418258667, 0.006790143966674805, 0.006804480075836182, 0.006815743923187256, 0.006832159996032715, 0.006866911888122559, 0.00683622407913208, 0.006862847805023193, 0.006796288013458252, 0.006811647891998291, 0.006863872051239014, 0.006809599876403808, 0.006937600135803222, 0.006829055786132812, 0.006810624122619629, 0.0068618240356445315, 0.00677785587310791, 0.006871039867401123, 0.006795263767242431, 0.006913023948669434, 0.0068392958641052244, 0.0069212160110473635, 0.0068249602317810056, 0.006859776020050049, 0.006917119979858399, 0.006814720153808594, 0.006841343879699707, 0.006848512172698974, 0.006887423992156983, 0.006906879901885986, 0.006881279945373535, 0.006870016098022461, 0.006802432060241699, 0.0068403840065002445, 0.006761407852172852, 0.00687820816040039, 0.006825984001159668, 0.00673689603805542, 0.006892543792724609, 0.0068884482383728025, 0.006841343879699707, 0.006825984001159668, 0.006799359798431396, 0.014756863594055175, 0.007226367950439453, 0.007236608028411865, 0.00809267234802246, 0.0074700798988342285, 0.007097343921661377, 0.007128064155578613, 0.0073431038856506346, 0.007114751815795899, 0.00722431993484497, 0.007000063896179199, 0.007351295948028564, 0.007067647933959961, 0.007016448020935059, 0.00714137601852417, 0.007134208202362061, 0.007184383869171143, 0.007386112213134765, 0.007300096035003662, 0.0072837119102478025, 0.007336959838867187, 0.007326720237731933, 0.007062560081481934, 0.007136223793029785, 0.006829055786132812, 0.006880256175994873, 0.006904863834381104, 0.006792160034179687, 0.006924287796020508, 0.006826015949249267, 0.0067604160308837894, 0.006821887969970703, 0.006882304191589355, 0.006898719787597657, 0.006868959903717041, 0.006837247848510742, 0.006811647891998291, 0.006829055786132812, 0.006916096210479736, 0.006834176063537598, 0.006846464157104492, 0.006814720153808594, 0.006897664070129395, 0.006904831886291504, 0.006899712085723877, 0.006858751773834228, 0.006920191764831543, 0.006802495956420899, 0.006924223899841308, 0.006905856132507324, 0.006837247848510742, 0.006739967823028564, 0.006881311893463134, 0.006877151966094971, 0.006774784088134766, 0.006870016098022461, 0.0068689918518066405, 0.006977536201477051, 0.0067348480224609375, 0.006831103801727295, 0.007082047939300537, 0.007328735828399658, 0.007105504035949707, 0.015265791893005372, 0.007007232189178467, 0.007094272136688232, 0.007019519805908203, 0.007029759883880615, 0.007334911823272705, 0.007013376235961914, 0.007321599960327148, 0.007360511779785156, 0.006967296123504638, 0.007035903930664063, 0.007107583999633789, 0.006987775802612305, 0.007321599960327148, 0.006990848064422607, 0.007080959796905518, 0.007327744007110596, 0.006852608203887939, 0.006818816184997558, 0.006819839954376221, 0.006850560188293457, 0.006814720153808594, 0.006842368125915528, 0.006866943836212158, 0.006765567779541016, 0.006838272094726563, 0.006903808116912841, 0.006816768169403077, 0.0068351998329162595, 0.006870016098022461, 0.006903808116912841, 0.0067420158386230465, 0.00682700777053833, 0.006851583957672119, 0.00683622407913208, 0.006849535942077637, 0.00695091199874878, 0.006930431842803955, 0.006820864200592041, 0.006825984001159668, 0.006842368125915528, 0.006903840065002441, 0.006749152183532715, 0.006801407814025879, 0.0068321280479431154, 0.006884352207183838, 0.0067983360290527345, 0.006927360057830811, 0.0068659520149230956, 0.006851552009582519, 0.006815743923187256, 0.006879231929779053, 0.006770688056945801, 0.006820864200592041, 0.0068351998329162595, 0.006905856132507324, 0.006778880119323731, 0.006809599876403808, 0.0069324798583984375, 0.006813695907592773, 0.0068884482383728025, 0.006900735855102539, 0.006858751773834228, 0.014593024253845215, 0.006931519985198974, 0.006945727825164795, 0.0068577280044555666, 0.006807551860809326, 0.00687820816040039, 0.006846464157104492, 0.0068321280479431154, 0.006821887969970703, 0.006852608203887939, 0.006850560188293457, 0.00689356803894043, 0.006864895820617676, 0.006930431842803955, 0.006833151817321777, 0.006841343879699707, 0.006866943836212158, 0.006849535942077637, 0.0072499198913574215, 0.007202816009521484, 0.006822912216186523, 0.006814720153808594, 0.006889472007751465, 0.006825984001159668, 0.006730751991271973, 0.006906879901885986, 0.00687820816040039, 0.006924287796020508, 0.006841407775878906, 0.006832064151763916, 0.006874112129211426, 0.006864895820617676, 0.006862847805023193, 0.006816768169403077, 0.006825984001159668, 0.006821887969970703, 0.006904831886291504, 0.006820864200592041, 0.006754303932189941, 0.006837247848510742, 0.006806528091430664, 0.0069632000923156735, 0.006896639823913574, 0.006838272094726563, 0.0068690237998962405, 0.006818784236907959, 0.006925343990325928, 0.006814688205718994, 0.006924287796020508, 0.006721536159515381, 0.006793216228485107, 0.006865920066833496, 0.006818880081176757, 0.0068269438743591305, 0.0068884482383728025, 0.007058432102203369, 0.008130559921264649, 0.007341055870056152, 0.007233535766601563, 0.00708403205871582, 0.00703385591506958, 0.0070256638526916505, 0.006979584217071533, 0.01458892822265625, 0.006846464157104492, 0.006864895820617676, 0.006960127830505371, 0.006848512172698974, 0.00684441614151001, 0.006829055786132812, 0.006831103801727295, 0.006892543792724609, 0.0068618240356445315, 0.006882304191589355, 0.006905856132507324, 0.006882304191589355, 0.006827040195465088, 0.006789087772369385, 0.006854656219482422, 0.0068249602317810056, 0.0068280320167541505, 0.006865920066833496, 0.006848512172698974, 0.006830080032348633, 0.00687820816040039, 0.0069253120422363285, 0.006853631973266602, 0.006829055786132812, 0.00682700777053833, 0.006895616054534912, 0.006739967823028564, 0.006937600135803222, 0.006820864200592041, 0.006876160144805908, 0.006912000179290771, 0.006804512023925781, 0.006790143966674805, 0.00677785587310791, 0.00704099178314209, 0.007131135940551757, 0.007195648193359375, 0.00703385591506958, 0.007274496078491211, 0.007260159969329834, 0.007279615879058838, 0.007324672222137451, 0.006994944095611572, 0.006920191764831543, 0.006999040126800537, 0.007361536026000977, 0.0070553598403930665, 0.007038976192474365, 0.006946815967559815, 0.007103487968444824, 0.006858751773834228, 0.006846464157104492, 0.006766592025756836, 0.0068689918518066405, 0.006909952163696289, 0.006982656002044678, 0.006907904148101806, 0.006811647891998291, 0.00682700777053833, 0.0067870721817016606, 0.006804480075836182, 0.006803455829620361, 0.014484479904174804, 0.00684441614151001, 0.0068618240356445315, 0.006823935985565186, 0.006817791938781738, 0.0067573761940002445, 0.0068076162338256835, 0.00683513593673706, 0.00682700777053833, 0.006851583957672119, 0.0067758078575134275, 0.0068351998329162595, 0.0068392958641052244, 0.0068618240356445315, 0.006919167995452881, 0.006856704235076904, 0.006812672138214112, 0.0068055038452148435, 0.006791168212890625, 0.006724607944488525, 0.006815743923187256, 0.00690176010131836, 0.006831103801727295, 0.006850560188293457, 0.0068392958641052244, 0.006795263767242431, 0.0067338237762451176, 0.006799359798431396, 0.006945792198181152, 0.006814720153808594, 0.0068853759765625, 0.006910975933074951, 0.006845471858978272, 0.006775775909423828, 0.006874112129211426, 0.006841343879699707, 0.0068689918518066405, 0.00678604793548584, 0.006905856132507324, 0.006766592025756836, 0.006804480075836182, 0.006808576107025147, 0.006880256175994873, 0.006937600135803222, 0.006846464157104492, 0.006829055786132812, 0.006797311782836914, 0.006800384044647217, 0.0068986878395080565, 0.006799359798431396, 0.006816768169403077, 0.006882304191589355, 0.00695091199874878, 0.006711296081542969, 0.00687718391418457, 0.006760447978973389, 0.006851583957672119, 0.006994944095611572, 0.007008255958557129, 0.007278592109680176, 0.007364607810974121, 0.007021567821502686, 0.007110655784606934, 0.014486528396606446, 0.0067983360290527345, 0.0067758078575134275, 0.006808576107025147, 0.006819839954376221, 0.006845439910888672, 0.006817791938781738, 0.006862847805023193, 0.006850560188293457, 0.006914048194885254, 0.006873087882995605, 0.006830080032348633, 0.006825984001159668, 0.006924287796020508, 0.0068055038452148435, 0.006867968082427979, 0.0068986878395080565, 0.006773759841918945, 0.006887423992156983, 0.006834176063537598, 0.006807551860809326, 0.006825984001159668, 0.006724607944488525, 0.006808576107025147, 0.006781951904296875, 0.006873087882995605, 0.006897664070129395, 0.006820864200592041, 0.006953983783721924, 0.006814720153808594, 0.0068055038452148435, 0.006864895820617676, 0.007054336071014404, 0.007000063896179199, 0.006851583957672119, 0.00693452787399292, 0.006875135898590088, 0.006767615795135498, 0.006912000179290771, 0.006930431842803955, 0.007072768211364746, 0.007323647975921631, 0.007137343883514404, 0.007246784210205078, 0.007519231796264648, 0.007401472091674805, 0.007305215835571289, 0.007124991893768311, 0.007411712169647216, 0.007051263809204102, 0.00744652795791626, 0.007027711868286133, 0.007122943878173828, 0.006968319892883301, 0.007057407855987549, 0.006961152076721191, 0.007019519805908203, 0.006977536201477051, 0.007052351951599121, 0.00700819206237793, 0.006982656002044678, 0.007037951946258545, 0.007131135940551757, 0.014568448066711426, 0.00683622407913208, 0.006825984001159668, 0.00679423999786377, 0.006896639823913574, 0.006814720153808594, 0.00689356803894043, 0.006833151817321777, 0.006890495777130127, 0.0068577280044555666, 0.006875135898590088, 0.006852608203887939, 0.00677785587310791, 0.006897664070129395, 0.006802432060241699, 0.006900735855102539, 0.006795263767242431, 0.006851583957672119, 0.006871039867401123, 0.0067758078575134275, 0.0068249602317810056, 0.006874112129211426, 0.0068280320167541505, 0.006838272094726563, 0.0067123198509216305, 0.006875135898590088, 0.006866943836212158, 0.006851615905761718, 0.006802400112152099, 0.0067983360290527345, 0.006980607986450196, 0.0067338237762451176, 0.006904831886291504, 0.006790143966674805, 0.006918144226074219, 0.006833151817321777, 0.006924287796020508, 0.007085055828094483, 0.007234560012817383, 0.007293951988220215, 0.006968319892883301, 0.0069202561378479, 0.007036863803863525, 0.007241727828979493, 0.00728166389465332, 0.007208960056304932, 0.00697657585144043, 0.00704095983505249, 0.006946815967559815, 0.00724889612197876, 0.007309311866760254, 0.007264256000518799, 0.007353343963623047, 0.006953983783721924, 0.007078911781311035, 0.00727347183227539, 0.007308288097381592, 0.007346176147460938, 0.007018496036529541, 0.007307263851165771, 0.007106560230255127, 0.007044095993041993, 0.007268352031707764, 0.0158341121673584, 0.0071833600997924804, 0.007159808158874512, 0.007171072006225586, 0.007048192024230957, 0.007072768211364746, 0.007048192024230957, 0.007138304233551026, 0.007122943878173828, 0.007095295906066895, 0.007088128089904785, 0.007096320152282715, 0.007090176105499267, 0.0070553598403930665, 0.006953983783721924, 0.007385087966918945, 0.007057407855987549, 0.0069928960800170895, 0.0069621758460998535, 0.006957056045532227, 0.0069632000923156735, 0.0069632000923156735, 0.006944767951965332, 0.006987775802612305, 0.006967296123504638, 0.0069959678649902345, 0.006947840213775635, 0.006946815967559815, 0.006946815967559815, 0.006977536201477051, 0.006964223861694336, 0.007103487968444824, 0.006945792198181152, 0.007001088142395019, 0.006977536201477051, 0.0069847040176391605, 0.006988800048828125, 0.006998015880584717, 0.006968319892883301, 0.006985727787017822, 0.006965248107910156, 0.0070266880989074704, 0.0069959678649902345, 0.006982656002044678, 0.007024640083312988, 0.007001088142395019, 0.006975488185882568, 0.006843391895294189, 0.006909952163696289, 0.006802495956420899, 0.00689247989654541, 0.006854688167572022, 0.0068873920440673825, 0.006812672138214112, 0.006874112129211426, 0.006903808116912841, 0.006825984001159668, 0.006889472007751465, 0.0068618240356445315, 0.006879231929779053, 0.006908927917480469, 0.006876160144805908, 0.006922239780426025, 0.014621696472167968, 0.006899712085723877, 0.00693555212020874, 0.006847519874572754, 0.006882271766662598, 0.00687820816040039, 0.006944767951965332, 0.006766592025756836, 0.006940671920776367, 0.006897664070129395, 0.006865920066833496, 0.006841407775878906, 0.006919104099273682, 0.00689356803894043, 0.006847487926483154, 0.006870016098022461, 0.00690176010131836, 0.006906879901885986, 0.006912000179290771, 0.006965248107910156, 0.006863872051239014, 0.006830080032348633, 0.006730751991271973, 0.00690176010131836, 0.006927360057830811, 0.006810624122619629, 0.007012351989746094, 0.00689356803894043, 0.00691919994354248, 0.006855648040771484, 0.006883327960968018, 0.0069253120422363285, 0.006875135898590088, 0.006892543792724609, 0.006822912216186523, 0.006840320110321045, 0.006876160144805908, 0.006833151817321777, 0.006819839954376221, 0.006812672138214112, 0.006814720153808594, 0.006903808116912841, 0.006895616054534912, 0.0068321280479431154, 0.006881279945373535, 0.006841343879699707, 0.006731776237487793, 0.006875135898590088, 0.0069918718338012695, 0.006924287796020508, 0.0068351998329162595, 0.006939648151397705, 0.00687718391418457, 0.006896639823913574, 0.006813695907592773, 0.006858751773834228, 0.006916096210479736, 0.0068884482383728025, 0.0069027838706970214, 0.006879231929779053, 0.00689356803894043, 0.006807551860809326, 0.006790143966674805, 0.0144650239944458, 0.006765567779541016, 0.006876160144805908, 0.00690176010131836, 0.006822912216186523, 0.006920191764831543, 0.006781951904296875, 0.006908927917480469, 0.006758399963378906, 0.0068884482383728025, 0.006837247848510742, 0.006825984001159668, 0.006895616054534912, 0.006811647891998291, 0.006904831886291504, 0.006757440090179444, 0.006893504142761231, 0.006813695907592773, 0.006830080032348633, 0.006797311782836914, 0.0068853759765625, 0.00674508810043335, 0.00678604793548584, 0.006873087882995605, 0.006900735855102539, 0.006816768169403077, 0.006854656219482422, 0.00682700777053833, 0.006744063854217529, 0.006923264026641846, 0.006816768169403077, 0.006825984001159668, 0.006808576107025147, 0.006788095951080322, 0.006864895820617676, 0.0067983360290527345, 0.007007232189178467, 0.006815743923187256, 0.006808576107025147, 0.006830080032348633, 0.006899712085723877, 0.006894591808319092, 0.006729728221893311, 0.006913023948669434, 0.006912000179290771, 0.007111680030822754, 0.006854656219482422, 0.006816768169403077, 0.006829055786132812, 0.006802432060241699, 0.0068986878395080565, 0.006889472007751465, 0.0068392958641052244, 0.0067358717918396, 0.006773759841918945, 0.006892543792724609, 0.0067686400413513184, 0.006929408073425293, 0.006807551860809326, 0.006790143966674805, 0.006814720153808594, 0.006875135898590088, 0.006815743923187256, 0.014535679817199706, 0.006904863834381104, 0.006907872200012207, 0.006808576107025147, 0.006803455829620361, 0.006793216228485107, 0.00690176010131836, 0.006812672138214112, 0.006782976150512696, 0.006817791938781738, 0.006718463897705078, 0.006838304042816162, 0.006800352096557617, 0.006853631973266602, 0.006856704235076904, 0.006875135898590088, 0.006808576107025147, 0.006815743923187256, 0.006871039867401123, 0.006882304191589355, 0.0067123198509216305, 0.00693555212020874, 0.0069928960800170895, 0.0067983360290527345, 0.006738944053649902, 0.006795263767242431, 0.006780928134918213, 0.0068915200233459475, 0.006890495777130127, 0.006781951904296875, 0.006867968082427979, 0.00683622407913208, 0.006870016098022461, 0.0068055038452148435, 0.006714367866516113, 0.006806528091430664, 0.006781951904296875, 0.006781951904296875, 0.006825984001159668, 0.006850560188293457, 0.006818816184997558, 0.006812672138214112, 0.006882368087768554, 0.006911935806274414, 0.006807551860809326, 0.00689356803894043, 0.006797311782836914, 0.006874176025390625, 0.00681056022644043, 0.006914048194885254, 0.006818816184997558, 0.006825984001159668, 0.00682700777053833, 0.006793216228485107, 0.006847487926483154, 0.0068351998329162595, 0.0068055038452148435, 0.006850560188293457, 0.006801407814025879, 0.006872064113616944, 0.0068986878395080565, 0.0068689918518066405, 0.006813695907592773, 0.014460927963256836, 0.006829055786132812, 0.006843391895294189, 0.00684441614151001, 0.006938623905181885, 0.006749184131622315, 0.0068392958641052244, 0.00688640022277832, 0.006897664070129395, 0.00684441614151001, 0.006819839954376221, 0.0068280320167541505, 0.006834176063537598, 0.006892543792724609, 0.00687718391418457, 0.006845439910888672, 0.0069632000923156735, 0.006806528091430664, 0.0068915200233459475, 0.0069283838272094726, 0.00688640022277832, 0.006917119979858399, 0.006831103801727295, 0.006825984001159668, 0.0069918718338012695, 0.007312384128570557, 0.007332863807678222, 0.007145503997802735, 0.0069437122344970705, 0.00729702377319336, 0.00725708818435669, 0.006986752033233643, 0.007379968166351319, 0.0073359360694885255, 0.007027711868286133, 0.007037951946258545, 0.007255040168762207, 0.007329792022705078, 0.006730751991271973, 0.007303167819976806, 0.007400447845458984, 0.006849535942077637, 0.006818816184997558, 0.0067983360290527345, 0.006873087882995605, 0.006714367866516113, 0.006894591808319092, 0.0069027838706970214, 0.006818816184997558, 0.006830080032348633, 0.006841343879699707, 0.006900735855102539, 0.0067348480224609375, 0.006866943836212158, 0.006940671920776367, 0.00672870397567749, 0.006923264026641846, 0.0068055038452148435, 0.006945856094360351, 0.00685152006149292, 0.006808576107025147, 0.006906879901885986, 0.0068280320167541505]",tokens/s,140.10129046771218,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run - self.run_text_generation_memory_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 205, in run_text_generation_memory_tracking - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 454, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample - outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 406, in forward - max(kv_seq_len, position_ids[:, -1].max().item() + 1) if position_ids is not None else kv_seq_len -RuntimeError: CUDA error: an illegal memory access was encountered -CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. -For debugging consider passing CUDA_LAUNCH_BLOCKING=1 -Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. - - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1332.211712,1292.36992,0.0,706.740224,675.13344,s,1,7.6464423828125,7.6464423828125,0.0,7.6464423828125,7.6464423828125,7.6464423828125,7.6464423828125,[7.6464423828125],,kWh,1.0050934508336065e-05,5.482515550131421e-06,1.4705289541949718e-05,3.0238739600417203e-05,,MB,1533.145088,1642.594304,0.0,996.1472,942.733312,s,10,0.2759903697967529,0.02759903697967529,0.0007088083682366724,0.027268272399902344,0.028662793540954593,0.02895976514816284,0.029197342433929442,"[0.027191904067993163, 0.026970272064208985, 0.027021503448486327, 0.02772537612915039, 0.029256736755371094, 0.027241952896118166, 0.02729315185546875, 0.02744927978515625, 0.02859679985046387, 0.027243392944335938]",tokens/s,9275.68596645331,kWh,3.275136263292354e-07,1.794607649099927e-07,9.378977955624474e-07,1.4448721868016754e-06,tokens/kWh,177178301.54006475,MB,1568.473088,1650.982912,0.0,1004.535808,942.735872,s,10,14.831874877929687,1.4831874877929687,0.011579155970044516,1.4823635864257811,1.4922393676757812,1.5022363220214845,1.5102338854980468,"[1.4714168701171875, 1.474384521484375, 1.490017822265625, 1.5122332763671875, 1.4840687255859375, 1.4724068603515625, 1.4738143310546874, 1.480658447265625, 1.487448974609375, 1.485425048828125]",tokens/s,42.47608648165314,kWh,1.850375899478001e-05,1.013754022315164e-05,3.029346052283296e-05,5.893475974076462e-05,tokens/kWh,1068978.6515991087,,s,630,14.829727731704716,0.023539250367785258,0.0005017649158708145,0.023388672828674316,0.02423930816650391,0.024389251613616943,0.025211187419891364,"[0.023835647583007814, 0.02334617614746094, 0.02349056053161621, 0.023164928436279295, 0.023146495819091797, 0.02313523292541504, 0.023408639907836915, 0.0233123836517334, 0.02303385543823242, 0.02330521583557129, 0.023129087448120117, 0.02310041618347168, 0.023366655349731445, 0.023349248886108398, 0.023378944396972655, 0.023383039474487305, 0.022999040603637694, 0.02301644706726074, 0.022923263549804687, 0.023184383392333984, 0.023299072265625, 0.023333887100219726, 0.02332467269897461, 0.023666688919067383, 0.02325196838378906, 0.02294272041320801, 0.023347200393676756, 0.023942144393920898, 0.024429567337036134, 0.02332467269897461, 0.023393280029296876, 0.023068672180175782, 0.02345267105102539, 0.023425024032592775, 0.02330931282043457, 0.0232857608795166, 0.023435264587402343, 0.023194623947143556, 0.02308198356628418, 0.02330828857421875, 0.02323148727416992, 0.023374847412109375, 0.023365631103515624, 0.024376319885253905, 0.02438041687011719, 0.024228864669799805, 0.023268415451049806, 0.023276479721069335, 0.023302143096923827, 0.023274496078491212, 0.023235584259033205, 0.0231014404296875, 0.02295408058166504, 0.02302249526977539, 0.023245824813842773, 0.023327743530273438, 0.023367679595947266, 0.023332895278930663, 0.02335024070739746, 0.02328780746459961, 0.02355200004577637, 0.023185407638549805, 0.023369728088378908, 0.022988800048828126, 0.0230328311920166, 0.023034879684448242, 0.023142400741577147, 0.023173120498657225, 0.022938623428344726, 0.023180288314819338, 0.02350182342529297, 0.023954431533813478, 0.02346188735961914, 0.023397375106811523, 0.02327961540222168, 0.023186431884765626, 0.023427072525024413, 0.023748607635498048, 0.024431615829467773, 0.024228864669799805, 0.02352230453491211, 0.024209407806396483, 0.023796735763549806, 0.02407219123840332, 0.023391231536865235, 0.02328473663330078, 0.023307264328002928, 0.023468032836914062, 0.02329599952697754, 0.023315456390380858, 0.02304819107055664, 0.023061504364013673, 0.022792192459106447, 0.022931455612182617, 0.022967296600341795, 0.023476224899291992, 0.02413670349121094, 0.023537664413452147, 0.023451648712158202, 0.023384063720703126, 0.022951936721801756, 0.023008256912231444, 0.022932479858398438, 0.022831104278564454, 0.022969343185424804, 0.022982656478881838, 0.022989824295043947, 0.022952959060668944, 0.023005184173583985, 0.02432204818725586, 0.02424115180969238, 0.024203264236450195, 0.02424115180969238, 0.02432307243347168, 0.02346905517578125, 0.024015871047973633, 0.02352639961242676, 0.02326425552368164, 0.02330112075805664, 0.02306559944152832, 0.023219200134277345, 0.023061504364013673, 0.02310246467590332, 0.023003135681152344, 0.023302143096923827, 0.023363584518432616, 0.023349248886108398, 0.02350284767150879, 0.02351103973388672, 0.023421951293945312, 0.023323648452758788, 0.023340032577514647, 0.02344960021972656, 0.02346188735961914, 0.023311359405517578, 0.023068672180175782, 0.022947839736938477, 0.022957056045532227, 0.023364608764648437, 0.023417856216430662, 0.02354380798339844, 0.02364723205566406, 0.023994367599487306, 0.02406809616088867, 0.023577600479125976, 0.023195648193359376, 0.02348543930053711, 0.023435264587402343, 0.023406591415405274, 0.023444480895996093, 0.0237076473236084, 0.02431590461730957, 0.024394752502441407, 0.024206335067749024, 0.024102912902832032, 0.023355392456054686, 0.024138751983642577, 0.02494259262084961, 0.024781824111938477, 0.024068159103393556, 0.02401580810546875, 0.024371200561523438, 0.02387660789489746, 0.023187456130981447, 0.02369945526123047, 0.0234833927154541, 0.023400447845458985, 0.023573503494262696, 0.024219648361206055, 0.02369740867614746, 0.024457216262817383, 0.023995391845703123, 0.023480319976806642, 0.02348646354675293, 0.02337900733947754, 0.023460800170898438, 0.023436288833618164, 0.023627775192260742, 0.023344127655029297, 0.02348748779296875, 0.023213056564331053, 0.02415718460083008, 0.024319999694824217, 0.02365132713317871, 0.023227392196655275, 0.023616512298583983, 0.023444480895996093, 0.022898687362670898, 0.023396352767944335, 0.02349567985534668, 0.02613657569885254, 0.024928255081176756, 0.023621631622314454, 0.023739391326904297, 0.023433216094970705, 0.023561216354370116, 0.023341056823730468, 0.02310758399963379, 0.023447551727294923, 0.02360019111633301, 0.024847295761108397, 0.02390323257446289, 0.024382528305053712, 0.024177600860595703, 0.024453119277954103, 0.02438041687011719, 0.02387353515625, 0.02307276725769043, 0.023432191848754884, 0.02376192092895508, 0.024311807632446288, 0.024992767333984374, 0.024475648880004884, 0.024293376922607423, 0.02428313636779785, 0.023777280807495117, 0.023864320755004883, 0.024225791931152343, 0.024247295379638673, 0.024147968292236328, 0.023817216873168946, 0.024145919799804686, 0.02372096061706543, 0.023993343353271485, 0.023597055435180665, 0.02391961669921875, 0.023742464065551756, 0.023243776321411135, 0.02308095932006836, 0.023391231536865235, 0.023175167083740233, 0.023888896942138672, 0.024197120666503907, 0.024359935760498046, 0.024244224548339844, 0.024130559921264647, 0.02330112075805664, 0.023547903060913086, 0.024147968292236328, 0.024200191497802736, 0.02429644775390625, 0.024145919799804686, 0.024621055603027343, 0.025116672515869142, 0.024723455429077147, 0.024239103317260743, 0.024009727478027345, 0.023785472869873047, 0.023629823684692384, 0.02369638442993164, 0.02431692886352539, 0.02429132843017578, 0.02551296043395996, 0.025076736450195314, 0.02433945655822754, 0.02388582420349121, 0.02346905517578125, 0.02352742385864258, 0.02330624008178711, 0.023567359924316408, 0.02345062446594238, 0.026471424102783202, 0.0245534725189209, 0.02433228874206543, 0.024345600128173828, 0.023790592193603514, 0.02345062446594238, 0.023382015228271484, 0.023319551467895508, 0.02332569694519043, 0.02342911911010742, 0.023423999786376954, 0.02342911911010742, 0.02334617614746094, 0.023019519805908203, 0.023378944396972655, 0.023366655349731445, 0.023331840515136718, 0.023379968643188476, 0.023169023513793945, 0.023377920150756838, 0.023369728088378908, 0.023366655349731445, 0.023136255264282226, 0.023413759231567383, 0.023962623596191408, 0.02371993637084961, 0.023333887100219726, 0.023014400482177736, 0.02310348892211914, 0.023221248626708983, 0.022971391677856445, 0.023601152420043944, 0.023478271484375, 0.023878719329833983, 0.023779264450073244, 0.023521280288696288, 0.02347520065307617, 0.023419904708862304, 0.023355392456054686, 0.023380992889404296, 0.023227392196655275, 0.023183359146118163, 0.022994943618774414, 0.023212032318115236, 0.023112703323364257, 0.02327654457092285, 0.023389184951782226, 0.023326719284057617, 0.023348224639892577, 0.02329599952697754, 0.02327244758605957, 0.02326323127746582, 0.023366655349731445, 0.023367679595947266, 0.023010303497314453, 0.022935552597045897, 0.022942752838134767, 0.02295599937438965, 0.022972415924072266, 0.02331340789794922, 0.02329292869567871, 0.023357440948486328, 0.023011327743530274, 0.02309734344482422, 0.02305023956298828, 0.022965248107910157, 0.02330931282043457, 0.023758848190307616, 0.023772159576416017, 0.02345779228210449, 0.023299072265625, 0.02333695983886719, 0.02327347183227539, 0.023378944396972655, 0.023570432662963867, 0.023427072525024413, 0.023088127136230468, 0.02332057571411133, 0.02332262420654297, 0.023096319198608398, 0.023045120239257814, 0.023339008331298827, 0.023432191848754884, 0.02347315216064453, 0.02346291160583496, 0.023363584518432616, 0.023171072006225587, 0.02311884880065918, 0.023053312301635744, 0.02384486389160156, 0.02496512031555176, 0.02470195198059082, 0.023956480026245116, 0.02352025604248047, 0.023386112213134767, 0.023357440948486328, 0.023051263809204102, 0.02303385543823242, 0.023034879684448242, 0.023194623947143556, 0.023311359405517578, 0.02327347183227539, 0.023342079162597656, 0.023442432403564452, 0.023813119888305666, 0.02332569694519043, 0.023249919891357423, 0.023351295471191406, 0.023841791152954102, 0.02369024085998535, 0.023358463287353515, 0.02371686363220215, 0.0237260799407959, 0.02328678321838379, 0.023006208419799806, 0.02330316734313965, 0.023371776580810546, 0.023421951293945312, 0.02335436820983887, 0.02332262420654297, 0.023393280029296876, 0.02326323127746582, 0.023388160705566406, 0.023315456390380858, 0.023290880203247072, 0.023165952682495116, 0.022987775802612305, 0.02314854431152344, 0.023398399353027344, 0.02305638313293457, 0.02292736053466797, 0.023233535766601563, 0.023205888748168944, 0.02328985595703125, 0.02330828857421875, 0.02330009651184082, 0.023374847412109375, 0.023344127655029297, 0.023237632751464843, 0.023010303497314453, 0.02309017562866211, 0.023378944396972655, 0.023363584518432616, 0.023267328262329103, 0.023362560272216795, 0.023257087707519532, 0.023405567169189453, 0.023323648452758788, 0.02335436820983887, 0.023397375106811523, 0.023342079162597656, 0.023219263076782227, 0.02323142433166504, 0.02332467269897461, 0.023612415313720703, 0.023847936630249023, 0.02332876777648926, 0.02328166389465332, 0.02330624008178711, 0.024012800216674804, 0.02352025604248047, 0.022996992111206056, 0.023365631103515624, 0.02347110366821289, 0.023254016876220703, 0.023335935592651368, 0.02629734420776367, 0.024963071823120117, 0.02349875259399414, 0.023427072525024413, 0.02352742385864258, 0.023405567169189453, 0.023440383911132814, 0.023408639907836915, 0.022970367431640625, 0.02313932800292969, 0.023040000915527343, 0.023021568298339845, 0.02328780746459961, 0.023810047149658203, 0.023339008331298827, 0.02306662368774414, 0.023010303497314453, 0.022946815490722656, 0.022952959060668944, 0.023010303497314453, 0.023409664154052736, 0.023883775711059572, 0.02412851142883301, 0.023385087966918947, 0.023378944396972655, 0.023397375106811523, 0.02349363136291504, 0.0234833927154541, 0.023480319976806642, 0.023347200393676756, 0.02327244758605957, 0.023835647583007814, 0.023547903060913086, 0.02428927993774414, 0.024611839294433592, 0.024465408325195313, 0.023744543075561522, 0.023635936737060548, 0.023940095901489256, 0.023618560791015625, 0.023399423599243165, 0.023351295471191406, 0.023374847412109375, 0.023365631103515624, 0.02424831962585449, 0.024237056732177735, 0.023798784255981444, 0.02309017562866211, 0.023349248886108398, 0.023348224639892577, 0.023364608764648437, 0.023005184173583985, 0.023614463806152345, 0.02366873550415039, 0.02412748718261719, 0.02343731117248535, 0.02326425552368164, 0.023556095123291015, 0.023641088485717773, 0.023053312301635744, 0.023002111434936523, 0.022921215057373046, 0.022938623428344726, 0.023195648193359376, 0.02389401626586914, 0.023395328521728515, 0.023408639907836915, 0.02308608055114746, 0.022999040603637694, 0.02324787139892578, 0.02329395294189453, 0.02369740867614746, 0.02351923179626465, 0.022939647674560547, 0.023147584915161133, 0.02419398307800293, 0.024268800735473633, 0.02347007942199707, 0.023403520584106444, 0.023299072265625, 0.02335436820983887, 0.02290995216369629, 0.023211008071899415, 0.02365132713317871, 0.023587839126586914, 0.023377920150756838, 0.023431167602539063, 0.023020544052124024, 0.022984703063964843, 0.023356416702270507, 0.023059455871582032, 0.023342079162597656, 0.023340032577514647, 0.0235100154876709, 0.024208383560180666, 0.024139776229858398, 0.02369843292236328, 0.02327654457092285, 0.02330521583557129, 0.023340032577514647, 0.023376895904541017, 0.02329292869567871, 0.023425024032592775, 0.02390323257446289, 0.023359487533569336, 0.02374143981933594, 0.023382015228271484, 0.023349248886108398, 0.023191551208496093, 0.023808000564575195, 0.023430143356323242, 0.023405632019042968, 0.023485376358032228, 0.023326719284057617, 0.023406591415405274, 0.023194623947143556, 0.023220224380493162, 0.024227840423583984, 0.025249792098999024, 0.023814144134521483, 0.023274496078491212, 0.023019519805908203, 0.023567359924316408, 0.024011775970458983, 0.024081407546997072, 0.02368409538269043, 0.023390207290649414, 0.02345779228210449, 0.023392255783081056, 0.023963647842407225, 0.024352767944335937, 0.02578950309753418, 0.025380800247192383, 0.02434764862060547, 0.023394304275512694, 0.023798784255981444, 0.02450432014465332, 0.023771135330200196, 0.02305843162536621, 0.023163904190063478, 0.023516159057617187, 0.0242739200592041, 0.024159231185913087, 0.02345369529724121, 0.023240703582763672, 0.023404544830322265, 0.024130559921264647, 0.02432614326477051, 0.024163328170776367, 0.023560192108154295, 0.0234967041015625, 0.02325196838378906, 0.023009279251098632, 0.02307276725769043, 0.0235284481048584, 0.024172544479370117, 0.02342092704772949, 0.023014400482177736, 0.024052736282348632, 0.02427903938293457, 0.023981056213378905, 0.02455244827270508, 0.024228864669799805, 0.02391347122192383, 0.02415001678466797, 0.023259136199951173, 0.023004159927368165, 0.023624704360961913, 0.024129535675048826, 0.023426048278808592, 0.023422975540161133, 0.023384063720703126, 0.02326937675476074, 0.023414783477783203, 0.023532543182373047, 0.023415807723999024, 0.022936576843261718, 0.024813568115234375, 0.02416640090942383, 0.023823360443115234, 0.02350284767150879, 0.02349260711669922, 0.023353343963623048, 0.023446527481079102, 0.023209983825683594, 0.022966272354125978, 0.022993919372558593, 0.02368617630004883, 0.023298015594482423, 0.023366655349731445, 0.023211008071899415, 0.023385087966918947, 0.02327756881713867, 0.022979583740234375, 0.023257087707519532, 0.022992895126342772, 0.02329190444946289, 0.022832128524780275, 0.022952959060668944, 0.023326719284057617, 0.02373222351074219, 0.024591360092163086, 0.024123392105102538]",tokens/s,42.48223645084953,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1493.233664,1753.74336,0.0,1168.113664,1154.613248,s,1,8.0892802734375,8.0892802734375,0.0,8.0892802734375,8.0892802734375,8.0892802734375,8.0892802734375,[8.0892802734375],,kWh,1.4458989269447404e-05,7.907928386035761e-06,1.9359182154010668e-05,4.1726099809493834e-05,,MB,1543.053312,2015.88736,0.0,1369.440256,1323.44832,s,10,0.2879095344543457,0.028790953445434576,5.2383171530136506e-05,0.028765903472900388,0.028858428382873533,0.028884046268463134,0.028904540576934815,"[0.028909664154052734, 0.0287390079498291, 0.02876006317138672, 0.028767871856689452, 0.028772192001342775, 0.02885273551940918, 0.028832799911499025, 0.028763935089111327, 0.02875270462036133, 0.028758560180664063]",tokens/s,8891.6819126945,kWh,3.4017699038150486e-07,1.864009391192692e-07,1.6174988993618507e-06,2.144076828862625e-06,tokens/kWh,119398706.49868509,MB,1569.140736,2036.85888,0.0,1390.411776,1377.251328,s,10,11.505031249999998,1.1505031249999997,0.013774331563448977,1.149140380859375,1.168698291015625,1.1700311523437499,1.17109744140625,"[1.168402099609375, 1.171364013671875, 1.165570068359375, 1.14463037109375, 1.1530740966796875, 1.1526236572265625, 1.1456571044921875, 1.1420390625, 1.130388427734375, 1.1312823486328125]",tokens/s,54.758651785496035,kWh,1.3425956556769682e-05,7.356978924649777e-06,2.572907853344004e-05,4.6512014014859495e-05,tokens/kWh,1354488.755956106,,s,630,11.501252576828012,0.01825595647115556,0.00046943455425475274,0.018008064270019532,0.018872425842285157,0.01901849594116211,0.019947417354583746,"[0.018586624145507814, 0.018707456588745116, 0.018611200332641603, 0.01863987159729004, 0.01863577651977539, 0.018728960037231446, 0.018586624145507814, 0.01866035270690918, 0.018572288513183592, 0.018734079360961914, 0.01861324882507324, 0.018696191787719727, 0.01860915184020996, 0.018667520523071288, 0.018549760818481444, 0.018740224838256835, 0.018984960556030273, 0.01869824028015137, 0.018765823364257812, 0.018697216033935548, 0.018692096710205077, 0.0186562557220459, 0.018803712844848632, 0.01878633689880371, 0.018789344787597657, 0.01866444778442383, 0.018662399291992187, 0.018731008529663085, 0.01881292724609375, 0.018740224838256835, 0.01861427116394043, 0.01881907272338867, 0.018676736831665038, 0.018689023971557618, 0.018741247177124023, 0.018725887298583984, 0.01886720085144043, 0.018902015686035157, 0.018725887298583984, 0.01864192008972168, 0.018746368408203123, 0.018771968841552734, 0.018655231475830078, 0.01880166435241699, 0.01878118324279785, 0.018585599899291993, 0.01804800033569336, 0.018496511459350586, 0.018163711547851562, 0.018499584197998048, 0.018510847091674804, 0.0180316162109375, 0.01808076858520508, 0.018019327163696287, 0.017959936141967774, 0.01794867134094238, 0.017937408447265626, 0.018017280578613282, 0.01798041534423828, 0.017983488082885742, 0.01790771293640137, 0.017936384201049805, 0.017916927337646483, 0.01777561569213867, 0.017916927337646483, 0.017892351150512697, 0.017910783767700195, 0.017886207580566405, 0.018811904907226562, 0.018873376846313475, 0.01954300880432129, 0.018682880401611326, 0.018516992568969725, 0.01815449523925781, 0.018025472640991212, 0.01819545555114746, 0.01865727996826172, 0.01802239990234375, 0.018525184631347655, 0.018763776779174804, 0.01861427116394043, 0.018766847610473633, 0.01865011215209961, 0.01864089584350586, 0.01866035270690918, 0.018684928894042968, 0.018491392135620118, 0.018702335357666015, 0.018461696624755858, 0.01879347229003906, 0.018790399551391602, 0.018844671249389648, 0.018557952880859374, 0.01799577522277832, 0.018321407318115233, 0.018364416122436524, 0.017955839157104494, 0.01839411163330078, 0.018733055114746093, 0.019070976257324217, 0.01900441551208496, 0.0188723201751709, 0.01881907272338867, 0.018890752792358398, 0.01900851249694824, 0.019194879531860352, 0.018033664703369142, 0.01823744010925293, 0.018531328201293946, 0.017909759521484374, 0.01818009567260742, 0.018556928634643553, 0.017947647094726564, 0.018102272033691406, 0.018753536224365236, 0.01880473518371582, 0.018701311111450195, 0.018494464874267577, 0.018720767974853517, 0.01868185615539551, 0.019392511367797852, 0.01940377616882324, 0.02048409652709961, 0.01928294372558594, 0.01899212837219238, 0.018311168670654295, 0.01773155212402344, 0.017921024322509766, 0.018577407836914063, 0.017932287216186525, 0.01819443130493164, 0.018753536224365236, 0.01868083190917969, 0.018696191787719727, 0.018696191787719727, 0.019350528717041016, 0.01884569549560547, 0.018078720092773438, 0.01803264045715332, 0.018815999984741212, 0.018008064270019532, 0.018362367630004883, 0.018935808181762694, 0.01901568031311035, 0.018924543380737305, 0.018589696884155273, 0.01900851249694824, 0.018745344161987306, 0.018281471252441405, 0.01897881507873535, 0.018316287994384766, 0.017915903091430666, 0.019183616638183593, 0.018915327072143554, 0.018694143295288086, 0.01903923225402832, 0.018888704299926756, 0.01904947280883789, 0.019524608612060547, 0.01884160041809082, 0.018795520782470702, 0.01817804718017578, 0.017932287216186525, 0.01900441551208496, 0.018283519744873047, 0.018156543731689453, 0.01836953544616699, 0.017926143646240233, 0.01842995262145996, 0.018723840713500976, 0.01869004821777344, 0.01864396858215332, 0.017944576263427735, 0.018288639068603514, 0.01904844856262207, 0.01883033561706543, 0.01805721664428711, 0.018027519226074217, 0.017976320266723633, 0.018009088516235353, 0.018092031478881835, 0.018060287475585936, 0.018062335968017578, 0.018553855895996094, 0.018877439498901367, 0.018344959259033202, 0.01817087936401367, 0.018059263229370116, 0.018147327423095702, 0.017809375762939453, 0.017959936141967774, 0.017949695587158202, 0.018110464096069336, 0.01803980827331543, 0.017901567459106444, 0.017979391098022462, 0.017903615951538086, 0.017987583160400392, 0.017944576263427735, 0.017931264877319338, 0.0184586238861084, 0.018279487609863282, 0.01793222427368164, 0.018027519226074217, 0.017994752883911135, 0.017978368759155275, 0.017922048568725587, 0.018710527420043945, 0.018876415252685547, 0.019284992218017577, 0.01883135986328125, 0.01884160041809082, 0.019174400329589843, 0.019591167449951173, 0.018880512237548826, 0.01800601577758789, 0.01797222328186035, 0.01799782371520996, 0.017934335708618163, 0.017977344512939454, 0.01803468894958496, 0.018086912155151368, 0.01798041534423828, 0.01800704002380371, 0.01794047927856445, 0.01798246383666992, 0.017908735275268553, 0.01838902473449707, 0.019050464630126954, 0.01884160041809082, 0.018347007751464844, 0.017991680145263672, 0.018082815170288084, 0.01800499153137207, 0.017953792572021485, 0.01819647979736328, 0.018083839416503905, 0.01782579231262207, 0.017711103439331053, 0.017922048568725587, 0.017975296020507812, 0.017976320266723633, 0.017985536575317384, 0.017925119400024413, 0.017941503524780272, 0.017937408447265626, 0.01795689582824707, 0.017935327529907227, 0.017970176696777345, 0.01807257652282715, 0.018076671600341796, 0.017963008880615236, 0.017733631134033204, 0.017935359954833984, 0.017939456939697264, 0.017896448135375977, 0.01790771293640137, 0.017889280319213868, 0.017935359954833984, 0.017904640197753906, 0.017950719833374023, 0.017888256072998047, 0.018041856765747072, 0.0202926082611084, 0.02002943992614746, 0.018913280487060546, 0.017958911895751953, 0.017966079711914062, 0.020188159942626953, 0.01920614433288574, 0.018699264526367186, 0.018726911544799805, 0.018556928634643553, 0.017926143646240233, 0.017991680145263672, 0.017935359954833984, 0.017898496627807618, 0.01789030456542969, 0.020204544067382812, 0.02000383949279785, 0.019412992477416992, 0.01827123260498047, 0.017933311462402343, 0.01784934425354004, 0.017952768325805665, 0.017887231826782226, 0.017894399642944335, 0.017934335708618163, 0.017938432693481447, 0.01785036849975586, 0.017966079711914062, 0.017870847702026366, 0.017947647094726564, 0.01786777687072754, 0.017935359954833984, 0.017855487823486327, 0.017953792572021485, 0.017929216384887696, 0.017937408447265626, 0.017904640197753906, 0.017967103958129883, 0.017933311462402343, 0.018185216903686522, 0.018928640365600585, 0.01881292724609375, 0.018762752532958983, 0.018086912155151368, 0.018137088775634767, 0.018019327163696287, 0.01801420783996582, 0.017975296020507812, 0.017978368759155275, 0.018790399551391602, 0.018824192047119142, 0.018763776779174804, 0.01861427116394043, 0.018044927597045898, 0.01801523208618164, 0.018076671600341796, 0.017994752883911135, 0.017917951583862304, 0.017960960388183594, 0.01804083251953125, 0.01872489547729492, 0.018537439346313477, 0.017958911895751953, 0.01796713638305664, 0.017985504150390626, 0.017984512329101563, 0.017921024322509766, 0.017898496627807618, 0.017927167892456054, 0.01803468894958496, 0.017950719833374023, 0.018485248565673826, 0.0188221435546875, 0.018750463485717773, 0.018892799377441406, 0.018787328720092773, 0.01884671974182129, 0.01879756736755371, 0.018913280487060546, 0.01886720085144043, 0.018865152359008788, 0.018900991439819336, 0.01903411293029785, 0.019056640625, 0.018883583068847656, 0.018912256240844725, 0.01842483139038086, 0.018025472640991212, 0.01796505546569824, 0.017932287216186525, 0.017953792572021485, 0.01808076858520508, 0.01787494468688965, 0.017949695587158202, 0.01798860740661621, 0.018845727920532226, 0.018403295516967774, 0.017911808013916015, 0.017912832260131836, 0.017912832260131836, 0.017914880752563478, 0.017955839157104494, 0.01797427177429199, 0.018372608184814454, 0.018881536483764647, 0.01883750343322754, 0.01817190361022949, 0.017979391098022462, 0.018027519226074217, 0.018066432952880858, 0.017928192138671875, 0.017952768325805665, 0.018732032775878905, 0.01802444839477539, 0.017894399642944335, 0.018570240020751954, 0.018258943557739257, 0.017927167892456054, 0.017977344512939454, 0.018721792221069337, 0.018880512237548826, 0.01884774398803711, 0.01840025520324707, 0.018071584701538086, 0.018373600006103517, 0.018421760559082033, 0.01963212776184082, 0.01903206443786621, 0.018952192306518553, 0.018733055114746093, 0.017963008880615236, 0.01788211250305176, 0.017932287216186525, 0.017941503524780272, 0.017978368759155275, 0.018018304824829103, 0.01798041534423828, 0.017945600509643556, 0.01799782371520996, 0.017929216384887696, 0.017968128204345703, 0.01799782371520996, 0.017955839157104494, 0.017944576263427735, 0.0180316162109375, 0.018082815170288084, 0.018077695846557617, 0.017942527770996093, 0.017955839157104494, 0.01789030456542969, 0.017960960388183594, 0.01789952087402344, 0.017935359954833984, 0.017937408447265626, 0.017943552017211914, 0.01817500877380371, 0.017966047286987304, 0.017932287216186525, 0.017871871948242187, 0.017938432693481447, 0.01803775978088379, 0.0184453125, 0.0192225284576416, 0.01899929618835449, 0.018172927856445312, 0.018019327163696287, 0.017975296020507812, 0.01801215934753418, 0.017931264877319338, 0.01799884796142578, 0.018025472640991212, 0.01840025520324707, 0.018455551147460936, 0.01807360076904297, 0.017872896194458008, 0.017961984634399415, 0.017934335708618163, 0.017979391098022462, 0.017846271514892577, 0.018531328201293946, 0.01864806365966797, 0.018702335357666015, 0.018766847610473633, 0.018323455810546875, 0.017869823455810546, 0.017917951583862304, 0.017920000076293945, 0.017887231826782226, 0.017924095153808595, 0.017953792572021485, 0.017920000076293945, 0.017915903091430666, 0.017939456939697264, 0.017976320266723633, 0.017931264877319338, 0.017925119400024413, 0.017900543212890627, 0.017984512329101563, 0.017895423889160156, 0.017900543212890627, 0.017880064010620117, 0.017910783767700195, 0.017966079711914062, 0.017881088256835938, 0.01803775978088379, 0.017920000076293945, 0.01796403121948242, 0.017946624755859376, 0.017894399642944335, 0.017926143646240233, 0.018001920700073244, 0.017920000076293945, 0.01780735969543457, 0.017864704132080078, 0.017959936141967774, 0.017796096801757814, 0.01801420783996582, 0.017922048568725587, 0.017933311462402343, 0.017936384201049805, 0.017944576263427735, 0.018067455291748045, 0.018085887908935547, 0.01801625633239746, 0.01799580764770508, 0.01790665626525879, 0.017966079711914062, 0.01844428825378418, 0.0209039363861084, 0.019809280395507813, 0.018911231994628908, 0.019006464004516603, 0.018282495498657226, 0.018156543731689453, 0.018111488342285157, 0.018083839416503905, 0.01797324752807617, 0.017954816818237306, 0.018008064270019532, 0.01800396728515625, 0.01798041534423828, 0.01780633544921875, 0.017993728637695314, 0.017878015518188475, 0.017984512329101563, 0.017950719833374023, 0.017966079711914062, 0.017745920181274414, 0.01779199981689453, 0.017887231826782226, 0.017955839157104494, 0.017903615951538086, 0.018017280578613282, 0.01800396728515625, 0.017918975830078124, 0.017942527770996093, 0.017969152450561524, 0.01794047927856445, 0.017938432693481447, 0.01798860740661621, 0.017902591705322265, 0.017861631393432616, 0.018081792831420897, 0.017939456939697264, 0.01797222328186035, 0.017944576263427735, 0.017741823196411134, 0.018137088775634767, 0.017959936141967774, 0.01800499153137207, 0.017853439331054686, 0.01790771293640137, 0.017895423889160156, 0.017931264877319338, 0.017853439331054686, 0.018062335968017578, 0.0178155517578125, 0.01794867134094238, 0.017888256072998047, 0.017950719833374023, 0.017905664443969727, 0.017943552017211914, 0.018125823974609375, 0.01865318489074707, 0.018242559432983398, 0.017978368759155275, 0.01803878402709961, 0.01816166305541992, 0.017968128204345703, 0.017957887649536132, 0.018019327163696287, 0.017947647094726564, 0.017733631134033204, 0.01769267272949219, 0.017696767807006835, 0.017950719833374023, 0.017934335708618163, 0.017906688690185548, 0.017937408447265626, 0.017930240631103517, 0.017731584548950196, 0.017707008361816406, 0.017727487564086913, 0.017883167266845704, 0.017537023544311522, 0.01762918472290039, 0.017671167373657228, 0.017893375396728514, 0.017872896194458008, 0.01784934425354004, 0.01786777687072754, 0.01785036849975586, 0.017730560302734375, 0.01765068817138672, 0.01782476806640625, 0.017977344512939454, 0.017800224304199218, 0.01770800018310547, 0.017862655639648437, 0.017944608688354492, 0.017919967651367188, 0.017872896194458008, 0.017758207321166994, 0.017942527770996093, 0.01799065589904785, 0.01801625633239746, 0.0179814395904541, 0.017958911895751953, 0.017949695587158202, 0.018580480575561522, 0.017910783767700195, 0.01796403121948242, 0.017969152450561524, 0.017968128204345703, 0.017925119400024413, 0.017976320266723633, 0.017945600509643556, 0.017954816818237306, 0.018183168411254884, 0.01902079963684082, 0.01866444778442383, 0.018280448913574218, 0.01803059196472168, 0.0179814395904541, 0.01798041534423828, 0.01795686340332031, 0.01803059196472168, 0.017904640197753906, 0.018067455291748045, 0.017945600509643556, 0.01797222328186035, 0.017916927337646483, 0.01798246383666992, 0.01795686340332031, 0.017905664443969727, 0.017886240005493163, 0.017939424514770506, 0.017957887649536132, 0.017947647094726564, 0.01783296012878418, 0.017931264877319338, 0.017895423889160156, 0.017893375396728514, 0.017917951583862304, 0.01784832000732422, 0.017959936141967774, 0.01784832000732422]",tokens/s,54.776642438866546,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 82498 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1574.989824,1753.74336,0.0,1168.113664,1154.613248,s,1,8.07701123046875,8.07701123046875,0.0,8.07701123046875,8.07701123046875,8.07701123046875,8.07701123046875,[8.07701123046875],,kWh,1.5132919572907768e-05,8.278152942143011e-06,2.1276128132019512e-05,4.468720064707029e-05,,MB,1727.65184,2015.88736,0.0,1369.440256,1323.44832,s,10,0.2876616954803467,0.02876616954803467,3.8482135726794416e-05,0.028762895584106447,0.028794262504577638,0.028830219554901122,0.028858985195159913,"[0.02886617660522461, 0.02876844787597656, 0.028755903244018555, 0.028741600036621094, 0.028786272048950196, 0.028761247634887695, 0.02872598457336426, 0.02877039909362793, 0.028764543533325197, 0.028721120834350584]",tokens/s,8899.342666131583,kWh,3.4028200140884923e-07,1.8645764471016147e-07,1.6326705270805134e-06,2.159410173199524e-06,tokens/kWh,118550890.96884894,MB,1737.125888,2036.85888,0.0,1390.411776,1377.251328,s,10,11.444406860351563,1.1444406860351561,0.003757526337023889,1.143448974609375,1.1489893798828126,1.150921875,1.1524678710937502,"[1.1447156982421876, 1.1457169189453125, 1.1430673828125, 1.1528543701171876, 1.1431868896484374, 1.1388907470703125, 1.1405169677734375, 1.1436781005859376, 1.1432198486328125, 1.1485599365234376]",tokens/s,55.048724471916145,kWh,1.3636335967897336e-05,7.472339692285668e-06,2.7138380267521004e-05,4.824705592770401e-05,tokens/kWh,1305779.1566474563,,s,630,11.440643095016469,0.0181597509444706,0.00026051404529775377,0.018083839416503905,0.018431079101562498,0.018696396636962893,0.01922524147033692,"[0.01785036849975586, 0.01802444839477539, 0.01803468894958496, 0.017983488082885742, 0.018412544250488282, 0.018099199295043944, 0.01805721664428711, 0.0179814395904541, 0.018083839416503905, 0.01821696090698242, 0.018125823974609375, 0.018008064270019532, 0.01821183967590332, 0.01799884796142578, 0.018150400161743165, 0.018044927597045898, 0.018185216903686522, 0.018076671600341796, 0.018041856765747072, 0.018499584197998048, 0.01804697608947754, 0.01807257652282715, 0.01809715270996094, 0.018431999206542968, 0.01819545555114746, 0.01829478454589844, 0.018229248046875, 0.018059263229370116, 0.018127872467041017, 0.018086912155151368, 0.018113536834716795, 0.01844428825378418, 0.018981887817382814, 0.01821900749206543, 0.018096128463745118, 0.018316287994384766, 0.018153472900390624, 0.01823846435546875, 0.018152448654174806, 0.018083839416503905, 0.0184268798828125, 0.018321407318115233, 0.018153472900390624, 0.018104320526123048, 0.018113536834716795, 0.018151424407958985, 0.018114559173583983, 0.018123775482177733, 0.018145280838012694, 0.01807257652282715, 0.018185216903686522, 0.01805619239807129, 0.018214912414550782, 0.018172927856445312, 0.018106367111206053, 0.018074623107910158, 0.018136064529418947, 0.01840230369567871, 0.018122751235961913, 0.018231296539306642, 0.018181119918823242, 0.018148351669311523, 0.01804902458190918, 0.017924095153808595, 0.017796096801757814, 0.017741823196411134, 0.018044927597045898, 0.018078720092773438, 0.01804595184326172, 0.01807155227661133, 0.01803775978088379, 0.018067455291748045, 0.018440191268920898, 0.018066432952880858, 0.018091007232666014, 0.0180633602142334, 0.01802649688720703, 0.018197504043579102, 0.01808793640136719, 0.018091007232666014, 0.018093055725097656, 0.018083839416503905, 0.01807974433898926, 0.018078720092773438, 0.018115583419799804, 0.01862451171875, 0.018481151580810547, 0.01925939178466797, 0.018926591873168946, 0.018767871856689454, 0.018637823104858398, 0.018280448913574218, 0.01813811111450195, 0.01830297660827637, 0.01807974433898926, 0.01815449523925781, 0.018761728286743166, 0.018655231475830078, 0.018086912155151368, 0.01803775978088379, 0.01804902458190918, 0.01803468894958496, 0.01804902458190918, 0.018304000854492186, 0.01821388816833496, 0.018144256591796876, 0.018137088775634767, 0.018020351409912108, 0.01800294494628906, 0.01799782371520996, 0.01838489532470703, 0.018094079971313477, 0.018035711288452147, 0.01824051284790039, 0.01820057678222656, 0.018160640716552736, 0.01801215934753418, 0.01802342414855957, 0.01798963165283203, 0.018008064270019532, 0.018050048828125, 0.018493440628051756, 0.018044927597045898, 0.01805516815185547, 0.018027519226074217, 0.0180633602142334, 0.017861631393432616, 0.018374656677246092, 0.018172927856445312, 0.01802137565612793, 0.018123775482177733, 0.018155519485473632, 0.018115583419799804, 0.018025472640991212, 0.01808793640136719, 0.0180633602142334, 0.018157567977905274, 0.017994752883911135, 0.018233343124389647, 0.01819443130493164, 0.018116607666015624, 0.018028543472290038, 0.018102272033691406, 0.018051071166992186, 0.01808793640136719, 0.018115583419799804, 0.01807974433898926, 0.018136064529418947, 0.01806540870666504, 0.018149375915527344, 0.01807155227661133, 0.017992704391479493, 0.018025472640991212, 0.018123775482177733, 0.018099199295043944, 0.018155519485473632, 0.01807155227661133, 0.018094079971313477, 0.01802649688720703, 0.01818726348876953, 0.018109439849853515, 0.018106367111206053, 0.018152448654174806, 0.01805414390563965, 0.01817087936401367, 0.018164735794067383, 0.018637823104858398, 0.018408447265625, 0.01843097686767578, 0.018565120697021483, 0.018010112762451173, 0.018086912155151368, 0.01803775978088379, 0.018282495498657226, 0.01826201629638672, 0.018364416122436524, 0.018068479537963866, 0.018067455291748045, 0.018099199295043944, 0.018018304824829103, 0.01804800033569336, 0.018397184371948243, 0.01807360076904297, 0.018061311721801757, 0.018078720092773438, 0.01824051284790039, 0.01825382423400879, 0.018001920700073244, 0.018078720092773438, 0.017836032867431642, 0.01800396728515625, 0.018084863662719726, 0.018882560729980468, 0.01904435157775879, 0.018324480056762696, 0.0188538875579834, 0.018067455291748045, 0.017979391098022462, 0.01803264045715332, 0.01801523208618164, 0.01801625633239746, 0.01807257652282715, 0.01804595184326172, 0.017985536575317384, 0.018114559173583983, 0.017976320266723633, 0.018009088516235353, 0.017941503524780272, 0.018027519226074217, 0.017958911895751953, 0.017994752883911135, 0.018282495498657226, 0.01946828842163086, 0.01903615951538086, 0.01880268859863281, 0.018710527420043945, 0.01820364761352539, 0.018488319396972656, 0.01799782371520996, 0.018102272033691406, 0.018339839935302735, 0.018122751235961913, 0.018043903350830077, 0.01807155227661133, 0.018174976348876954, 0.019141632080078123, 0.018817024230957033, 0.01842483139038086, 0.018128896713256838, 0.018144256591796876, 0.018501632690429686, 0.018298879623413086, 0.018082815170288084, 0.018267135620117187, 0.01878835105895996, 0.0180633602142334, 0.0180316162109375, 0.01806540870666504, 0.018096128463745118, 0.018336767196655272, 0.01803878402709961, 0.018050048828125, 0.018190336227416993, 0.018151424407958985, 0.01862758445739746, 0.018173952102661133, 0.018982912063598634, 0.018938880920410156, 0.01859584045410156, 0.018131967544555663, 0.018174976348876954, 0.018119680404663087, 0.017915903091430666, 0.018106367111206053, 0.018010112762451173, 0.018156543731689453, 0.017953792572021485, 0.018053119659423827, 0.017966079711914062, 0.018017280578613282, 0.017926143646240233, 0.017986560821533205, 0.017952768325805665, 0.017999872207641602, 0.020025344848632814, 0.01964236831665039, 0.018534400939941405, 0.018152448654174806, 0.018091007232666014, 0.018156543731689453, 0.017994752883911135, 0.01807155227661133, 0.018159616470336915, 0.017975296020507812, 0.017761280059814453, 0.01785139274597168, 0.017993728637695314, 0.017986560821533205, 0.01824358367919922, 0.01824870491027832, 0.01803059196472168, 0.018036735534667968, 0.017977344512939454, 0.018137088775634767, 0.018094079971313477, 0.01809715270996094, 0.018081792831420897, 0.018532352447509767, 0.018091007232666014, 0.01807257652282715, 0.01808793640136719, 0.01803980827331543, 0.018060287475585936, 0.018070528030395508, 0.018086912155151368, 0.01800294494628906, 0.018086912155151368, 0.01805516815185547, 0.018070528030395508, 0.018058240890502928, 0.018526208877563476, 0.018356224060058594, 0.018168832778930662, 0.01808793640136719, 0.018067455291748045, 0.018066432952880858, 0.018084863662719726, 0.018143232345581056, 0.01802137565612793, 0.018092031478881835, 0.018091007232666014, 0.018116607666015624, 0.018098175048828127, 0.018110464096069336, 0.018095104217529297, 0.017862655639648437, 0.01807257652282715, 0.01802444839477539, 0.018020351409912108, 0.01801420783996582, 0.018085887908935547, 0.01801113510131836, 0.018025472640991212, 0.01803878402709961, 0.01818009567260742, 0.018070528030395508, 0.018163711547851562, 0.018082815170288084, 0.018043903350830077, 0.018036735534667968, 0.018082815170288084, 0.018078720092773438, 0.018157567977905274, 0.01801523208618164, 0.018078720092773438, 0.01800601577758789, 0.018060287475585936, 0.018069503784179687, 0.01808076858520508, 0.018050048828125, 0.018084863662719726, 0.018122751235961913, 0.018518016815185546, 0.018107391357421874, 0.018102272033691406, 0.018190336227416993, 0.018116607666015624, 0.01810534477233887, 0.01820467185974121, 0.018153472900390624, 0.018028543472290038, 0.018118656158447266, 0.01802342414855957, 0.018113536834716795, 0.018158592224121094, 0.018136064529418947, 0.018092031478881835, 0.018053119659423827, 0.018081792831420897, 0.018052095413208007, 0.018108415603637695, 0.01805721664428711, 0.01806438446044922, 0.01806540870666504, 0.018114559173583983, 0.018020351409912108, 0.018098175048828127, 0.0180633602142334, 0.018067455291748045, 0.018036735534667968, 0.018103296279907227, 0.01803878402709961, 0.01803264045715332, 0.01803980827331543, 0.01805721664428711, 0.01799884796142578, 0.01781452751159668, 0.01776742362976074, 0.017819648742675782, 0.018009088516235353, 0.018325504302978517, 0.01836031913757324, 0.018074623107910158, 0.0180633602142334, 0.01800396728515625, 0.01804800033569336, 0.017942527770996093, 0.01805721664428711, 0.01802649688720703, 0.018035711288452147, 0.018009088516235353, 0.017994752883911135, 0.01802649688720703, 0.018050048828125, 0.018035711288452147, 0.0184453125, 0.01804595184326172, 0.017794048309326172, 0.0177838077545166, 0.018050048828125, 0.018044927597045898, 0.018070528030395508, 0.017986560821533205, 0.018134016036987305, 0.018091007232666014, 0.0180633602142334, 0.018001920700073244, 0.018502656936645507, 0.01840025520324707, 0.018060287475585936, 0.018066432952880858, 0.01801523208618164, 0.018050048828125, 0.01800499153137207, 0.01869824028015137, 0.018316287994384766, 0.018102272033691406, 0.018044927597045898, 0.01819545555114746, 0.018017280578613282, 0.01802956771850586, 0.018058240890502928, 0.01817804718017578, 0.018104320526123048, 0.018119680404663087, 0.018076671600341796, 0.0180633602142334, 0.018115583419799804, 0.01808793640136719, 0.018076671600341796, 0.01804902458190918, 0.018098175048828127, 0.01805414390563965, 0.018125823974609375, 0.018092031478881835, 0.018126848220825196, 0.018125823974609375, 0.018100223541259765, 0.01844428825378418, 0.01804595184326172, 0.01801215934753418, 0.017941503524780272, 0.018164735794067383, 0.017976320266723633, 0.018068479537963866, 0.018008064270019532, 0.018103296279907227, 0.01803468894958496, 0.018473983764648438, 0.018059263229370116, 0.017994752883911135, 0.018183168411254884, 0.018144256591796876, 0.01807360076904297, 0.017975296020507812, 0.01803264045715332, 0.018085887908935547, 0.018315263748168945, 0.018123775482177733, 0.018058240890502928, 0.018078720092773438, 0.018364416122436524, 0.018095104217529297, 0.01805721664428711, 0.018134016036987305, 0.018129919052124025, 0.018033664703369142, 0.018092031478881835, 0.018043903350830077, 0.01808076858520508, 0.018074623107910158, 0.018033664703369142, 0.01821183967590332, 0.018051071166992186, 0.018096128463745118, 0.01799782371520996, 0.01801318359375, 0.018009088516235353, 0.018892799377441406, 0.019070976257324217, 0.0191016960144043, 0.018892799377441406, 0.01806540870666504, 0.01802956771850586, 0.018100223541259765, 0.01817087936401367, 0.018070528030395508, 0.0180316162109375, 0.01799065589904785, 0.01803468894958496, 0.01814630317687988, 0.018041856765747072, 0.018122751235961913, 0.018019327163696287, 0.018167808532714845, 0.018033664703369142, 0.018123775482177733, 0.018020351409912108, 0.018086912155151368, 0.01803468894958496, 0.01808793640136719, 0.018058240890502928, 0.01808076858520508, 0.0184268798828125, 0.017912832260131836, 0.018110464096069336, 0.01806438446044922, 0.018163711547851562, 0.018010112762451173, 0.01800704002380371, 0.01798041534423828, 0.018060287475585936, 0.01802444839477539, 0.018017280578613282, 0.018494464874267577, 0.01886207962036133, 0.018396223068237304, 0.018067392349243164, 0.01817087936401367, 0.01810534477233887, 0.018028543472290038, 0.018150400161743165, 0.018101247787475586, 0.01807360076904297, 0.01800704002380371, 0.018168832778930662, 0.01799577522277832, 0.018112512588500978, 0.01801420783996582, 0.018059263229370116, 0.018084863662719726, 0.018077695846557617, 0.018084863662719726, 0.018084863662719726, 0.018145280838012694, 0.018060287475585936, 0.018249727249145507, 0.018145280838012694, 0.018149375915527344, 0.018060287475585936, 0.01806540870666504, 0.018053119659423827, 0.018158592224121094, 0.018061311721801757, 0.01809715270996094, 0.018033664703369142, 0.018053119659423827, 0.01808896064758301, 0.0180633602142334, 0.01805619239807129, 0.018101247787475586, 0.01803775978088379, 0.018128896713256838, 0.01826918411254883, 0.018082815170288084, 0.018052095413208007, 0.01803059196472168, 0.017991680145263672, 0.01843916893005371, 0.01801215934753418, 0.017984512329101563, 0.018774015426635742, 0.018592767715454102, 0.018569215774536133, 0.018185216903686522, 0.018490367889404297, 0.018083839416503905, 0.01786675262451172, 0.01968435287475586, 0.018694143295288086, 0.01801523208618164, 0.01802137565612793, 0.01839206314086914, 0.018076671600341796, 0.018050048828125, 0.01801625633239746, 0.018324480056762696, 0.018044927597045898, 0.01982259178161621, 0.01903411293029785, 0.018482175827026368, 0.018155519485473632, 0.01817087936401367, 0.01805619239807129, 0.018082815170288084, 0.01803059196472168, 0.01804697608947754, 0.018067455291748045, 0.01801523208618164, 0.01801523208618164, 0.01803775978088379, 0.017999872207641602, 0.0180633602142334, 0.018033664703369142, 0.018081792831420897, 0.01801523208618164, 0.01807155227661133, 0.01802342414855957, 0.01801625633239746, 0.018025472640991212, 0.018107391357421874, 0.01802649688720703, 0.018052095413208007, 0.018050048828125, 0.018051071166992186, 0.0180316162109375, 0.018132991790771484, 0.018059263229370116, 0.018085887908935547, 0.018085887908935547, 0.01803059196472168, 0.018480127334594726, 0.0180316162109375, 0.018152448654174806, 0.018103296279907227, 0.01816166305541992, 0.018148351669311523, 0.01822003173828125, 0.018096128463745118, 0.018100223541259765, 0.018103296279907227, 0.018134016036987305, 0.017992704391479493, 0.018973695755004884, 0.0194334716796875, 0.019066879272460938, 0.018329599380493163, 0.01824665641784668, 0.018127872467041017, 0.01804595184326172]",tokens/s,55.06683450989103,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmptiq_oici/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 414, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1237.069824,2645.03296,0.0,1998.585856,1692.285952,s,10,0.19134716606140137,0.019134716606140138,0.0005608761432091296,0.018977791786193847,0.0197670597076416,0.02017293758392334,0.02049763988494873,"[0.02057881546020508, 0.0190994873046875, 0.0187042236328125, 0.018728607177734374, 0.01878009605407715, 0.018654111862182618, 0.01891535949707031, 0.019040224075317382, 0.019169376373291015, 0.019676864624023436]",tokens/s,13378.823698797409,kWh,2.1992317273112132e-07,1.2050741516561284e-07,6.735971662309277e-07,1.014027754127662e-06,tokens/kWh,252458573.2076231,MB,1237.069824,2645.03296,0.0,1998.585856,1740.085248,s,10,11.554657470703125,1.1554657470703125,0.013645969689330972,1.1534767456054689,1.1682131958007813,1.1769882507324219,1.1840082946777344,"[1.1857633056640624, 1.14386572265625, 1.1522501220703125, 1.1418011474609375, 1.154703369140625, 1.1461051025390625, 1.1383001708984375, 1.1644542236328126, 1.16626318359375, 1.161151123046875]",tokens/s,54.52346827219822,kWh,1.387793931927391e-05,7.604759108500891e-06,2.8986009894769086e-05,5.0468708322543894e-05,tokens/kWh,1248298.244475944,,s,629,11.705972766876219,0.0186104495498827,0.0023066301847454542,0.018160640716552736,0.01886699562072754,0.01908162498474121,0.03717550079345703,"[0.019216384887695313, 0.018964479446411133, 0.018840576171875, 0.01904947280883789, 0.01902387237548828, 0.019145727157592773, 0.01901568031311035, 0.018939903259277344, 0.018876415252685547, 0.018917375564575196, 0.019091455459594727, 0.019136512756347656, 0.018971647262573242, 0.018998271942138673, 0.019066879272460938, 0.018921472549438476, 0.018934783935546876, 0.019094528198242186, 0.019323904037475585, 0.019158016204833983, 0.01887027168273926, 0.01883955192565918, 0.01860095977783203, 0.019615743637084963, 0.019579904556274414, 0.019698720932006836, 0.018974687576293944, 0.018787328720092773, 0.01881292724609375, 0.01901055908203125, 0.018912256240844725, 0.018939903259277344, 0.018815999984741212, 0.01886617660522461, 0.018852863311767578, 0.018775039672851563, 0.0186746883392334, 0.018123775482177733, 0.018050048828125, 0.018131967544555663, 0.018062335968017578, 0.018153472900390624, 0.018135040283203126, 0.01808076858520508, 0.018112512588500978, 0.018164735794067383, 0.018568191528320312, 0.01880166435241699, 0.018711551666259766, 0.018769920349121092, 0.01878937530517578, 0.018946048736572265, 0.018766847610473633, 0.01879347229003906, 0.0188221435546875, 0.01882111930847168, 0.01923788833618164, 0.01900748825073242, 0.018765823364257812, 0.018397184371948243, 0.01823232078552246, 0.018239488601684572, 0.03753472137451172, 0.018603008270263673, 0.018795520782470702, 0.018242559432983398, 0.01817087936401367, 0.018142208099365235, 0.01823232078552246, 0.018128896713256838, 0.018149375915527344, 0.018166784286499024, 0.01822719955444336, 0.018189311981201172, 0.0182108154296875, 0.018174976348876954, 0.018280448913574218, 0.01822822380065918, 0.01816166305541992, 0.018155519485473632, 0.018177024841308592, 0.01824563217163086, 0.018111520767211915, 0.018137056350708006, 0.018149375915527344, 0.018100223541259765, 0.01813811111450195, 0.01783500862121582, 0.01787494468688965, 0.017894399642944335, 0.017953792572021485, 0.017885183334350584, 0.017896448135375977, 0.017854463577270507, 0.01788313674926758, 0.017912832260131836, 0.017934335708618163, 0.018101247787475586, 0.01818726348876953, 0.018156543731689453, 0.018149375915527344, 0.018177024841308592, 0.01805619239807129, 0.01822003173828125, 0.018223104476928712, 0.018168832778930662, 0.018167808532714845, 0.018589696884155273, 0.018256895065307616, 0.018077695846557617, 0.018156543731689453, 0.018137088775634767, 0.018282495498657226, 0.018156543731689453, 0.018117631912231445, 0.01825484848022461, 0.018134016036987305, 0.018223104476928712, 0.018164735794067383, 0.018197504043579102, 0.01816985511779785, 0.018124799728393554, 0.018155519485473632, 0.018290687561035156, 0.018130943298339842, 0.037168128967285156, 0.01818009567260742, 0.018239551544189454, 0.018205631256103517, 0.018103296279907227, 0.018289663314819335, 0.018159616470336915, 0.018160640716552736, 0.01822208023071289, 0.01824563217163086, 0.018258943557739257, 0.0182794246673584, 0.01820569610595703, 0.01816268730163574, 0.01822822380065918, 0.018748416900634765, 0.01879859161376953, 0.018307071685791015, 0.018050048828125, 0.01819340705871582, 0.01815449523925781, 0.01820569610595703, 0.01810534477233887, 0.0184268798828125, 0.018166784286499024, 0.018199552536010744, 0.018164735794067383, 0.01816268730163574, 0.018464767456054687, 0.018880512237548826, 0.01871670341491699, 0.01874940872192383, 0.0186562557220459, 0.018355199813842774, 0.018729984283447267, 0.018807807922363282, 0.01882009506225586, 0.018563072204589845, 0.018144256591796876, 0.01816985511779785, 0.018226175308227538, 0.018223104476928712, 0.01814630317687988, 0.018119680404663087, 0.018130943298339842, 0.018096128463745118, 0.018131967544555663, 0.018359296798706053, 0.018117631912231445, 0.018168832778930662, 0.018172927856445312, 0.018096128463745118, 0.018293760299682618, 0.01820057678222656, 0.018135040283203126, 0.018155519485473632, 0.018283519744873047, 0.01820057678222656, 0.018164735794067383, 0.01820364761352539, 0.018145280838012694, 0.01822412872314453, 0.01813811111450195, 0.03717836761474609, 0.01821388816833496, 0.018144256591796876, 0.018122751235961913, 0.018102272033691406, 0.018163711547851562, 0.018121728897094725, 0.018184192657470705, 0.018119680404663087, 0.01819545555114746, 0.01816166305541992, 0.018160640716552736, 0.018266111373901366, 0.01821696090698242, 0.018255872726440428, 0.018323455810546875, 0.018198528289794923, 0.01815449523925781, 0.018103296279907227, 0.018280448913574218, 0.018265087127685545, 0.018174976348876954, 0.018153472900390624, 0.018174976348876954, 0.018192384719848635, 0.018147327423095702, 0.018134016036987305, 0.018181119918823242, 0.018050048828125, 0.01806438446044922, 0.018141183853149414, 0.018076671600341796, 0.01815760040283203, 0.018163679122924804, 0.018155519485473632, 0.018151424407958985, 0.01803468894958496, 0.01807360076904297, 0.018167808532714845, 0.018008064270019532, 0.017787904739379884, 0.017830911636352538, 0.017772544860839845, 0.01799782371520996, 0.018382848739624022, 0.018126848220825196, 0.017991680145263672, 0.01817087936401367, 0.018077695846557617, 0.018139135360717772, 0.018092031478881835, 0.018158592224121094, 0.018059263229370116, 0.018089984893798827, 0.018041856765747072, 0.018061311721801757, 0.018103296279907227, 0.018142208099365235, 0.018017280578613282, 0.018050048828125, 0.018076671600341796, 0.01803468894958496, 0.017994752883911135, 0.03722649765014648, 0.018111488342285157, 0.01818623924255371, 0.017994752883911135, 0.01804800033569336, 0.01926246452331543, 0.019359743118286133, 0.01949286460876465, 0.01882316780090332, 0.01883033561706543, 0.018815999984741212, 0.019165184020996092, 0.018874368667602538, 0.01860915184020996, 0.01880166435241699, 0.018739200592041014, 0.018692096710205077, 0.018678783416748047, 0.018107391357421874, 0.018124799728393554, 0.018082815170288084, 0.018174976348876954, 0.018091007232666014, 0.018122751235961913, 0.018191360473632814, 0.018147327423095702, 0.018148351669311523, 0.018145280838012694, 0.01804287910461426, 0.018082815170288084, 0.017983488082885742, 0.01805721664428711, 0.018076671600341796, 0.018067455291748045, 0.018149375915527344, 0.018094079971313477, 0.018135040283203126, 0.01814630317687988, 0.018142208099365235, 0.018115583419799804, 0.018166784286499024, 0.018386943817138672, 0.019382272720336914, 0.018767871856689454, 0.018694143295288086, 0.01863270378112793, 0.01861427116394043, 0.018083839416503905, 0.018103296279907227, 0.018050048828125, 0.018113536834716795, 0.018058240890502928, 0.01802342414855957, 0.01777663993835449, 0.017921024322509766, 0.01782681655883789, 0.017921024322509766, 0.01846784019470215, 0.018112512588500978, 0.018089984893798827, 0.018061311721801757, 0.018089984893798827, 0.01805516815185547, 0.036992000579833983, 0.018264064788818358, 0.0181790714263916, 0.01810534477233887, 0.018128896713256838, 0.018123775482177733, 0.018157567977905274, 0.018098175048828127, 0.01805516815185547, 0.018114559173583983, 0.01809715270996094, 0.01816166305541992, 0.018075647354125975, 0.018110464096069336, 0.018122751235961913, 0.018165760040283203, 0.01822208023071289, 0.018057247161865235, 0.018113504409790038, 0.01807360076904297, 0.018062335968017578, 0.018144256591796876, 0.01813811111450195, 0.018101247787475586, 0.018135040283203126, 0.018324480056762696, 0.018190336227416993, 0.018036735534667968, 0.017916927337646483, 0.017898496627807618, 0.01784012794494629, 0.01798454475402832, 0.018193376541137694, 0.018103328704833985, 0.018072544097900392, 0.018150400161743165, 0.01800601577758789, 0.01804595184326172, 0.01846784019470215, 0.02028441619873047, 0.019355648040771483, 0.018916351318359375, 0.019009536743164062, 0.01843404769897461, 0.018092031478881835, 0.017987583160400392, 0.018111488342285157, 0.01802444839477539, 0.01803775978088379, 0.018012191772460936, 0.018046943664550782, 0.018084863662719726, 0.018102272033691406, 0.018140159606933593, 0.018076671600341796, 0.01803878402709961, 0.018139135360717772, 0.018066432952880858, 0.018126848220825196, 0.018110464096069336, 0.018114559173583983, 0.018098175048828127, 0.01814630317687988, 0.037294078826904296, 0.018145280838012694, 0.018086912155151368, 0.018075647354125975, 0.018165760040283203, 0.018132991790771484, 0.018043903350830077, 0.018018304824829103, 0.018028543472290038, 0.01806540870666504, 0.018144256591796876, 0.018074623107910158, 0.018058240890502928, 0.0180316162109375, 0.01807974433898926, 0.018182144165039063, 0.018156543731689453, 0.017967103958129883, 0.01810534477233887, 0.018052095413208007, 0.018083839416503905, 0.017984512329101563, 0.01803878402709961, 0.018317312240600587, 0.018141183853149414, 0.01800396728515625, 0.018159616470336915, 0.018051071166992186, 0.017992704391479493, 0.01800499153137207, 0.01804287910461426, 0.01803059196472168, 0.01802649688720703, 0.017954816818237306, 0.018076671600341796, 0.018121728897094725, 0.01807257652282715, 0.018082815170288084, 0.01803059196472168, 0.01807974433898926, 0.017958911895751953, 0.01796505546569824, 0.01797532844543457, 0.018025440216064455, 0.01803878402709961, 0.01806540870666504, 0.01796505546569824, 0.018125823974609375, 0.018075647354125975, 0.018061311721801757, 0.0180316162109375, 0.018134016036987305, 0.018086912155151368, 0.018164735794067383, 0.01799679946899414, 0.01802137565612793, 0.01804083251953125, 0.017947647094726564, 0.018076671600341796, 0.018101247787475586, 0.018081792831420897, 0.018067455291748045, 0.018050079345703126, 0.037448673248291015, 0.01804595184326172, 0.01804287910461426, 0.01805721664428711, 0.018019327163696287, 0.017979391098022462, 0.01803878402709961, 0.01813811111450195, 0.018076671600341796, 0.01804287910461426, 0.018136064529418947, 0.01821286392211914, 0.01808076858520508, 0.01781862449645996, 0.017942527770996093, 0.017855487823486327, 0.018158592224121094, 0.01843507194519043, 0.018691072463989256, 0.018387968063354493, 0.018655231475830078, 0.018695167541503906, 0.018689023971557618, 0.01859891128540039, 0.018722816467285155, 0.018726911544799805, 0.018397184371948243, 0.018044927597045898, 0.018075647354125975, 0.018089984893798827, 0.01805721664428711, 0.018367488861083983, 0.01881907272338867, 0.018697216033935548, 0.01864499282836914, 0.01866649627685547, 0.01862860870361328, 0.018646015167236327, 0.01884160041809082, 0.021174272537231444, 0.019122175216674805, 0.018735103607177735, 0.01868083190917969, 0.01863167953491211, 0.018770944595336913, 0.01869004821777344, 0.018700288772583007, 0.018790399551391602, 0.018677759170532226, 0.01881395149230957, 0.01878937530517578, 0.018729984283447267, 0.018126848220825196, 0.018069503784179687, 0.018150400161743165, 0.018163711547851562, 0.018404352188110353, 0.01885593605041504, 0.018689023971557618, 0.018748416900634765, 0.018749439239501953, 0.018803712844848632, 0.018762752532958983, 0.037664768218994144, 0.01887539291381836, 0.018889728546142577, 0.018763776779174804, 0.01877507209777832, 0.0187412166595459, 0.018119680404663087, 0.01802342414855957, 0.018107391357421874, 0.018082815170288084, 0.018150400161743165, 0.018117631912231445, 0.018062335968017578, 0.018276351928710938, 0.018740224838256835, 0.01885798454284668, 0.01875660705566406, 0.018753536224365236, 0.018559999465942383, 0.01843302345275879, 0.018131967544555663, 0.01803878402709961, 0.018724863052368163, 0.018746368408203123, 0.018697216033935548, 0.018694143295288086, 0.01867263984680176, 0.018733055114746093, 0.018939903259277344, 0.019005439758300782, 0.018803712844848632, 0.018717695236206054, 0.01846886444091797, 0.018082815170288084, 0.018060287475585936, 0.01808793640136719, 0.01844428825378418, 0.018759679794311524, 0.018736127853393555, 0.0185743350982666, 0.018044927597045898, 0.01803264045715332, 0.01802444839477539, 0.01799884796142578, 0.018070528030395508, 0.01820364761352539, 0.018120704650878908, 0.01862758445739746, 0.01864192008972168, 0.018712575912475587, 0.018655231475830078, 0.018743295669555664, 0.01866547203063965, 0.01861734390258789, 0.0187238712310791, 0.018577375411987306, 0.01866035270690918, 0.018686975479125977, 0.01861324882507324, 0.01904742431640625, 0.018710527420043945, 0.018672672271728516, 0.018756576538085937, 0.03872665786743164, 0.0188272647857666, 0.018716672897338867, 0.01861529541015625, 0.018856000900268555, 0.01869100761413574, 0.018774015426635742, 0.0186746883392334, 0.018679807662963867, 0.01840947151184082, 0.017886207580566405, 0.017796096801757814, 0.01823539161682129, 0.018374656677246092, 0.018529279708862305, 0.018464767456054687, 0.018569215774536133, 0.01862348747253418, 0.019160064697265625, 0.01885593605041504, 0.01883545684814453, 0.018778112411499022, 0.018763776779174804, 0.018713600158691408, 0.018767871856689454, 0.018744319915771485, 0.01897881507873535, 0.02040934371948242, 0.020281343460083007, 0.018932735443115235, 0.018964479446411133, 0.01883852767944336, 0.018106367111206053, 0.018100223541259765, 0.018060287475585936, 0.018084863662719726, 0.01822003173828125, 0.018050048828125, 0.018067455291748045, 0.01802444839477539, 0.018092031478881835, 0.0180633602142334, 0.018075647354125975, 0.018059263229370116, 0.01801215934753418, 0.018120704650878908, 0.018118656158447266, 0.018052095413208007, 0.018059263229370116, 0.01803264045715332, 0.01808076858520508, 0.018027519226074217, 0.018068479537963866, 0.018044927597045898, 0.018103296279907227, 0.018051071166992186, 0.018059263229370116, 0.018085887908935547, 0.018165760040283203, 0.018076671600341796, 0.018082815170288084, 0.018075647354125975, 0.018073631286621095]",tokens/s,53.73325331661872,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949949-4b302278458c510215cc2ae4;1e1a56f7-55f7-4a20-a761-437ba8cc1f59) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,deci,MB,4382.429184,4769.447936,0.0,4183.81824,4182.069248,s,1,9.4664892578125,9.4664892578125,0.0,9.4664892578125,9.4664892578125,9.4664892578125,9.4664892578125,[9.4664892578125],,kWh,3.185749394305806e-05,1.7443398194287977e-05,4.3412534729975194e-05,9.271342686732124e-05,,MB,1738.93632,4953.997312,0.0,4307.550208,4281.174016,s,10,1.0667434310913086,0.10667434310913086,5.854842700539137e-05,0.10665380859375,0.10676968078613282,0.10677162399291992,0.10677317855834961,"[0.10677356719970703, 0.10662000274658204, 0.10663833618164062, 0.10676924896240235, 0.10662432098388672, 0.10671190643310546, 0.10666928100585937, 0.10662067413330079, 0.10670178985595703, 0.10661430358886718]",tokens/s,2399.8272924737375,kWh,1.260066658702757e-06,6.904544042061277e-07,7.177532337765826e-06,9.128053400674712e-06,tokens/kWh,28045409.9864356,MB,1749.520384,4968.677376,0.0,4322.230272,4281.176576,s,10,17.4349365234375,1.74349365234375,0.007660187812129348,1.741386962890625,1.7549929809570313,1.7561424255371094,1.7570619812011719,"[1.7364151611328125, 1.7476153564453125, 1.74721142578125, 1.7572918701171876, 1.754737548828125, 1.7355184326171875, 1.7382025146484374, 1.7351702880859374, 1.7382806396484376, 1.7444932861328124]",tokens/s,36.134344346657144,kWh,2.1585141281366744e-05,1.1829104553353886e-05,5.900310394003309e-05,9.241734977475372e-05,tokens/kWh,681690.1821308248,,s,630,17.4323036365509,0.027670323232620486,0.0006980212484571102,0.027415040016174317,0.028317798042297365,0.028715571117401122,0.03122675651550293,"[0.02778726387023926, 0.0273305606842041, 0.02728447914123535, 0.027461631774902344, 0.027303936004638672, 0.027265024185180665, 0.0273623046875, 0.02733363151550293, 0.027394048690795897, 0.02710425567626953, 0.027253759384155272, 0.02754047966003418, 0.027051008224487305, 0.027045888900756834, 0.027206655502319335, 0.02736742401123047, 0.02730291175842285, 0.02736128044128418, 0.027020288467407227, 0.027497472763061522, 0.027448320388793947, 0.027999231338500977, 0.027535360336303712, 0.028298240661621094, 0.027596799850463868, 0.02729471969604492, 0.027297792434692384, 0.027422719955444336, 0.027248640060424805, 0.027475967407226562, 0.02716160011291504, 0.027108352661132814, 0.027693056106567384, 0.02981171226501465, 0.03019366455078125, 0.027884544372558592, 0.02735001564025879, 0.027451391220092772, 0.027281408309936524, 0.027630592346191408, 0.02738380813598633, 0.02731724739074707, 0.027427839279174804, 0.027414527893066407, 0.02758963203430176, 0.029882368087768556, 0.028633087158203126, 0.028266496658325195, 0.027372543334960937, 0.027345920562744142, 0.02740940856933594, 0.02715340805053711, 0.027304960250854493, 0.027337728500366212, 0.027395072937011718, 0.027321344375610353, 0.027313152313232423, 0.027654144287109376, 0.027402240753173827, 0.027382783889770508, 0.02778726387023926, 0.027279359817504883, 0.02735103988647461, 0.027197439193725585, 0.027752447128295898, 0.027627519607543945, 0.02815692710876465, 0.03125043106079101, 0.02795827293395996, 0.0279685115814209, 0.02735308837890625, 0.0274616641998291, 0.027472864151000975, 0.02721075248718262, 0.027279359817504883, 0.0271646728515625, 0.0275732479095459, 0.02735820770263672, 0.0275281925201416, 0.02730700874328613, 0.027416576385498048, 0.027661312103271486, 0.027431936264038087, 0.027251712799072264, 0.027305984497070314, 0.02735001564025879, 0.027373567581176757, 0.0273756160736084, 0.027329536437988283, 0.027664384841918944, 0.028071935653686524, 0.02751283264160156, 0.027696128845214843, 0.027190271377563476, 0.027397119522094726, 0.02736844825744629, 0.027279359817504883, 0.02730803108215332, 0.027454463958740235, 0.027404287338256835, 0.0275732479095459, 0.02835148811340332, 0.027250688552856447, 0.02754764747619629, 0.030052352905273437, 0.030196735382080078, 0.027801599502563477, 0.027513856887817382, 0.028318719863891603, 0.027648000717163085, 0.02753228759765625, 0.02736947250366211, 0.027312128067016602, 0.027278335571289062, 0.027286527633666992, 0.02732646369934082, 0.027340799331665038, 0.027296768188476563, 0.027303936004638672, 0.027265024185180665, 0.027234304428100587, 0.027322368621826174, 0.027378688812255858, 0.028237823486328126, 0.031889408111572266, 0.029060096740722657, 0.02730086326599121, 0.027518976211547853, 0.027240447998046875, 0.027125759124755858, 0.027672576904296874, 0.02716160011291504, 0.028110847473144532, 0.02815180778503418, 0.027800575256347656, 0.028068864822387695, 0.027579391479492187, 0.02834943962097168, 0.03132825660705566, 0.02837606430053711, 0.028076032638549804, 0.027484159469604492, 0.02735513687133789, 0.02733158493041992, 0.027256832122802735, 0.027236352920532225, 0.027226112365722657, 0.027232255935668945, 0.027447296142578126, 0.02726092720031738, 0.02736742401123047, 0.027190271377563476, 0.027068416595458986, 0.02720256042480469, 0.027329536437988283, 0.02733363151550293, 0.02729471969604492, 0.027297792434692384, 0.027999231338500977, 0.02735001564025879, 0.027870208740234374, 0.027320320129394532, 0.027280384063720704, 0.02728447914123535, 0.02721177673339844, 0.02756608009338379, 0.027485183715820313, 0.027279359817504883, 0.027191295623779296, 0.027505664825439452, 0.02735308837890625, 0.02732441520690918, 0.027283456802368163, 0.027396095275878905, 0.027642879486083984, 0.029831167221069335, 0.030216192245483397, 0.028423168182373046, 0.028033023834228517, 0.028062719345092774, 0.02817024040222168, 0.028136447906494142, 0.028040191650390626, 0.027841535568237305, 0.02814156723022461, 0.02791935920715332, 0.028014591217041016, 0.028048383712768556, 0.0279736328125, 0.027634687423706054, 0.02833612823486328, 0.028299264907836914, 0.028095487594604493, 0.028511232376098632, 0.028103679656982423, 0.02817228889465332, 0.028034048080444338, 0.028006399154663086, 0.028099584579467773, 0.028019712448120116, 0.02812313652038574, 0.02836275291442871, 0.02814771270751953, 0.02815488052368164, 0.028275711059570312, 0.02835148811340332, 0.02871603202819824, 0.028691455841064452, 0.03120639991760254, 0.02880614471435547, 0.028252159118652344, 0.027760639190673828, 0.028827648162841796, 0.028980224609375, 0.028447744369506835, 0.02780364799499512, 0.0272936954498291, 0.02751692771911621, 0.02721177673339844, 0.027386880874633788, 0.02734694480895996, 0.02717081642150879, 0.02731110382080078, 0.027610111236572265, 0.027257856369018556, 0.027387903213500975, 0.027406335830688477, 0.027245567321777343, 0.02776678466796875, 0.027267072677612306, 0.02713804817199707, 0.027389951705932617, 0.02729471969604492, 0.027206655502319335, 0.027297792434692384, 0.02730086326599121, 0.02735308837890625, 0.027585535049438475, 0.027265024185180665, 0.027270143508911132, 0.0272988166809082, 0.027674623489379883, 0.027255807876586914, 0.027272192001342774, 0.0273756160736084, 0.02871500778198242, 0.031148031234741212, 0.02835968017578125, 0.027235328674316408, 0.02736128044128418, 0.027494400024414063, 0.02736128044128418, 0.02840166473388672, 0.02752511978149414, 0.02712883186340332, 0.027642879486083984, 0.02735308837890625, 0.02728447914123535, 0.02798182487487793, 0.027778047561645508, 0.027867136001586915, 0.02752409553527832, 0.027158527374267577, 0.02712985610961914, 0.02735308837890625, 0.027289600372314454, 0.027378688812255858, 0.0273756160736084, 0.02730086326599121, 0.027822080612182616, 0.02735513687133789, 0.0272936954498291, 0.027230207443237304, 0.027472896575927733, 0.027493375778198242, 0.027279359817504883, 0.027265024185180665, 0.028049407958984376, 0.028317695617675782, 0.031071231842041015, 0.028452863693237306, 0.02729471969604492, 0.027405311584472656, 0.028003328323364256, 0.028265472412109374, 0.028092416763305664, 0.028033023834228517, 0.02811494445800781, 0.028302335739135744, 0.028194816589355468, 0.028015615463256836, 0.02794598388671875, 0.028112895965576173, 0.02814873504638672, 0.02832691192626953, 0.02795929527282715, 0.028424192428588867, 0.028089344024658205, 0.028100608825683594, 0.02820812797546387, 0.02833612823486328, 0.02817945671081543, 0.028169216156005858, 0.02813030433654785, 0.028188671112060547, 0.029477888107299805, 0.02877644729614258, 0.027782144546508788, 0.02709503936767578, 0.0276889591217041, 0.02735513687133789, 0.02733875274658203, 0.027622400283813478, 0.027411455154418944, 0.02733260726928711, 0.028049407958984376, 0.02759065628051758, 0.027370496749877928, 0.02734182357788086, 0.02755276870727539, 0.02737664031982422, 0.027354112625122072, 0.027247615814208984, 0.027632640838623046, 0.027687936782836913, 0.02750464057922363, 0.027502592086791993, 0.027433984756469725, 0.027389951705932617, 0.027910144805908203, 0.027460607528686523, 0.02758143997192383, 0.02731827163696289, 0.027421695709228516, 0.02737766456604004, 0.027419647216796874, 0.02736128044128418, 0.02815283203125, 0.028085248947143555, 0.027262975692749023, 0.027364351272583007, 0.027603967666625977, 0.02773401641845703, 0.027778047561645508, 0.02716876792907715, 0.02772377586364746, 0.027445247650146484, 0.02735513687133789, 0.02736128044128418, 0.028169216156005858, 0.03141427230834961, 0.0279418888092041, 0.02750771141052246, 0.027340799331665038, 0.027206655502319335, 0.02726092720031738, 0.02731827163696289, 0.02731520080566406, 0.027365375518798828, 0.02737664031982422, 0.027489280700683592, 0.027239423751831054, 0.028082176208496092, 0.02756505584716797, 0.027425792694091795, 0.027644927978515626, 0.027246591567993163, 0.027191295623779296, 0.027254783630371093, 0.027282432556152345, 0.027291648864746092, 0.027397119522094726, 0.02730291175842285, 0.027490304946899413, 0.027478015899658204, 0.027337728500366212, 0.02732441520690918, 0.02710527992248535, 0.027262975692749023, 0.02735923194885254, 0.02731622314453125, 0.02731110382080078, 0.02837708854675293, 0.03097804832458496, 0.02997760009765625, 0.02778316879272461, 0.02749849510192871, 0.0273305606842041, 0.027200511932373047, 0.027146240234375, 0.027259904861450194, 0.027248640060424805, 0.02758963203430176, 0.02736639976501465, 0.02752102470397949, 0.02731520080566406, 0.027427839279174804, 0.027687936782836913, 0.02729267120361328, 0.02716876792907715, 0.027273216247558595, 0.027215871810913086, 0.027259904861450194, 0.027287551879882813, 0.027291648864746092, 0.027874303817749024, 0.028214271545410157, 0.027424768447875978, 0.027421695709228516, 0.027261951446533202, 0.027282432556152345, 0.027320320129394532, 0.027389951705932617, 0.027463680267333986, 0.028449792861938477, 0.027638784408569338, 0.027439104080200196, 0.027473920822143554, 0.02728550338745117, 0.027214847564697265, 0.027865087509155274, 0.031235071182250978, 0.028011520385742186, 0.027220991134643553, 0.02736025619506836, 0.02736332893371582, 0.027270143508911132, 0.027247615814208984, 0.02734694480895996, 0.027299840927124022, 0.027278335571289062, 0.027628543853759766, 0.02729267120361328, 0.02772787284851074, 0.027472896575927733, 0.027289600372314454, 0.027207679748535156, 0.027061248779296877, 0.027320320129394532, 0.027257856369018556, 0.027305984497070314, 0.027421695709228516, 0.027215871810913086, 0.027280384063720704, 0.027372543334960937, 0.02795724868774414, 0.02872217559814453, 0.02784668731689453, 0.027331552505493163, 0.027262975692749023, 0.02735206413269043, 0.027216896057128907, 0.027245567321777343, 0.02748313522338867, 0.02834124755859375, 0.03138150405883789, 0.027778047561645508, 0.02735001564025879, 0.027388927459716796, 0.02740019226074219, 0.027517951965332032, 0.027510784149169923, 0.02748313522338867, 0.029592575073242186, 0.028456960678100586, 0.027320320129394532, 0.027709440231323244, 0.027271167755126953, 0.027257856369018556, 0.027283456802368163, 0.02689740753173828, 0.02716364860534668, 0.027199487686157226, 0.02734489631652832, 0.02796134376525879, 0.027357183456420898, 0.027299840927124022, 0.027216896057128907, 0.027429887771606445, 0.02733875274658203, 0.027125759124755858, 0.026983423233032225, 0.02758143997192383, 0.02731007957458496, 0.027296768188476563, 0.027125759124755858, 0.027150335311889647, 0.027402240753173827, 0.027288576126098633, 0.027494400024414063, 0.027257856369018556, 0.027402240753173827, 0.02796441650390625, 0.028664831161499024, 0.027494400024414063, 0.027198463439941405, 0.02733875274658203, 0.02731929588317871, 0.027166719436645507, 0.027048959732055664, 0.027024383544921874, 0.0272988166809082, 0.027700223922729493, 0.027288576126098633, 0.027207679748535156, 0.027242496490478517, 0.027257856369018556, 0.028636159896850585, 0.0273305606842041, 0.027248640060424805, 0.027872255325317383, 0.0277708797454834, 0.027201536178588868, 0.027619327545166016, 0.02730086326599121, 0.02795212745666504, 0.0273756160736084, 0.027357183456420898, 0.027189247131347655, 0.02727628707885742, 0.02730086326599121, 0.02710937690734863, 0.02752921676635742, 0.026976255416870116, 0.027313152313232423, 0.0273438720703125, 0.027874303817749024, 0.028065792083740236, 0.027415552139282227, 0.02753023910522461, 0.027398143768310547, 0.027348991394042968, 0.027487232208251954, 0.02746675109863281, 0.027395072937011718, 0.027724800109863282, 0.027439104080200196, 0.0273305606842041, 0.027354112625122072, 0.027057151794433593, 0.02779955291748047, 0.027421695709228516, 0.027380735397338866, 0.029289472579956056, 0.027441152572631834, 0.027571199417114257, 0.02755072021484375, 0.027416576385498048, 0.027635711669921875, 0.027649023056030272, 0.02750771141052246, 0.027513856887817382, 0.02752102470397949, 0.027428863525390625, 0.02753331184387207, 0.02838528060913086, 0.029557760238647462, 0.02831974411010742, 0.02769817543029785, 0.027208703994750977, 0.027303936004638672, 0.027546623229980468, 0.027444223403930663, 0.027699199676513672, 0.028700672149658202, 0.027844608306884764, 0.027380735397338866, 0.027627519607543945, 0.02818355178833008, 0.02775654411315918, 0.02737664031982422, 0.027456512451171877, 0.027495424270629884, 0.027320320129394532, 0.02731724739074707, 0.028479488372802734, 0.02810572814941406, 0.027231231689453125, 0.027227136611938478, 0.027263999938964844, 0.02752921676635742, 0.02714726448059082, 0.02735103988647461, 0.027217920303344727, 0.028035072326660155, 0.02832691192626953, 0.027273216247558595, 0.02735308837890625, 0.027570175170898437, 0.027444223403930663, 0.0272988166809082, 0.02752102470397949, 0.027432960510253908, 0.027511808395385744, 0.027457536697387694, 0.02730905532836914, 0.02734284782409668, 0.027840511322021484, 0.02860748863220215, 0.02727628707885742, 0.027479040145874024, 0.027624448776245116, 0.0272936954498291, 0.027304960250854493, 0.02772377586364746, 0.027411455154418944, 0.02733568000793457, 0.02735103988647461, 0.02747494316101074, 0.027410432815551757, 0.03166924858093262, 0.029258752822875978, 0.02789580726623535, 0.027314176559448244, 0.02740838432312012, 0.027510784149169923, 0.02754457664489746, 0.027291648864746092, 0.027638784408569338, 0.027258880615234377, 0.02811392021179199, 0.02770227241516113, 0.027651071548461914, 0.028251136779785156, 0.02791731262207031, 0.028072959899902345, 0.029033472061157226, 0.028060672760009765, 0.027275264739990233, 0.027282432556152345]",tokens/s,36.13980189508961,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 292, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 414, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 292, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,838.524928,745.013248,0.0,159.383552,141.760512,s,1,7.16419287109375,7.16419287109375,0.0,7.16419287109375,7.16419287109375,7.16419287109375,7.16419287109375,[7.16419287109375],,kWh,4.341445727077876e-06,2.36358633994568e-06,6.4858385220856185e-06,1.3190870589109174e-05,,MB,1382.207488,847.773696,0.0,201.326592,184.771584,s,30,0.19116688013076782,0.0063722293376922605,0.0001410431056395332,0.006389264106750488,0.006558287858963012,0.00657901771068573,0.0066738571453094486,"[0.006299007892608643, 0.006448480129241943, 0.006426720142364502, 0.006710912227630615, 0.006583136081695556, 0.00653436803817749, 0.0064559998512268065, 0.006414976119995117, 0.006248640060424805, 0.0064271998405456545, 0.006268703937530517, 0.006314047813415527, 0.006488800048828125, 0.006457376003265381, 0.006195744037628174, 0.00623635196685791, 0.006199552059173584, 0.006231135845184326, 0.006416607856750489, 0.006300064086914062, 0.006203167915344239, 0.0065565438270568845, 0.006448031902313233, 0.006573984146118164, 0.006199615955352783, 0.0062308797836303715, 0.006197760105133057, 0.006507840156555176, 0.006227680206298828, 0.006363552093505859]",tokens/s,40174.323056098896,kWh,7.629970579285956e-08,4.180862444033124e-08,1.541824015542601e-07,2.722907317874509e-07,tokens/kWh,940171552.3679029,MB,1417.682944,847.773696,0.0,201.326592,184.774144,s,30,10.110716613769531,0.33702388712565107,0.004596418219683327,0.33730699157714844,0.34238978576660156,0.3427373229980469,0.34670233642578124,"[0.34011691284179685, 0.342708251953125, 0.3390265808105469, 0.3349437255859375, 0.34235440063476563, 0.33942160034179686, 0.33736614990234376, 0.3483121337890625, 0.34219146728515626, 0.3330297241210938, 0.3374594116210938, 0.33860910034179686, 0.33731512451171874, 0.33123910522460936, 0.33007000732421876, 0.32869955444335935, 0.3360467529296875, 0.3349437255859375, 0.3366600341796875, 0.32983334350585936, 0.33673666381835937, 0.33920816040039065, 0.3427611083984375, 0.3370115051269531, 0.3369119567871094, 0.32721591186523435, 0.33379196166992187, 0.33757064819335936, 0.33729885864257814, 0.34186273193359373]",tokens/s,186.93037023963822,kWh,3.896933288003641e-06,2.1353364255270764e-06,6.069813318379667e-06,1.2102083031910386e-05,tokens/kWh,5205715.399066724,,s,1890,10.098572327137,0.005343159961448144,0.00014725797836197464,0.0053647360801696775,0.005465087890625,0.00550558729171753,0.005792757973670958,"[0.005359615802764893, 0.005288959980010987, 0.005258240222930908, 0.005248000144958496, 0.005275648117065429, 0.005231679916381836, 0.005263296127319336, 0.0052674560546875, 0.005272575855255127, 0.005242879867553711, 0.005276671886444092, 0.005243904113769531, 0.005248000144958496, 0.0052295680046081545, 0.005296127796173096, 0.0052633600234985355, 0.005241856098175048, 0.005242976188659668, 0.005199776172637939, 0.005243904113769531, 0.005276671886444092, 0.005235712051391602, 0.005243904113769531, 0.005279744148254394, 0.0058419198989868165, 0.005482495784759522, 0.005577727794647217, 0.005493760108947754, 0.005537792205810547, 0.0055285758972167965, 0.005624896049499512, 0.00562988805770874, 0.00557260799407959, 0.005574656009674072, 0.005441535949707031, 0.005484543800354004, 0.005537792205810547, 0.005446656227111817, 0.005440512180328369, 0.005517312049865723, 0.005637152194976807, 0.005545951843261719, 0.005371903896331787, 0.005350399971008301, 0.005387263774871826, 0.005341184139251709, 0.005327871799468994, 0.005402624130249023, 0.005332992076873779, 0.005342207908630371, 0.0053647360801696775, 0.005350399971008301, 0.005432320117950439, 0.005469183921813964, 0.005459968090057373, 0.005444608211517334, 0.00551423978805542, 0.005422080039978027, 0.005442560195922852, 0.0054568958282470706, 0.005430272102355957, 0.0054876160621643065, 0.005414912223815918, 0.005391359806060791, 0.005442560195922852, 0.005442560195922852, 0.00542310380935669, 0.005431295871734619, 0.005464064121246338, 0.005582848072052002, 0.005505023956298828, 0.005469183921813964, 0.005439487934112549, 0.005500927925109863, 0.005438464164733887, 0.005419007778167725, 0.005489664077758789, 0.005434368133544922, 0.005470240116119384, 0.005453792095184326, 0.005440512180328369, 0.005508096218109131, 0.005452799797058106, 0.005440512180328369, 0.005493760108947754, 0.0053647360801696775, 0.0053534722328186036, 0.005370880126953125, 0.005331967830657959, 0.00532480001449585, 0.005362688064575195, 0.00532480001449585, 0.005344287872314453, 0.005370848178863526, 0.005336063861846924, 0.005401663780212403, 0.005442495822906494, 0.005426176071166992, 0.005450751781463623, 0.0054579200744628905, 0.005434368133544922, 0.005593088150024414, 0.005432320117950439, 0.005440512180328369, 0.005494783878326416, 0.005436416149139404, 0.005419007778167725, 0.005449728012084961, 0.005393407821655274, 0.005402624130249023, 0.005444608211517334, 0.005451776027679443, 0.005447679996490478, 0.00541593599319458, 0.005417984008789062, 0.005452799797058106, 0.005414912223815918, 0.005416959762573242, 0.005440512180328369, 0.005412864208221436, 0.005437439918518067, 0.005508096218109131, 0.005414912223815918, 0.0054241280555725096, 0.005465087890625, 0.0054282240867614745, 0.005451776027679443, 0.005465087890625, 0.005441535949707031, 0.005436416149139404, 0.005463039875030518, 0.005442560195922852, 0.005744639873504639, 0.005501952171325684, 0.00546611213684082, 0.005692480087280273, 0.005879744052886963, 0.00582041597366333, 0.0054568958282470706, 0.005410816192626953, 0.0054876160621643065, 0.005438464164733887, 0.00542310380935669, 0.005689343929290771, 0.005405695915222168, 0.005469183921813964, 0.005315584182739258, 0.005245952129364013, 0.005198847770690918, 0.005155839920043945, 0.005167103767395019, 0.0052336640357971195, 0.005281792163848877, 0.005208064079284668, 0.00516812801361084, 0.005265408039093018, 0.005244927883148193, 0.005239808082580567, 0.005250048160552978, 0.005228544235229492, 0.005220352172851562, 0.005252096176147461, 0.005245952129364013, 0.0052408318519592285, 0.005232639789581299, 0.005246975898742676, 0.005230591773986816, 0.005222400188446045, 0.005250048160552978, 0.005208064079284668, 0.005235712051391602, 0.0052633600234985355, 0.005283904075622558, 0.005461952209472656, 0.005452799797058106, 0.005412864208221436, 0.005409791946411133, 0.005463039875030518, 0.005412864208221436, 0.005379072189331055, 0.005450751781463623, 0.005391359806060791, 0.005408768177032471, 0.005389311790466309, 0.005389311790466309, 0.005425151824951172, 0.005404672145843506, 0.005405695915222168, 0.005441535949707031, 0.005470208168029785, 0.00541593599319458, 0.005449728012084961, 0.005449728012084961, 0.005437471866607666, 0.0054650559425354, 0.005438464164733887, 0.005431295871734619, 0.005489664077758789, 0.005402719974517823, 0.005349279880523682, 0.005473279953002929, 0.00538316822052002, 0.005395455837249756, 0.005429247856140137, 0.005406720161437988, 0.00542310380935669, 0.005432320117950439, 0.0053975038528442385, 0.00542310380935669, 0.005504000186920166, 0.0053043198585510255, 0.005250048160552978, 0.005219327926635742, 0.005224448204040527, 0.005232639789581299, 0.005244927883148193, 0.005214208126068115, 0.005223423957824707, 0.005258240222930908, 0.005208064079284668, 0.005250048160552978, 0.005249023914337158, 0.005204991817474365, 0.005224448204040527, 0.00526643180847168, 0.005227519989013672, 0.005234687805175781, 0.00530841588973999, 0.005239808082580567, 0.005339200019836426, 0.005345215797424316, 0.005346303939819336, 0.0053012480735778805, 0.0052111358642578124, 0.005195775985717774, 0.005157951831817627, 0.00516703987121582, 0.0051998720169067385, 0.005188608169555664, 0.005216256141662597, 0.005279744148254394, 0.005235712051391602, 0.005231616020202637, 0.005268479824066162, 0.005255167961120606, 0.005239808082580567, 0.005223423957824707, 0.005169151782989502, 0.005215231895446777, 0.005237760066986084, 0.0052633600234985355, 0.005530623912811279, 0.005441535949707031, 0.005458943843841553, 0.005425151824951172, 0.005399551868438721, 0.005434400081634522, 0.0053821120262146, 0.0055920639038085935, 0.005449728012084961, 0.005401599884033203, 0.005389311790466309, 0.005452799797058106, 0.005425183773040771, 0.0054159040451049805, 0.005469183921813964, 0.005453824043273926, 0.005450751781463623, 0.005440512180328369, 0.005406720161437988, 0.005438464164733887, 0.005431295871734619, 0.005400576114654541, 0.0054282240867614745, 0.0053975038528442385, 0.005407743930816651, 0.005426176071166992, 0.005399551868438721, 0.005398528099060058, 0.005425151824951172, 0.005417984008789062, 0.005451776027679443, 0.005412864208221436, 0.005395455837249756, 0.005516287803649903, 0.005404672145843506, 0.005417984008789062, 0.005516287803649903, 0.005406752109527588, 0.005440480232238769, 0.005463039875030518, 0.005337088108062744, 0.00535859203338623, 0.005434368133544922, 0.005399551868438721, 0.005453824043273926, 0.005408768177032471, 0.005411839962005615, 0.005448703765869141, 0.005416959762573242, 0.005429247856140137, 0.005464064121246338, 0.005459968090057373, 0.005398528099060058, 0.005433343887329102, 0.005388288021087646, 0.005399551868438721, 0.005435455799102784, 0.005401535987854004, 0.005458943843841553, 0.005408768177032471, 0.005416959762573242, 0.005475327968597412, 0.005406720161437988, 0.005417984008789062, 0.005421055793762207, 0.005437439918518067, 0.005390336036682129, 0.0053975038528442385, 0.00537497615814209, 0.005150720119476319, 0.005148672103881836, 0.005169151782989502, 0.00515174388885498, 0.005152768135070801, 0.005192704200744629, 0.0051773438453674315, 0.00521830415725708, 0.005307424068450928, 0.005510111808776855, 0.005436416149139404, 0.005492735862731934, 0.005437439918518067, 0.0054241280555725096, 0.00547430419921875, 0.005401599884033203, 0.005384191989898681, 0.005368832111358642, 0.0053821439743042, 0.005404672145843506, 0.005425151824951172, 0.005468160152435303, 0.005403647899627686, 0.00551632022857666, 0.005408736228942871, 0.005451776027679443, 0.005411839962005615, 0.0053534722328186036, 0.005339136123657226, 0.005310463905334473, 0.005298175811767578, 0.005348351955413819, 0.005298175811767578, 0.005310463905334473, 0.005313536167144775, 0.00536678409576416, 0.005409791946411133, 0.005440512180328369, 0.005391359806060791, 0.005410816192626953, 0.005436416149139404, 0.0054139518737792965, 0.005408703804016113, 0.005446656227111817, 0.005431295871734619, 0.005380095958709717, 0.005433343887329102, 0.005388288021087646, 0.005411839962005615, 0.005393407821655274, 0.005379072189331055, 0.005430272102355957, 0.0057794561386108395, 0.005471231937408447, 0.005434368133544922, 0.005402624130249023, 0.005396480083465576, 0.005489664077758789, 0.005472256183624268, 0.005389311790466309, 0.005440512180328369, 0.005381120204925537, 0.005321728229522705, 0.005401599884033203, 0.005342207908630371, 0.00521727991104126, 0.005244991779327393, 0.005208000183105469, 0.005202943801879883, 0.005224448204040527, 0.005387263774871826, 0.005214208126068115, 0.005215231895446777, 0.005243904113769531, 0.005212160110473632, 0.005206016063690186, 0.005235712051391602, 0.005222464084625244, 0.005202879905700683, 0.005265408039093018, 0.0052899842262268066, 0.005390336036682129, 0.005447679996490478, 0.005416959762573242, 0.005417984008789062, 0.005441535949707031, 0.0053975038528442385, 0.005410816192626953, 0.005535744190216065, 0.005390336036682129, 0.005319680213928223, 0.005332992076873779, 0.005327871799468994, 0.005319680213928223, 0.005326848030090332, 0.005282815933227539, 0.005295135974884033, 0.005331935882568359, 0.005306367874145508, 0.005306367874145508, 0.005435391902923584, 0.005407743930816651, 0.005408768177032471, 0.0054282240867614745, 0.0053309440612792965, 0.005235712051391602, 0.005259263992309571, 0.005306367874145508, 0.005237760066986084, 0.005171199798583984, 0.005387263774871826, 0.005420032024383545, 0.0055203838348388675, 0.005405695915222168, 0.005480447769165039, 0.005773312091827393, 0.005410816192626953, 0.005485568046569824, 0.005402624130249023, 0.005455872058868408, 0.0054609918594360355, 0.005435391902923584, 0.005396480083465576, 0.005443583965301513, 0.005430272102355957, 0.005420032024383545, 0.005441535949707031, 0.005389311790466309, 0.005385216236114502, 0.0054609918594360355, 0.005419007778167725, 0.005430272102355957, 0.005449728012084961, 0.00542310380935669, 0.005420032024383545, 0.005462016105651855, 0.005455872058868408, 0.005543935775756836, 0.005410816192626953, 0.005419007778167725, 0.00546611213684082, 0.005352447986602784, 0.006103040218353272, 0.006418432235717773, 0.006275072097778321, 0.0058009600639343266, 0.005508096218109131, 0.005472256183624268, 0.005451776027679443, 0.005501952171325684, 0.005448703765869141, 0.0054282240867614745, 0.007501823902130127, 0.005766143798828125, 0.005471231937408447, 0.005467167854309082, 0.005460959911346435, 0.005434400081634522, 0.005434336185455323, 0.0054579200744628905, 0.005440512180328369, 0.005446656227111817, 0.005420032024383545, 0.005429247856140137, 0.005453824043273926, 0.005582848072052002, 0.005413919925689697, 0.005468128204345703, 0.0054282240867614745, 0.005433343887329102, 0.005446656227111817, 0.005422080039978027, 0.005470208168029785, 0.005432320117950439, 0.0054282240867614745, 0.00547430419921875, 0.00536678409576416, 0.005313536167144775, 0.005391392230987549, 0.005431263923645019, 0.0054241280555725096, 0.005449728012084961, 0.005438464164733887, 0.005422080039978027, 0.005213183879852295, 0.0052705278396606445, 0.005222400188446045, 0.0052111678123474125, 0.0052520642280578615, 0.005412864208221436, 0.005414912223815918, 0.005439487934112549, 0.005408768177032471, 0.005552127838134766, 0.005494783878326416, 0.005436416149139404, 0.005471231937408447, 0.005436416149139404, 0.00535756778717041, 0.005455872058868408, 0.005412864208221436, 0.00542310380935669, 0.005464064121246338, 0.00541593599319458, 0.0054241280555725096, 0.005453824043273926, 0.005354496002197265, 0.005430272102355957, 0.005470208168029785, 0.005479423999786377, 0.005438464164733887, 0.005438464164733887, 0.005403647899627686, 0.005470208168029785, 0.005430272102355957, 0.005440512180328369, 0.005464064121246338, 0.005416959762573242, 0.005430272102355957, 0.005475327968597412, 0.005425183773040771, 0.005573599815368652, 0.005436416149139404, 0.005454847812652588, 0.005492735862731934, 0.005437439918518067, 0.005437439918518067, 0.005455872058868408, 0.005420032024383545, 0.005429247856140137, 0.00546611213684082, 0.00541593599319458, 0.005439487934112549, 0.005732351779937744, 0.005472256183624268, 0.005465087890625, 0.0053975038528442385, 0.005405695915222168, 0.005438464164733887, 0.005444608211517334, 0.005445631980895996, 0.005401599884033203, 0.005399551868438721, 0.005426176071166992, 0.005344255924224854, 0.005332992076873779, 0.005411839962005615, 0.005422080039978027, 0.005398528099060058, 0.005390336036682129, 0.005443583965301513, 0.005393407821655274, 0.00537395191192627, 0.005421055793762207, 0.00538316822052002, 0.005499904155731201, 0.005431295871734619, 0.005398528099060058, 0.005405695915222168, 0.005393407821655274, 0.005355519771575928, 0.005244927883148193, 0.005163008213043213, 0.005148672103881836, 0.00515993595123291, 0.005228544235229492, 0.005212160110473632, 0.005224448204040527, 0.005253119945526123, 0.005213183879852295, 0.005220352172851562, 0.005223455905914306, 0.005237728118896484, 0.005322751998901368, 0.005232639789581299, 0.005231616020202637, 0.0051998720169067385, 0.00521727991104126, 0.0052336640357971195, 0.005230591773986816, 0.005227551937103271, 0.005271520137786865, 0.005242879867553711, 0.005235712051391602, 0.005260287761688232, 0.005220352172851562, 0.005252096176147461, 0.005262335777282715, 0.005252096176147461, 0.005238815784454346, 0.005221343994140625, 0.005195775985717774, 0.0052336640357971195, 0.005203968048095703, 0.005723135948181152, 0.005445631980895996, 0.005401599884033203, 0.005477375984191894, 0.0053012480735778805, 0.005179391860961914, 0.005201920032501221, 0.005193727970123291, 0.005161983966827393, 0.005239808082580567, 0.005163008213043213, 0.005169151782989502, 0.005183487892150879, 0.0051701760292053225, 0.0051701760292053225, 0.005213183879852295, 0.005221375942230225, 0.0052674560546875, 0.005262335777282715, 0.005235712051391602, 0.005248000144958496, 0.005294079780578613, 0.005471231937408447, 0.005443583965301513, 0.005405695915222168, 0.005402624130249023, 0.005432320117950439, 0.005420032024383545, 0.005437439918518067, 0.005434432029724121, 0.005417920112609863, 0.005451776027679443, 0.005352447986602784, 0.005251071929931641, 0.005249023914337158, 0.005257215976715088, 0.005232639789581299, 0.005235712051391602, 0.005261312007904053, 0.005221375942230225, 0.005384191989898681, 0.005445631980895996, 0.00536787223815918, 0.005341119766235351, 0.005441535949707031, 0.005425151824951172, 0.005496831893920898, 0.005401599884033203, 0.005417984008789062, 0.005435391902923584, 0.005393407821655274, 0.005400576114654541, 0.005443583965301513, 0.005408768177032471, 0.00537497615814209, 0.005429247856140137, 0.005408768177032471, 0.005388288021087646, 0.005481472015380859, 0.005471231937408447, 0.005441535949707031, 0.005408768177032471, 0.005390336036682129, 0.00540780782699585, 0.0054148478507995606, 0.00535859203338623, 0.00542310380935669, 0.005285888195037842, 0.005284863948822022, 0.005195775985717774, 0.005152768135070801, 0.005161983966827393, 0.005180416107177735, 0.005260287761688232, 0.0052336640357971195, 0.005245952129364013, 0.005269504070281982, 0.005223423957824707, 0.0054568958282470706, 0.00538316822052002, 0.00542310380935669, 0.005392384052276611, 0.005420095920562744, 0.005422016143798828, 0.005402624130249023, 0.00541593599319458, 0.005405695915222168, 0.005343232154846191, 0.00537497615814209, 0.005371903896331787, 0.0053678078651428224, 0.005401599884033203, 0.005369855880737305, 0.005385216236114502, 0.005404672145843506, 0.005476352214813233, 0.005387263774871826, 0.005429247856140137, 0.005356544017791748, 0.005281792163848877, 0.005331967830657959, 0.005251071929931641, 0.005172224044799805, 0.005223487854003906, 0.005220287799835205, 0.00522649621963501, 0.005245952129364013, 0.005228544235229492, 0.005239808082580567, 0.005238783836364746, 0.005239808082580567, 0.005213183879852295, 0.005216256141662597, 0.005277696132659912, 0.005402624130249023, 0.0053821439743042, 0.0054271998405456545, 0.005388288021087646, 0.005405695915222168, 0.0054271998405456545, 0.0054098558425903324, 0.005413824081420898, 0.00538316822052002, 0.005653503894805908, 0.005462016105651855, 0.0054026880264282226, 0.005370816230773926, 0.005435391902923584, 0.0053975038528442385, 0.005435391902923584, 0.005455872058868408, 0.005403711795806885, 0.005388224124908447, 0.005441535949707031, 0.00535859203338623, 0.005375999927520752, 0.005415999889373779, 0.0053851518630981445, 0.005455872058868408, 0.005375999927520752, 0.005397535800933838, 0.005417952060699463, 0.0054282240867614745, 0.005519360065460205, 0.005242879867553711, 0.005262335777282715, 0.00521727991104126, 0.005355519771575928, 0.005242879867553711, 0.005198847770690918, 0.005252096176147461, 0.005165056228637695, 0.005137407779693603, 0.005142528057098389, 0.005130239963531494, 0.005158912181854248, 0.0053012480735778805, 0.005299200057983398, 0.005338111877441406, 0.005329919815063477, 0.005302271842956543, 0.005259263992309571, 0.005194752216339111, 0.0052008957862854, 0.005238783836364746, 0.00521727991104126, 0.0052971520423889164, 0.005232639789581299, 0.005219327926635742, 0.005492735862731934, 0.0054271998405456545, 0.005404672145843506, 0.005398528099060058, 0.005431295871734619, 0.0053975038528442385, 0.005385216236114502, 0.005422080039978027, 0.005402624130249023, 0.005387263774871826, 0.005440512180328369, 0.005338111877441406, 0.00532480001449585, 0.005402624130249023, 0.005368832111358642, 0.005545983791351319, 0.005552127838134766, 0.005434368133544922, 0.005455872058868408, 0.005405695915222168, 0.005447679996490478, 0.005469183921813964, 0.005396480083465576, 0.005389311790466309, 0.005421055793762207, 0.005389311790466309, 0.005388288021087646, 0.005413887977600097, 0.005392384052276611, 0.0054282240867614745, 0.005414912223815918, 0.005392384052276611, 0.005410816192626953, 0.005444608211517334, 0.005385216236114502, 0.0054271998405456545, 0.005416959762573242, 0.0053606400489807126, 0.005393407821655274, 0.005452896118164062, 0.005366687774658203, 0.005385216236114502, 0.005409791946411133, 0.005402624130249023, 0.005388288021087646, 0.005451776027679443, 0.005318655967712403, 0.005213183879852295, 0.005224448204040527, 0.005188608169555664, 0.0052008957862854, 0.005234687805175781, 0.005206016063690186, 0.005202943801879883, 0.005253119945526123, 0.005220352172851562, 0.005158912181854248, 0.005150720119476319, 0.005254144191741943, 0.005215231895446777, 0.005203968048095703, 0.005242879867553711, 0.005208064079284668, 0.005242879867553711, 0.005286911964416504, 0.005232639789581299, 0.00521727991104126, 0.005243904113769531, 0.005268479824066162, 0.005230591773986816, 0.005198847770690918, 0.005245952129364013, 0.005221375942230225, 0.005243904113769531, 0.005248000144958496, 0.005223423957824707, 0.005237760066986084, 0.005285888195037842, 0.00520908784866333, 0.005188608169555664, 0.005239808082580567, 0.005222432136535645, 0.005204959869384766, 0.005201920032501221, 0.00522649621963501, 0.0051998720169067385, 0.0051968002319335935, 0.005245952129364013, 0.005206016063690186, 0.005195775985717774, 0.005246975898742676, 0.005189631938934326, 0.005198847770690918, 0.0052070398330688475, 0.005258240222930908, 0.005202976226806641, 0.0051957440376281736, 0.005261312007904053, 0.00520300817489624, 0.005212160110473632, 0.005206016063690186, 0.005212160110473632, 0.005135359764099121, 0.005079040050506592, 0.005059584140777588, 0.005154816150665284, 0.005123104095458984, 0.005127232074737548, 0.005145503997802735, 0.005214208126068115, 0.00521830415725708, 0.005389311790466309, 0.005255167961120606, 0.005239808082580567, 0.005244927883148193, 0.005255167961120606, 0.005224448204040527, 0.005228544235229492, 0.005234687805175781, 0.005227519989013672, 0.005239808082580567, 0.00526643180847168, 0.005234687805175781, 0.00520908784866333, 0.005234687805175781, 0.005239808082580567, 0.005248000144958496, 0.005248000144958496, 0.005270592212677002, 0.005235648155212403, 0.005237760066986084, 0.005251071929931641, 0.00522547197341919, 0.005250048160552978, 0.005264383792877197, 0.005232639789581299, 0.005224448204040527, 0.005258240222930908, 0.005248000144958496, 0.005252096176147461, 0.005285888195037842, 0.005261343955993652, 0.005243872165679932, 0.00524396800994873, 0.005287871837615967, 0.005243904113769531, 0.005249023914337158, 0.005285888195037842, 0.005244927883148193, 0.005186560153961181, 0.005378047943115235, 0.005231616020202637, 0.005156864166259765, 0.005258240222930908, 0.005238783836364746, 0.00522547197341919, 0.005254144191741943, 0.005237760066986084, 0.005222400188446045, 0.005251071929931641, 0.0053821439743042, 0.005231616020202637, 0.005195775985717774, 0.005206016063690186, 0.00522649621963501, 0.005203968048095703, 0.00520908784866333, 0.0052367358207702636, 0.00515993595123291, 0.005142528057098389, 0.005186560153961181, 0.005259263992309571, 0.005197824001312256, 0.0052070398330688475, 0.0052408318519592285, 0.005206016063690186, 0.005195775985717774, 0.005332992076873779, 0.0051968002319335935, 0.005192704200744629, 0.005224448204040527, 0.005219327926635742, 0.005203968048095703, 0.005224448204040527, 0.005232639789581299, 0.0052008957862854, 0.005222400188446045, 0.005258240222930908, 0.005201920032501221, 0.005215231895446777, 0.005244927883148193, 0.005187583923339844, 0.005212160110473632, 0.005213183879852295, 0.0052408318519592285, 0.005227519989013672, 0.005202943801879883, 0.005243904113769531, 0.005154816150665284, 0.005126143932342529, 0.005248000144958496, 0.005243904113769531, 0.005224448204040527, 0.005260287761688232, 0.005219327926635742, 0.005203968048095703, 0.005222400188446045, 0.00522649621963501, 0.005208064079284668, 0.005216256141662597, 0.005234687805175781, 0.005216256141662597, 0.005198847770690918, 0.005234687805175781, 0.005228544235229492, 0.00522649621963501, 0.005216256141662597, 0.00526643180847168, 0.005188608169555664, 0.005208064079284668, 0.005228544235229492, 0.005122047901153564, 0.005135359764099121, 0.0051660799980163576, 0.005119999885559082, 0.0051404800415039064, 0.005139455795288086, 0.005131264209747314, 0.00515993595123291, 0.005193727970123291, 0.005183487892150879, 0.005221375942230225, 0.005216256141662597, 0.0052111358642578124, 0.005216256141662597, 0.005234687805175781, 0.0052070398330688475, 0.005198847770690918, 0.005231616020202637, 0.005189631938934326, 0.005208064079284668, 0.0052336640357971195, 0.005214208126068115, 0.005192704200744629, 0.005199967861175537, 0.005176224231719971, 0.0051435518264770505, 0.005228544235229492, 0.0052336640357971195, 0.005214208126068115, 0.005198847770690918, 0.005242879867553711, 0.005210112094879151, 0.005190656185150146, 0.0051968002319335935, 0.005294079780578613, 0.005220352172851562, 0.005214208126068115, 0.005241856098175048, 0.005227519989013672, 0.005960703849792481, 0.005628928184509278, 0.005478400230407715, 0.005615615844726562, 0.005421055793762207, 0.0054917120933532714, 0.005708799839019775, 0.005831679821014404, 0.005473279953002929, 0.005403647899627686, 0.005417984008789062, 0.005445631980895996, 0.005413887977600097, 0.005429247856140137, 0.005453824043273926, 0.005398528099060058, 0.005454847812652588, 0.005484543800354004, 0.005447679996490478, 0.005791744232177734, 0.005442560195922852, 0.005482495784759522, 0.005452799797058106, 0.0054271998405456545, 0.005255167961120606, 0.005238815784454346, 0.005241824150085449, 0.005186560153961181, 0.0052070398330688475, 0.005253119945526123, 0.00516096019744873, 0.0051476478576660155, 0.005239808082580567, 0.0051476478576660155, 0.005135359764099121, 0.005219327926635742, 0.0051476478576660155, 0.005119999885559082, 0.005114880084991455, 0.005134335994720459, 0.005139455795288086, 0.005119999885559082, 0.005115903854370117, 0.005158912181854248, 0.005114880084991455, 0.005119999885559082, 0.00516099214553833, 0.0051363520622253415, 0.005387263774871826, 0.005342207908630371, 0.005318655967712403, 0.0052899842262268066, 0.00537395191192627, 0.005325823783874512, 0.005288959980010987, 0.005310463905334473, 0.005299200057983398, 0.005295104026794434, 0.005287936210632324, 0.005429247856140137, 0.005392384052276611, 0.005404672145843506, 0.005386240005493164, 0.0053944320678710935, 0.00541593599319458, 0.005452799797058106, 0.005620736122131348, 0.005469183921813964, 0.005430272102355957, 0.005410816192626953, 0.005406720161437988, 0.005417984008789062, 0.005409791946411133, 0.005458943843841553, 0.005413887977600097, 0.005440512180328369, 0.0053606400489807126, 0.005540863990783691, 0.005347328186035156, 0.005239808082580567, 0.0052899842262268066, 0.005341184139251709, 0.005282815933227539, 0.005283840179443359, 0.0054988799095153805, 0.005408768177032471, 0.005385216236114502, 0.005400608062744141, 0.005395423889160156, 0.00536678409576416, 0.005432320117950439, 0.005443552017211914, 0.005382175922393799, 0.005384160041809082, 0.005421055793762207, 0.005404672145843506, 0.005417984008789062, 0.005412864208221436, 0.005408768177032471, 0.005447679996490478, 0.005452799797058106, 0.005392384052276611, 0.005354496002197265, 0.005287936210632324, 0.005279744148254394, 0.00531763219833374, 0.0051435518264770505, 0.005135359764099121, 0.005135392189025879, 0.005153759956359864, 0.005202943801879883, 0.005213183879852295, 0.005256192207336426, 0.005201920032501221, 0.005208064079284668, 0.005255167961120606, 0.005210112094879151, 0.005216256141662597, 0.005249023914337158, 0.00522649621963501, 0.005206016063690186, 0.005206016063690186, 0.005248000144958496, 0.0051701760292053225, 0.0050503678321838375, 0.0051435518264770505, 0.005131264209747314, 0.005723135948181152, 0.0055716800689697265, 0.005321631908416748, 0.005305344104766845, 0.005395455837249756, 0.005379072189331055, 0.005386240005493164, 0.005489664077758789, 0.005379072189331055, 0.005420032024383545, 0.005429247856140137, 0.005398528099060058, 0.0054579200744628905, 0.005439487934112549, 0.0054241280555725096, 0.005448703765869141, 0.005447679996490478, 0.005446656227111817, 0.005483520030975342, 0.005437439918518067, 0.005442560195922852, 0.005463039875030518, 0.005441535949707031, 0.005315584182739258, 0.005372928142547607, 0.005311488151550293, 0.005328896045684814, 0.005227519989013672, 0.005239808082580567, 0.005146624088287354, 0.005222400188446045, 0.005182464122772217, 0.005152768135070801, 0.005128191947937012, 0.00516812801361084, 0.005223423957824707, 0.0051660799980163576, 0.00511897611618042, 0.005178368091583252, 0.005137407779693603, 0.005128191947937012, 0.005188672065734863, 0.0051311998367309574, 0.0050503678321838375, 0.005117951869964599, 0.005174272060394287, 0.005139455795288086, 0.0051435518264770505, 0.0051701760292053225, 0.005153791904449463, 0.005148672103881836, 0.00515993595123291, 0.005186560153961181, 0.005134335994720459, 0.005129216194152832, 0.005174272060394287, 0.005146624088287354, 0.005137407779693603, 0.00520908784866333, 0.005265408039093018, 0.005154816150665284, 0.005149695873260498, 0.005178368091583252, 0.005141503810882568, 0.005129216194152832, 0.005128191947937012, 0.005441535949707031, 0.005342207908630371, 0.005361663818359375, 0.00535756778717041, 0.005337088108062744, 0.005381120204925537, 0.0053350400924682614, 0.005302271842956543, 0.0053309440612792965, 0.005313536167144775, 0.005303296089172363, 0.005312511920928955, 0.0053043198585510255, 0.005944320201873779, 0.0055920639038085935, 0.0054241280555725096, 0.005351424217224121, 0.005377024173736572, 0.0052705278396606445, 0.005155839920043945, 0.005181439876556396, 0.005148672103881836, 0.005139455795288086, 0.005165056228637695, 0.006333439826965332, 0.0056555519104003905, 0.005505023956298828, 0.005768191814422607, 0.00602726411819458, 0.005517312049865723, 0.005408768177032471, 0.005454847812652588, 0.00542310380935669, 0.005411839962005615, 0.005325823783874512, 0.005245952129364013, 0.00522649621963501, 0.005265408039093018, 0.005223423957824707, 0.005212160110473632, 0.005222400188446045, 0.005227519989013672, 0.005194752216339111, 0.00522547197341919, 0.005253119945526123, 0.00521727991104126, 0.005223423957824707, 0.005254144191741943, 0.0052193918228149415, 0.005223360061645508, 0.005315584182739258, 0.0052336640357971195, 0.005232639789581299, 0.005254144191741943, 0.00521014404296875, 0.0051578559875488286, 0.005155839920043945, 0.005435391902923584, 0.005658624172210694, 0.005502975940704345, 0.005416959762573242, 0.005436416149139404, 0.005501952171325684, 0.005429247856140137, 0.0054241280555725096, 0.005450751781463623, 0.005404672145843506, 0.0053309440612792965, 0.005256192207336426, 0.0052008957862854, 0.005139455795288086, 0.005213183879852295, 0.0052295680046081545, 0.005213183879852295, 0.005252096176147461, 0.005231616020202637, 0.005203968048095703, 0.005220352172851562, 0.005231616020202637, 0.005221375942230225, 0.005341184139251709, 0.005243904113769531, 0.005206016063690186, 0.005215231895446777, 0.005244927883148193, 0.005203968048095703, 0.005449728012084961, 0.005399551868438721, 0.005447679996490478, 0.005403647899627686, 0.005401599884033203, 0.005439487934112549, 0.005386240005493164, 0.005381120204925537, 0.005432320117950439, 0.005393407821655274, 0.005411839962005615, 0.0052930560111999515, 0.005298175811767578, 0.005431295871734619, 0.005395455837249756, 0.005379072189331055, 0.005385216236114502, 0.005311488151550293, 0.0053095040321350095, 0.005345215797424316, 0.005258240222930908, 0.005164031982421875, 0.005157887935638428, 0.00516812801361084, 0.005187583923339844, 0.005216256141662597, 0.005275648117065429, 0.005197824001312256, 0.005302271842956543, 0.0055244798660278325, 0.005331967830657959, 0.005703680038452149, 0.005847104072570801, 0.005453760147094726, 0.005343232154846191, 0.005296127796173096, 0.005350399971008301, 0.0054170241355896, 0.005294015884399414, 0.005315584182739258, 0.005326848030090332, 0.005276671886444092, 0.005321728229522705, 0.005417984008789062, 0.005371903896331787, 0.005372928142547607, 0.005398528099060058, 0.00537395191192627, 0.005377024173736572, 0.005438464164733887, 0.005413887977600097, 0.00538316822052002, 0.005404672145843506, 0.005385216236114502, 0.00539958381652832, 0.005433311939239502, 0.005411839962005615, 0.0054999361038208005, 0.0055316162109375, 0.005407743930816651, 0.005482495784759522, 0.005398528099060058, 0.005435391902923584, 0.005473279953002929, 0.005445631980895996, 0.005396480083465576, 0.005425151824951172, 0.005419007778167725, 0.005414912223815918, 0.005419007778167725, 0.005431295871734619, 0.005413887977600097, 0.005504000186920166, 0.005416959762573242, 0.005405695915222168, 0.005379199981689453, 0.005400447845458984, 0.005412864208221436, 0.005478400230407715, 0.005439487934112549, 0.005450751781463623, 0.005576704025268555, 0.005432320117950439, 0.005506048202514649, 0.005443583965301513, 0.005446656227111817, 0.0053821439743042, 0.00535859203338623, 0.005431295871734619, 0.005435391902923584, 0.005410816192626953, 0.005416959762573242, 0.0054579200744628905, 0.005416959762573242, 0.005425151824951172, 0.005445631980895996, 0.005421055793762207, 0.005440512180328369, 0.005437439918518067, 0.005425151824951172, 0.005437439918518067, 0.005419007778167725, 0.005420032024383545, 0.005477375984191894, 0.005406720161437988, 0.005412864208221436, 0.005451776027679443, 0.005435391902923584, 0.005455872058868408, 0.005598207950592041, 0.005409791946411133, 0.00551423978805542, 0.005414912223815918, 0.0054241280555725096, 0.005600255966186523, 0.005429247856140137, 0.005464064121246338, 0.005433343887329102, 0.005323775768280029, 0.00538316822052002, 0.005336063861846924, 0.005296127796173096, 0.00526643180847168, 0.005355519771575928, 0.005435391902923584, 0.005497856140136719, 0.005693439960479736, 0.0054241280555725096, 0.005464064121246338, 0.005343232154846191, 0.005321728229522705, 0.005325823783874512, 0.005319680213928223, 0.00530847978591919, 0.005301184177398681, 0.005337088108062744, 0.005326848030090332, 0.005303296089172363, 0.0053647360801696775, 0.005419007778167725, 0.005422080039978027, 0.005484543800354004, 0.005470208168029785, 0.00546611213684082, 0.005420032024383545, 0.005410816192626953, 0.005451776027679443, 0.005425151824951172, 0.0054271998405456545, 0.005445631980895996, 0.00541593599319458, 0.005306367874145508, 0.005260320186614991, 0.0052520642280578615, 0.00568012809753418, 0.005493760108947754, 0.005432320117950439, 0.005476352214813233, 0.005459968090057373, 0.005445631980895996, 0.005385216236114502, 0.0051660799980163576, 0.005145599842071533, 0.005172224044799805, 0.005163008213043213, 0.005150720119476319, 0.005172224044799805, 0.0051660799980163576, 0.005203968048095703, 0.00521727991104126, 0.005257215976715088, 0.005594111919403076, 0.005445631980895996, 0.005464064121246338, 0.005535744190216065, 0.005432320117950439, 0.00541593599319458, 0.0053985600471496585, 0.005436384201049805, 0.00530944013595581, 0.005257215976715088, 0.0052408318519592285, 0.005210112094879151, 0.005203968048095703, 0.005215231895446777, 0.005219327926635742, 0.005203968048095703, 0.0052008957862854, 0.005232639789581299, 0.005203968048095703, 0.005125120162963868, 0.005202943801879883, 0.005137407779693603, 0.005190656185150146, 0.00521727991104126, 0.0052111358642578124, 0.005235712051391602, 0.00522649621963501, 0.005195775985717774, 0.005400576114654541, 0.005375040054321289, 0.005391295909881592, 0.005467135906219483, 0.005450751781463623, 0.005375999927520752, 0.005408768177032471, 0.005371903896331787, 0.005377024173736572, 0.005412960052490234, 0.00539024019241333, 0.00537395191192627, 0.005413887977600097, 0.005403647899627686, 0.005328896045684814, 0.005594111919403076, 0.0060928001403808595, 0.00551526403427124, 0.005438464164733887, 0.005854207992553711, 0.006157311916351318, 0.005594111919403076, 0.005434368133544922, 0.005414912223815918, 0.005492735862731934, 0.005370880126953125, 0.00522649621963501, 0.005185535907745361, 0.0051404800415039064, 0.0052111358642578124, 0.005202943801879883, 0.00516812801361084, 0.005174272060394287, 0.005303296089172363, 0.005328896045684814, 0.005375999927520752, 0.005369855880737305, 0.005426176071166992, 0.005408768177032471, 0.005264383792877197, 0.0052418880462646485, 0.005205984115600586, 0.005203968048095703, 0.005252096176147461, 0.005204991817474365, 0.005213183879852295, 0.005245952129364013, 0.005224448204040527, 0.005191679954528809, 0.005227519989013672, 0.005227519989013672, 0.005192704200744629, 0.005228544235229492, 0.005230591773986816, 0.005265408039093018, 0.0052193598747253414, 0.00522441577911377, 0.005232639789581299, 0.0052070398330688475, 0.00515993595123291, 0.005059584140777588, 0.005302271842956543, 0.005221375942230225, 0.005203968048095703, 0.005251071929931641, 0.0052070398330688475, 0.005225503921508789, 0.005256159782409668, 0.0052367358207702636, 0.005220352172851562, 0.005275648117065429, 0.005220352172851562, 0.005202943801879883, 0.0052408318519592285, 0.005232639789581299, 0.005197824001312256, 0.005222400188446045, 0.005250048160552978, 0.005214208126068115, 0.0052111358642578124, 0.005237760066986084, 0.0052705278396606445, 0.005202943801879883, 0.00515993595123291, 0.005144576072692871, 0.005129216194152832, 0.005141503810882568, 0.0052295680046081545, 0.005193727970123291, 0.005230591773986816, 0.005164031982421875, 0.005126143932342529, 0.005129280090332031, 0.005148608207702637, 0.0051476478576660155, 0.005117951869964599, 0.005180416107177735, 0.005228544235229492, 0.005167103767395019, 0.005126143932342529, 0.005137407779693603, 0.005148672103881836, 0.005119999885559082, 0.005126143932342529, 0.00516096019744873, 0.0052111358642578124, 0.005213183879852295, 0.005237760066986084, 0.005107711791992187, 0.005119999885559082, 0.005138432025909424, 0.0051435518264770505, 0.005135359764099121, 0.005129216194152832, 0.005184512138366699, 0.0051435518264770505, 0.005139455795288086, 0.00515174388885498, 0.005193727970123291, 0.00521727991104126, 0.0052715520858764645, 0.005202943801879883, 0.005255167961120606, 0.0051968002319335935, 0.0052111358642578124, 0.005192704200744629, 0.005231616020202637, 0.005192704200744629, 0.005186560153961181, 0.005164031982421875, 0.005125120162963868, 0.005126143932342529, 0.00516812801361084, 0.005152768135070801, 0.005146624088287354, 0.005198847770690918, 0.005248000144958496, 0.005157887935638428, 0.005146624088287354, 0.005163040161132813, 0.005140448093414306, 0.005224448204040527, 0.005314559936523438, 0.005194752216339111, 0.00515993595123291, 0.00516096019744873, 0.005242879867553711, 0.005223423957824707, 0.005202943801879883, 0.005251071929931641, 0.0052152638435363765, 0.005231584072113037, 0.0052367358207702636, 0.00522547197341919, 0.0052971520423889164, 0.00517632007598877, 0.0053975038528442385, 0.005219327926635742, 0.005213183879852295, 0.005276671886444092, 0.005351424217224121, 0.00522547197341919, 0.005243904113769531, 0.006132736206054688, 0.006195199966430664, 0.005470208168029785, 0.005452799797058106, 0.005426176071166992, 0.005453824043273926, 0.005440512180328369, 0.005406720161437988, 0.005436416149139404, 0.005452799797058106, 0.005429247856140137, 0.0053678078651428224, 0.005372928142547607, 0.005325823783874512, 0.005316639900207519, 0.005561312198638916, 0.005417984008789062, 0.005386240005493164, 0.005321728229522705, 0.005310463905334473, 0.005291007995605469, 0.005338111877441406, 0.0054241280555725096, 0.005409823894500732, 0.005448671817779541, 0.005387263774871826, 0.005399551868438721, 0.00547430419921875, 0.005446656227111817, 0.005429247856140137, 0.0053944320678710935, 0.005277696132659912, 0.005130239963531494, 0.005171199798583984, 0.005132287979125977, 0.005141503810882568, 0.005153791904449463, 0.005608448028564453, 0.005387263774871826, 0.005467135906219483, 0.005440512180328369, 0.005343232154846191, 0.005496831893920898, 0.005511168003082275, 0.0053350400924682614, 0.005392384052276611, 0.00531763219833374, 0.005322751998901368, 0.005471231937408447, 0.005435391902923584, 0.005443583965301513, 0.005452799797058106, 0.005407743930816651, 0.005432320117950439, 0.0053944320678710935, 0.0054241280555725096, 0.005440512180328369, 0.005412864208221436, 0.005441535949707031, 0.005389311790466309, 0.005315584182739258, 0.005294079780578613, 0.005344255924224854, 0.005296127796173096, 0.005306367874145508, 0.00535859203338623, 0.005396512031555176, 0.005394400119781494, 0.005433343887329102, 0.005431295871734619, 0.005339136123657226, 0.005260287761688232, 0.0051998720169067385, 0.005192704200744629, 0.0052674560546875, 0.005215231895446777, 0.0052070398330688475, 0.005235712051391602, 0.005246975898742676, 0.005213183879852295, 0.005215231895446777, 0.005206016063690186, 0.005248000144958496, 0.005113952159881592, 0.005177279949188232, 0.005238751888275147, 0.005194752216339111, 0.005195775985717774, 0.005198880195617676, 0.0052285118103027346, 0.005219327926635742, 0.005221375942230225, 0.005248000144958496, 0.005203968048095703, 0.005223423957824707, 0.005241856098175048, 0.005210112094879151, 0.005212160110473632, 0.005425151824951172, 0.00538316822052002, 0.005381120204925537, 0.005362688064575195, 0.005285888195037842, 0.005299200057983398, 0.005395455837249756, 0.005395455837249756, 0.0054271998405456545, 0.005432320117950439, 0.00541593599319458, 0.005411839962005615, 0.005439487934112549, 0.005431295871734619, 0.005422080039978027, 0.005465087890625, 0.005411839962005615, 0.005432320117950439, 0.005526527881622314, 0.005408768177032471, 0.005445631980895996, 0.005409823894500732, 0.005402592182159424, 0.005462016105651855, 0.005416959762573242, 0.005439487934112549, 0.00546611213684082, 0.005420032024383545, 0.0054568958282470706, 0.005464064121246338, 0.005400576114654541, 0.005436416149139404, 0.005329919815063477, 0.005422080039978027, 0.005464064121246338, 0.005401599884033203, 0.005403647899627686, 0.0054568958282470706, 0.005310463905334473, 0.005339136123657226, 0.005331967830657959, 0.005328896045684814, 0.005312511920928955, 0.005329919815063477, 0.0053043198585510255, 0.005399551868438721, 0.005387263774871826, 0.005465087890625, 0.005483520030975342, 0.005481472015380859, 0.005439487934112549, 0.005412864208221436, 0.005445631980895996, 0.005414912223815918, 0.005417984008789062, 0.005432320117950439, 0.005408768177032471, 0.005433343887329102, 0.005444608211517334, 0.005406720161437988, 0.005419007778167725, 0.005440512180328369, 0.005443583965301513, 0.005757952213287353, 0.005463039875030518, 0.005432320117950439, 0.005448703765869141, 0.005431295871734619, 0.005447679996490478, 0.005480447769165039, 0.005410816192626953, 0.005414912223815918, 0.005591040134429932, 0.0054241280555725096, 0.0054579200744628905, 0.005405695915222168, 0.005410816192626953, 0.0053678078651428224, 0.005404672145843506, 0.005403647899627686, 0.005575679779052735, 0.005431295871734619, 0.005421055793762207, 0.005465087890625, 0.005449728012084961, 0.005484543800354004, 0.0053309440612792965, 0.005349376201629639, 0.005453824043273926, 0.005441535949707031, 0.005402656078338623, 0.005451744079589844, 0.0054579200744628905, 0.005408768177032471, 0.005455872058868408, 0.005412864208221436, 0.005419007778167725, 0.005502975940704345, 0.005433343887329102, 0.005388288021087646, 0.005359615802764893, 0.005327871799468994, 0.005349376201629639, 0.0053647360801696775, 0.005249023914337158, 0.005165056228637695, 0.005189631938934326, 0.005204991817474365]",tokens/s,187.15516795588735,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 414, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492e4-684f91a4746cfedf34b1046c;d06f3984-dae6-4629-9c3e-de9065198821) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 339, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9pfso7np/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,892.387328,738.721792,0.0,178.25792,176.546816,s,1,7.2647998046875,7.2647998046875,0.0,7.2647998046875,7.2647998046875,7.2647998046875,7.2647998046875,[7.2647998046875],,kWh,5.985883529163832e-06,3.264524596902485e-06,8.051950885989978e-06,1.7302359012056294e-05,,MB,1345.82272,887.619584,0.0,241.17248,221.108736,s,21,0.1893135347366333,0.009014930225553965,0.00043094855294290116,0.008855135917663574,0.009163519859313966,0.009177663803100585,0.01051406059265137,"[0.008999103546142579, 0.009125184059143067, 0.00883129596710205, 0.009106752395629882, 0.008855135917663574, 0.00915993595123291, 0.008859647750854491, 0.009177663803100585, 0.008832991600036621, 0.010848159790039063, 0.008884672164916993, 0.009163519859313966, 0.008786463737487793, 0.008887616157531739, 0.008804415702819824, 0.008833951950073242, 0.0088472318649292, 0.008809760093688964, 0.00880992031097412, 0.008848223686218262, 0.008841888427734375]",tokens/s,28397.335708082963,kWh,1.0530464876894629e-07,5.7701779843419037e-08,2.8480315911584243e-07,4.478095877282078e-07,tokens/kWh,571671547.495709,MB,1380.78208,902.299648,0.0,255.852544,221.111296,s,21,9.962652435302736,0.4744120207287016,0.006514076953166273,0.47399810791015623,0.4838817138671875,0.48536688232421876,0.4867486938476563,"[0.48709414672851564, 0.47399810791015623, 0.48536688232421876, 0.47753305053710937, 0.47936611938476564, 0.4698493957519531, 0.48066751098632815, 0.46914523315429685, 0.4690346984863281, 0.470818603515625, 0.4740699157714844, 0.4749193420410156, 0.47398431396484375, 0.4838817138671875, 0.46552774047851564, 0.46677810668945313, 0.46642910766601564, 0.4766146240234375, 0.46746005249023437, 0.4821667785644531, 0.46794699096679687]",tokens/s,132.79596057290323,kWh,5.581611985027058e-06,3.0584677508495903e-06,9.018815394884693e-06,1.765889513076134e-05,tokens/kWh,3567607.120009203,,s,1323,9.954007172584541,0.007523814945264197,0.0002723339708379757,0.007416831970214844,0.007732223987579345,0.007818956756591796,0.009264571609497065,"[0.0072724480628967286, 0.00738918399810791, 0.007366655826568603, 0.007385087966918945, 0.007450623989105225, 0.007525440216064453, 0.007730112075805664, 0.007688191890716553, 0.007692287921905518, 0.00765337610244751, 0.007689216136932373, 0.0076605439186096195, 0.007639039993286132, 0.007641088008880615, 0.007671807765960693, 0.007706624031066894, 0.007749631881713868, 0.007725056171417236, 0.007675903797149658, 0.007692287921905518, 0.007778304100036621, 0.007676928043365478, 0.007662591934204102, 0.007669760227203369, 0.007658495903015137, 0.0075304961204528805, 0.007881728172302246, 0.007679999828338623, 0.007664639949798584, 0.009491456031799317, 0.00929792022705078, 0.007895040035247802, 0.007747583866119385, 0.0077619199752807615, 0.007739391803741455, 0.00773529577255249, 0.007725056171417236, 0.007713791847229004, 0.007758848190307617, 0.007664639949798584, 0.007662591934204102, 0.007648255825042725, 0.007649280071258545, 0.00774348783493042, 0.007704576015472412, 0.007710720062255859, 0.007697408199310303, 0.007713791847229004, 0.007664639949798584, 0.007670783996582031, 0.007770112037658691, 0.007702527999877929, 0.007753727912902832, 0.007622655868530274, 0.007602176189422607, 0.007710720062255859, 0.007726079940795898, 0.007696383953094482, 0.007674880027770996, 0.007669760227203369, 0.007666687965393066, 0.0076943359375, 0.007788544178009033, 0.007560192108154297, 0.007712768077850342, 0.007682047843933106, 0.007682047843933106, 0.007650303840637207, 0.007646207809448242, 0.0073768959045410155, 0.007375872135162354, 0.0073768959045410155, 0.0073697280883789065, 0.007394303798675537, 0.007368703842163086, 0.007329792022705078, 0.007710720062255859, 0.007774208068847656, 0.007775231838226319, 0.007726079940795898, 0.007714816093444824, 0.007671807765960693, 0.007659520149230957, 0.0076605439186096195, 0.007649280071258545, 0.00780083179473877, 0.007460864067077637, 0.007374847888946533, 0.007402495861053467, 0.007413760185241699, 0.007387135982513428, 0.007451648235321045, 0.00743936014175415, 0.007384064197540283, 0.00738099193572998, 0.007411712169647216, 0.0073994240760803225, 0.007386112213134765, 0.007434239864349365, 0.007417856216430664, 0.007395328044891358, 0.007363584041595459, 0.0073768959045410155, 0.0073768959045410155, 0.007386144161224365, 0.007428063869476319, 0.0074035201072692874, 0.007356416225433349, 0.007386112213134765, 0.00738099193572998, 0.007395328044891358, 0.007423999786376953, 0.007392255783081054, 0.0074106879234313965, 0.007379968166351319, 0.007414783954620361, 0.007425024032592774, 0.007897088050842285, 0.007996416091918946, 0.007610367774963379, 0.007700479984283447, 0.00773632001876831, 0.00774348783493042, 0.007689216136932373, 0.007685120105743408, 0.007390207767486572, 0.007264256000518799, 0.007419936180114746, 0.00740553617477417, 0.007513088226318359, 0.00743833589553833, 0.007414783954620361, 0.0073994240760803225, 0.007575551986694336, 0.007692287921905518, 0.007708672046661377, 0.007715839862823487, 0.0077209601402282715, 0.007676959991455078, 0.007684063911437988, 0.00769536018371582, 0.007678976058959961, 0.007667776107788086, 0.007661503791809082, 0.007673855781555176, 0.007684095859527588, 0.007681024074554443, 0.007636991977691651, 0.007643136024475097, 0.007678976058959961, 0.007638016223907471, 0.00760422420501709, 0.007614463806152344, 0.007622655868530274, 0.00765235185623169, 0.007688191890716553, 0.007664639949798584, 0.007767039775848389, 0.007700479984283447, 0.007827487945556641, 0.007721951961517334, 0.007692287921905518, 0.009053183555603026, 0.009359359741210938, 0.008120320320129394, 0.007714816093444824, 0.007654399871826172, 0.007640063762664795, 0.007641088008880615, 0.00764415979385376, 0.007345215797424316, 0.007654335975646973, 0.007790592193603516, 0.007673920154571533, 0.007688127994537354, 0.0076308479309082035, 0.007662591934204102, 0.007740416049957275, 0.007667712211608887, 0.007702527999877929, 0.007799808025360107, 0.007677951812744141, 0.007675903797149658, 0.007614463806152344, 0.007656447887420655, 0.007636991977691651, 0.007663616180419922, 0.007673855781555176, 0.007648287773132324, 0.007535615921020508, 0.007661568164825439, 0.007667712211608887, 0.0078111357688903805, 0.0077229437828063964, 0.007815167903900147, 0.007692287921905518, 0.007764992237091065, 0.007755775928497314, 0.007752736091613769, 0.007756768226623535, 0.007746560096740723, 0.007425024032592774, 0.0074291200637817386, 0.00745472002029419, 0.0074065918922424315, 0.007779327869415284, 0.007792640209197998, 0.007860223770141601, 0.007725056171417236, 0.007798783779144287, 0.0077281279563903805, 0.007723008155822754, 0.0076953921318054196, 0.0076943039894104, 0.007656447887420655, 0.0076984319686889645, 0.007673855781555176, 0.007806975841522217, 0.00769536018371582, 0.007701504230499268, 0.007717887878417969, 0.0076984319686889645, 0.0076984319686889645, 0.007704576015472412, 0.007665664196014404, 0.007352320194244385, 0.007387135982513428, 0.007394303798675537, 0.007365632057189942, 0.00743936014175415, 0.007391232013702393, 0.007442431926727295, 0.0073400321006774905, 0.007415808200836181, 0.007384064197540283, 0.007386112213134765, 0.007386112213134765, 0.007405600070953369, 0.007422944068908691, 0.007328767776489258, 0.007366655826568603, 0.0073768959045410155, 0.007374847888946533, 0.007372799873352051, 0.007379968166351319, 0.007349279880523682, 0.007428063869476319, 0.007490560054779053, 0.007329792022705078, 0.007592959880828858, 0.0076574721336364745, 0.007650303840637207, 0.007269408226013184, 0.007379936218261719, 0.007417920112609863, 0.007362495899200439, 0.007374847888946533, 0.007513088226318359, 0.007811071872711181, 0.007685120105743408, 0.007701504230499268, 0.007726079940795898, 0.007715839862823487, 0.0077281279563903805, 0.0076616001129150395, 0.0073983678817749025, 0.007434239864349365, 0.007426047801971435, 0.007511040210723877, 0.00743936014175415, 0.007418879985809326, 0.007422976016998291, 0.007354368209838867, 0.007398399829864502, 0.00738918399810791, 0.007419904232025146, 0.007413760185241699, 0.007415808200836181, 0.007607295989990235, 0.007683072090148926, 0.0073400321006774905, 0.007357439994812012, 0.007386112213134765, 0.007449600219726562, 0.007468031883239746, 0.007388160228729248, 0.007416831970214844, 0.007353343963623047, 0.007394303798675537, 0.007401567935943603, 0.007400352001190186, 0.007370751857757568, 0.0073810238838195805, 0.007477215766906738, 0.007824384212493896, 0.0077281279563903805, 0.008739904403686523, 0.009556927680969239, 0.008324095726013184, 0.007730175971984863, 0.007731200218200684, 0.007813119888305664, 0.007673855781555176, 0.007703551769256592, 0.007650303840637207, 0.007619584083557129, 0.007693312168121338, 0.007651328086853027, 0.007631872177124023, 0.007751679897308349, 0.007755775928497314, 0.007702527999877929, 0.007686143875122071, 0.007611392021179199, 0.007713791847229004, 0.007560192108154297, 0.0077281279563903805, 0.007738368034362793, 0.007837696075439453, 0.007685120105743408, 0.0076984319686889645, 0.007672832012176513, 0.007426047801971435, 0.007375872135162354, 0.00739737606048584, 0.007387135982513428, 0.0074106879234313965, 0.0074301438331604, 0.007417856216430664, 0.007371776103973389, 0.0074107198715209965, 0.007439328193664551, 0.007409664154052734, 0.00739737606048584, 0.007393280029296875, 0.007823359966278077, 0.007727104187011719, 0.007355391979217529, 0.007420928001403809, 0.007417856216430664, 0.00740556812286377, 0.007404543876647949, 0.00739737606048584, 0.007623680114746094, 0.007418879985809326, 0.007300096035003662, 0.007386112213134765, 0.00742195177078247, 0.007449600219726562, 0.007396351814270019, 0.007404543876647949, 0.007383039951324463, 0.007420928001403809, 0.007411712169647216, 0.007388160228729248, 0.007391232013702393, 0.00738918399810791, 0.007396351814270019, 0.007413760185241699, 0.007358463764190673, 0.0074035201072692874, 0.007388160228729248, 0.007393280029296875, 0.007388160228729248, 0.007360511779785156, 0.007390207767486572, 0.007395359992980957, 0.007419871807098389, 0.007394303798675537, 0.007385087966918945, 0.007379968166351319, 0.007383039951324463, 0.007422976016998291, 0.007419904232025146, 0.007388160228729248, 0.007478271961212158, 0.007508992195129394, 0.007437312126159668, 0.007266335964202881, 0.007359456062316894, 0.007392255783081054, 0.007377920150756836, 0.007419904232025146, 0.007400447845458984, 0.007395328044891358, 0.0074301438331604, 0.007423999786376953, 0.007417856216430664, 0.007379968166351319, 0.007422976016998291, 0.00742195177078247, 0.00743833589553833, 0.0074301438331604, 0.007699456214904785, 0.007756800174713135, 0.007442431926727295, 0.007418879985809326, 0.00742195177078247, 0.007445504188537597, 0.007449600219726562, 0.00742195177078247, 0.007383039951324463, 0.007417856216430664, 0.007414783954620361, 0.00740556812286377, 0.007407616138458252, 0.007418879985809326, 0.007419904232025146, 0.007457791805267334, 0.007451648235321045, 0.007419904232025146, 0.007417856216430664, 0.007315455913543701, 0.007449600219726562, 0.007414783954620361, 0.007383039951324463, 0.007456768035888672, 0.00748748779296875, 0.007460864067077637, 0.007592959880828858, 0.008148991584777832, 0.008216575622558593, 0.008041472434997558, 0.007771135807037354, 0.007740416049957275, 0.007715839862823487, 0.007732223987579345, 0.007732223987579345, 0.008047616004943848, 0.007970816135406494, 0.00782643222808838, 0.009587712287902832, 0.009406463623046875, 0.00799232006072998, 0.007769087791442871, 0.007819263935089112, 0.007699456214904785, 0.007703551769256592, 0.007686143875122071, 0.0076912641525268555, 0.007745535850524903, 0.007583744049072265, 0.007709695816040039, 0.007619584083557129, 0.007648255825042725, 0.0076984319686889645, 0.007675903797149658, 0.007702527999877929, 0.007707647800445557, 0.007395328044891358, 0.0073994240760803225, 0.007385087966918945, 0.007418879985809326, 0.007423999786376953, 0.0074106879234313965, 0.007415808200836181, 0.007401472091674805, 0.007422976016998291, 0.007465983867645264, 0.007423999786376953, 0.007409664154052734, 0.007402495861053467, 0.007419904232025146, 0.007374847888946533, 0.007392255783081054, 0.007414783954620361, 0.007417856216430664, 0.007408639907836914, 0.0073994240760803225, 0.0073768959045410155, 0.00743936014175415, 0.007493631839752197, 0.007411712169647216, 0.007387135982513428, 0.0074332160949707035, 0.007363584041595459, 0.007364607810974121, 0.0072724480628967286, 0.007388160228729248, 0.007411712169647216, 0.0073994240760803225, 0.0073768959045410155, 0.007547935962677002, 0.007480288028717041, 0.007489535808563232, 0.0074106879234313965, 0.007414783954620361, 0.007372799873352051, 0.0073820161819458, 0.007444479942321777, 0.007331840038299561, 0.007395328044891358, 0.007387135982513428, 0.007396351814270019, 0.00740556812286377, 0.007411712169647216, 0.00738099193572998, 0.007365632057189942, 0.007396351814270019, 0.007402495861053467, 0.007417856216430664, 0.0073768959045410155, 0.007413760185241699, 0.007435264110565186, 0.007304192066192627, 0.007477248191833496, 0.007411712169647216, 0.007451680183410645, 0.007443424224853516, 0.007398399829864502, 0.007391232013702393, 0.0074106879234313965, 0.007357439994812012, 0.007451648235321045, 0.007426047801971435, 0.00743936014175415, 0.007384064197540283, 0.007392255783081054, 0.007395328044891358, 0.007458816051483155, 0.0074291200637817386, 0.0074106879234313965, 0.007411712169647216, 0.007464000225067139, 0.007632832050323487, 0.007702527999877929, 0.007673855781555176, 0.007692287921905518, 0.007383039951324463, 0.00738918399810791, 0.007423999786376953, 0.007514111995697022, 0.007418879985809326, 0.007375872135162354, 0.007393280029296875, 0.007362559795379638, 0.00740556812286377, 0.007520256042480469, 0.007456768035888672, 0.00740556812286377, 0.007387135982513428, 0.007386112213134765, 0.007371776103973389, 0.007360511779785156, 0.0073697280883789065, 0.007388160228729248, 0.007384064197540283, 0.007392255783081054, 0.007367680072784424, 0.007358463764190673, 0.007391232013702393, 0.007377920150756836, 0.007366687774658203, 0.007370719909667969, 0.007390207767486572, 0.007346176147460938, 0.007363584041595459, 0.007485439777374267, 0.007364607810974121, 0.007501823902130127, 0.00738918399810791, 0.00742406415939331, 0.007400383949279785, 0.007388160228729248, 0.007740416049957275, 0.007758848190307617, 0.007737343788146973, 0.009358336448669433, 0.007915520191192627, 0.007762944221496582, 0.007649280071258545, 0.007667712211608887, 0.007665664196014404, 0.007676928043365478, 0.0076943359375, 0.007677951812744141, 0.007649280071258545, 0.007425024032592774, 0.007377920150756836, 0.00739737606048584, 0.007393311977386474, 0.007403488159179687, 0.0074106879234313965, 0.007350272178649903, 0.007388160228729248, 0.007374847888946533, 0.0073697280883789065, 0.007385183811187744, 0.007381919860839844, 0.007337984085083008, 0.007452672004699707, 0.007402495861053467, 0.007398399829864502, 0.007419904232025146, 0.007402495861053467, 0.007412735939025879, 0.0074997758865356446, 0.007390207767486572, 0.007398399829864502, 0.0074106879234313965, 0.007427072048187256, 0.007414783954620361, 0.007386112213134765, 0.007359488010406494, 0.007393280029296875, 0.007402495861053467, 0.007392255783081054, 0.007365632057189942, 0.007387135982513428, 0.007269375801086426, 0.0073820161819458, 0.007482367992401123, 0.007431168079376221, 0.007393280029296875, 0.007383039951324463, 0.0073820161819458, 0.007344128131866455, 0.007361536026000977, 0.007384064197540283, 0.007358463764190673, 0.007418879985809326, 0.007391232013702393, 0.007377920150756836, 0.007368703842163086, 0.007364607810974121, 0.007375872135162354, 0.007388160228729248, 0.0073820161819458, 0.007395328044891358, 0.007329792022705078, 0.007320576190948487, 0.007342080116271973, 0.007428095817565918, 0.00738099193572998, 0.007386112213134765, 0.007403552055358887, 0.007377888202667236, 0.007391232013702393, 0.007377920150756836, 0.0074065918922424315, 0.007631872177124023, 0.007704576015472412, 0.007602176189422607, 0.007669760227203369, 0.007671807765960693, 0.007677951812744141, 0.007411776065826416, 0.007365568161010742, 0.007666687965393066, 0.00743833589553833, 0.0073768959045410155, 0.007386112213134765, 0.007377920150756836, 0.007383039951324463, 0.007377920150756836, 0.00740556812286377, 0.007432191848754883, 0.00738099193572998, 0.007379968166351319, 0.007409664154052734, 0.007354368209838867, 0.0074065918922424315, 0.0073768959045410155, 0.007377920150756836, 0.0073892478942871095, 0.007417791843414306, 0.007398399829864502, 0.008716320037841797, 0.009146335601806641, 0.008052736282348634, 0.007742464065551758, 0.008163328170776368, 0.007713791847229004, 0.00765235185623169, 0.007716928005218506, 0.007662528038024903, 0.007346176147460938, 0.007320640087127686, 0.007372735977172851, 0.007370751857757568, 0.007377920150756836, 0.007416831970214844, 0.00738099193572998, 0.007304192066192627, 0.007361536026000977, 0.007367680072784424, 0.007361536026000977, 0.007450623989105225, 0.007387135982513428, 0.007428095817565918, 0.007368703842163086, 0.007507967948913574, 0.007682047843933106, 0.007579648017883301, 0.007756800174713135, 0.0076697921752929684, 0.007691232204437256, 0.0076574721336364745, 0.007872511863708496, 0.0077199358940124516, 0.007730175971984863, 0.009422847747802734, 0.009364480018615723, 0.007912447929382324, 0.007714816093444824, 0.007642111778259277, 0.007411712169647216, 0.007398399829864502, 0.007415808200836181, 0.007422976016998291, 0.007419904232025146, 0.007374847888946533, 0.0073359360694885255, 0.007342080116271973, 0.007391232013702393, 0.007387135982513428, 0.007401472091674805, 0.00738099193572998, 0.007346176147460938, 0.007362559795379638, 0.007362559795379638, 0.007371808052062988, 0.007455711841583252, 0.007352320194244385, 0.007352320194244385, 0.00734822416305542, 0.007358463764190673, 0.007415808200836181, 0.007948287963867188, 0.007681024074554443, 0.007678976058959961, 0.00765235185623169, 0.0076267518997192385, 0.007681024074554443, 0.007366655826568603, 0.007349247932434082, 0.007378943920135498, 0.007341055870056152, 0.007402495861053467, 0.007383071899414063, 0.007294943809509277, 0.007327744007110596, 0.007388160228729248, 0.007378943920135498, 0.007388160228729248, 0.007383039951324463, 0.007337984085083008, 0.007324672222137451, 0.007404543876647949, 0.007449600219726562, 0.007392255783081054, 0.007366655826568603, 0.0073697280883789065, 0.007333888053894043, 0.007359488010406494, 0.007378943920135498, 0.0072325119972229, 0.007408639907836914, 0.007418879985809326, 0.007370751857757568, 0.007394303798675537, 0.007366655826568603, 0.007353375911712647, 0.007373856067657471, 0.0073911681175231935, 0.00738918399810791, 0.007372799873352051, 0.007485439777374267, 0.007740416049957275, 0.007391232013702393, 0.007693312168121338, 0.008764415740966798, 0.007822336196899414, 0.007395328044891358, 0.007386112213134765, 0.007394303798675537, 0.007417888164520264, 0.007417823791503906, 0.007393280029296875, 0.0073768959045410155, 0.007615488052368164, 0.007449600219726562, 0.007394303798675537, 0.007388160228729248, 0.007407616138458252, 0.007361536026000977, 0.007394303798675537, 0.007419904232025146, 0.007484416007995606, 0.007408639907836914, 0.007426047801971435, 0.007451648235321045, 0.007414783954620361, 0.007352320194244385, 0.007402495861053467, 0.007401472091674805, 0.007422976016998291, 0.007409664154052734, 0.007400447845458984, 0.007816192150115966, 0.007794688224792481, 0.007670783996582031, 0.00763808012008667, 0.007755712032318115, 0.007662591934204102, 0.007702527999877929, 0.007633920192718506, 0.007669760227203369, 0.007788544178009033, 0.007763967990875244, 0.007673855781555176, 0.007701504230499268, 0.007654399871826172, 0.0076277761459350585, 0.007412735939025879, 0.007457791805267334, 0.007457791805267334, 0.007461887836456299, 0.007474271774291992, 0.007280640125274658, 0.007442431926727295, 0.007413760185241699, 0.007434239864349365, 0.007372799873352051, 0.007483391761779785, 0.007742464065551758, 0.007721983909606934, 0.00769536018371582, 0.00785920000076294, 0.007658495903015137, 0.007667712211608887, 0.007685120105743408, 0.0076984319686889645, 0.0079267840385437, 0.0077608962059021, 0.007778304100036621, 0.00935321617126465, 0.009332736015319825, 0.007891007900238037, 0.007861184120178224, 0.007742464065551758, 0.007700511932373047, 0.0076973757743835445, 0.007686143875122071, 0.007674880027770996, 0.007673855781555176, 0.007703551769256592, 0.007684095859527588, 0.007792640209197998, 0.0076267518997192385, 0.007673855781555176, 0.007688191890716553, 0.007672832012176513, 0.007666687965393066, 0.007659520149230957, 0.007673855781555176, 0.007688191890716553, 0.007966720104217529, 0.007962624073028564, 0.0077281279563903805, 0.007726079940795898, 0.007672832012176513, 0.007700479984283447, 0.007375872135162354, 0.007395328044891358, 0.007411712169647216, 0.007394303798675537, 0.0073994240760803225, 0.0074106879234313965, 0.007875584125518798, 0.007358560085296631, 0.007310239791870117, 0.007390207767486572, 0.007565311908721924, 0.007700479984283447, 0.007711743831634522, 0.007575551986694336, 0.007448575973510742, 0.007371776103973389, 0.0074741759300231934, 0.00738918399810791, 0.007391232013702393, 0.007261184215545655, 0.007418879985809326, 0.007384064197540283, 0.007394303798675537, 0.007356416225433349, 0.007322624206542969, 0.0074629120826721195, 0.007292928218841553, 0.007296000003814697, 0.007296000003814697, 0.00743123197555542, 0.007246784210205078, 0.007295040130615235, 0.007399360179901123, 0.0074403839111328125, 0.007485439777374267, 0.007426047801971435, 0.007404543876647949, 0.007514111995697022, 0.0074065918922424315, 0.007409664154052734, 0.007331840038299561, 0.007391232013702393, 0.007388160228729248, 0.0073697280883789065, 0.007360511779785156, 0.007362624168395996, 0.007390143871307373, 0.007387135982513428, 0.007416831970214844, 0.007407616138458252, 0.007345151901245117, 0.007364607810974121, 0.00739737606048584, 0.00739737606048584, 0.0073820161819458, 0.007361536026000977, 0.007354368209838867, 0.007371776103973389, 0.007377920150756836, 0.00738099193572998, 0.007371776103973389, 0.007450623989105225, 0.0074035201072692874, 0.007384064197540283, 0.007388224124908447, 0.007414720058441162, 0.007443456172943115, 0.007428095817565918, 0.007392255783081054, 0.007404607772827148, 0.0073614721298217774, 0.007388160228729248, 0.007422976016998291, 0.007378079891204834, 0.007297887802124024, 0.007367680072784424, 0.0074291200637817386, 0.007395328044891358, 0.007396351814270019, 0.007400447845458984, 0.007377920150756836, 0.0073471999168395995, 0.0072295360565185545, 0.007400352001190186, 0.007386112213134765, 0.007349247932434082, 0.007394303798675537, 0.0074035201072692874, 0.0074035201072692874, 0.007387135982513428, 0.007379968166351319, 0.007367680072784424, 0.007513088226318359, 0.007401472091674805, 0.007383039951324463, 0.00739737606048584, 0.0073861761093139645, 0.00738809585571289, 0.007376959800720215, 0.007409599781036377, 0.007387135982513428, 0.007401472091674805, 0.007407616138458252, 0.007401472091674805, 0.007337984085083008, 0.007378943920135498, 0.007404543876647949, 0.007437344074249268, 0.007407584190368652, 0.007671807765960693, 0.007881728172302246, 0.007643136024475097, 0.007407616138458252, 0.007371776103973389, 0.007413760185241699, 0.007428160190582275, 0.007407551765441895, 0.007416831970214844, 0.007396351814270019, 0.007373824119567871, 0.007385087966918945, 0.007375904083251953, 0.007373792171478271, 0.007362559795379638, 0.007361536026000977, 0.007357439994812012, 0.007417856216430664, 0.0074301438331604, 0.0074065918922424315, 0.007392255783081054, 0.007465983867645264, 0.007385087966918945, 0.007301119804382325, 0.007345151901245117, 0.007390207767486572, 0.007412735939025879, 0.007401472091674805, 0.0073768959045410155, 0.007355391979217529, 0.007336063861846924, 0.007290751934051514, 0.00739737606048584, 0.007363584041595459, 0.0073768959045410155, 0.007375872135162354, 0.007266304016113281, 0.007364607810974121, 0.007385087966918945, 0.007361536026000977, 0.007366720199584961, 0.007365568161010742, 0.007368703842163086, 0.0073697280883789065, 0.007312384128570557, 0.00734822416305542, 0.007367680072784424, 0.007394303798675537, 0.007391232013702393, 0.007366655826568603, 0.007385087966918945, 0.007364607810974121, 0.00739737606048584, 0.0074035201072692874, 0.007386112213134765, 0.007400447845458984, 0.007457791805267334, 0.007400447845458984, 0.007522304058074952, 0.0074414081573486324, 0.0074403839111328125, 0.007561215877532959, 0.007419904232025146, 0.0073472318649291995, 0.007345119953155518, 0.00738918399810791, 0.007392255783081054, 0.007428127765655518, 0.00740553617477417, 0.0073820161819458, 0.007419904232025146, 0.007370751857757568, 0.007395328044891358, 0.0074291200637817386, 0.0074106879234313965, 0.007457824230194092, 0.007419871807098389, 0.00740556812286377, 0.007375872135162354, 0.007390207767486572, 0.007411712169647216, 0.007395328044891358, 0.007386112213134765, 0.007410751819610596, 0.007421887874603271, 0.007370751857757568, 0.007437312126159668, 0.007415808200836181, 0.007425024032592774, 0.007401472091674805, 0.007455743789672851, 0.00738918399810791, 0.007388160228729248, 0.007522304058074952, 0.007390207767486572, 0.007390207767486572, 0.007350272178649903, 0.007290880203247071, 0.007379968166351319, 0.007223296165466309, 0.007368703842163086, 0.007407616138458252, 0.007419904232025146, 0.008146944046020508, 0.008239104270935058, 0.007781375885009765, 0.008217599868774414, 0.007967743873596191, 0.007686143875122071, 0.007649280071258545, 0.007715839862823487, 0.007648255825042725, 0.00769536018371582, 0.007699456214904785, 0.0076984319686889645, 0.008020000457763672, 0.0077833919525146485, 0.007583744049072265, 0.007379968166351319, 0.0073768959045410155, 0.007384064197540283, 0.007388160228729248, 0.007367680072784424, 0.007387135982513428, 0.007353343963623047, 0.007399456024169922, 0.007360479831695557, 0.007383039951324463, 0.007322624206542969, 0.007301119804382325, 0.0074291200637817386, 0.007398399829864502, 0.00739737606048584, 0.007386112213134765, 0.007465983867645264, 0.007414783954620361, 0.007692287921905518, 0.007896063804626464, 0.007575551986694336, 0.00740556812286377, 0.007444479942321777, 0.007443456172943115, 0.007773183822631836, 0.007771135807037354, 0.007678976058959961, 0.007400447845458984, 0.0074035201072692874, 0.007374847888946533, 0.007518208026885987, 0.00794316816329956, 0.007948287963867188, 0.007778304100036621, 0.007600128173828125, 0.007422976016998291, 0.007414783954620361, 0.007402495861053467, 0.007411776065826416, 0.007441343784332276, 0.007418879985809326, 0.007419904232025146, 0.007401472091674805, 0.0073820161819458, 0.007270400047302246, 0.00742195177078247, 0.007456768035888672, 0.007368703842163086, 0.00743833589553833, 0.007461887836456299, 0.007438399791717529, 0.007416768074035645, 0.007414783954620361, 0.007449600219726562, 0.007383039951324463, 0.007437312126159668, 0.007413760185241699, 0.0074106879234313965, 0.007461887836456299, 0.007432223796844483, 0.007451615810394287, 0.007386112213134765, 0.007358463764190673, 0.007420928001403809, 0.0074291200637817386, 0.0074106879234313965, 0.007388160228729248, 0.007392288208007813, 0.007358431816101074, 0.0073994240760803225, 0.007700479984283447, 0.007426047801971435, 0.007422976016998291, 0.007400447845458984, 0.007414783954620361, 0.007371776103973389, 0.007371776103973389, 0.007418879985809326, 0.007374847888946533, 0.007402495861053467, 0.007400447845458984, 0.0073768959045410155, 0.007366655826568603, 0.007402495861053467, 0.007396383762359619, 0.007387104034423828, 0.00738918399810791, 0.007368768215179443, 0.007403456211090088, 0.007394303798675537, 0.007416831970214844, 0.007363584041595459, 0.00739737606048584, 0.007390207767486572, 0.007468031883239746, 0.007398399829864502, 0.007408639907836914, 0.007387135982513428, 0.007458816051483155, 0.007432191848754883, 0.007398399829864502, 0.007483424186706543, 0.007375840187072754, 0.007490560054779053, 0.007448575973510742, 0.007418879985809326, 0.007455743789672851, 0.007266304016113281, 0.007350272178649903, 0.00738099193572998, 0.007414783954620361, 0.007373824119567871, 0.0074035201072692874, 0.007412735939025879, 0.0074065918922424315, 0.0074065918922424315, 0.007409664154052734, 0.007329792022705078, 0.007401472091674805, 0.007664639949798584, 0.007696383953094482, 0.007617536067962646, 0.007634943962097168, 0.007636991977691651, 0.007625728130340576, 0.007623680114746094, 0.007505919933319092, 0.007396351814270019, 0.009329664230346679, 0.009361408233642577, 0.008043519973754883, 0.0077619199752807615, 0.007658495903015137, 0.007661568164825439, 0.007669760227203369, 0.00765337610244751, 0.007620607852935791, 0.007646207809448242, 0.007532576084136963, 0.007421919822692871, 0.007342080116271973, 0.007345151901245117, 0.007393280029296875, 0.007394303798675537, 0.0073697280883789065, 0.0074301438331604, 0.007412735939025879, 0.007401472091674805, 0.007462975978851319, 0.007418816089630127, 0.007377920150756836, 0.007833600044250488, 0.0076277761459350585, 0.009075712203979493, 0.009378815650939941, 0.008036352157592774, 0.007703551769256592, 0.007701504230499268, 0.007621632099151611, 0.007434239864349365, 0.00769536018371582, 0.007961599826812745, 0.007643136024475097, 0.00765235185623169, 0.007641088008880615, 0.007417856216430664, 0.007400447845458984, 0.007456768035888672, 0.007400447845458984, 0.0074106879234313965, 0.007266304016113281, 0.007423999786376953, 0.007265279769897461, 0.0074291200637817386, 0.007384064197540283, 0.007396480083465576, 0.007391104221343994, 0.0074148159027099605, 0.007427040100097656, 0.007453695774078369, 0.007452672004699707, 0.007417856216430664, 0.00743936014175415, 0.00742195177078247, 0.007407616138458252, 0.007394368171691894, 0.007413695812225342, 0.007464960098266602, 0.007399487972259522, 0.007414783954620361, 0.007399360179901123, 0.00739737606048584, 0.007385087966918945, 0.007366655826568603, 0.007385087966918945, 0.007388160228729248, 0.00740556812286377, 0.0074035201072692874, 0.007392255783081054, 0.007359519958496093, 0.007392223834991455, 0.007929855823516846, 0.0077209601402282715, 0.007780352115631104, 0.007667712211608887, 0.007609344005584716, 0.007606272220611572, 0.007602176189422607, 0.007400447845458984, 0.007371776103973389, 0.007390207767486572, 0.007383039951324463, 0.00742300796508789, 0.007403488159179687, 0.007413760185241699, 0.007377920150756836, 0.007385087966918945, 0.007428095817565918, 0.0074332160949707035, 0.007378943920135498, 0.007402495861053467, 0.007362559795379638, 0.007320735931396484, 0.007286655902862549, 0.0072633600234985355, 0.007268191814422608, 0.007264256000518799, 0.007361536026000977, 0.007363584041595459, 0.007379968166351319, 0.007414783954620361, 0.007375872135162354, 0.007423999786376953]",tokens/s,132.91129663275964,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gptj,MB,3840.335872,4578.607104,0.0,3992.977408,3875.045888,s,1,9.0854033203125,9.0854033203125,0.0,9.0854033203125,9.0854033203125,9.0854033203125,9.0854033203125,[9.0854033203125],,kWh,2.7147177340270272e-05,1.4863089211095229e-05,3.71514186100208e-05,7.91616851613863e-05,,MB,2100.072448,4817.682432,0.0,4171.235328,4096.03328,s,10,1.0599249954223633,0.10599249954223633,0.0003303956056014514,0.10602869033813477,0.10647223968505859,0.10648557434082032,0.10649624206542968,"[0.10649890899658203, 0.10575011444091797, 0.1059898910522461, 0.10557164764404296, 0.10612742614746094, 0.10556201934814453, 0.10566291046142579, 0.10646927642822265, 0.10622531127929688, 0.10606748962402343]",tokens/s,2415.265241461619,kWh,1.2498488923172212e-06,6.848543282543577e-07,6.2661804515368475e-06,8.200883672108426e-06,tokens/kWh,31216148.19030631,MB,2107.027456,4922.540032,0.0,4276.092928,4202.843136,s,10,26.5477021484375,2.65477021484375,0.049196178616912174,2.67496142578125,2.70367705078125,2.70499892578125,2.70605642578125,"[2.589861328125, 2.583371826171875, 2.67386328125, 2.687188720703125, 2.65938818359375, 2.6760595703125, 2.574428466796875, 2.70632080078125, 2.693836669921875, 2.70338330078125]",tokens/s,23.730867420368412,kWh,3.1721882772821554e-05,1.7384909507633037e-05,7.513105133286288e-05,0.00012423784361331744,tokens/kWh,507091.8664371187,,s,630,26.541825050353985,0.04212988103230794,0.0010543305347371311,0.04257484817504883,0.043153510284423824,0.04338432006835938,0.043962817382812505,"[0.0415478401184082, 0.040529857635498046, 0.040768512725830076, 0.040681472778320314, 0.04036710357666016, 0.040460289001464846, 0.04003942489624023, 0.04046438217163086, 0.040612865447998046, 0.042071041107177735, 0.041836544036865236, 0.04119551849365234, 0.04048998260498047, 0.04046131134033203, 0.040271873474121096, 0.04132556915283203, 0.0436756477355957, 0.04213759994506836, 0.04182732772827148, 0.04128870391845703, 0.04354355239868164, 0.04191743850708008, 0.040476673126220705, 0.04182425689697265, 0.04188467025756836, 0.042071041107177735, 0.042700801849365234, 0.042605567932128906, 0.0419502067565918, 0.041990142822265625, 0.04185190582275391, 0.041970687866210936, 0.04085452651977539, 0.04050534439086914, 0.04048588943481445, 0.04070195388793945, 0.04050227355957031, 0.04047564697265625, 0.041025535583496094, 0.04336435317993164, 0.04215193557739258, 0.04066611099243164, 0.04064665603637695, 0.040509441375732425, 0.04054937744140625, 0.040584190368652344, 0.04042342376708984, 0.040823806762695314, 0.040581119537353515, 0.04055039978027344, 0.04048691177368164, 0.04066918563842774, 0.04042956924438477, 0.04053504180908203, 0.04042649459838867, 0.04059033584594727, 0.040458240509033204, 0.04055039978027344, 0.04062617492675781, 0.04062617492675781, 0.040612865447998046, 0.040605728149414065, 0.04054729461669922, 0.04042649459838867, 0.04051865768432617, 0.04035379028320313, 0.04038246536254883, 0.04039680099487305, 0.04033638381958008, 0.04072959899902344, 0.04047872161865235, 0.040515583038330076, 0.04053299331665039, 0.04065280151367188, 0.040438785552978515, 0.04034560012817383, 0.04054323196411133, 0.04070707321166992, 0.04064051055908203, 0.04053811264038086, 0.04050022506713867, 0.04044800186157226, 0.04053708648681641, 0.04050947189331055, 0.04227990341186524, 0.043856895446777344, 0.04290457534790039, 0.0425164794921875, 0.04249190521240234, 0.04258816146850586, 0.042551296234130856, 0.04065280151367188, 0.04043468856811523, 0.04069375991821289, 0.043153408050537106, 0.042646526336669925, 0.04069580841064453, 0.040578048706054685, 0.040659969329833984, 0.04045926284790039, 0.040791038513183595, 0.0407459831237793, 0.04071219253540039, 0.04095897674560547, 0.040852481842041016, 0.0407347183227539, 0.040576000213623044, 0.04058828735351563, 0.040586238861083986, 0.041022464752197264, 0.04131737518310547, 0.041106433868408204, 0.04152012634277344, 0.040622081756591794, 0.04060979080200195, 0.04060160064697266, 0.04067737579345703, 0.04053299331665039, 0.04055551910400391, 0.0405667839050293, 0.04044287872314453, 0.040569854736328126, 0.04053606414794922, 0.0412303352355957, 0.04253286361694336, 0.042461185455322265, 0.04057190322875977, 0.04048281478881836, 0.04048588943481445, 0.040546302795410154, 0.04053094482421875, 0.04096307373046875, 0.04134195327758789, 0.04111872100830078, 0.04257894515991211, 0.042498046875, 0.0426956787109375, 0.042657791137695314, 0.04277248001098633, 0.042613761901855465, 0.04244070434570312, 0.04257484817504883, 0.04269055938720703, 0.04269776153564453, 0.0424560317993164, 0.04252569580078125, 0.04246835327148438, 0.043261951446533206, 0.04281856155395508, 0.042619903564453124, 0.04273459243774414, 0.04274687957763672, 0.04267827224731445, 0.04262604904174805, 0.04369203186035156, 0.042805248260498044, 0.04279500961303711, 0.04316262435913086, 0.04245913696289062, 0.042449920654296876, 0.04236492919921875, 0.04246220779418945, 0.04258508682250976, 0.04242329788208008, 0.042713153839111326, 0.04253996658325195, 0.04246015930175781, 0.04253696060180664, 0.042452991485595705, 0.04254412841796875, 0.04233011245727539, 0.04347289657592773, 0.04263935852050781, 0.04276019287109375, 0.04258611297607422, 0.04253593444824219, 0.042943489074707034, 0.042590206146240234, 0.04312985610961914, 0.04270796966552735, 0.042485759735107424, 0.04354764938354492, 0.043172863006591795, 0.04248678588867188, 0.0425984001159668, 0.042552318572998044, 0.04235059356689453, 0.0425533447265625, 0.04254412841796875, 0.04054937744140625, 0.04051968002319336, 0.040687614440917966, 0.04274995040893555, 0.04260454559326172, 0.04245811080932617, 0.04252262496948242, 0.0425082893371582, 0.04257484817504883, 0.04251443099975586, 0.04327116775512695, 0.042848255157470705, 0.042742782592773435, 0.04310528182983398, 0.04332339096069336, 0.04289945602416992, 0.04267007827758789, 0.04294451141357422, 0.044268543243408204, 0.043480064392089846, 0.04277248001098633, 0.04267007827758789, 0.04275609588623047, 0.042790912628173826, 0.04257382583618164, 0.04271923065185547, 0.043254783630371094, 0.04273664093017578, 0.04244889450073242, 0.04253593444824219, 0.042333183288574217, 0.04256256103515625, 0.042589183807373046, 0.04260147094726562, 0.04254719924926758, 0.04256460952758789, 0.04240691375732422, 0.0425984001159668, 0.04342169570922851, 0.042684417724609375, 0.042428417205810545, 0.04261478424072265, 0.04208947372436524, 0.044867584228515625, 0.043146240234375, 0.04285235214233398, 0.042550273895263675, 0.042456062316894534, 0.04246015930175781, 0.04282470321655273, 0.04297420883178711, 0.0425533447265625, 0.04253081512451172, 0.0424192008972168, 0.042262527465820314, 0.042418174743652344, 0.04249292755126953, 0.04321279907226563, 0.04246015930175781, 0.04253702545166015, 0.04223174285888672, 0.0425082893371582, 0.04322918319702149, 0.04025753784179688, 0.040594432830810545, 0.04050950241088867, 0.04124358367919922, 0.040576000213623044, 0.04053811264038086, 0.04042342376708984, 0.04058726501464844, 0.042521598815917966, 0.04261273574829102, 0.04248166275024414, 0.042428417205810545, 0.0423680305480957, 0.042563552856445315, 0.04235059356689453, 0.042947616577148434, 0.04363670349121094, 0.042567680358886716, 0.0425799674987793, 0.0425615348815918, 0.04248371124267578, 0.0433889274597168, 0.042900478363037106, 0.04272025680541992, 0.04232089614868164, 0.04273459243774414, 0.04247347259521484, 0.04247654342651367, 0.042446849822998046, 0.042403839111328126, 0.04245811080932617, 0.04271615982055664, 0.04268544006347656, 0.04269465637207031, 0.04268236923217773, 0.04267007827758789, 0.04251955032348633, 0.04255644989013672, 0.04288406372070312, 0.0427325439453125, 0.04259942245483399, 0.04245401763916016, 0.042501121520996096, 0.04268544006347656, 0.04249292755126953, 0.04333977508544922, 0.04268646240234375, 0.042616832733154295, 0.04328755187988281, 0.04268032073974609, 0.042418174743652344, 0.04274585723876953, 0.0425533447265625, 0.042584064483642575, 0.042517505645751956, 0.04315852737426758, 0.04264243316650391, 0.04057292938232422, 0.0405401611328125, 0.040599552154541016, 0.040443904876708986, 0.04057395172119141, 0.04057395172119141, 0.040959999084472655, 0.04253081512451172, 0.04259737777709961, 0.04061695861816406, 0.041181182861328124, 0.043865089416503904, 0.04311040115356445, 0.043017215728759765, 0.042590206146240234, 0.04284928131103516, 0.04279500961303711, 0.04048998260498047, 0.040499198913574216, 0.04258303833007813, 0.042916862487792966, 0.04267007827758789, 0.0425533447265625, 0.042578975677490236, 0.04237820816040039, 0.042559486389160156, 0.04270694351196289, 0.04258201599121094, 0.042638336181640625, 0.04313190460205078, 0.04264755249023437, 0.04252671813964844, 0.04266393661499023, 0.04256563186645508, 0.042616832733154295, 0.04357948684692383, 0.04250511932373047, 0.04275302505493164, 0.04281856155395508, 0.042874881744384766, 0.04257484817504883, 0.0426321907043457, 0.042738689422607425, 0.04307046508789063, 0.042592254638671875, 0.042616832733154295, 0.042590206146240234, 0.04263628768920898, 0.04246835327148438, 0.04259942245483399, 0.042511390686035155, 0.04256150436401367, 0.04289843368530273, 0.04070502471923828, 0.04043673706054687, 0.040564735412597655, 0.043681793212890625, 0.04290662384033203, 0.042502143859863284, 0.04337868881225586, 0.0426506233215332, 0.043038719177246096, 0.04247552108764648, 0.04252467346191406, 0.042482688903808595, 0.04262400054931641, 0.04205055999755859, 0.042635265350341796, 0.04256358337402344, 0.040139774322509765, 0.04013772964477539, 0.04058009719848633, 0.040515583038330076, 0.04052377700805664, 0.04052275085449219, 0.04054732894897461, 0.040619007110595705, 0.040594432830810545, 0.04055244827270508, 0.040668159484863284, 0.040622081756591794, 0.041224193572998044, 0.0408616943359375, 0.04063129425048828, 0.04095590209960937, 0.04053401565551758, 0.04055039978027344, 0.04052787017822266, 0.040699905395507815, 0.04072857666015625, 0.04058009719848633, 0.04055449676513672, 0.040430591583251956, 0.04056576156616211, 0.040515583038330076, 0.040529918670654294, 0.04059648132324219, 0.040542209625244144, 0.04051251220703125, 0.04050534439086914, 0.04049817657470703, 0.04059648132324219, 0.04061798477172852, 0.040531967163085936, 0.04058009719848633, 0.040528896331787106, 0.040444927215576174, 0.041388031005859374, 0.04071731185913086, 0.04056576156616211, 0.04059648132324219, 0.04052070236206055, 0.04048998260498047, 0.04267520141601563, 0.042602497100830077, 0.04263628768920898, 0.04251136016845703, 0.04049407958984375, 0.0404398078918457, 0.040551422119140625, 0.04063846588134765, 0.04058931350708008, 0.0406640625, 0.04060467147827149, 0.04051660919189453, 0.04065280151367188, 0.04055449676513672, 0.04063436889648438, 0.042355712890625, 0.04300697708129883, 0.04271615982055664, 0.04256358337402344, 0.041799678802490234, 0.04311859130859375, 0.04283494567871094, 0.04317388916015625, 0.04331008148193359, 0.0429219856262207, 0.042943489074707034, 0.04279296112060547, 0.04328140640258789, 0.04315545654296875, 0.04279193496704101, 0.04364492797851562, 0.043014144897460936, 0.04275199890136719, 0.04255846405029297, 0.042845184326171876, 0.042627071380615236, 0.04265267181396484, 0.04271513748168945, 0.042877952575683595, 0.04483379364013672, 0.043826175689697267, 0.043423744201660154, 0.04290764617919922, 0.04271615982055664, 0.04324147033691406, 0.04284723281860352, 0.04283391952514649, 0.04293632125854492, 0.04252671813964844, 0.04275302505493164, 0.04260966491699219, 0.04301311874389648, 0.04292095947265625, 0.042744831085205076, 0.042805248260498044, 0.042793983459472655, 0.04236185455322266, 0.042600448608398435, 0.042820606231689456, 0.04270796966552735, 0.042700801849365234, 0.04275404739379883, 0.04291481781005859, 0.04442214584350586, 0.04319232177734375, 0.04347391891479492, 0.04286054229736328, 0.04270489501953125, 0.04302438354492188, 0.04274892807006836, 0.043177982330322266, 0.04272742462158203, 0.0427407341003418, 0.04263740921020508, 0.04267612838745117, 0.04400332641601563, 0.04316774368286133, 0.042881023406982424, 0.04283084869384766, 0.042543102264404296, 0.04261785507202148, 0.0426506233215332, 0.04088115310668945, 0.040546302795410154, 0.040592384338378903, 0.04057702255249023, 0.04275404739379883, 0.043237377166748046, 0.04270796966552735, 0.04323535919189453, 0.04300899124145508, 0.043548671722412106, 0.043150337219238284, 0.04289235305786133, 0.04235769653320313, 0.04274380874633789, 0.04293734359741211, 0.043055103302001956, 0.04289535903930664, 0.042949630737304685, 0.04276224136352539, 0.040428543090820314, 0.043243518829345705, 0.0427407341003418, 0.04276633453369141, 0.042742782592773435, 0.04254924774169922, 0.0424376335144043, 0.04264550399780274, 0.04240486526489258, 0.04333670425415039, 0.04350156784057617, 0.042820606231689456, 0.04300595092773438, 0.04315750503540039, 0.042787841796875, 0.04287590408325195, 0.042913791656494144, 0.04263126373291016, 0.04283484649658203, 0.04266393661499023, 0.042845184326171876, 0.04277350234985351, 0.04283391952514649, 0.042313728332519535, 0.0427694091796875, 0.04267212677001953, 0.0439818229675293, 0.04389068984985352, 0.0431912956237793, 0.042793983459472655, 0.042728446960449216, 0.04268544006347656, 0.04265574264526367, 0.04422655868530274, 0.043872318267822265, 0.04281542587280274, 0.04283084869384766, 0.04258201599121094, 0.04273766326904297, 0.042790912628173826, 0.04280217742919922, 0.04292095947265625, 0.04284415817260742, 0.04306534576416016, 0.042193920135498046, 0.04288716888427734, 0.04266393661499023, 0.042866687774658206, 0.04274687957763672, 0.04283391952514649, 0.042782718658447266, 0.043022335052490236, 0.04294144058227539, 0.04301107025146484, 0.04326604843139648, 0.04391628646850586, 0.04299980926513672, 0.042842113494873046, 0.04262911987304688, 0.042638336181640625, 0.04278476715087891, 0.04263423919677734, 0.042982398986816404, 0.043170814514160154, 0.04295884704589844, 0.04286361694335938, 0.0428328971862793, 0.0427663688659668, 0.042869728088378904, 0.04287692642211914, 0.04287180709838867, 0.04276633453369141, 0.04287385559082031, 0.04275711822509766, 0.04290150451660156, 0.04277248001098633, 0.04265574264526367, 0.043028480529785154, 0.04296908950805664, 0.04377804946899414, 0.04300185775756836, 0.04290457534790039, 0.04276326370239258, 0.04271104049682617, 0.04268134307861328, 0.04309196853637695, 0.043014144897460936, 0.043409408569335936, 0.042881023406982424, 0.042848255157470705, 0.04273561477661133, 0.04262297439575195, 0.04256361770629883, 0.04281238555908203, 0.04268339157104492, 0.04276531219482422, 0.043104255676269534, 0.04266086578369141, 0.042641407012939454, 0.042874881744384766, 0.042826751708984374, 0.04287180709838867, 0.0428851203918457, 0.043581439971923826, 0.043154430389404294, 0.043238399505615234, 0.04279808044433594]",tokens/s,23.736122094271636,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 560, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948131-3b9e312013a4f2f05b0d57cc;a5e7424f-1441-4583-a3cb-67182b0313e4) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7421.70624,8015.839232,0.0,7430.209536,7414.23104,s,1,10.5738935546875,10.5738935546875,0.0,10.5738935546875,10.5738935546875,10.5738935546875,10.5738935546875,[10.5738935546875],,kWh,4.473719257431792e-05,2.4503308719547623e-05,6.386588442597319e-05,0.00013310638571983874,,MB,1778.05312,8812.756992,0.0,8166.309888,8044.111872,s,10,1.988222427368164,0.19882224273681642,5.74598727761381e-05,0.19881380462646486,0.19890319519042968,0.19890977325439455,0.19891503570556643,"[0.19891635131835939, 0.19874085998535157, 0.19879350280761718, 0.19882028198242188, 0.19880732727050782, 0.19878997802734374, 0.19884771728515624, 0.1989017333984375, 0.1987403564453125, 0.19886431884765626]",tokens/s,1287.582297011258,kWh,2.3510816124998943e-06,1.2879539809547566e-06,1.3500887707018627e-05,1.7139923300473277e-05,tokens/kWh,14935889.473492054,MB,1778.05312,8980.529152,0.0,8334.082048,8265.729024,s,10,19.20118566894531,1.920118566894531,0.02228685459603105,1.91575,1.9468973388671875,1.9605423583984374,1.9714583740234375,"[1.900060302734375, 1.9207611083984375, 1.91608837890625, 1.919744140625, 1.9021898193359374, 1.9741873779296875, 1.9438651123046875, 1.915052001953125, 1.91541162109375, 1.8938258056640624]",tokens/s,32.810473835421476,kWh,2.2517051253682303e-05,1.2340095945555837e-05,8.735422075478171e-05,0.00012221136795401985,tokens/kWh,515500.325826504,,s,630,19.198157777786257,0.030473266313946434,0.0009332049841704222,0.030071296691894532,0.03184977931976318,0.032104652786254885,0.03381917652130128,"[0.030643199920654295, 0.030176256179809572, 0.029615104675292967, 0.02980147171020508, 0.02960291290283203, 0.029675424575805662, 0.029575168609619142, 0.02970419120788574, 0.029607936859130858, 0.029838336944580077, 0.029533184051513672, 0.03037593650817871, 0.030733312606811523, 0.030493696212768553, 0.02953113555908203, 0.029585407257080077, 0.02956492805480957, 0.029678592681884764, 0.029642751693725586, 0.029705215454101562, 0.0295731201171875, 0.029615104675292967, 0.029624319076538085, 0.029792255401611328, 0.029682687759399414, 0.029799423217773437, 0.029560831069946288, 0.029699167251586913, 0.029676448822021483, 0.030847999572753908, 0.03390054321289063, 0.03139686393737793, 0.030650367736816408, 0.02976870346069336, 0.029656063079833983, 0.029716480255126954, 0.03483340835571289, 0.03242803192138672, 0.030669824600219726, 0.03079884719848633, 0.03076300811767578, 0.030851072311401367, 0.031474687576293944, 0.030983167648315428, 0.030427135467529298, 0.02972876739501953, 0.02956595230102539, 0.029784063339233398, 0.029623296737670897, 0.02978713607788086, 0.029823999404907226, 0.02981171226501465, 0.029716480255126954, 0.029628416061401368, 0.029657087326049804, 0.02956595230102539, 0.029211648941040037, 0.029699071884155274, 0.02932428741455078, 0.029649919509887695, 0.029396991729736328, 0.030516223907470705, 0.03184025573730469, 0.03000831985473633, 0.03057459259033203, 0.030510080337524413, 0.03060531234741211, 0.030810111999511718, 0.031025152206420898, 0.030695423126220703, 0.030938112258911132, 0.03125555229187012, 0.0309749755859375, 0.030686208724975586, 0.031324159622192385, 0.030725120544433594, 0.029895679473876953, 0.030466047286987305, 0.03097292709350586, 0.029887487411499023, 0.029905920028686524, 0.02962227249145508, 0.029861888885498046, 0.029702144622802733, 0.02977177619934082, 0.029645824432373048, 0.029817855834960938, 0.02963046455383301, 0.029914112091064454, 0.02978508758544922, 0.02959667205810547, 0.03351039886474609, 0.031888383865356446, 0.030451711654663087, 0.02978201675415039, 0.029669376373291017, 0.029702144622802733, 0.029615104675292967, 0.029560831069946288, 0.029665279388427734, 0.02978099250793457, 0.029693952560424806, 0.029510656356811524, 0.029683712005615235, 0.029691904067993165, 0.030903295516967775, 0.030666751861572264, 0.030104576110839845, 0.02978099250793457, 0.029697023391723632, 0.03059916877746582, 0.030922752380371094, 0.030824480056762697, 0.03081724739074707, 0.029764608383178712, 0.029677568435668947, 0.032911361694335936, 0.03154841613769531, 0.03096985626220703, 0.030297088623046874, 0.03167027282714844, 0.031069183349609376, 0.03099443244934082, 0.030905344009399413, 0.03132928085327148, 0.03395686340332031, 0.0298024959564209, 0.030785535812377928, 0.03079987144470215, 0.030866432189941406, 0.03079884719848633, 0.030914560317993164, 0.030708736419677734, 0.030003200531005858, 0.02979430389404297, 0.029772800445556642, 0.031297536849975584, 0.0319180793762207, 0.03149004745483398, 0.031065088272094726, 0.030785535812377928, 0.030682111740112306, 0.02982809638977051, 0.029821952819824218, 0.029739007949829102, 0.02982707214355469, 0.029740032196044923, 0.030457855224609375, 0.02975436782836914, 0.02976870346069336, 0.029434879302978514, 0.029925376892089843, 0.03114188766479492, 0.034203647613525394, 0.031245311737060546, 0.031074304580688477, 0.030679040908813477, 0.03075993537902832, 0.030748672485351562, 0.030888959884643553, 0.030912511825561522, 0.03001651191711426, 0.02950553512573242, 0.03012403106689453, 0.02962227249145508, 0.029716480255126954, 0.0297574405670166, 0.030066688537597655, 0.029842432022094727, 0.029995008468627928, 0.0297891845703125, 0.02987932777404785, 0.029928415298461915, 0.029921279907226563, 0.029662208557128908, 0.030079999923706056, 0.02973695945739746, 0.029808704376220702, 0.029764543533325194, 0.029914112091064454, 0.029800447463989257, 0.029970432281494142, 0.029206527709960937, 0.030394367218017578, 0.03039334487915039, 0.030699520111083983, 0.031237119674682616, 0.03423539352416992, 0.03132620811462403, 0.029839359283447265, 0.0297256965637207, 0.030543872833251953, 0.030547967910766603, 0.030318592071533205, 0.02981068801879883, 0.029834239959716798, 0.03001753616333008, 0.02972876739501953, 0.02972159957885742, 0.029722623825073242, 0.029823999404907226, 0.029569023132324217, 0.02977177619934082, 0.029644800186157227, 0.0297891845703125, 0.029709312438964845, 0.029740032196044923, 0.030900224685668946, 0.0319498233795166, 0.03121971130371094, 0.03100979232788086, 0.030693376541137695, 0.03141734313964844, 0.029618175506591796, 0.03280691146850586, 0.03281817626953125, 0.031268863677978515, 0.030649343490600587, 0.031268863677978515, 0.030846975326538087, 0.03060736083984375, 0.0296048641204834, 0.029677568435668947, 0.02972979164123535, 0.029734912872314452, 0.029560831069946288, 0.029844480514526366, 0.029765663146972657, 0.02979631996154785, 0.03079270362854004, 0.030926847457885744, 0.030721023559570314, 0.030765056610107422, 0.030678016662597656, 0.030329856872558594, 0.02971955108642578, 0.029775871276855468, 0.030891008377075195, 0.030834688186645507, 0.030834688186645507, 0.03082444763183594, 0.03081523132324219, 0.03084492874145508, 0.030712831497192384, 0.030852096557617188, 0.03058176040649414, 0.029707263946533204, 0.030714879989624022, 0.03361996841430664, 0.031676416397094724, 0.029682687759399414, 0.030539775848388673, 0.02962124824523926, 0.029791231155395507, 0.02957107162475586, 0.03019980812072754, 0.029838336944580077, 0.029879295349121093, 0.029714431762695313, 0.02977791976928711, 0.029649919509887695, 0.029708288192749024, 0.029671424865722655, 0.02976563262939453, 0.029619199752807617, 0.029724672317504884, 0.03038310432434082, 0.031006719589233397, 0.030690303802490236, 0.030728191375732423, 0.030449663162231445, 0.029726720809936522, 0.02962124824523926, 0.029699071884155274, 0.029624319076538085, 0.03056537628173828, 0.03396198272705078, 0.031422464370727536, 0.031124479293823244, 0.030689279556274415, 0.029633535385131835, 0.029849599838256836, 0.02969599914550781, 0.029706239700317383, 0.029549568176269532, 0.03001753616333008, 0.030040063858032227, 0.029876224517822264, 0.03079475212097168, 0.03080601692199707, 0.02958847999572754, 0.02969599914550781, 0.02942464065551758, 0.029717504501342775, 0.02952908706665039, 0.029722623825073242, 0.029489152908325194, 0.029650943756103516, 0.029575168609619142, 0.03020185661315918, 0.02993356704711914, 0.029775871276855468, 0.029443136215209963, 0.029666240692138673, 0.02944000053405762, 0.029628416061401368, 0.030685247421264647, 0.031041471481323243, 0.030675968170166015, 0.030676992416381835, 0.03237887954711914, 0.0332042236328125, 0.031036415100097657, 0.0307957763671875, 0.03076300811767578, 0.03038921546936035, 0.029871103286743163, 0.029634592056274413, 0.02962428855895996, 0.029657087326049804, 0.029705215454101562, 0.02979430389404297, 0.029237247467041014, 0.029678592681884764, 0.029638656616210936, 0.029703168869018554, 0.02973593521118164, 0.029989887237548828, 0.029724672317504884, 0.029492223739624023, 0.02977996826171875, 0.029694976806640624, 0.029713407516479492, 0.029499391555786132, 0.03179007911682129, 0.03305267333984375, 0.032210945129394535, 0.03198262405395508, 0.03254678344726562, 0.0321638412475586, 0.031665151596069335, 0.03172966384887695, 0.0318156795501709, 0.03165286445617676, 0.03190681648254395, 0.03174195289611816, 0.03184639930725098, 0.03184537506103516, 0.031866880416870115, 0.03193446350097656, 0.03167948722839355, 0.031661056518554685, 0.03206758499145508, 0.031716352462768556, 0.031546367645263675, 0.031697919845581055, 0.032249855041503905, 0.03305574417114258, 0.03327283096313476, 0.03224576187133789, 0.031833087921142575, 0.031659008026123044, 0.032091136932373046, 0.03183923149108887, 0.03213824081420898, 0.03182592010498047, 0.0318525447845459, 0.03191910362243652, 0.03193446350097656, 0.03216998291015625, 0.03248025512695313, 0.03253964614868164, 0.03199897575378418, 0.03206246566772461, 0.03201536178588867, 0.03181465530395508, 0.03219148635864258, 0.03206041717529297, 0.031735807418823245, 0.03213926315307617, 0.03201331329345703, 0.03196723175048828, 0.031849472045898435, 0.032026622772216795, 0.03189248085021973, 0.03198361587524414, 0.03192831993103027, 0.031905792236328126, 0.031835136413574217, 0.03196211242675781, 0.03182489585876465, 0.031886335372924804, 0.03184127998352051, 0.03187302398681641, 0.03160371208190918, 0.03201638412475586, 0.032299007415771484, 0.03160678482055664, 0.029850624084472657, 0.02976870346069336, 0.029652992248535157, 0.030143487930297853, 0.0307957763671875, 0.03143680000305176, 0.03128422355651855, 0.03000934410095215, 0.029805568695068358, 0.03036057662963867, 0.030809087753295897, 0.030867456436157226, 0.03075174331665039, 0.030810111999511718, 0.029880319595336914, 0.029856767654418945, 0.02973593521118164, 0.029809663772583008, 0.029748224258422853, 0.02981990432739258, 0.029739007949829102, 0.030040063858032227, 0.030843904495239258, 0.030888959884643553, 0.03082035255432129, 0.030886911392211915, 0.029813760757446288, 0.029917184829711913, 0.02979840087890625, 0.029870080947875976, 0.029691904067993165, 0.029706239700317383, 0.02999603271484375, 0.030801919937133788, 0.029783039093017577, 0.029879295349121093, 0.029902847290039062, 0.029891584396362306, 0.030704639434814454, 0.033972225189208984, 0.03163340759277344, 0.030881792068481444, 0.030766080856323243, 0.029669376373291017, 0.029870080947875976, 0.029649919509887695, 0.03078451156616211, 0.029626367568969726, 0.030492671966552733, 0.03118796730041504, 0.02981888008117676, 0.02971446418762207, 0.02977686309814453, 0.029724672317504884, 0.029874176025390626, 0.02963763236999512, 0.030423040390014647, 0.03075993537902832, 0.030431232452392577, 0.032146430969238284, 0.031888383865356446, 0.03186483192443847, 0.030673919677734376, 0.03098726463317871, 0.030798879623413086, 0.030866399765014648, 0.03080294418334961, 0.030742528915405274, 0.030785535812377928, 0.030724096298217773, 0.031128576278686523, 0.03081216049194336, 0.030687231063842774, 0.030736383438110353, 0.030136320114135744, 0.029693952560424806, 0.029714431762695313, 0.02971548843383789, 0.02983420753479004, 0.029707263946533204, 0.029713407516479492, 0.029681663513183593, 0.029833215713500977, 0.029625343322753905, 0.029872127532958984, 0.029702144622802733, 0.029808639526367187, 0.029678592681884764, 0.029747200012207032, 0.029807647705078124, 0.029737951278686524, 0.029701120376586915, 0.029954048156738283, 0.03079987144470215, 0.03138252830505371, 0.031303680419921875, 0.031120384216308594, 0.030715904235839843, 0.03165593528747559, 0.031138816833496095, 0.030911487579345705, 0.03015475273132324, 0.030862335205078126, 0.030697471618652345, 0.031202304840087892, 0.030075904846191406, 0.031849472045898435, 0.031927295684814457, 0.031093759536743162, 0.03076710319519043, 0.03077324867248535, 0.030902271270751954, 0.030448640823364258, 0.02975129508972168, 0.029852672576904295, 0.02977484893798828, 0.029641727447509765, 0.029809663772583008, 0.029685760498046877, 0.029915136337280275, 0.029905920028686524, 0.030452735900878908, 0.0315043830871582, 0.029797376632690428, 0.030727167129516602, 0.03100160026550293, 0.030612480163574218, 0.030870527267456056, 0.030635007858276365, 0.029792255401611328, 0.029689855575561523, 0.029688831329345702, 0.029691904067993165, 0.0297891845703125, 0.02973695945739746, 0.02998374366760254, 0.02977382469177246, 0.03034316825866699, 0.030867456436157226, 0.030917631149291993, 0.030850048065185546, 0.03097599983215332, 0.029841407775878907, 0.02995712089538574, 0.030120960235595705, 0.030895103454589845, 0.030752767562866212, 0.031220735549926756, 0.030864383697509764, 0.030665727615356447, 0.02971238327026367, 0.029901824951171874, 0.029870080947875976, 0.029903871536254883, 0.029823999404907226, 0.03037900733947754, 0.03035443115234375, 0.03098521614074707, 0.030918655395507814, 0.03100160026550293, 0.03075174331665039, 0.030112768173217775, 0.02975846481323242, 0.030143487930297853, 0.031871999740600586, 0.03115519905090332, 0.030687231063842774, 0.030081024169921877, 0.029636608123779298, 0.02981171226501465, 0.029816831588745117, 0.02963046455383301, 0.029784063339233398, 0.029723648071289063, 0.02998579216003418, 0.029566976547241212, 0.029965311050415038, 0.029716480255126954, 0.029925439834594728, 0.029757375717163085, 0.029998079299926757, 0.029723648071289063, 0.02996326446533203, 0.030279680252075194, 0.030127103805541993, 0.029688831329345702, 0.029820928573608397, 0.029668352127075196, 0.02981990432739258, 0.029688831329345702, 0.02973388862609863, 0.02972979164123535, 0.02972159957885742, 0.029691904067993165, 0.02978816032409668, 0.029642751693725586, 0.029916160583496092, 0.029650943756103516, 0.029755392074584962, 0.0297544002532959, 0.029699039459228516, 0.02969599914550781, 0.029710336685180663, 0.029717504501342775, 0.029783039093017577, 0.029651968002319336, 0.02971238327026367, 0.029625343322753905, 0.029741056442260744, 0.029755392074584962, 0.03002572822570801, 0.029715456008911133, 0.02976665687561035, 0.02969599914550781, 0.02981171226501465, 0.029689855575561523, 0.029848575592041016, 0.03098521614074707, 0.032115711212158206, 0.03115827178955078, 0.030896127700805662, 0.03093708801269531, 0.0310118408203125, 0.030523391723632814, 0.030296064376831053, 0.030943231582641603, 0.030988288879394532, 0.03093606376647949, 0.030633983612060548, 0.03074355125427246, 0.031078432083129885, 0.03083363151550293]",tokens/s,32.81564863108681,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949038-2a225c2360edfe3456f52d62;6532c1cd-5cdd-44e3-8da0-76861e388423) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 560, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyyymrp2c/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 560, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4173.508608,4933.025792,0.0,4347.396096,4328.833024,s,1,9.59846875,9.59846875,0.0,9.59846875,9.59846875,9.59846875,9.59846875,[9.59846875],,kWh,3.241447864930175e-05,1.7737065507360346e-05,4.560892537597283e-05,9.576046953263493e-05,,MB,1466.003456,5492.965376,0.0,4846.518272,4748.27776,s,10,0.9927882919311524,0.09927882919311523,4.6197017472260955e-05,0.09926470565795899,0.09930990676879882,0.09935680809020996,0.09939432914733887,"[0.09940370941162109, 0.09924845123291015, 0.09924278259277344, 0.09927958679199218, 0.09929344177246094, 0.09924591827392579, 0.0992455062866211, 0.09925263977050781, 0.09927677154541016, 0.09929948425292968]",tokens/s,2578.5960821720996,kWh,1.1742504632633558e-06,6.431582458337362e-07,6.6524585672872835e-06,8.469867276384375e-06,tokens/kWh,30224794.751362562,MB,1500.581888,5576.851456,0.0,4930.404352,4877.430784,s,10,15.509869995117189,1.550986999511719,0.023642131563985484,1.5435031127929688,1.5892890625,1.5926124267578126,1.5952711181640626,"[1.588550537109375, 1.595935791015625, 1.567197265625, 1.5211236572265625, 1.530933349609375, 1.5323360595703126, 1.5470096435546874, 1.543470458984375, 1.5435357666015626, 1.5397774658203125]",tokens/s,40.61929598367596,kWh,1.8286465694930688e-05,1.0021303558798169e-05,5.57551746921164e-05,8.406294394584523e-05,tokens/kWh,749438.4212928072,,s,630,15.507649539947524,0.024615316730075408,0.0005882532227592437,0.02443007946014404,0.02565693492889404,0.025791027736663818,0.026018949146270752,"[0.02433228874206543, 0.024474624633789063, 0.02432614326477051, 0.02452275276184082, 0.024361984252929687, 0.024061952590942383, 0.024370176315307617, 0.02455244827270508, 0.025401344299316408, 0.025432064056396485, 0.025463808059692384, 0.025693183898925782, 0.02631372833251953, 0.024993791580200195, 0.02447257614135742, 0.025671680450439452, 0.02553036880493164, 0.02555392074584961, 0.02568191909790039, 0.025662464141845705, 0.025608192443847655, 0.025790464401245116, 0.02572800064086914, 0.025882623672485353, 0.025837568283081053, 0.025168895721435547, 0.025652223587036133, 0.02595327949523926, 0.025198591232299804, 0.025244672775268553, 0.025417728424072264, 0.025204736709594725, 0.0259051513671875, 0.026255359649658205, 0.02587648010253906, 0.025799680709838867, 0.02573311996459961, 0.025846784591674804, 0.025267200469970705, 0.02589798355102539, 0.02570444869995117, 0.025766912460327147, 0.025824256896972656, 0.02428108787536621, 0.02409062385559082, 0.02407731246948242, 0.024016895294189454, 0.02404351997375488, 0.02409676742553711, 0.02407935905456543, 0.024415231704711913, 0.02446950340270996, 0.024374271392822267, 0.02592255973815918, 0.02572902488708496, 0.02550067138671875, 0.025801727294921875, 0.02577305603027344, 0.025668607711791993, 0.025907199859619142, 0.025840639114379883, 0.02444595146179199, 0.024360960006713867, 0.024750080108642578, 0.025480192184448244, 0.025799680709838867, 0.02411110305786133, 0.024773632049560547, 0.024997888565063478, 0.024419328689575196, 0.025540607452392578, 0.027240447998046875, 0.025640960693359374, 0.025523199081420898, 0.02569932746887207, 0.025364479064941405, 0.025177087783813477, 0.02569215965270996, 0.025815040588378906, 0.025656320571899413, 0.025267200469970705, 0.025169919967651368, 0.025246719360351562, 0.025814016342163085, 0.025233407974243165, 0.02570342445373535, 0.02571468734741211, 0.025791488647460937, 0.02470195198059082, 0.02428927993774414, 0.02427801513671875, 0.024442880630493165, 0.024435712814331056, 0.024204288482666016, 0.024139776229858398, 0.024639488220214844, 0.02553856086730957, 0.02573516845703125, 0.025818111419677735, 0.025774080276489256, 0.025174016952514647, 0.025418752670288085, 0.025625600814819335, 0.025169919967651368, 0.02515456008911133, 0.025686016082763673, 0.025363456726074218, 0.02550271987915039, 0.02587238311767578, 0.02533683204650879, 0.025222143173217772, 0.025742336273193358, 0.025605119705200196, 0.025232383728027344, 0.025280511856079102, 0.02517196846008301, 0.02515660858154297, 0.02527948760986328, 0.025797632217407225, 0.025789440155029295, 0.02570035171508789, 0.025812992095947264, 0.026175487518310548, 0.026016767501831056, 0.02573721694946289, 0.024066047668457033, 0.02409984016418457, 0.024026111602783205, 0.02411520004272461, 0.024016895294189454, 0.02409267234802246, 0.025709568023681642, 0.025606143951416017, 0.026019840240478515, 0.025078784942626952, 0.025788415908813478, 0.025673728942871094, 0.02566655921936035, 0.025247743606567383, 0.02528665542602539, 0.02555801582336426, 0.025169919967651368, 0.025066495895385742, 0.02509004783630371, 0.025010175704956054, 0.02490163230895996, 0.025092096328735353, 0.025126911163330077, 0.024845312118530274, 0.02407731246948242, 0.023974912643432617, 0.023960575103759766, 0.024031232833862305, 0.023954431533813478, 0.02391961669921875, 0.02405171203613281, 0.02510950469970703, 0.025250816345214845, 0.02509619140625, 0.025011199951171875, 0.025083904266357423, 0.025190399169921874, 0.025017343521118163, 0.025150463104248046, 0.025058303833007813, 0.025165824890136718, 0.0252620792388916, 0.02505523109436035, 0.02511257553100586, 0.0252938232421875, 0.02498150444030762, 0.02533478355407715, 0.025632768630981444, 0.02573721694946289, 0.025667583465576172, 0.02513920021057129, 0.02511052894592285, 0.0243056640625, 0.024211456298828125, 0.02407935905456543, 0.024052736282348632, 0.02409779167175293, 0.023976959228515626, 0.024456192016601562, 0.024959999084472655, 0.02511769676208496, 0.024853504180908204, 0.0246691837310791, 0.024482816696166993, 0.025381887435913086, 0.024204288482666016, 0.02447974395751953, 0.02431795120239258, 0.023975936889648438, 0.023981056213378905, 0.023933952331542968, 0.023928831100463867, 0.024056831359863282, 0.0239052791595459, 0.02395136070251465, 0.023973888397216796, 0.024004608154296874, 0.023953407287597657, 0.023958528518676758, 0.023949312210083007, 0.024130559921264647, 0.02469375991821289, 0.024466432571411133, 0.02432512092590332, 0.02457907295227051, 0.02410905647277832, 0.024610815048217775, 0.024191999435424806, 0.02407423973083496, 0.024052736282348632, 0.024022016525268555, 0.02408448028564453, 0.024070144653320313, 0.024012800216674804, 0.0241080322265625, 0.02411110305786133, 0.024482816696166993, 0.024377344131469726, 0.024029184341430664, 0.024138751983642577, 0.024013824462890625, 0.024011775970458983, 0.024001535415649415, 0.02427801513671875, 0.023868415832519533, 0.024062976837158204, 0.02408857536315918, 0.024071168899536134, 0.02413465690612793, 0.02406809616088867, 0.024105983734130858, 0.0241080322265625, 0.024053760528564453, 0.024137727737426756, 0.02409574317932129, 0.023980031967163085, 0.024225791931152343, 0.023989248275756835, 0.024337408065795898, 0.02407935905456543, 0.02393087959289551, 0.023993343353271485, 0.024071168899536134, 0.02404249572753906, 0.02414182472229004, 0.024319999694824217, 0.02405887985229492, 0.024406015396118166, 0.024013824462890625, 0.02412851142883301, 0.024371200561523438, 0.024024063110351563, 0.02429849624633789, 0.02447667121887207, 0.02409779167175293, 0.024094720840454102, 0.024155136108398437, 0.02404761505126953, 0.02409984016418457, 0.02450227165222168, 0.024184831619262694, 0.02405068778991699, 0.024001535415649415, 0.02409369659423828, 0.02408345603942871, 0.02407219123840332, 0.0240762882232666, 0.024060928344726562, 0.024194047927856444, 0.02411827278137207, 0.024147968292236328, 0.024236032485961914, 0.0243558406829834, 0.02443059158325195, 0.024421375274658205, 0.02448486328125, 0.02434048080444336, 0.024057855606079103, 0.024023040771484375, 0.024160255432128908, 0.0243507194519043, 0.024137727737426756, 0.024392704010009765, 0.024375295639038085, 0.024413183212280275, 0.02490163230895996, 0.024532991409301756, 0.024475648880004884, 0.02413260841369629, 0.02407731246948242, 0.02410188865661621, 0.02406809616088867, 0.024181760787963868, 0.02412441635131836, 0.024275968551635742, 0.024186880111694335, 0.0241080322265625, 0.024105983734130858, 0.024178688049316405, 0.024461311340332033, 0.024156160354614258, 0.02414080047607422, 0.02407731246948242, 0.023967744827270508, 0.02411622428894043, 0.02424831962585449, 0.024260608673095704, 0.02590003204345703, 0.02631782341003418, 0.025629695892333985, 0.02452377510070801, 0.024429567337036134, 0.024409088134765625, 0.024156160354614258, 0.024035327911376952, 0.02412851142883301, 0.02408448028564453, 0.024030208587646484, 0.02408448028564453, 0.024506368637084962, 0.024387584686279298, 0.024459264755249024, 0.02467737579345703, 0.024416255950927734, 0.023972864151000976, 0.0244715518951416, 0.02434867286682129, 0.024426496505737305, 0.024078336715698243, 0.02408038330078125, 0.024303615570068358, 0.025013248443603517, 0.02464460754394531, 0.02432614326477051, 0.024186880111694335, 0.024183807373046876, 0.02404249572753906, 0.024376319885253905, 0.024411136627197266, 0.024193023681640623, 0.023945215225219727, 0.024066047668457033, 0.02411724853515625, 0.024035327911376952, 0.024065023422241212, 0.024902656555175783, 0.024635391235351564, 0.02405068778991699, 0.024167423248291017, 0.024391679763793944, 0.024428543090820314, 0.02427903938293457, 0.024152063369750978, 0.02433945655822754, 0.024415231704711913, 0.024382463455200197, 0.024392704010009765, 0.024482816696166993, 0.024398847579956053, 0.024219648361206055, 0.02456166458129883, 0.024385536193847656, 0.02407219123840332, 0.024588287353515623, 0.024449024200439453, 0.024491008758544923, 0.024374271392822267, 0.02457804870605469, 0.02411827278137207, 0.02408243179321289, 0.024321023941040038, 0.024250368118286132, 0.024588287353515623, 0.025457664489746092, 0.024662015914916992, 0.024836095809936523, 0.024185855865478514, 0.024054784774780274, 0.024509439468383788, 0.024243200302124023, 0.02415001678466797, 0.024447999954223632, 0.024458240509033204, 0.02497331237792969, 0.024489984512329102, 0.024535039901733398, 0.025762815475463868, 0.02512179183959961, 0.024928255081176756, 0.024605695724487304, 0.02427187156677246, 0.02466815948486328, 0.024543231964111328, 0.024938495635986328, 0.024989696502685548, 0.024474624633789063, 0.024550399780273437, 0.024466432571411133, 0.025848831176757812, 0.025544704437255858, 0.024521728515625, 0.02409062385559082, 0.024025087356567384, 0.024595455169677736, 0.024102912902832032, 0.02484121513366699, 0.024582143783569335, 0.024416255950927734, 0.025264127731323242, 0.025333759307861328, 0.024772607803344726, 0.024664064407348633, 0.02471014404296875, 0.02426572799682617, 0.02410495948791504, 0.0241582088470459, 0.02405171203613281, 0.023949312210083007, 0.024284160614013672, 0.024031232833862305, 0.024611839294433592, 0.024392704010009765, 0.025084928512573244, 0.025151487350463866, 0.024078336715698243, 0.02432512092590332, 0.024466432571411133, 0.024399871826171874, 0.024360960006713867, 0.024421375274658205, 0.02427289581298828, 0.023992319107055664, 0.023966720581054687, 0.0241080322265625, 0.024319999694824217, 0.02434867286682129, 0.02489753532409668, 0.024794111251831053, 0.024423423767089843, 0.024351743698120116, 0.02411212730407715, 0.024447999954223632, 0.024232959747314452, 0.024459264755249024, 0.024426496505737305, 0.02443059158325195, 0.024403968811035157, 0.024611839294433592, 0.02510540771484375, 0.024996864318847657, 0.027124736785888674, 0.025821184158325194, 0.025423871994018556, 0.02465177536010742, 0.024147968292236328, 0.024002559661865236, 0.024024063110351563, 0.023979007720947267, 0.024001535415649415, 0.024026111602783205, 0.024027135848999022, 0.024439807891845702, 0.024468479156494142, 0.02449407958984375, 0.02455244827270508, 0.024417280197143554, 0.024381439208984376, 0.024447999954223632, 0.024444927215576173, 0.024420352935791017, 0.024427520751953126, 0.024387584686279298, 0.02467532730102539, 0.024498176574707032, 0.02451456069946289, 0.024392704010009765, 0.0241080322265625, 0.02487398338317871, 0.02447667121887207, 0.024475648880004884, 0.024044544219970702, 0.02453606414794922, 0.024416255950927734, 0.024054784774780274, 0.024036352157592773, 0.02414489555358887, 0.024625152587890626, 0.024758272171020508, 0.02451046371459961, 0.024412160873413087, 0.024526847839355468, 0.024211456298828125, 0.02412031936645508, 0.024558591842651366, 0.02464460754394531, 0.024443904876708986, 0.024361984252929687, 0.024459264755249024, 0.02456166458129883, 0.02549862480163574, 0.024749055862426757, 0.02450227165222168, 0.024254463195800782, 0.024378368377685547, 0.024407039642333983, 0.023985151290893555, 0.023967744827270508, 0.02431385612487793, 0.02430259132385254, 0.024408063888549804, 0.024673280715942384, 0.024630271911621093, 0.0243189754486084, 0.0242739200592041, 0.024812543869018554, 0.024426496505737305, 0.02450841522216797, 0.02448691177368164, 0.024156160354614258, 0.02434048080444336, 0.024333311080932618, 0.024450048446655274, 0.0243240966796875, 0.024793088912963866, 0.02447974395751953, 0.02453708839416504, 0.02448691177368164, 0.02449510383605957, 0.024208383560180666, 0.024046592712402344, 0.024152063369750978, 0.02412441635131836, 0.024081407546997072, 0.024199167251586915, 0.024449024200439453, 0.024560640335083008, 0.02508595275878906, 0.024700927734375, 0.024491008758544923, 0.024444927215576173, 0.02438860893249512, 0.024327167510986326, 0.024583168029785156, 0.024414207458496092, 0.02450022315979004, 0.02445516777038574, 0.024425472259521484, 0.02449407958984375, 0.02447667121887207, 0.0244715518951416, 0.025194496154785157, 0.024698879241943358, 0.02533683204650879, 0.024222719192504884, 0.024847360610961915, 0.024468479156494142, 0.0244715518951416, 0.024587263107299806, 0.024707071304321288, 0.024216575622558592, 0.02451456069946289, 0.025671680450439452, 0.025274368286132814, 0.02507263946533203, 0.024532991409301756, 0.024635391235351564, 0.024972288131713868, 0.0249169921875, 0.024571903228759767, 0.02443878364562988, 0.02454425621032715, 0.024417280197143554, 0.024521728515625, 0.02512895965576172, 0.025181184768676756, 0.024327167510986326, 0.024167423248291017, 0.024328191757202147, 0.024963071823120117, 0.02449715232849121, 0.024385536193847656, 0.024426496505737305, 0.024456192016601562, 0.024419328689575196, 0.024267776489257813, 0.024482816696166993, 0.024408063888549804, 0.024205312728881836, 0.02518016052246094, 0.02488422393798828, 0.024381439208984376, 0.02414182472229004, 0.024833023071289064, 0.02448588752746582, 0.024461311340332033, 0.024403968811035157, 0.024451072692871095, 0.02453094482421875, 0.02429542350769043, 0.024187904357910156, 0.02453094482421875, 0.024609792709350587, 0.024630271911621093, 0.024386560440063477, 0.02407526397705078, 0.024081407546997072, 0.02413670349121094, 0.02424831962585449, 0.023986175537109376, 0.024236032485961914, 0.023988224029541014, 0.023931903839111326, 0.02430771255493164, 0.02429952049255371, 0.024415231704711913, 0.02429747200012207, 0.023979007720947267, 0.02406399917602539, 0.024135679244995118, 0.02408550453186035, 0.024392704010009765, 0.0242739200592041, 0.02410905647277832, 0.02412544059753418, 0.02444697570800781]",tokens/s,40.625112037586874,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17656.410112,20059.783168,0.0,19474.153472,19254.604288,s,1,15.49682421875,15.49682421875,0.0,15.49682421875,15.49682421875,15.49682421875,15.49682421875,[15.49682421875],,kWh,0.00010171552981528009,5.5730228534773106e-05,0.00014873567454400105,0.00030618143289405425,,MB,1764.954112,20407.9104,0.0,19761.463296,19446.51008,s,10,5.232500793457032,0.5232500793457031,0.00014407317035676061,0.52328466796875,0.5234116821289062,0.5234182739257812,0.5234235473632812,"[0.5234102172851562, 0.52320849609375, 0.5233613891601563, 0.5230205688476562, 0.5231404418945312, 0.5234248657226562, 0.5230988159179687, 0.52336083984375, 0.5231013793945313, 0.523373779296875]",tokens/s,489.2498063643194,kWh,6.18301851597222e-06,3.3880173717967387e-06,3.465104160970053e-05,4.422207749746949e-05,tokens/kWh,5788963.668987487,MB,1791.03744,20433.076224,0.0,19786.62912,19446.51264,s,10,37.639876708984374,3.7639876708984374,0.00797346511113168,3.7678734130859377,3.7719742187499996,3.7720339843749997,3.772081796875,"[3.7719609375, 3.77209375, 3.75298291015625, 3.75338232421875, 3.75456591796875, 3.75687451171875, 3.7707548828125, 3.7715146484375, 3.76720751953125, 3.768539306640625]",tokens/s,16.737568108176706,kWh,4.449747177881941e-05,2.4387725608998432e-05,0.00018814627551689902,0.0002570314729047169,tokens/kWh,245106.17041577032,,s,630,37.63717425537106,0.059741546437096976,0.0003724331024869964,0.05964902305603027,0.06024724464416504,0.060371712112426756,0.060804353446960446,"[0.060695552825927736, 0.06019583892822265, 0.05961318588256836, 0.05940326309204102, 0.059463680267333986, 0.05938278579711914, 0.059469825744628904, 0.06005759811401367, 0.05998796844482422, 0.05945753479003906, 0.05947699356079102, 0.0594595832824707, 0.060401664733886716, 0.06040883255004883, 0.059848705291748044, 0.059394046783447264, 0.06113075256347656, 0.059630592346191405, 0.05969305419921875, 0.0596940803527832, 0.060075008392333984, 0.059655166625976565, 0.05953843307495117, 0.05998796844482422, 0.06034329605102539, 0.06035148620605469, 0.05994393539428711, 0.059774974822998046, 0.059666431427001954, 0.059990016937255856, 0.059856895446777345, 0.059355136871337894, 0.059361278533935545, 0.0594411506652832, 0.05932748794555664, 0.059512832641601565, 0.059936767578125, 0.059888641357421876, 0.0595148811340332, 0.059630592346191405, 0.05971660614013672, 0.05993267059326172, 0.05972480010986328, 0.05939199829101562, 0.059428863525390625, 0.059737087249755856, 0.060224510192871096, 0.05953945541381836, 0.05939199829101562, 0.05945446395874023, 0.0596049919128418, 0.060353534698486325, 0.060647422790527344, 0.06014668655395508, 0.060175361633300783, 0.06015590286254883, 0.06031872177124024, 0.06026444625854492, 0.06074982452392578, 0.06008524703979492, 0.060144641876220706, 0.060260353088378904, 0.060254207611083986, 0.05990399932861328, 0.05954150390625, 0.05954457473754883, 0.059960319519042966, 0.05964287948608398, 0.05942784118652344, 0.05957529449462891, 0.05994403076171875, 0.05993155288696289, 0.059696128845214844, 0.059545600891113284, 0.059829246520996096, 0.059440128326416014, 0.05952511978149414, 0.0593070068359375, 0.05986099243164063, 0.05949542236328125, 0.05942272186279297, 0.0595865592956543, 0.05972275161743164, 0.05981491088867188, 0.0595968017578125, 0.05947289657592773, 0.059423744201660154, 0.05961625671386719, 0.05982003021240234, 0.059463680267333986, 0.059906047821044923, 0.05940838241577148, 0.05981695938110351, 0.060044288635253906, 0.05986816024780273, 0.05969100952148437, 0.059993087768554686, 0.06027571105957031, 0.06037401580810547, 0.06055116653442383, 0.06026444625854492, 0.06013849639892578, 0.060284927368164064, 0.060290046691894535, 0.060368896484375, 0.06020505523681641, 0.060200958251953124, 0.060268543243408204, 0.06020710372924805, 0.06016716766357422, 0.060298240661621094, 0.06013337707519531, 0.06030745697021484, 0.060249088287353515, 0.06029312133789062, 0.060270591735839846, 0.06003507232666016, 0.05971148681640625, 0.06009241485595703, 0.05992345428466797, 0.05971865463256836, 0.059633663177490234, 0.059445247650146485, 0.059379711151123046, 0.05940531158447265, 0.06046105575561524, 0.059734016418457034, 0.05925273513793945, 0.05935103988647461, 0.05933977508544922, 0.05942476654052734, 0.05925068664550781, 0.05982617568969727, 0.05975859069824219, 0.059309055328369144, 0.059286529541015626, 0.059096065521240235, 0.05925580978393555, 0.05931622314453125, 0.05932646560668945, 0.059565055847167966, 0.05964083099365235, 0.05933465576171875, 0.05929369735717773, 0.059285663604736326, 0.05983523178100586, 0.06001561737060547, 0.05925580978393555, 0.05923942565917969, 0.059668479919433595, 0.05991424179077148, 0.05967871856689453, 0.059668479919433595, 0.059417598724365236, 0.059581439971923826, 0.05940326309204102, 0.05940633773803711, 0.059545600891113284, 0.06013951873779297, 0.05933158493041992, 0.05960806274414063, 0.059483135223388675, 0.06002790451049805, 0.059930622100830076, 0.060184574127197264, 0.06012518310546875, 0.060826625823974606, 0.05998387145996094, 0.05972172927856445, 0.059433025360107423, 0.05989471817016601, 0.05941862487792969, 0.059338752746582034, 0.05957222366333008, 0.05942476654052734, 0.05994598388671875, 0.059466751098632815, 0.059379711151123046, 0.05972480010986328, 0.05949951934814453, 0.05960294342041016, 0.059442176818847656, 0.059284481048583984, 0.059394046783447264, 0.05949030303955078, 0.05934592056274414, 0.059540481567382814, 0.05946879959106445, 0.05943091201782227, 0.059737087249755856, 0.05972275161743164, 0.059390975952148435, 0.05956710433959961, 0.060679168701171876, 0.059650047302246094, 0.059524097442626954, 0.05939507293701172, 0.05945548629760742, 0.059676673889160155, 0.059837440490722656, 0.05929983901977539, 0.059154430389404294, 0.059232257843017576, 0.05948928070068359, 0.05975142288208008, 0.05951590347290039, 0.059431934356689455, 0.059394046783447264, 0.05937868881225586, 0.05925888061523438, 0.05929062271118164, 0.05935615921020508, 0.059200511932373044, 0.05927526473999024, 0.05940633773803711, 0.05975040054321289, 0.05972377777099609, 0.059445247650146485, 0.05947289657592773, 0.059510784149169924, 0.059494400024414064, 0.059582462310791014, 0.05952102279663086, 0.061193214416503904, 0.060230655670166014, 0.0595968017578125, 0.05945446395874023, 0.059377662658691405, 0.05970943832397461, 0.05945241546630859, 0.05950361633300781, 0.05944319915771484, 0.05973811340332031, 0.059845630645751956, 0.05961830520629883, 0.05999411010742187, 0.05989273452758789, 0.05950054550170898, 0.059426815032958984, 0.05943296051025391, 0.059338752746582034, 0.05940326309204102, 0.05942476654052734, 0.059409408569335936, 0.05948928070068359, 0.05939814376831055, 0.05955583953857422, 0.059442176818847656, 0.05948723220825195, 0.05989888000488281, 0.059940864562988284, 0.05936537551879883, 0.05972787094116211, 0.05942272186279297, 0.05934080123901367, 0.059210750579833986, 0.059374591827392575, 0.05936643218994141, 0.059575263977050784, 0.05944319915771484, 0.0593520622253418, 0.059464702606201174, 0.05952000045776367, 0.05926707077026367, 0.05923737716674805, 0.05926707077026367, 0.059367584228515624, 0.06023868942260742, 0.06005350494384765, 0.05999923324584961, 0.060052478790283206, 0.059344894409179685, 0.05937152099609375, 0.05943500900268555, 0.05975244903564453, 0.05961830520629883, 0.05958348846435547, 0.05927833557128906, 0.05972172927856445, 0.05997260665893555, 0.05947391891479492, 0.05942988967895508, 0.05971558380126953, 0.05968281555175781, 0.05944329452514648, 0.05953833770751953, 0.059894783020019535, 0.06012518310546875, 0.059834369659423826, 0.0593704948425293, 0.05954764938354492, 0.06051123046875, 0.05990092849731445, 0.059478015899658204, 0.05946265411376953, 0.059676673889160155, 0.05947289657592773, 0.05932748794555664, 0.05949542236328125, 0.05940121459960938, 0.059717632293701174, 0.05944319915771484, 0.0594411506652832, 0.05932646560668945, 0.05941145706176758, 0.05938585662841797, 0.0594442253112793, 0.05947289657592773, 0.0598579216003418, 0.05931315231323242, 0.05976678466796875, 0.059478015899658204, 0.059734016418457034, 0.06027980804443359, 0.0600893440246582, 0.059843582153320314, 0.05971660614013672, 0.05957734298706055, 0.0597022705078125, 0.059415550231933595, 0.059734016418457034, 0.05937254333496094, 0.05931827163696289, 0.05928550338745117, 0.059379711151123046, 0.05935308837890625, 0.05928755187988281, 0.05929779052734375, 0.059390975952148435, 0.05940838241577148, 0.059344894409179685, 0.05941862487792969, 0.06006988906860351, 0.05996953582763672, 0.06000844955444336, 0.0595968017578125, 0.059328510284423826, 0.05930188751220703, 0.0593172492980957, 0.05918003082275391, 0.05941350555419922, 0.059991039276123044, 0.05952000045776367, 0.06046515274047851, 0.060283905029296876, 0.059954177856445315, 0.059730945587158205, 0.05976678466796875, 0.06042521667480469, 0.060539905548095706, 0.059850753784179686, 0.05933676910400391, 0.05945747375488281, 0.059474945068359375, 0.05949030303955078, 0.05993164825439453, 0.05954457473754883, 0.059719680786132816, 0.05986099243164063, 0.05959065628051758, 0.05957120132446289, 0.05954662322998047, 0.06025625610351563, 0.06022246551513672, 0.05938380813598633, 0.059789310455322264, 0.05989990234375, 0.05934899139404297, 0.059456512451171874, 0.059390975952148435, 0.059510784149169924, 0.05932032012939453, 0.05947596740722656, 0.05943807983398437, 0.05971558380126953, 0.05953638458251953, 0.059478015899658204, 0.059319297790527345, 0.06053580856323242, 0.059410430908203124, 0.05949849700927735, 0.05929983901977539, 0.059529216766357425, 0.05938483047485352, 0.05964799880981445, 0.059338752746582034, 0.05982822418212891, 0.059772926330566405, 0.06002483367919922, 0.060098560333251956, 0.060126209259033205, 0.060706817626953125, 0.06026649475097656, 0.06010265731811523, 0.060111873626708986, 0.0602081298828125, 0.060375038146972655, 0.06031769561767578, 0.06014361572265625, 0.06004326248168945, 0.06005145645141602, 0.060112895965576174, 0.060039169311523435, 0.060224510192871096, 0.060464126586914066, 0.06013951873779297, 0.0600893440246582, 0.059873279571533204, 0.05991219329833984, 0.06017228698730469, 0.059612159729003904, 0.05969100952148437, 0.06016716766357422, 0.05964492797851562, 0.05951900863647461, 0.05931721496582031, 0.05947289657592773, 0.06060134506225586, 0.06023372650146484, 0.0599552001953125, 0.05973811340332031, 0.06009036636352539, 0.05950156784057617, 0.059448318481445314, 0.060014591217041016, 0.05935001754760742, 0.05943296051025391, 0.059305984497070315, 0.05955276870727539, 0.059379711151123046, 0.05942988967895508, 0.060333057403564455, 0.05946572875976563, 0.059919361114501954, 0.05980876922607422, 0.05988761520385742, 0.05952511978149414, 0.059769855499267575, 0.059417598724365236, 0.05947903823852539, 0.059565055847167966, 0.06083891296386719, 0.06020198440551758, 0.060404735565185545, 0.05978316879272461, 0.05968076705932617, 0.059493377685546876, 0.05939507293701172, 0.05939814376831055, 0.05968588638305664, 0.059433982849121096, 0.05937561416625976, 0.05940636825561523, 0.060510177612304684, 0.06038016128540039, 0.05957222366333008, 0.05999923324584961, 0.05931827163696289, 0.05953126525878906, 0.06085631942749024, 0.06016921615600586, 0.05940224075317383, 0.059600894927978515, 0.05932748794555664, 0.05938995361328125, 0.05947391891479492, 0.060214271545410154, 0.06034739303588867, 0.059930622100830076, 0.06004019165039062, 0.06002687835693359, 0.05957734298706055, 0.06006272125244141, 0.05979647827148438, 0.06003200149536133, 0.059937793731689455, 0.059774974822998046, 0.05980876922607422, 0.05978214263916016, 0.06012723159790039, 0.05970636749267578, 0.05976166534423828, 0.0599552001953125, 0.06027264022827149, 0.05975244903564453, 0.05987123107910156, 0.06008524703979492, 0.06008627319335937, 0.06006988906860351, 0.060148735046386716, 0.05971148681640625, 0.05992243194580078, 0.060639232635498044, 0.06028902435302735, 0.059657215118408206, 0.05976678466796875, 0.059630592346191405, 0.05976268768310547, 0.05952511978149414, 0.060052478790283206, 0.059448318481445314, 0.059445247650146485, 0.059568126678466796, 0.060037120819091794, 0.060104705810546874, 0.06009446334838867, 0.06032076644897461, 0.060507137298583986, 0.060142593383789064, 0.060109825134277345, 0.0600893440246582, 0.06010675048828125, 0.060063743591308595, 0.060111873626708986, 0.060077056884765626, 0.060037120819091794, 0.060132350921630856, 0.060126209259033205, 0.06021324920654297, 0.06011699295043945, 0.060247039794921874, 0.060066814422607424, 0.060082176208496096, 0.06010572814941406, 0.060523521423339846, 0.06010572814941406, 0.06017228698730469, 0.0601712646484375, 0.0601794548034668, 0.05933260726928711, 0.05945446395874023, 0.059305984497070315, 0.05953638458251953, 0.05935308837890625, 0.05933363342285156, 0.05999411010742187, 0.059568126678466796, 0.05995110321044922, 0.059627521514892576, 0.05945241546630859, 0.05986304092407226, 0.05969510269165039, 0.05950054550170898, 0.059486209869384764, 0.05960806274414063, 0.05962444686889649, 0.05937561416625976, 0.05932339096069336, 0.05946060943603516, 0.05934284973144531, 0.05950054550170898, 0.05927833557128906, 0.05957427215576172, 0.05932748794555664, 0.06014771270751953, 0.059253761291503906, 0.059276287078857424, 0.06033203125, 0.05971558380126953, 0.059428863525390625, 0.05944934463500977, 0.06005657577514648, 0.05955891036987305, 0.05947391891479492, 0.05936742401123047, 0.05938380813598633, 0.0595599365234375, 0.060260353088378904, 0.059458560943603515, 0.0597125129699707, 0.059856895446777345, 0.05956198501586914, 0.05947084808349609, 0.05939507293701172, 0.059819007873535154, 0.05948211288452149, 0.061491199493408207, 0.06051123046875, 0.0602531852722168, 0.05957632064819336, 0.059456512451171874, 0.05955788803100586, 0.05990399932861328, 0.05937152099609375, 0.05938790512084961, 0.05932646560668945, 0.0597391357421875, 0.05954969787597656, 0.05939814376831055, 0.059309055328369144, 0.05953945541381836, 0.059660289764404295, 0.05936025619506836, 0.05932032012939453, 0.0595865592956543, 0.05994393539428711, 0.06020710372924805, 0.060063743591308595, 0.05976678466796875, 0.05998796844482422, 0.059509761810302736, 0.05955891036987305, 0.05950668716430664, 0.06019686508178711, 0.06020710372924805, 0.060060672760009766, 0.05942272186279297, 0.059974655151367184, 0.05963776016235352, 0.06000332641601563, 0.061047809600830075, 0.060216320037841796, 0.05999923324584961, 0.0600709114074707, 0.05960192108154297, 0.06015590286254883, 0.0599920654296875, 0.06014771270751953, 0.05977907180786133, 0.059635711669921876, 0.059686912536621096, 0.05986918258666992, 0.06008115386962891, 0.05990707015991211, 0.05969305419921875, 0.059991039276123044, 0.059851776123046874, 0.05931827163696289, 0.059822078704833984, 0.0600002555847168]",tokens/s,16.738769912039675,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,1498.005504,1531.445248,0.0,945.815552,943.480832,s,1,8.455251953125,8.455251953125,0.0,8.455251953125,8.455251953125,8.455251953125,8.455251953125,[8.455251953125],,kWh,1.872375970764324e-05,1.0245798873188622e-05,2.5614464935985737e-05,5.45840235168176e-05,,MB,1619.902464,1705.508864,0.0,1059.06176,1037.869056,s,11,0.32442886543273924,0.02949353322115812,0.0002106050051040176,0.02957241630554199,0.029643968582153322,0.02965171241760254,0.029657907485961914,"[0.028997535705566405, 0.029499935150146483, 0.029123136520385742, 0.02954857635498047, 0.029639328002929687, 0.029561920166015623, 0.02957241630554199, 0.029659456253051757, 0.029601024627685546, 0.029581567764282227, 0.029643968582153322]",tokens/s,8679.868840412457,kWh,3.4365279442021517e-07,1.8830534315373692e-07,1.5894610583895445e-06,2.1214191959634966e-06,tokens/kWh,120673933.98112959,MB,1631.195136,1812.463616,0.0,1166.016512,1080.15616,s,11,9.810508544921875,0.8918644131747159,0.011049588185804406,0.8924695434570312,0.9062291870117187,0.9063132629394531,0.9063805236816407,"[0.8834459228515625, 0.9062291870117187, 0.9063973388671875, 0.8816298828125, 0.8794376220703125, 0.8753644409179687, 0.89629736328125, 0.8841578979492187, 0.9053551635742187, 0.8924695434570312, 0.8997241821289063]",tokens/s,70.63853997239637,kWh,1.0563356039292717e-05,5.788196877967148e-06,2.1646820156883457e-05,3.79983730741433e-05,tokens/kWh,1657965.7207184357,,s,693,9.806406651496888,0.014150658948768957,0.00037010303345497245,0.013973504066467286,0.01455493106842041,0.014668390464782714,0.015069593849182128,"[0.01586995220184326, 0.014908415794372559, 0.01455513572692871, 0.014559231758117675, 0.014407679557800293, 0.01444863986968994, 0.014437376022338867, 0.014000127792358399, 0.01388748836517334, 0.013824000358581542, 0.013837311744689941, 0.01396019172668457, 0.01386905574798584, 0.01387110424041748, 0.013771776199340821, 0.01387929630279541, 0.013798399925231934, 0.013856767654418945, 0.013804544448852539, 0.013899776458740234, 0.013864959716796875, 0.013856767654418945, 0.013758463859558106, 0.013731840133666993, 0.013853695869445801, 0.013810688018798829, 0.013847552299499511, 0.013817855834960938, 0.014163968086242675, 0.013835264205932616, 0.013833215713500976, 0.01376153564453125, 0.01415987205505371, 0.013971455574035644, 0.013845503807067871, 0.013873151779174805, 0.01387929630279541, 0.013815808296203613, 0.013835264205932616, 0.013905920028686524, 0.013810688018798829, 0.013857791900634766, 0.013881343841552735, 0.013904895782470703, 0.014211071968078613, 0.014023679733276367, 0.013940735816955567, 0.014511103630065919, 0.014641152381896973, 0.014507007598876954, 0.014486528396606446, 0.01396224021911621, 0.013872127532958984, 0.013908991813659668, 0.013897727966308594, 0.013859840393066406, 0.013883392333984374, 0.013864959716796875, 0.013796352386474609, 0.013816831588745117, 0.013864959716796875, 0.013829119682312012, 0.013891584396362304, 0.013744128227233888, 0.013922304153442382, 0.014097408294677734, 0.014534655570983887, 0.014425087928771972, 0.014507007598876954, 0.014513152122497559, 0.014502911567687989, 0.014509056091308594, 0.01451417636871338, 0.014515199661254884, 0.014368767738342286, 0.014454784393310547, 0.014401535987854003, 0.014559231758117675, 0.014443519592285157, 0.014144512176513671, 0.013789183616638183, 0.01386905574798584, 0.01387929630279541, 0.013817855834960938, 0.013824000358581542, 0.014438400268554688, 0.014485504150390625, 0.014531583786010742, 0.014454784393310547, 0.0144650239944458, 0.01389568042755127, 0.013826047897338867, 0.014436351776123046, 0.014484479904174804, 0.01439129638671875, 0.014455807685852052, 0.01438003158569336, 0.014222335815429688, 0.014203904151916504, 0.01457254409790039, 0.014457856178283691, 0.014629887580871583, 0.014510080337524415, 0.014528512001037597, 0.014379008293151856, 0.014439423561096192, 0.014412799835205077, 0.014440447807312011, 0.01447321605682373, 0.014287872314453125, 0.014441472053527832, 0.01440665626525879, 0.01444863986968994, 0.01508249568939209, 0.014683135986328125, 0.014495743751525878, 0.01447935962677002, 0.014361599922180175, 0.014432255744934081, 0.014427136421203614, 0.014870528221130372, 0.014724096298217774, 0.014495743751525878, 0.01446399974822998, 0.014444543838500976, 0.014431232452392579, 0.01376972770690918, 0.013883392333984374, 0.013783040046691895, 0.013821951866149903, 0.013780991554260253, 0.0138854398727417, 0.01451417636871338, 0.014426112174987793, 0.014486528396606446, 0.0144650239944458, 0.01457151985168457, 0.014486528396606446, 0.014533632278442383, 0.014527487754821777, 0.014238719940185546, 0.014452735900878906, 0.014305279731750489, 0.014529536247253418, 0.0144650239944458, 0.014513152122497559, 0.01426636791229248, 0.013930496215820312, 0.01387827205657959, 0.013883392333984374, 0.013882368087768555, 0.013853695869445801, 0.013859840393066406, 0.014393343925476074, 0.014452735900878906, 0.01438003158569336, 0.014419967651367188, 0.014435327529907227, 0.014450688362121582, 0.015029248237609863, 0.014551039695739745, 0.014526464462280274, 0.01448857593536377, 0.014477312088012695, 0.014507007598876954, 0.014503935813903808, 0.01477836799621582, 0.014550016403198243, 0.014797823905944824, 0.014470144271850586, 0.014467071533203125, 0.01447321605682373, 0.014458880424499512, 0.0144650239944458, 0.014435327529907227, 0.014527487754821777, 0.014496767997741699, 0.01439129638671875, 0.01447219181060791, 0.014450688362121582, 0.014817279815673828, 0.014727168083190918, 0.014642175674438476, 0.014496767997741699, 0.014567423820495605, 0.014469120025634765, 0.014511103630065919, 0.014513152122497559, 0.014423040390014649, 0.013874176025390626, 0.013889535903930664, 0.013845503807067871, 0.013880319595336914, 0.013765631675720215, 0.013829119682312012, 0.013790207862854004, 0.013819904327392578, 0.013824000358581542, 0.013840383529663085, 0.013858816146850587, 0.013840383529663085, 0.013888511657714844, 0.013819904327392578, 0.013839360237121581, 0.013832192420959472, 0.01384447956085205, 0.013856767654418945, 0.01386086368560791, 0.013817855834960938, 0.013859840393066406, 0.01387827205657959, 0.013736960411071777, 0.013897727966308594, 0.01386086368560791, 0.013865983963012696, 0.013758463859558106, 0.014721023559570312, 0.014699520111083985, 0.0144650239944458, 0.014429183959960937, 0.014393343925476074, 0.014386176109313965, 0.014429183959960937, 0.014358528137207031, 0.014411775588989258, 0.01425715160369873, 0.013873151779174805, 0.013856767654418945, 0.01375436782836914, 0.01365401554107666, 0.013692928314208984, 0.013788160324096679, 0.01367244815826416, 0.0138854398727417, 0.01425100803375244, 0.014186495780944825, 0.014551039695739745, 0.014550016403198243, 0.013830143928527832, 0.01360588836669922, 0.01387827205657959, 0.013759488105773926, 0.01358131217956543, 0.014028800010681153, 0.01377791976928711, 0.013650943756103515, 0.013583359718322753, 0.013790207862854004, 0.01396224021911621, 0.014452735900878906, 0.014741503715515136, 0.01467903995513916, 0.013697024345397948, 0.013800448417663574, 0.013829119682312012, 0.01387929630279541, 0.013917183876037598, 0.013857791900634766, 0.013906944274902343, 0.013821951866149903, 0.013842432022094727, 0.01386086368560791, 0.013859840393066406, 0.013808639526367187, 0.013922304153442382, 0.013818880081176758, 0.013818880081176758, 0.013855744361877441, 0.01390182399749756, 0.013875200271606445, 0.013836288452148437, 0.013892607688903809, 0.01385267162322998, 0.013924351692199707, 0.013880319595336914, 0.013965312004089356, 0.013932543754577637, 0.013883392333984374, 0.01386291217803955, 0.013925375938415528, 0.013889535903930664, 0.013827072143554688, 0.013837311744689941, 0.013876223564147949, 0.013815808296203613, 0.01399500846862793, 0.013873151779174805, 0.01448755168914795, 0.013648896217346192, 0.013627391815185547, 0.013732864379882812, 0.013831168174743653, 0.013897727966308594, 0.013864959716796875, 0.013921279907226563, 0.01387110424041748, 0.013904895782470703, 0.013905952453613282, 0.013895648002624512, 0.013800448417663574, 0.013883392333984374, 0.01387827205657959, 0.013873151779174805, 0.014268416404724121, 0.01388748836517334, 0.013946880340576171, 0.013940735816955567, 0.014189567565917969, 0.014072832107543945, 0.01426534366607666, 0.014623744010925293, 0.01479372787475586, 0.0146626558303833, 0.01459712028503418, 0.014419967651367188, 0.013716480255126954, 0.014645248413085938, 0.0147957763671875, 0.014454784393310547, 0.014468095779418945, 0.014526464462280274, 0.014445568084716797, 0.014484479904174804, 0.014425087928771972, 0.013903871536254882, 0.013828096389770507, 0.013838335990905762, 0.013832192420959472, 0.013593600273132325, 0.01366220760345459, 0.013868032455444336, 0.01376255989074707, 0.013574144363403321, 0.013606911659240722, 0.01356492805480957, 0.013584383964538574, 0.013592576026916504, 0.013617152214050294, 0.01358028793334961, 0.014000127792358399, 0.014105600357055664, 0.014243840217590332, 0.014324735641479493, 0.013723648071289063, 0.01375334358215332, 0.01389568042755127, 0.01375641632080078, 0.013644800186157227, 0.013622271537780761, 0.013925375938415528, 0.013765631675720215, 0.01380352020263672, 0.013842432022094727, 0.013865983963012696, 0.013847552299499511, 0.013845503807067871, 0.013799424171447755, 0.01375334358215332, 0.013570048332214356, 0.013574144363403321, 0.013724672317504882, 0.013816831588745117, 0.013907967567443847, 0.013608960151672364, 0.013650943756103515, 0.013839360237121581, 0.013828096389770507, 0.013857791900634766, 0.013915136337280273, 0.01376972770690918, 0.01386393642425537, 0.013890560150146485, 0.014255104064941406, 0.013802495956420899, 0.013593600273132325, 0.013656064033508301, 0.013997056007385255, 0.014008319854736329, 0.013821951866149903, 0.014230527877807618, 0.01457049560546875, 0.014511103630065919, 0.01448140811920166, 0.01454694366455078, 0.014502911567687989, 0.014420991897583007, 0.014820351600646972, 0.014742527961730957, 0.014540800094604492, 0.01452236843109131, 0.014430208206176758, 0.01459712028503418, 0.014452735900878906, 0.014144512176513671, 0.014558208465576173, 0.014512127876281738, 0.014574591636657714, 0.014425087928771972, 0.013914112091064454, 0.013855744361877441, 0.013914112091064454, 0.0138854398727417, 0.01388748836517334, 0.014381055831909179, 0.014496767997741699, 0.01459609603881836, 0.0144650239944458, 0.014419967651367188, 0.014501888275146485, 0.014419967651367188, 0.014503935813903808, 0.014386176109313965, 0.01448140811920166, 0.01448960018157959, 0.0144650239944458, 0.014459903717041016, 0.014523391723632812, 0.013979647636413574, 0.01386086368560791, 0.013815808296203613, 0.013812735557556152, 0.014044159889221192, 0.01417625617980957, 0.01427558422088623, 0.014041088104248046, 0.013973504066467286, 0.01397043228149414, 0.01387929630279541, 0.013872127532958984, 0.013902848243713378, 0.013864959716796875, 0.01400115203857422, 0.013892607688903809, 0.013897727966308594, 0.013849599838256836, 0.013882368087768555, 0.013889535903930664, 0.013914112091064454, 0.013786111831665039, 0.013941760063171387, 0.013956095695495606, 0.013676544189453126, 0.013830143928527832, 0.013857791900634766, 0.013808639526367187, 0.013916159629821777, 0.013768704414367675, 0.013900799751281738, 0.013807616233825683, 0.01386393642425537, 0.013842432022094727, 0.01384447956085205, 0.013855744361877441, 0.013848575592041015, 0.013744128227233888, 0.014147583961486816, 0.013985792160034179, 0.013998080253601074, 0.013832192420959472, 0.013824000358581542, 0.013782015800476074, 0.013802495956420899, 0.013842432022094727, 0.013820927619934082, 0.01385267162322998, 0.013816831588745117, 0.013817855834960938, 0.013842432022094727, 0.013808639526367187, 0.013797375679016113, 0.013830143928527832, 0.013810688018798829, 0.013739007949829102, 0.013814784049987794, 0.013835264205932616, 0.013859840393066406, 0.013812735557556152, 0.013854720115661622, 0.013811712265014648, 0.013831168174743653, 0.013808639526367187, 0.013838335990905762, 0.014307328224182129, 0.014736384391784667, 0.014443519592285157, 0.014496767997741699, 0.014474240303039551, 0.014512127876281738, 0.014509056091308594, 0.014440447807312011, 0.013833215713500976, 0.01385267162322998, 0.013997056007385255, 0.013894656181335448, 0.013988863945007325, 0.014019583702087402, 0.015047679901123047, 0.01507430362701416, 0.014500864028930664, 0.014467071533203125, 0.014452735900878906, 0.014503935813903808, 0.014447615623474122, 0.014419967651367188, 0.013785087585449218, 0.01384447956085205, 0.014358528137207031, 0.014515199661254884, 0.014828543663024902, 0.014491647720336913, 0.01384447956085205, 0.014032896041870118, 0.014537728309631348, 0.014527487754821777, 0.014333951950073242, 0.014529536247253418, 0.014436351776123046, 0.01438003158569336, 0.014432255744934081, 0.014389247894287109, 0.014412799835205077, 0.014439423561096192, 0.014416895866394042, 0.014791680335998534, 0.01542246437072754, 0.01642086410522461, 0.015069184303283691, 0.014561280250549317, 0.014589952468872071, 0.014621696472167968, 0.014529536247253418, 0.014577664375305176, 0.014530559539794922, 0.014449664115905762, 0.014517248153686524, 0.014385151863098144, 0.01444863986968994, 0.014562303543090821, 0.014451711654663087, 0.014561280250549317, 0.014321663856506347, 0.014512127876281738, 0.014441472053527832, 0.014441472053527832, 0.014445568084716797, 0.014474240303039551, 0.014493696212768555, 0.014538751602172852, 0.014538751602172852, 0.013971455574035644, 0.013899776458740234, 0.014003199577331543, 0.013963264465332031, 0.013934592247009277, 0.013880319595336914, 0.013922304153442382, 0.013899776458740234, 0.01397043228149414, 0.013802495956420899, 0.014008319854736329, 0.013894656181335448, 0.013950976371765136, 0.013849599838256836, 0.013946880340576171, 0.01386291217803955, 0.014272512435913086, 0.014660608291625977, 0.013774847984313965, 0.013950976371765136, 0.013829119682312012, 0.013911040306091308, 0.01397555160522461, 0.013949952125549316, 0.013834239959716797, 0.013959168434143066, 0.014321663856506347, 0.013929471969604493, 0.013956095695495606, 0.013985792160034179, 0.013899776458740234, 0.014012415885925293, 0.013946880340576171, 0.01386393642425537, 0.013867008209228515, 0.013915136337280273, 0.013928447723388672, 0.013898752212524413, 0.013947903633117676, 0.013888511657714844, 0.013892607688903809, 0.013899776458740234, 0.01399500846862793, 0.014453760147094727, 0.014524415969848633, 0.014542847633361817, 0.014457856178283691, 0.014431232452392579, 0.014433279991149902, 0.01439846420288086, 0.014476287841796874, 0.014562303543090821, 0.014468095779418945, 0.014476287841796874, 0.014427136421203614, 0.014428159713745118, 0.014425087928771972, 0.014482432365417481, 0.014446592330932618, 0.014466048240661621, 0.014313471794128419, 0.014689279556274413, 0.015138815879821778, 0.014759936332702638, 0.01457151985168457, 0.014516223907470703, 0.01396224021911621, 0.01369600009918213, 0.01368883228302002, 0.013678591728210449, 0.013985792160034179, 0.014517248153686524, 0.014243840217590332, 0.013645824432373046, 0.01376153564453125, 0.01387724781036377, 0.013965312004089356, 0.014491647720336913, 0.014369791984558105, 0.014213120460510254, 0.013800448417663574, 0.013980671882629395, 0.013914112091064454, 0.01389363193511963, 0.013850624084472657, 0.013853695869445801, 0.013857791900634766, 0.013628416061401367, 0.013600768089294434, 0.01369600009918213, 0.0138854398727417, 0.013843456268310546, 0.013891584396362304, 0.013640704154968262, 0.013924351692199707, 0.013858816146850587, 0.01388646411895752, 0.013841407775878906, 0.013880319595336914, 0.013914112091064454, 0.013991935729980469, 0.01419059181213379, 0.014536704063415527, 0.014515199661254884, 0.014508031845092773, 0.014500864028930664, 0.014435327529907227, 0.014520319938659668, 0.014485504150390625, 0.014486528396606446, 0.014474240303039551, 0.014783488273620606, 0.014573568344116212, 0.01448140811920166, 0.014407679557800293, 0.014432255744934081, 0.014435327529907227, 0.01439027214050293, 0.014469120025634765, 0.014451711654663087, 0.014533632278442383, 0.014676992416381835, 0.014554112434387208, 0.014608384132385254, 0.01456332778930664, 0.014459903717041016, 0.014655488014221191, 0.01448960018157959, 0.013942784309387207, 0.013868032455444336, 0.013956095695495606, 0.013964287757873535, 0.014003199577331543, 0.013942784309387207, 0.014089216232299804, 0.014504960060119629, 0.014640128135681153, 0.01600819206237793, 0.014947327613830566, 0.014635007858276367, 0.014637056350708008, 0.014684160232543946, 0.014568448066711426, 0.014516223907470703]",tokens/s,70.66808716261198,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp9d7838n/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495d8-6e7bc83278d01246445ae00e;c5c31fda-fd9f-42e7-9e41-3a985ee5d71a) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 976, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 866, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 583, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 411, in forward - query_states = self.q_proj(hidden_states).view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 1204, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 1004, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 738, in forward - hidden_states, self_attn_weights, present_key_value = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 410, in forward - qkv_states = self.wqkv(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 292, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 47, in __init__ - assert out_features % (32 // self.w_bit) == 0 -AssertionError - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1065.746432,1074.266112,0.0,488.636416,482.553856,s,1,7.51860791015625,7.51860791015625,0.0,7.51860791015625,7.51860791015625,7.51860791015625,7.51860791015625,[7.51860791015625],,kWh,8.425589512502383e-06,4.602170035234357e-06,1.1376120212014396e-05,2.4403879759751136e-05,,MB,1444.225024,1263.009792,0.0,616.562688,582.974464,s,10,0.21123420715332034,0.02112342071533203,0.0002940863049575914,0.0210197114944458,0.021300131607055663,0.0216319221496582,0.021897354583740234,"[0.02122640037536621, 0.020907455444335938, 0.021064191818237304, 0.021009855270385742, 0.020976768493652344, 0.021001535415649412, 0.02092367935180664, 0.021029567718505858, 0.021131040573120118, 0.02196371269226074]",tokens/s,12119.249218673533,kWh,2.4271796039244614e-07,1.329978377967466e-07,7.337360421561207e-07,1.1094518403453134e-06,tokens/kWh,230744580.96380353,MB,1479.184384,1277.689856,0.0,631.242752,597.192192,s,10,12.011481933593748,1.201148193359375,0.005673057261116415,1.2020083618164064,1.206297265625,1.2080716674804686,1.2094911889648436,"[1.1909664306640626, 1.19735546875, 1.192417724609375, 1.20478369140625, 1.20496875, 1.20122412109375, 1.201978271484375, 1.2098460693359374, 1.2020384521484375, 1.2059029541015625]",tokens/s,52.449814559352085,kWh,1.3702224206413716e-05,7.508426467720574e-06,2.2023365496838787e-05,4.323401617097306e-05,tokens/kWh,1457185.9285720869,,s,630,12.006772737503052,0.01905836942460802,0.00027272207118190573,0.01904435157775879,0.019249458503723144,0.019384985065460207,0.01998997488021851,"[0.019054592132568358, 0.019090431213378906, 0.01904025650024414, 0.018927616119384767, 0.019082239151000976, 0.018965503692626954, 0.018948095321655273, 0.018958335876464845, 0.018979839324951172, 0.018989055633544923, 0.019083263397216797, 0.018991104125976564, 0.0192491512298584, 0.019283967971801756, 0.0190515193939209, 0.018944000244140623, 0.019084287643432618, 0.018880512237548826, 0.018719743728637696, 0.018508800506591795, 0.018579456329345705, 0.018564096450805666, 0.018726911544799805, 0.01864192008972168, 0.018936832427978514, 0.019106815338134766, 0.01927168083190918, 0.019153919219970703, 0.01902079963684082, 0.01906790351867676, 0.019074047088623047, 0.019090431213378906, 0.01902387237548828, 0.01965772819519043, 0.019314687728881837, 0.019125247955322267, 0.019135488510131835, 0.019109888076782225, 0.01906175994873047, 0.019059743881225586, 0.019042272567749024, 0.01903615951538086, 0.018757631301879883, 0.018249727249145507, 0.018159616470336915, 0.018100223541259765, 0.018301952362060548, 0.01824563217163086, 0.01823846435546875, 0.018281471252441405, 0.01824870491027832, 0.01843404769897461, 0.018860031127929687, 0.018651136398315428, 0.01907302474975586, 0.019055648803710936, 0.019036127090454102, 0.01909350395202637, 0.019162111282348633, 0.018940927505493164, 0.018919424057006837, 0.01903411293029785, 0.01902899169921875, 0.018865152359008788, 0.019014656066894533, 0.0191016960144043, 0.018924543380737305, 0.018949119567871094, 0.019006464004516603, 0.01903206443786621, 0.018973695755004884, 0.018920448303222655, 0.018993152618408202, 0.018958335876464845, 0.018990079879760743, 0.018984960556030273, 0.01900441551208496, 0.018944000244140623, 0.018981887817382814, 0.01899519920349121, 0.01908121681213379, 0.01900748825073242, 0.018938880920410156, 0.01923276710510254, 0.0190248966217041, 0.01901260757446289, 0.018898944854736328, 0.018988031387329102, 0.01904128074645996, 0.01901670455932617, 0.01899212837219238, 0.018945024490356444, 0.019105791091918945, 0.019042303085327148, 0.018977792739868164, 0.01902592086791992, 0.01900748825073242, 0.018807807922363282, 0.01895427131652832, 0.018963424682617188, 0.018877439498901367, 0.018958335876464845, 0.018726911544799805, 0.01883135986328125, 0.01938739204406738, 0.019325952529907226, 0.019091455459594727, 0.018988031387329102, 0.019058687210083008, 0.018957311630249024, 0.018971647262573242, 0.018934783935546876, 0.018954240798950195, 0.018970624923706055, 0.019009536743164062, 0.01900441551208496, 0.01889587211608887, 0.018919424057006837, 0.01904025650024414, 0.019134464263916014, 0.01906175994873047, 0.01900032043457031, 0.019131391525268555, 0.018935808181762694, 0.019021823883056642, 0.019001344680786132, 0.01884774398803711, 0.019009536743164062, 0.01899622344970703, 0.01921023941040039, 0.0190515193939209, 0.01901260757446289, 0.01901260757446289, 0.019149824142456053, 0.019330047607421876, 0.01922867202758789, 0.018964479446411133, 0.01903411293029785, 0.019072000503540038, 0.019203071594238282, 0.01900339126586914, 0.019288063049316406, 0.018959360122680666, 0.018997247695922852, 0.01904435157775879, 0.019078144073486326, 0.019083263397216797, 0.019070976257324217, 0.018997247695922852, 0.018902015686035157, 0.01904435157775879, 0.019072000503540038, 0.01902592086791992, 0.01901055908203125, 0.019186687469482423, 0.019182592391967773, 0.019076095581054688, 0.018997247695922852, 0.01906483268737793, 0.01901158332824707, 0.01903718376159668, 0.01924198341369629, 0.019009536743164062, 0.019386367797851564, 0.01902694320678711, 0.01905561637878418, 0.01899519920349121, 0.01900032043457031, 0.018973695755004884, 0.01866547203063965, 0.018536447525024414, 0.018578432083129884, 0.018775039672851563, 0.018700288772583007, 0.01904435157775879, 0.018712575912475587, 0.018694143295288086, 0.01866547203063965, 0.018330623626708984, 0.017903615951538086, 0.01800396728515625, 0.017937408447265626, 0.01830297660827637, 0.01902079963684082, 0.018731008529663085, 0.0187064323425293, 0.01867366409301758, 0.01885081672668457, 0.019172351837158205, 0.01904025650024414, 0.01920102310180664, 0.01924608039855957, 0.018939903259277344, 0.018703359603881836, 0.0186060791015625, 0.01906380844116211, 0.018716672897338867, 0.018734079360961914, 0.019005439758300782, 0.018984960556030273, 0.01903206443786621, 0.018938880920410156, 0.01903104019165039, 0.018973695755004884, 0.019109888076782225, 0.019100671768188478, 0.01909350395202637, 0.019087360382080077, 0.019079168319702147, 0.019058687210083008, 0.01901260757446289, 0.019133440017700197, 0.01904844856262207, 0.01926348876953125, 0.019118080139160155, 0.019132415771484376, 0.019165184020996092, 0.019087360382080077, 0.019146751403808594, 0.019148799896240236, 0.01926144027709961, 0.019276800155639647, 0.01920921516418457, 0.019108863830566408, 0.01919795227050781, 0.01946931266784668, 0.019796991348266603, 0.019211263656616212, 0.019320831298828126, 0.01940377616882324, 0.019172351837158205, 0.019100671768188478, 0.01982464027404785, 0.019105791091918945, 0.019155967712402345, 0.019115007400512696, 0.01905766487121582, 0.01901568031311035, 0.018998271942138673, 0.01920204734802246, 0.018971647262573242, 0.019013631820678712, 0.01907711982727051, 0.019333120346069335, 0.019315711975097655, 0.019143680572509765, 0.01906585693359375, 0.018994176864624023, 0.019123199462890626, 0.019131391525268555, 0.01902694320678711, 0.019091455459594727, 0.01878326416015625, 0.01910883140563965, 0.01901568031311035, 0.01899212837219238, 0.018914304733276367, 0.019106815338134766, 0.019272703170776367, 0.01903411293029785, 0.019161088943481445, 0.018997247695922852, 0.019091455459594727, 0.019092479705810548, 0.019281919479370118, 0.01895427131652832, 0.019098592758178712, 0.01902592086791992, 0.019192832946777344, 0.019056640625, 0.01903001594543457, 0.01906073570251465, 0.01905356788635254, 0.019139583587646485, 0.019120128631591796, 0.0190248966217041, 0.019116031646728517, 0.019155967712402345, 0.01921331214904785, 0.018898944854736328, 0.019111936569213867, 0.01906790351867676, 0.01899929618835449, 0.019108863830566408, 0.019118080139160155, 0.01906175994873047, 0.019087360382080077, 0.01901158332824707, 0.019122175216674805, 0.019136512756347656, 0.019115007400512696, 0.019080192565917968, 0.01906380844116211, 0.01903718376159668, 0.01965056037902832, 0.019188735961914064, 0.01906790351867676, 0.019078144073486326, 0.019079168319702147, 0.019135488510131835, 0.019070976257324217, 0.019099647521972657, 0.019146751403808594, 0.0193832950592041, 0.019373056411743163, 0.01909760093688965, 0.019268608093261717, 0.01921023941040039, 0.01925734329223633, 0.019163135528564454, 0.01965158462524414, 0.019198976516723632, 0.019087360382080077, 0.019152896881103516, 0.01902694320678711, 0.019050495147705078, 0.01908531188964844, 0.018958368301391602, 0.018976768493652343, 0.020878303527832032, 0.01989836883544922, 0.019721216201782226, 0.0190248966217041, 0.0189040641784668, 0.018865152359008788, 0.018914304733276367, 0.018948095321655273, 0.018979839324951172, 0.018929664611816405, 0.01884364891052246, 0.018784255981445314, 0.018934783935546876, 0.018934783935546876, 0.01900339126586914, 0.018954240798950195, 0.018994176864624023, 0.018974720001220705, 0.018980863571166993, 0.018939903259277344, 0.01902284812927246, 0.018962432861328125, 0.019061824798583985, 0.019043264389038087, 0.01885593605041504, 0.01899622344970703, 0.01923788833618164, 0.01917750358581543, 0.019068895339965822, 0.018965503692626954, 0.019144704818725586, 0.018961408615112304, 0.019009536743164062, 0.019195903778076173, 0.019087360382080077, 0.019109888076782225, 0.019208192825317383, 0.019204095840454103, 0.019180543899536134, 0.019019775390625, 0.018990079879760743, 0.019390464782714844, 0.019252223968505858, 0.01901568031311035, 0.019050495147705078, 0.01899929618835449, 0.01899622344970703, 0.018975744247436522, 0.01902387237548828, 0.01868185615539551, 0.01847091293334961, 0.01882316780090332, 0.01902387237548828, 0.01903206443786621, 0.019107839584350587, 0.019005439758300782, 0.019001344680786132, 0.01899622344970703, 0.018885631561279297, 0.019569664001464843, 0.0190699520111084, 0.018997247695922852, 0.018924543380737305, 0.018897920608520507, 0.018966527938842775, 0.019001344680786132, 0.01908121681213379, 0.019006464004516603, 0.01886617660522461, 0.019168256759643554, 0.019147775650024415, 0.0190699520111084, 0.018979839324951172, 0.019014656066894533, 0.019125247955322267, 0.019116031646728517, 0.018975744247436522, 0.01904025650024414, 0.01919692802429199, 0.01920102310180664, 0.019062784194946288, 0.018944000244140623, 0.019161088943481445, 0.019142656326293944, 0.01904332733154297, 0.01906073570251465, 0.0184770565032959, 0.01842585563659668, 0.019140607833862306, 0.018939903259277344, 0.019021823883056642, 0.01904128074645996, 0.018998271942138673, 0.01906380844116211, 0.019079168319702147, 0.01919385528564453, 0.01899212837219238, 0.01900339126586914, 0.019056640625, 0.019170303344726563, 0.019129344940185547, 0.01906281661987305, 0.01891529655456543, 0.0190382080078125, 0.019083263397216797, 0.019177471160888672, 0.01902079963684082, 0.019075071334838867, 0.019739648818969727, 0.019334144592285156, 0.019076095581054688, 0.019102720260620116, 0.019178495407104493, 0.0190515193939209, 0.019334144592285156, 0.019054592132568358, 0.01918569564819336, 0.019168224334716797, 0.019136512756347656, 0.019086336135864256, 0.019072000503540038, 0.01905561637878418, 0.018928640365600585, 0.018985984802246093, 0.01903206443786621, 0.019062784194946288, 0.018926591873168946, 0.018993152618408202, 0.018961408615112304, 0.01961369514465332, 0.019378175735473634, 0.018950143814086915, 0.019058687210083008, 0.018927616119384767, 0.019141632080078123, 0.018958335876464845, 0.018930688858032226, 0.018909183502197266, 0.018896896362304686, 0.019208192825317383, 0.01887027168273926, 0.01906380844116211, 0.018976768493652343, 0.019006464004516603, 0.019198976516723632, 0.01959321594238281, 0.01923072052001953, 0.019080192565917968, 0.01901568031311035, 0.019095552444458007, 0.01907302474975586, 0.019002368927001953, 0.01902079963684082, 0.01903411293029785, 0.01904435157775879, 0.019192832946777344, 0.01886412811279297, 0.018972671508789063, 0.019129344940185547, 0.018988031387329102, 0.018800640106201173, 0.02002739143371582, 0.019649568557739257, 0.020750303268432618, 0.019376127243041993, 0.01945907211303711, 0.01946316719055176, 0.019108863830566408, 0.01906175994873047, 0.021424127578735352, 0.020584447860717774, 0.01926246452331543, 0.01909760093688965, 0.01903923225402832, 0.018967552185058592, 0.018948095321655273, 0.01905254364013672, 0.018997247695922852, 0.019075071334838867, 0.01906892776489258, 0.01903001594543457, 0.01926246452331543, 0.019396608352661132, 0.018988031387329102, 0.019173376083374022, 0.019304479598999023, 0.019109855651855467, 0.01903923225402832, 0.018941951751708985, 0.019062784194946288, 0.018990079879760743, 0.01886617660522461, 0.018892799377441406, 0.018947071075439453, 0.018969600677490234, 0.018947071075439453, 0.01923276710510254, 0.019211263656616212, 0.019005439758300782, 0.018952192306518553, 0.01902694320678711, 0.01903104019165039, 0.019219455718994142, 0.01905971145629883, 0.019080192565917968, 0.018974720001220705, 0.019107839584350587, 0.019078144073486326, 0.01903615951538086, 0.01904435157775879, 0.019129344940185547, 0.019211263656616212, 0.01901158332824707, 0.019100671768188478, 0.019091455459594727, 0.019002368927001953, 0.01904128074645996, 0.01899519920349121, 0.019748863220214845, 0.01905971145629883, 0.01908940887451172, 0.019107839584350587, 0.01903923225402832, 0.019013631820678712, 0.019005439758300782, 0.01903206443786621, 0.019006464004516603, 0.01904947280883789, 0.019109888076782225, 0.019054592132568358, 0.019211263656616212, 0.0189040641784668, 0.01943654441833496, 0.01922867202758789, 0.019135488510131835, 0.01900851249694824, 0.01906483268737793, 0.01902284812927246, 0.01902899169921875, 0.019066879272460938, 0.018914304733276367, 0.01903923225402832, 0.019163135528564454, 0.01908121681213379, 0.0190248966217041, 0.01906073570251465, 0.019168256759643554, 0.018983936309814452, 0.019572736740112305, 0.019137535095214844, 0.018998271942138673, 0.018989055633544923, 0.018993152618408202, 0.019155967712402345, 0.01902592086791992, 0.019080192565917968, 0.01889587211608887, 0.01900441551208496, 0.019140607833862306, 0.018993152618408202, 0.01903513526916504, 0.01905766487121582, 0.019141632080078123, 0.019070976257324217, 0.019080192565917968, 0.01899519920349121, 0.01904435157775879, 0.019066879272460938, 0.01907302474975586, 0.019156991958618166, 0.019342336654663086, 0.01903615951538086, 0.019283967971801756, 0.018984960556030273, 0.0190699520111084, 0.018998271942138673, 0.019062784194946288, 0.018955263137817382, 0.01904640007019043, 0.018951168060302736, 0.019100671768188478, 0.01901875114440918, 0.01895529556274414, 0.01926140785217285, 0.01926246452331543, 0.01906892776489258, 0.019058687210083008, 0.019167232513427734, 0.019296255111694336, 0.019086336135864256, 0.01980723190307617, 0.019200000762939453, 0.019082239151000976, 0.019005439758300782, 0.01905254364013672, 0.018954240798950195, 0.01902592086791992, 0.01904025650024414, 0.019115007400512696, 0.019404863357543944, 0.020048831939697264, 0.02030899238586426, 0.01922662353515625, 0.01901055908203125, 0.019106815338134766, 0.019084287643432618, 0.019103744506835937, 0.01903308868408203, 0.018966527938842775, 0.019095552444458007, 0.01904537582397461]",tokens/s,52.47038598741861,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b5a-14378b9a30e3d0c35418818b;b3683f41-9fa5-4d56-9dfe-a678c491ca92) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4935.118848,7434.928128,0.0,6849.298432,6445.09696,s,1,10.2448671875,10.2448671875,0.0,10.2448671875,10.2448671875,10.2448671875,10.2448671875,[10.2448671875],,kWh,4.064067539793794e-05,2.2258657021969266e-05,5.840254672206946e-05,0.00012130187914197666,,MB,2769.231872,7797.735424,0.0,7151.28832,6823.3216,s,10,1.1295041580200196,0.11295041580200196,0.00017123789355526198,0.11291983795166016,0.11306277770996094,0.1132333885192871,0.11336987716674805,"[0.11302486419677735, 0.11289711761474609, 0.11289417266845703, 0.11276012420654297, 0.11278848266601563, 0.1130025634765625, 0.11340399932861328, 0.11284710693359375, 0.11294255828857422, 0.11294316864013672]",tokens/s,2266.481253586165,kWh,1.3375498103463538e-06,7.328155665481638e-07,7.382175069280767e-06,9.452540446175285e-06,tokens/kWh,27082666.449058514,MB,2773.495808,7799.832576,0.0,7153.385472,6823.32416,s,10,20.43488452148437,2.043488452148437,0.010548896607980255,2.0419188232421877,2.054268310546875,2.060606323242187,2.0656767333984374,"[2.042926025390625, 2.0288299560546874, 2.0343135986328127, 2.038590087890625, 2.0669443359375, 2.0336641845703123, 2.0458065185546874, 2.05285986328125, 2.04091162109375, 2.050038330078125]",tokens/s,30.829633479829297,kWh,2.411820480201731e-05,1.3217564792064577e-05,6.66559952723141e-05,0.00010399176486639597,tokens/kWh,605817.20178458,,s,630,20.43294105529783,0.032433239770314044,0.0005867251722341688,0.03220121574401855,0.033316966247558595,0.033530419921875,0.03418489833831787,"[0.032285694122314454, 0.03187507247924805, 0.031971328735351565, 0.03191910362243652, 0.031611904144287106, 0.031628288269042966, 0.03196108818054199, 0.031704063415527346, 0.032148479461669925, 0.03203379058837891, 0.0318156795501709, 0.03214745712280274, 0.033263614654541016, 0.03222118377685547, 0.03206860733032227, 0.032075775146484374, 0.03152383995056152, 0.031661056518554685, 0.03207884979248047, 0.03212595367431641, 0.03206655883789063, 0.03218124771118164, 0.0335994873046875, 0.03327590560913086, 0.03232767868041992, 0.03323699188232422, 0.03311206436157227, 0.033317886352539065, 0.03170611190795898, 0.031974399566650394, 0.03215769577026367, 0.031680511474609374, 0.0320706558227539, 0.03318067169189453, 0.033448959350585936, 0.03296255874633789, 0.032643070220947264, 0.03189145660400391, 0.032145408630371096, 0.032097278594970705, 0.03266764831542969, 0.032464897155761716, 0.032061439514160156, 0.033463294982910154, 0.03324620819091797, 0.03300044631958008, 0.03317964935302734, 0.033209342956542966, 0.03325235366821289, 0.0332677116394043, 0.03276902389526367, 0.03199078369140625, 0.03252838516235351, 0.032979969024658204, 0.032010238647460935, 0.031736831665039066, 0.03399884796142578, 0.033017856597900394, 0.03222937774658203, 0.03227545547485351, 0.03199180793762207, 0.03210444641113281, 0.032069633483886716, 0.032331775665283204, 0.03195699119567871, 0.03211673736572265, 0.03205734252929687, 0.0320706558227539, 0.03214131164550781, 0.03205734252929687, 0.03226828765869141, 0.032105472564697264, 0.031970304489135744, 0.03210444641113281, 0.03210137557983399, 0.03217407989501953, 0.032216064453125, 0.03217715072631836, 0.032054271697998044, 0.032189441680908204, 0.03368960189819336, 0.03218329620361328, 0.0320819206237793, 0.032089088439941404, 0.032330753326416016, 0.03209011077880859, 0.032110591888427735, 0.03214745712280274, 0.03207680130004883, 0.03216281509399414, 0.032143360137939454, 0.032161792755126956, 0.032287742614746096, 0.03206246566772461, 0.032156673431396485, 0.03249151992797852, 0.0332564468383789, 0.03309875106811523, 0.03314585494995117, 0.03279667282104492, 0.032140289306640625, 0.032092159271240234, 0.032123905181884765, 0.031971328735351565, 0.03210649490356445, 0.032031742095947266, 0.03201536178588867, 0.03203788757324219, 0.03215359878540039, 0.03162214469909668, 0.03232563018798828, 0.03219353485107422, 0.032263168334960936, 0.032130046844482424, 0.031692800521850584, 0.03211673736572265, 0.03196006393432617, 0.03213721466064453, 0.03203891372680664, 0.03198873519897461, 0.03215359878540039, 0.03215769577026367, 0.032075775146484374, 0.03215871810913086, 0.03214131164550781, 0.03215359878540039, 0.032312320709228515, 0.03222630310058594, 0.03202867126464844, 0.032145408630371096, 0.03219251251220703, 0.03197849655151367, 0.03211980819702148, 0.032159744262695314, 0.032026622772216795, 0.03218534469604492, 0.032363521575927735, 0.03215462493896484, 0.03263078308105469, 0.032366592407226565, 0.032110591888427735, 0.032110591888427735, 0.03199795150756836, 0.03198975944519043, 0.032039936065673826, 0.03219046401977539, 0.03198975944519043, 0.032091136932373046, 0.03203891372680664, 0.032115711212158206, 0.03179622459411621, 0.031920127868652344, 0.03168563270568848, 0.03197337532043457, 0.03197644805908203, 0.03215462493896484, 0.032123905181884765, 0.03211673736572265, 0.032075775146484374, 0.0321976318359375, 0.031735807418823245, 0.0321638412475586, 0.03205836868286133, 0.03217407989501953, 0.03201433563232422, 0.03207987213134766, 0.03202252960205078, 0.03206860733032227, 0.032053249359130856, 0.03242086410522461, 0.03204710388183594, 0.032145408630371096, 0.03225190353393555, 0.032129024505615236, 0.03229695892333984, 0.032143360137939454, 0.032020481109619144, 0.03227347183227539, 0.031929279327392576, 0.03324006271362305, 0.03419647979736328, 0.03365478515625, 0.033081344604492184, 0.03293695831298828, 0.032865280151367186, 0.032382976531982424, 0.03355340957641602, 0.033329151153564454, 0.03326566314697266, 0.03211264038085938, 0.03206246566772461, 0.032115711212158206, 0.03202867126464844, 0.03212595367431641, 0.03217203140258789, 0.03203788757324219, 0.032107521057128906, 0.03333631896972656, 0.03344793701171875, 0.03271987152099609, 0.03208396911621094, 0.03215564727783203, 0.033546241760253906, 0.03307827377319336, 0.03219660949707031, 0.032194561004638675, 0.032304126739501955, 0.032023551940917966, 0.03196518325805664, 0.03215052795410156, 0.032748542785644534, 0.03237171173095703, 0.032247806549072264, 0.03217919921875, 0.03226419067382812, 0.032307201385498044, 0.03213926315307617, 0.03225907135009766, 0.03245772933959961, 0.033073150634765625, 0.03324620819091797, 0.03322880172729492, 0.03325132751464844, 0.032215038299560544, 0.032254974365234376, 0.03232563018798828, 0.03226828765869141, 0.032525310516357424, 0.03240038299560547, 0.032336894989013674, 0.0322426872253418, 0.03225190353393555, 0.03218227386474609, 0.032363521575927735, 0.0323133430480957, 0.0322426872253418, 0.03214233779907227, 0.0323164176940918, 0.032249855041503905, 0.0319815673828125, 0.0321525764465332, 0.03229388809204101, 0.032246784210205076, 0.03213312149047851, 0.03197747230529785, 0.03218636703491211, 0.032233470916748046, 0.03231027221679687, 0.03218431854248047, 0.032277503967285154, 0.0322949104309082, 0.0317573127746582, 0.032230400085449216, 0.03218124771118164, 0.03205222320556641, 0.033931262969970705, 0.034531326293945314, 0.03344179153442383, 0.03326464080810547, 0.03349401473999023, 0.033535999298095705, 0.03329536056518555, 0.03211468887329102, 0.03204608154296875, 0.03215769577026367, 0.03188019180297851, 0.032091136932373046, 0.032295936584472655, 0.033960960388183595, 0.033478656768798826, 0.03339775848388672, 0.033296382904052735, 0.032307201385498044, 0.032347137451171876, 0.032927745819091796, 0.032922622680664065, 0.03338956832885742, 0.03299532699584961, 0.03340800094604492, 0.033529857635498046, 0.03313971328735352, 0.03224371337890625, 0.03216486358642578, 0.03280691146850586, 0.03685478210449219, 0.03415654373168946, 0.032008190155029294, 0.03201945495605469, 0.03276287841796875, 0.033271808624267575, 0.033538047790527346, 0.03329740905761719, 0.03510067367553711, 0.032347137451171876, 0.031959039688110355, 0.032200702667236326, 0.03218739318847656, 0.03214131164550781, 0.032683006286621095, 0.03260723114013672, 0.0322344970703125, 0.032246784210205076, 0.03224166488647461, 0.032361473083496094, 0.03220172882080078, 0.03234201431274414, 0.03237068939208984, 0.0323133430480957, 0.032189441680908204, 0.032176128387451174, 0.033219585418701174, 0.03242393493652344, 0.032247806549072264, 0.0323133430480957, 0.03187711906433106, 0.03257753753662109, 0.03234815979003906, 0.03208396911621094, 0.03354111862182617, 0.03331686401367188, 0.032244735717773435, 0.031887359619140625, 0.0321341438293457, 0.03275571060180664, 0.034457599639892575, 0.03257651138305664, 0.032031742095947266, 0.03222220611572266, 0.03224576187133789, 0.0319682559967041, 0.03207680130004883, 0.03218022537231445, 0.03211161422729492, 0.03235635375976562, 0.03226521682739258, 0.032156673431396485, 0.0321607666015625, 0.03199795150756836, 0.03214745712280274, 0.03212595367431641, 0.032089088439941404, 0.03211468887329102, 0.032023551940917966, 0.03234918212890625, 0.0321003532409668, 0.03322367858886719, 0.03198054313659668, 0.031678464889526366, 0.03172454452514648, 0.033391616821289063, 0.032254974365234376, 0.03264716720581055, 0.03240345764160156, 0.03218329620361328, 0.03204095840454101, 0.03207372665405273, 0.03174195289611816, 0.0318525447845459, 0.03218534469604492, 0.032115711212158206, 0.03149926376342774, 0.03203583908081055, 0.032069633483886716, 0.03222016143798828, 0.032061439514160156, 0.03222732925415039, 0.03213926315307617, 0.03217715072631836, 0.03202560043334961, 0.03212595367431641, 0.03211673736572265, 0.032118785858154295, 0.0321638412475586, 0.03165798377990723, 0.03267379379272461, 0.03356467056274414, 0.032161792755126956, 0.032285694122314454, 0.032710655212402344, 0.03215871810913086, 0.032004096984863284, 0.03204403305053711, 0.03210854339599609, 0.032129024505615236, 0.0320552978515625, 0.03213721466064453, 0.03201126480102539, 0.0320706558227539, 0.031958015441894534, 0.032102401733398435, 0.03211775970458984, 0.032113662719726564, 0.03176345634460449, 0.032161792755126956, 0.032118785858154295, 0.03234611129760742, 0.032054271697998044, 0.03209318542480469, 0.032023551940917966, 0.0321525764465332, 0.03218534469604492, 0.03228876876831055, 0.03276287841796875, 0.032486400604248046, 0.03260723114013672, 0.0322344970703125, 0.0319498233795166, 0.03269734573364258, 0.032263168334960936, 0.03184950447082519, 0.032927711486816405, 0.03330252838134766, 0.032368640899658206, 0.0321607666015625, 0.03170918464660644, 0.033276927947998046, 0.03315814590454102, 0.03337932968139649, 0.03337420654296875, 0.03213619232177734, 0.03296768188476563, 0.03344793701171875, 0.03228672027587891, 0.03327385711669922, 0.033391616821289063, 0.03319193649291992, 0.03213721466064453, 0.033416191101074216, 0.033271808624267575, 0.033267776489257814, 0.03212691116333008, 0.032205825805664064, 0.032249855041503905, 0.03336908721923828, 0.032097278594970705, 0.03248025512695313, 0.03283763122558594, 0.03328716659545899, 0.03244543838500977, 0.03214131164550781, 0.03216486358642578, 0.033329151153564454, 0.03322367858886719, 0.0320819206237793, 0.032059391021728514, 0.03301683044433594, 0.033160190582275394, 0.03323494338989258, 0.034028545379638675, 0.03332403182983398, 0.0322529296875, 0.0328089599609375, 0.03215155029296875, 0.032233470916748046, 0.032817150115966795, 0.03346124649047852, 0.03313663864135742, 0.03301171112060547, 0.03321036911010742, 0.03365683364868164, 0.033445888519287106, 0.03300966262817383, 0.03321651077270508, 0.03303628921508789, 0.03322982406616211, 0.03278745651245117, 0.032198654174804685, 0.032178176879882815, 0.03204095840454101, 0.03216281509399414, 0.03368243026733399, 0.033737728118896484, 0.033175552368164066, 0.032892929077148435, 0.03217510223388672, 0.03210444641113281, 0.032075775146484374, 0.03224371337890625, 0.03222528076171875, 0.03215155029296875, 0.03228364944458008, 0.032363521575927735, 0.03215359878540039, 0.03217203140258789, 0.03199692726135254, 0.03242803192138672, 0.032156673431396485, 0.032075775146484374, 0.032107521057128906, 0.032146430969238284, 0.03218636703491211, 0.032290817260742184, 0.03230310440063477, 0.03237785720825195, 0.03219353485107422, 0.03184230422973633, 0.03187302398681641, 0.03216281509399414, 0.032178176879882815, 0.03215052795410156, 0.032023551940917966, 0.032366592407226565, 0.03242496109008789, 0.03244236755371094, 0.03226931381225586, 0.032194561004638675, 0.03213516616821289, 0.031855615615844726, 0.03226931381225586, 0.03228876876831055, 0.032292865753173826, 0.03323904037475586, 0.033067008972167966, 0.03234515380859375, 0.03231123352050781, 0.03232767868041992, 0.03230310440063477, 0.03404185485839844, 0.033716224670410154, 0.032292865753173826, 0.03217407989501953, 0.032440319061279296, 0.032314369201660156, 0.03237478256225586, 0.03234201431274414, 0.03235430526733398, 0.03228979110717774, 0.032148479461669925, 0.03214950561523437, 0.03179929542541504, 0.031751167297363284, 0.03170611190795898, 0.03242905426025391, 0.03193036842346191, 0.032421886444091795, 0.032102401733398435, 0.032369663238525394, 0.0324505615234375, 0.03220377731323242, 0.03231027221679687, 0.03227545547485351, 0.032282623291015625, 0.03305472183227539, 0.03223244857788086, 0.032194561004638675, 0.03207167816162109, 0.03186380767822266, 0.032121856689453124, 0.032121856689453124, 0.03236249542236328, 0.033849342346191406, 0.03354828643798828, 0.03343974304199219, 0.03208294296264649, 0.03227443313598633, 0.03222630310058594, 0.03207987213134766, 0.03233587265014649, 0.03209830474853516, 0.03366912078857422, 0.032639999389648434, 0.032451583862304685, 0.03179007911682129, 0.032115711212158206, 0.03221299362182617, 0.03223654556274414, 0.03208396911621094, 0.03245568084716797, 0.032672767639160154, 0.03329433441162109, 0.032048126220703126, 0.03222528076171875, 0.033396736145019534, 0.03336294555664063, 0.03328409576416016, 0.032173057556152344, 0.03462041473388672, 0.0335093765258789, 0.03342950439453125, 0.03333529663085937, 0.032292865753173826, 0.03347558212280274, 0.03325747299194336, 0.03488870239257812, 0.032102401733398435, 0.03202252960205078, 0.032123905181884765, 0.03215462493896484, 0.032189441680908204, 0.03176140785217285, 0.03170611190795898, 0.032418815612792966, 0.0321003532409668, 0.032064510345458985, 0.03214745712280274, 0.03220889663696289, 0.0318023681640625, 0.03203379058837891, 0.03240857696533203, 0.033274879455566404, 0.03219968032836914, 0.03208499145507813, 0.032271358489990236, 0.031751167297363284, 0.032189441680908204, 0.03225907135009766, 0.03222016143798828, 0.03239014434814453, 0.03234611129760742, 0.03228057479858398, 0.03189043235778809, 0.03213619232177734, 0.032606208801269534, 0.03206860733032227, 0.03225702285766602, 0.03228672027587891, 0.031784959793090824, 0.03239219284057617, 0.033675262451171875, 0.033530879974365234, 0.033600513458251956, 0.032266239166259765, 0.03284479904174804, 0.03176959991455078, 0.03213721466064453, 0.0322979850769043, 0.03222016143798828, 0.033143806457519534, 0.03201536178588867, 0.03267071914672852]",tokens/s,30.832565820800124,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8212.84864,11294.736384,0.0,10701.766656,10468.923392,s,1,11.83699609375,11.83699609375,0.0,11.83699609375,11.83699609375,11.83699609375,11.83699609375,[11.83699609375],,kWh,5.9098454026389567e-05,3.237095998714004e-05,8.657534703800096e-05,0.00017804476105153056,,MB,3485.650944,11842.093056,0.0,11188.30592,10924.283904,s,10,2.082240844726562,0.20822408447265625,0.0002597534084674124,0.20814559936523436,0.20860740203857422,0.20868229446411132,0.20874220840454102,"[0.20792678833007813, 0.20859075927734375, 0.20801795959472658, 0.20875718688964845, 0.20806687927246093, 0.20809638977050782, 0.20830934143066407, 0.2083164825439453, 0.20796424865722657, 0.20819480895996093]",tokens/s,1229.4447140845402,kWh,2.462096907638846e-06,1.349032281148003e-06,1.453231486658301e-05,1.8343444055369858e-05,tokens/kWh,13955939.747588381,MB,3489.91488,11844.190208,0.0,11190.403072,10924.286464,s,10,26.558841064453127,2.6558841064453125,0.005217258972530674,2.655650634765625,2.661804248046875,2.6621263671874997,2.6623840624999997,"[2.6605263671875, 2.6614775390625, 2.653010009765625, 2.650041259765625, 2.655388427734375, 2.647057861328125, 2.661732666015625, 2.662448486328125, 2.65124560546875, 2.655912841796875]",tokens/s,23.72091457120109,kWh,3.1500706032221524e-05,1.7263345705361382e-05,0.00010772728294101721,0.0001564913346786001,tokens/kWh,402578.20108307333,,s,630,26.55686449432375,0.042153753165593215,0.00035413413222385587,0.04202956771850586,0.042515460205078126,0.04282654666900635,0.04379562088012697,"[0.04259532928466797, 0.04281753540039063, 0.04193791961669922, 0.041984001159667966, 0.041908222198486327, 0.042428417205810545, 0.041995262145996096, 0.04205158233642578, 0.04198809432983398, 0.04199731063842774, 0.04237823867797851, 0.042076160430908206, 0.0420423698425293, 0.04196044921875, 0.04198604965209961, 0.041850879669189454, 0.04190412902832031, 0.04236288070678711, 0.04193894577026367, 0.04203212738037109, 0.04192563247680664, 0.04205363082885742, 0.041915393829345705, 0.042074111938476565, 0.04205670547485352, 0.04211609649658203, 0.041990142822265625, 0.04211609649658203, 0.041973758697509765, 0.04247347259521484, 0.04253593444824219, 0.04232499313354492, 0.042065921783447265, 0.042554367065429685, 0.041915393829345705, 0.04199628829956055, 0.04192153549194336, 0.04343500900268555, 0.04225740814208984, 0.04206387329101562, 0.04185190582275391, 0.043259902954101564, 0.042477569580078124, 0.04235059356689453, 0.041971710205078124, 0.042060798645019534, 0.041935871124267575, 0.04216831970214844, 0.042714111328125, 0.042218494415283206, 0.04192972946166992, 0.04455321502685547, 0.04287590408325195, 0.04231475067138672, 0.0420423698425293, 0.04218777465820313, 0.04199731063842774, 0.042877952575683595, 0.04253593444824219, 0.04204032135009766, 0.04186316680908203, 0.04230963134765625, 0.041799678802490234, 0.04249398422241211, 0.042217441558837894, 0.04220006561279297, 0.04306022262573242, 0.04270796966552735, 0.04211711883544922, 0.042074111938476565, 0.04205055999755859, 0.04252569580078125, 0.0420136947631836, 0.04214886474609375, 0.04208947372436524, 0.04194713592529297, 0.042692607879638675, 0.04227276611328125, 0.04215091323852539, 0.041968639373779294, 0.04243046569824219, 0.04185497665405274, 0.0421212158203125, 0.04221132659912109, 0.041987071990966796, 0.0421580810546875, 0.04187238311767578, 0.04195328140258789, 0.04190003204345703, 0.0421130256652832, 0.04296499252319336, 0.04205875015258789, 0.04218368148803711, 0.042049537658691405, 0.042039295196533204, 0.04251548767089844, 0.04191126251220703, 0.04188876724243164, 0.04244275283813476, 0.04197785568237305, 0.04231782531738281, 0.04204339218139649, 0.04199423980712891, 0.04207001495361328, 0.042024959564208986, 0.04226355361938477, 0.042175487518310545, 0.042493953704833984, 0.04193382263183594, 0.04225024032592774, 0.04299673461914062, 0.041896961212158204, 0.042006526947021484, 0.04222675323486328, 0.04258399963378906, 0.04192563247680664, 0.04248371124267578, 0.04394291305541992, 0.04263935852050781, 0.04300185775756836, 0.04215193557739258, 0.0421130256652832, 0.04220927810668945, 0.042297344207763675, 0.04196761703491211, 0.041899009704589846, 0.042077182769775394, 0.041990142822265625, 0.042024959564208986, 0.04196044921875, 0.04197580718994141, 0.041899009704589846, 0.04197580718994141, 0.042092544555664066, 0.04229939270019531, 0.041990142822265625, 0.0418416633605957, 0.04228505706787109, 0.04193689727783203, 0.04216115188598633, 0.04187750244140625, 0.04192972946166992, 0.04192563247680664, 0.0421396484375, 0.04205977630615235, 0.04186316680908203, 0.04188159942626953, 0.0419502067565918, 0.04188467025756836, 0.04211507034301758, 0.04214886474609375, 0.04201779174804687, 0.04189593505859375, 0.04192870330810547, 0.04194406509399414, 0.04203519821166992, 0.04252569580078125, 0.04225024032592774, 0.04218368148803711, 0.041954303741455076, 0.04197683334350586, 0.04244582366943359, 0.04196966552734375, 0.04199423980712891, 0.04210176086425781, 0.04203724670410156, 0.042439678192138675, 0.04208025741577148, 0.04206489562988281, 0.042223617553710936, 0.042022911071777344, 0.04196147155761719, 0.042673152923583986, 0.044034046173095705, 0.04267007827758789, 0.04234649658203125, 0.04213452911376953, 0.04194406509399414, 0.04199116897583008, 0.042537982940673826, 0.042176513671875, 0.042016769409179686, 0.04195840072631836, 0.04199116897583008, 0.04199731063842774, 0.04203007888793946, 0.04211097717285156, 0.04196352005004883, 0.04189184188842773, 0.042270721435546874, 0.04218777465820313, 0.041970687866210936, 0.04186214447021484, 0.04196659088134766, 0.04227993774414063, 0.042016769409179686, 0.04295065689086914, 0.043150337219238284, 0.042452991485595705, 0.042157054901123044, 0.04209766387939453, 0.04216012954711914, 0.04191231918334961, 0.04195123291015625, 0.04235673522949219, 0.04191743850708008, 0.04196249771118164, 0.04193996810913086, 0.04199628829956055, 0.042076160430908206, 0.042024959564208986, 0.04193996810913086, 0.04199116897583008, 0.041981952667236325, 0.041940990447998046, 0.0420136947631836, 0.04200755310058594, 0.04197273635864258, 0.04201267242431641, 0.041940990447998046, 0.0420208625793457, 0.041896961212158204, 0.041957374572753905, 0.042466304779052735, 0.042016769409179686, 0.042028030395507815, 0.04194303894042969, 0.04244172668457031, 0.04201267242431641, 0.04195635223388672, 0.04197683334350586, 0.04201881790161133, 0.042016769409179686, 0.0420423698425293, 0.042041343688964845, 0.041987071990966796, 0.0420055046081543, 0.04170444869995117, 0.04192563247680664, 0.04195942306518555, 0.0420208625793457, 0.0418785285949707, 0.04194815826416016, 0.04194918441772461, 0.041913345336914064, 0.041837600708007815, 0.04226863861083984, 0.041915393829345705, 0.04208127975463867, 0.04192563247680664, 0.041923583984375, 0.04230553436279297, 0.0421734390258789, 0.04201267242431641, 0.043184127807617184, 0.041985023498535154, 0.04189798355102539, 0.04190924835205078, 0.0419420166015625, 0.04194713592529297, 0.04193791961669922, 0.04192563247680664, 0.04207308959960938, 0.04208230209350586, 0.04225024032592774, 0.042433536529541016, 0.04208025741577148, 0.04218368148803711, 0.04216320037841797, 0.04316774368286133, 0.044058624267578124, 0.04211199951171875, 0.04194406509399414, 0.042382335662841795, 0.042297344207763675, 0.04219084930419922, 0.042000385284423826, 0.04210790252685547, 0.04196556854248047, 0.041970687866210936, 0.04212940979003906, 0.04197478485107422, 0.04213350296020508, 0.04196454238891602, 0.04243251037597656, 0.04262911987304688, 0.042016769409179686, 0.04231270217895508, 0.042208255767822264, 0.04212223815917969, 0.042251262664794925, 0.04221644973754883, 0.042033153533935545, 0.0422737922668457, 0.042431488037109374, 0.04224512100219727, 0.04201267242431641, 0.04192051315307617, 0.04196966552734375, 0.042008575439453126, 0.04199423980712891, 0.04192972946166992, 0.041918464660644535, 0.04199116897583008, 0.04194611358642578, 0.04198297500610351, 0.04194303894042969, 0.04194815826416016, 0.04193280029296875, 0.04197478485107422, 0.04229529571533203, 0.04187955093383789, 0.041859073638916014, 0.04207820892333984, 0.04184883117675781, 0.0419420166015625, 0.04193791961669922, 0.04189286422729492, 0.0419420166015625, 0.041919486999511715, 0.04192972946166992, 0.04194303894042969, 0.04193996810913086, 0.04183039855957031, 0.04187955093383789, 0.04189081573486328, 0.04190003204345703, 0.04194611358642578, 0.041935871124267575, 0.04224512100219727, 0.04182732772827148, 0.04240691375732422, 0.043399166107177735, 0.04204748916625976, 0.042001407623291014, 0.04191743850708008, 0.04228300857543945, 0.04191027069091797, 0.04201267242431641, 0.04190310287475586, 0.04197478485107422, 0.04192051315307617, 0.04202905654907227, 0.041952255249023435, 0.04199321746826172, 0.04194713592529297, 0.04205875015258789, 0.04191641616821289, 0.04200755310058594, 0.041995262145996096, 0.042090496063232424, 0.042016769409179686, 0.04199628829956055, 0.04188671875, 0.04193791961669922, 0.042156032562255856, 0.042087425231933595, 0.041924606323242186, 0.0420208625793457, 0.041882625579833986, 0.04233011245727539, 0.041853950500488284, 0.04199321746826172, 0.04200960159301758, 0.042014720916748044, 0.04191641616821289, 0.04198604965209961, 0.041981952667236325, 0.041924606323242186, 0.04212838363647461, 0.04214169692993164, 0.04189593505859375, 0.0421212158203125, 0.04192256164550781, 0.042202110290527346, 0.04184473419189453, 0.042159103393554685, 0.04186214447021484, 0.04213759994506836, 0.04231577682495117, 0.04195328140258789, 0.04198912048339844, 0.04213452911376953, 0.04195942306518555, 0.04198912048339844, 0.042076160430908206, 0.04197683334350586, 0.04194303894042969, 0.04193791961669922, 0.042000385284423826, 0.04302438354492188, 0.04399718475341797, 0.042369022369384765, 0.042646526336669925, 0.042136577606201174, 0.042038272857666016, 0.04194713592529297, 0.042159103393554685, 0.04283391952514649, 0.04207513427734375, 0.04205977630615235, 0.041957374572753905, 0.041875457763671874, 0.04235161590576172, 0.04196761703491211, 0.04192256164550781, 0.041842689514160154, 0.04189593505859375, 0.042515457153320314, 0.042281982421875, 0.04192668914794922, 0.0421611213684082, 0.041954303741455076, 0.04195942306518555, 0.04187136077880859, 0.04205363082885742, 0.04242739105224609, 0.04194815826416016, 0.04218675231933594, 0.04463513565063477, 0.042995712280273435, 0.04299673461914062, 0.041985023498535154, 0.042788864135742184, 0.0422031364440918, 0.042090496063232424, 0.04275814437866211, 0.04200960159301758, 0.04216115188598633, 0.041987071990966796, 0.04216115188598633, 0.04262911987304688, 0.04232601547241211, 0.042472446441650394, 0.04191027069091797, 0.04213043212890625, 0.041990142822265625, 0.04197990417480469, 0.042254337310791014, 0.0421396484375, 0.042126335144042966, 0.042413089752197264, 0.042092510223388675, 0.043291648864746096, 0.04288819122314453, 0.042590206146240234, 0.042003456115722655, 0.042008575439453126, 0.04207206344604492, 0.042049537658691405, 0.0421734390258789, 0.04227276611328125, 0.04211199951171875, 0.0420239372253418, 0.04213862228393555, 0.04201881790161133, 0.042077182769775394, 0.04229836654663086, 0.04199321746826172, 0.04227174377441406, 0.04214374542236328, 0.04184678268432617, 0.04205977630615235, 0.04314828872680664, 0.04263731384277344, 0.04196147155761719, 0.042028030395507815, 0.04208127975463867, 0.04225843048095703, 0.042431488037109374, 0.04208332824707031, 0.042077182769775394, 0.04199935913085937, 0.04187955093383789, 0.0428144645690918, 0.04217036819458008, 0.0424089584350586, 0.043319297790527345, 0.0424161262512207, 0.04204032135009766, 0.042208255767822264, 0.041973758697509765, 0.041984001159667966, 0.042234878540039066, 0.0421580810546875, 0.042205184936523435, 0.042469375610351565, 0.04218982315063476, 0.042278911590576174, 0.042014720916748044, 0.04226662445068359, 0.04251340866088867, 0.04243251037597656, 0.04317900848388672, 0.043033599853515625, 0.042033153533935545, 0.042157054901123044, 0.042019840240478515, 0.04207001495361328, 0.04220006561279297, 0.042011646270751955, 0.04198092651367188, 0.04201779174804687, 0.042022911071777344, 0.0421396484375, 0.041957374572753905, 0.042213375091552735, 0.042071041107177735, 0.041952255249023435, 0.041954303741455076, 0.041952255249023435, 0.0424796142578125, 0.04212838363647461, 0.04202905654907227, 0.04216012954711914, 0.04196249771118164, 0.041864192962646485, 0.042021888732910156, 0.04199423980712891, 0.04201267242431641, 0.041984001159667966, 0.04192870330810547, 0.04192563247680664, 0.04206284713745117, 0.04193689727783203, 0.042575870513916016, 0.04236185455322266, 0.042347518920898435, 0.04193689727783203, 0.04198809432983398, 0.042807296752929686, 0.04212428665161133, 0.041987071990966796, 0.04190412902832031, 0.04194815826416016, 0.042011646270751955, 0.041940990447998046, 0.04206796646118164, 0.04199423980712891, 0.042036224365234375, 0.041992191314697266, 0.04210892868041992, 0.041990142822265625, 0.0419317741394043, 0.04190006256103516, 0.04191331100463867, 0.04191027069091797, 0.042024959564208986, 0.041971710205078124, 0.041952255249023435, 0.041935871124267575, 0.042270721435546874, 0.0419420166015625, 0.042102783203125, 0.042896385192871096, 0.042234878540039066, 0.041981952667236325, 0.042008575439453126, 0.04196352005004883, 0.04240793609619141, 0.04221235275268555, 0.042396671295166014, 0.04210073471069336, 0.042074111938476565, 0.042109951019287106, 0.042022911071777344, 0.041924606323242186, 0.042147838592529296, 0.0421130256652832, 0.042363903045654294, 0.042092544555664066, 0.041973758697509765, 0.042041343688964845, 0.042431488037109374, 0.042076160430908206, 0.0420239372253418, 0.04204032135009766, 0.041985023498535154, 0.04204851150512695, 0.04194713592529297, 0.04225228881835937, 0.04273356628417969, 0.04221440124511719, 0.04223590469360351, 0.04205158233642578, 0.04195942306518555, 0.04207513427734375, 0.04170444869995117, 0.04175462341308594, 0.04218777465820313, 0.042019840240478515, 0.042031105041503904, 0.042022911071777344, 0.04205875015258789, 0.04202905654907227, 0.04203212738037109, 0.04205875015258789, 0.04232191848754883, 0.04200243377685547, 0.041804798126220705, 0.04193280029296875, 0.04250009536743164, 0.042011646270751955, 0.041981952667236325, 0.041985023498535154, 0.041987071990966796, 0.04197990417480469, 0.042123264312744144, 0.04197990417480469, 0.04193075180053711, 0.042011646270751955, 0.04191027069091797, 0.04252979278564453, 0.044414016723632814, 0.042904510498046874, 0.04259328079223633, 0.0428144645690918, 0.04245401763916016, 0.041984001159667966, 0.042011646270751955, 0.04268851089477539, 0.04204748916625976, 0.04198608016967773, 0.041964511871337894, 0.042044414520263675, 0.04198604965209961, 0.042021888732910156, 0.041954303741455076, 0.04213043212890625, 0.04201267242431641]",tokens/s,23.72268006769611,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949186-2778934b2e9c92af5add50ad;2c178255-a962-4296-8157-6faa1fa7e578) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 836, in forward - inputs_embeds = self.project_in(inputs_embeds) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c66-58d18afd55a71057301549eb;9c2d6a95-43a8-4a7e-99a9-332a4a4e8f29) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 339, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fdf-05786cce5e0d8cde3ad0a625;4bc435f5-be22-438a-92ab-4db3dc87c0ac) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694928f-1be66f637da322ac444cb0f9;d74fbd72-54b4-41b2-b79b-99d9308e1104) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11058.270208,12432.441344,0.0,11846.811648,11814.785024,s,1,12.3084462890625,12.3084462890625,0.0,12.3084462890625,12.3084462890625,12.3084462890625,12.3084462890625,[12.3084462890625],,kWh,6.54598504847191e-05,3.58615875554297e-05,9.607868797401053e-05,0.00019740012601415934,,MB,2159.18592,13422.297088,0.0,12775.849984,12632.68864,s,10,3.382351470947266,0.3382351470947266,7.308216676766516e-05,0.33824267578125,0.3382880584716797,0.33833311614990236,0.3383691622924805,"[0.33827804565429687, 0.3381224670410156, 0.33822213745117186, 0.3382587585449219, 0.3382301025390625, 0.338378173828125, 0.33822491455078124, 0.3382552490234375, 0.3381094970703125, 0.33827212524414063]",tokens/s,756.869894210918,kWh,3.999328336550877e-06,2.191122941344945e-06,2.235637899619854e-05,2.8546830274094364e-05,tokens/kWh,8967720.673083432,MB,2167.836672,13673.955328,0.0,13027.508224,12936.62208,s,10,23.876473388671876,2.387647338867187,0.007392715987579182,2.38534765625,2.3974734130859376,2.400580456542969,2.403066091308594,"[2.38337060546875, 2.4036875, 2.385121826171875, 2.383308349609375, 2.382802001953125, 2.390071044921875, 2.389324951171875, 2.3764306640625, 2.385573486328125, 2.396782958984375]",tokens/s,26.385806217885673,kWh,2.8326479356365877e-05,1.55246339760069e-05,0.00012281476491840342,0.00016666587825077617,tokens/kWh,378001.7881356984,,s,630,23.863257080078135,0.03787818584139385,0.0006142887732028406,0.03766988945007324,0.038571929931640624,0.03902259063720703,0.04017604549407959,"[0.04065894317626953, 0.03784601593017578, 0.03757056045532227, 0.03764326477050781, 0.037579776763916016, 0.03770982360839844, 0.03740671920776367, 0.03765657424926758, 0.038679584503173825, 0.03800060653686523, 0.0376360969543457, 0.03770880126953125, 0.0375654411315918, 0.037563392639160156, 0.03759513473510742, 0.03856486511230469, 0.03775692749023438, 0.039122943878173826, 0.03964416122436523, 0.037779457092285154, 0.03755212783813477, 0.03771187210083008, 0.03762688064575195, 0.03765555191040039, 0.03765555191040039, 0.038389759063720705, 0.03808358383178711, 0.03779174423217774, 0.0376627197265625, 0.037994495391845705, 0.03769753646850586, 0.03768217468261719, 0.037591041564941405, 0.03763097763061524, 0.03759513473510742, 0.037749759674072264, 0.03774259185791016, 0.037743614196777346, 0.0377784309387207, 0.03770265579223633, 0.037705726623535156, 0.03736166381835938, 0.03757567977905273, 0.0377446403503418, 0.039294975280761715, 0.038168575286865236, 0.037574657440185545, 0.03785113525390625, 0.037607425689697264, 0.03778252792358398, 0.03767091369628906, 0.037694465637207034, 0.03769241714477539, 0.03804569625854492, 0.0377446403503418, 0.03736064147949219, 0.0372408332824707, 0.03706367874145508, 0.03728691101074219, 0.03709235382080078, 0.03731353759765625, 0.037138431549072266, 0.03736064147949219, 0.03778355026245117, 0.037602302551269534, 0.037694465637207034, 0.03762073516845703, 0.037771263122558595, 0.037746688842773435, 0.03768115234375, 0.04017663955688477, 0.03973836898803711, 0.03828736114501953, 0.03770265579223633, 0.037647361755371096, 0.037935104370117184, 0.0373493766784668, 0.03750604629516602, 0.03796480178833008, 0.03788185501098633, 0.037664768218994144, 0.037996543884277346, 0.03809280014038086, 0.037677120208740235, 0.03742508697509766, 0.03760537719726562, 0.03764940643310547, 0.037823486328125, 0.03902259063720703, 0.03753676986694336, 0.037389312744140625, 0.037738494873046875, 0.03872665786743164, 0.037694465637207034, 0.03754905700683594, 0.03770675277709961, 0.03884236907958984, 0.03768627166748047, 0.03758697509765625, 0.044103649139404295, 0.03930316925048828, 0.038117374420166016, 0.037997566223144534, 0.03816755294799805, 0.03849216079711914, 0.037571582794189456, 0.03771289443969727, 0.037501953125, 0.03787571334838867, 0.03866828918457031, 0.037407745361328126, 0.0376627197265625, 0.03774259185791016, 0.03760332870483398, 0.03948339080810547, 0.03806515121459961, 0.037541889190673826, 0.038401023864746094, 0.038484992980957033, 0.037548030853271484, 0.03808358383178711, 0.038742015838623044, 0.03902259063720703, 0.03811123275756836, 0.038569984436035154, 0.03846144104003906, 0.03828838348388672, 0.03966259384155273, 0.03851366424560547, 0.03767398452758789, 0.03763507080078125, 0.039060478210449216, 0.03989299011230469, 0.039373825073242184, 0.03790643310546875, 0.03832524871826172, 0.03754086303710937, 0.037548030853271484, 0.03764223861694336, 0.03758489608764649, 0.037579776763916016, 0.037628929138183595, 0.03753779220581055, 0.0386693115234375, 0.03907276916503906, 0.037748737335205076, 0.03769548797607422, 0.03744153594970703, 0.03765862274169922, 0.03766681671142578, 0.03761356735229492, 0.03805491256713867, 0.037602302551269534, 0.03762483215332031, 0.03745792007446289, 0.03760537719726562, 0.037628929138183595, 0.037648384094238284, 0.03807231903076172, 0.03767193603515625, 0.03748147201538086, 0.03821363067626953, 0.0380948486328125, 0.03770470428466797, 0.03761356735229492, 0.03768115234375, 0.03754905700683594, 0.03768524932861328, 0.03754598236083984, 0.03756032180786133, 0.03766886520385742, 0.03814604949951172, 0.03782860946655273, 0.03770982360839844, 0.03759206390380859, 0.03730124664306641, 0.037631999969482424, 0.03771289443969727, 0.03776204681396484, 0.03755929565429687, 0.03703603363037109, 0.03760537719726562, 0.037571582794189456, 0.03761971282958984, 0.037713920593261716, 0.03768627166748047, 0.03748966217041016, 0.037553150177001955, 0.03709542465209961, 0.038694911956787106, 0.038335487365722655, 0.037556224822998044, 0.03773440170288086, 0.037698558807373043, 0.037564414978027344, 0.03781222534179687, 0.03779891204833984, 0.03750707244873047, 0.037550079345703126, 0.037610496520996094, 0.037612545013427735, 0.038653953552246094, 0.0380682258605957, 0.03759001541137695, 0.03756851196289063, 0.03769651031494141, 0.037582847595214845, 0.037585918426513674, 0.037610496520996094, 0.03752345657348633, 0.037713920593261716, 0.037526527404785154, 0.03767295837402344, 0.03772825622558594, 0.03753676986694336, 0.03759718322753906, 0.03763507080078125, 0.038171646118164065, 0.03828224182128906, 0.03776102447509765, 0.03772518539428711, 0.03761356735229492, 0.03803033447265625, 0.03767295837402344, 0.03752447891235351, 0.03745382308959961, 0.0377262077331543, 0.03762278366088867, 0.03858943939208984, 0.0384901123046875, 0.03873177719116211, 0.038365184783935545, 0.037645313262939455, 0.037594112396240234, 0.037574657440185545, 0.03762790298461914, 0.03759308624267578, 0.037631999969482424, 0.03760025787353516, 0.037679134368896486, 0.03758383941650391, 0.037648384094238284, 0.037617664337158206, 0.03867647933959961, 0.0382371826171875, 0.0381102066040039, 0.03768320083618164, 0.03772825622558594, 0.03794432067871094, 0.037852161407470705, 0.03778047943115234, 0.03758796691894531, 0.03806719970703125, 0.03782656097412109, 0.03765760040283203, 0.037776382446289065, 0.037459968566894535, 0.037749759674072264, 0.03785830307006836, 0.03779174423217774, 0.03760844802856445, 0.03762790298461914, 0.03777024078369141, 0.03829555130004883, 0.03771187210083008, 0.037618686676025394, 0.037910526275634765, 0.037972991943359374, 0.0376360969543457, 0.037569534301757815, 0.037556224822998044, 0.037814273834228515, 0.03882291030883789, 0.03796480178833008, 0.03764019012451172, 0.038468608856201174, 0.0377149429321289, 0.03768217468261719, 0.03752755355834961, 0.03779891204833984, 0.037596160888671876, 0.037569534301757815, 0.03744255828857422, 0.03822393417358398, 0.037583808898925784, 0.037556224822998044, 0.0374917106628418, 0.03794636917114258, 0.037923839569091795, 0.03751935958862305, 0.03808153533935547, 0.03799859237670898, 0.03894988632202148, 0.037749759674072264, 0.03755724716186523, 0.03758489608764649, 0.0375654411315918, 0.0375838737487793, 0.03787776184082031, 0.037776382446289065, 0.037572608947753904, 0.03774054336547852, 0.037528575897216795, 0.037689342498779296, 0.03753267288208008, 0.03762176132202148, 0.03768524932861328, 0.038340606689453126, 0.038112255096435545, 0.03765248107910156, 0.03856588745117188, 0.0380682258605957, 0.037553150177001955, 0.03758899307250976, 0.03767295837402344, 0.03809280014038086, 0.03854336166381836, 0.0377149429321289, 0.03760435104370117, 0.03784703826904297, 0.037456897735595705, 0.03764223861694336, 0.03755212783813477, 0.03748147201538086, 0.037580799102783204, 0.037615615844726565, 0.03752447891235351, 0.03784089660644531, 0.037512191772460936, 0.03766579055786133, 0.03750400161743164, 0.03768320083618164, 0.037601280212402347, 0.037449726104736326, 0.037572608947753904, 0.03768012619018555, 0.03884236907958984, 0.03819417572021484, 0.03754393768310547, 0.03795251083374023, 0.03739033508300781, 0.037648384094238284, 0.03876761627197266, 0.03926323318481445, 0.03780198287963867, 0.038973438262939454, 0.037956607818603515, 0.03825459289550781, 0.03843174362182617, 0.03775283050537109, 0.03757363128662109, 0.03762790298461914, 0.03773235321044922, 0.03745280075073242, 0.037541889190673826, 0.03759206390380859, 0.03760335922241211, 0.037574623107910154, 0.037700607299804685, 0.03769651031494141, 0.03751321411132812, 0.03758796691894531, 0.03760025787353516, 0.03868569564819336, 0.03794944000244141, 0.039067649841308595, 0.03843686294555664, 0.03769343948364258, 0.03775283050537109, 0.04044905471801758, 0.03916284942626953, 0.0376995849609375, 0.03854438400268555, 0.03748761749267578, 0.03768217468261719, 0.037582847595214845, 0.03768524932861328, 0.03807231903076172, 0.037976062774658204, 0.03803238296508789, 0.03849625778198242, 0.03847987365722656, 0.037884929656982425, 0.0374835205078125, 0.037384193420410154, 0.037612545013427735, 0.03785318374633789, 0.038972415924072266, 0.03757056045532227, 0.03756748962402344, 0.03913113784790039, 0.04035276794433594, 0.039221248626708984, 0.037868545532226565, 0.03765862274169922, 0.03762483215332031, 0.03755417633056641, 0.03751424026489258, 0.037528575897216795, 0.03776102447509765, 0.03759001541137695, 0.037689342498779296, 0.03760332870483398, 0.03767091369628906, 0.03746918487548828, 0.03765760040283203, 0.037612545013427735, 0.03861196899414063, 0.03820032119750977, 0.038160385131835936, 0.037580799102783204, 0.03760435104370117, 0.03836928176879883, 0.038141952514648435, 0.03763814544677734, 0.03762995147705078, 0.037594112396240234, 0.03760435104370117, 0.03763302230834961, 0.03753881454467774, 0.037645313262939455, 0.03768627166748047, 0.040027137756347655, 0.03853823852539062, 0.037722110748291016, 0.0377446403503418, 0.037610496520996094, 0.037645313262939455, 0.03761971282958984, 0.03742310333251953, 0.03754086303710937, 0.0376258544921875, 0.03759308624267578, 0.03763814544677734, 0.0376627197265625, 0.03776409530639648, 0.038373374938964845, 0.03760844802856445, 0.03765248107910156, 0.038965248107910154, 0.0378081283569336, 0.03725414276123047, 0.03763507080078125, 0.0375551986694336, 0.03766067123413086, 0.03768217468261719, 0.037751808166503906, 0.03769651031494141, 0.03766579055786133, 0.03753472137451172, 0.037644287109375, 0.037525505065917966, 0.03751731109619141, 0.03745382308959961, 0.03811840057373047, 0.03777740859985351, 0.038161407470703124, 0.03893657684326172, 0.038231040954589846, 0.03753779220581055, 0.03757567977905273, 0.03734425735473633, 0.03754291152954101, 0.03749683380126953, 0.03747635269165039, 0.03860275268554687, 0.0377077751159668, 0.03772518539428711, 0.03745177459716797, 0.03767603302001953, 0.037572608947753904, 0.037618686676025394, 0.03763507080078125, 0.03766579055786133, 0.03766067123413086, 0.037531646728515625, 0.037544960021972655, 0.037564414978027344, 0.03767500686645508, 0.037610496520996094, 0.03758899307250976, 0.03765350341796875, 0.038161407470703124, 0.0376995849609375, 0.03772415924072266, 0.03759718322753906, 0.037599231719970705, 0.037648384094238284, 0.037438465118408204, 0.03765555191040039, 0.03766886520385742, 0.03762176132202148, 0.03757056045532227, 0.03753267288208008, 0.038265857696533206, 0.037814273834228515, 0.03845529556274414, 0.03760025787353516, 0.03776409530639648, 0.03765760040283203, 0.037572608947753904, 0.03746201705932617, 0.03765760040283203, 0.03761151885986328, 0.039501823425292966, 0.037920768737792966, 0.03761459350585938, 0.03783270263671875, 0.03797708892822266, 0.03781222534179687, 0.03848089599609375, 0.038163455963134765, 0.03747123336791992, 0.03779481506347656, 0.037579776763916016, 0.03864169692993164, 0.037971935272216796, 0.03740979385375977, 0.03744768142700195, 0.03784908676147461, 0.03762176132202148, 0.03776102447509765, 0.037610496520996094, 0.037705726623535156, 0.03760435104370117, 0.037477375030517575, 0.03752140808105469, 0.03864780807495117, 0.03796889495849609, 0.037820415496826174, 0.037582847595214845, 0.03771187210083008, 0.037582847595214845, 0.0376627197265625, 0.037577728271484374, 0.038188030242919925, 0.03973427200317383, 0.03893350219726562, 0.037564414978027344, 0.037596160888671876, 0.037566463470458986, 0.03767193603515625, 0.03778252792358398, 0.03810406494140625, 0.03753472137451172, 0.0374917106628418, 0.037495807647705076, 0.037566463470458986, 0.037526527404785154, 0.037479423522949216, 0.03746406555175781, 0.03765248107910156, 0.037364734649658206, 0.03867647933959961, 0.038435840606689455, 0.03816243362426758, 0.037602302551269534, 0.037582847595214845, 0.037510143280029294, 0.037456897735595705, 0.03781631851196289, 0.037754878997802735, 0.037907455444335936, 0.03766067123413086, 0.03773440170288086, 0.038042625427246096, 0.03774156951904297, 0.03780915069580078, 0.037566463470458986, 0.03755417633056641, 0.0373309440612793, 0.03773235321044922, 0.03767603302001953, 0.0383375358581543, 0.038621185302734375, 0.03755929565429687, 0.03754086303710937, 0.03758899307250976, 0.037176319122314457, 0.037563392639160156, 0.037526527404785154, 0.03761151885986328, 0.03748044967651367, 0.03753881454467774, 0.03752553558349609, 0.03749372863769531, 0.037466110229492186, 0.03759001541137695, 0.037577728271484374, 0.03752755355834961, 0.03739852905273437, 0.03781836700439453, 0.03757363128662109, 0.037736446380615234, 0.03757875061035156, 0.037602302551269534, 0.03757875061035156, 0.03863859176635742, 0.040210430145263674, 0.03912089538574219, 0.03842047882080078, 0.03808358383178711, 0.03764940643310547, 0.03752243041992188, 0.0377077751159668, 0.03755724716186523, 0.03760435104370117, 0.03764223861694336, 0.03760025787353516, 0.03854131317138672, 0.038816768646240236, 0.039177215576171875, 0.03770470428466797, 0.03774156951904297, 0.03783270263671875, 0.03768217468261719, 0.03760537719726562, 0.03759820938110352, 0.03823308944702149, 0.03831193542480469, 0.03757056045532227, 0.03767193603515625, 0.03764940643310547, 0.038100990295410156, 0.039244800567626956, 0.03860377502441406, 0.037644287109375, 0.040174591064453126, 0.03965951919555664, 0.043253761291503906]",tokens/s,26.400419602651215,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948d09-6e8ec95503e713c10280dff3;b6161f37-5655-4d59-b412-f838e3c3f1d4) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6678.888448,7732.723712,0.0,7147.094016,7138.9184,s,1,10.396216796875,10.396216796875,0.0,10.396216796875,10.396216796875,10.396216796875,10.396216796875,[10.396216796875],,kWh,4.2426662023621656e-05,2.3237474299889004e-05,5.959726990001357e-05,0.00012526140622352424,,MB,1648.275456,8464.62976,0.0,7818.182656,7715.649536,s,10,1.7843802795410155,0.17843802795410155,0.00010741890820827824,0.17841027069091797,0.17852398376464845,0.17862181701660157,0.17870008361816406,"[0.1787196502685547, 0.17833401489257814, 0.17845033264160157, 0.17836883544921875, 0.17835714721679688, 0.1785022430419922, 0.17836163330078125, 0.17846588134765626, 0.1784376678466797, 0.17838287353515625]",tokens/s,1434.671762152904,kWh,2.10988854463034e-06,1.1558876103841392e-06,1.1938328003036466e-05,1.5204104158050946e-05,tokens/kWh,16837558.94716373,MB,1658.253312,8611.4304,0.0,7964.983296,7906.489344,s,10,16.965199340820313,1.6965199340820312,0.004452219387810332,1.696432861328125,1.7011896850585937,1.702692010498047,1.7038938708496094,"[1.698638427734375, 1.6978140869140625, 1.7008558349609375, 1.6950516357421874, 1.69312890625, 1.688598388671875, 1.6927579345703125, 1.6936756591796875, 1.7041943359375, 1.700484130859375]",tokens/s,37.134842175661575,kWh,2.052683439286845e-05,1.1249414626713348e-05,8.00925511771629e-05,0.00011186880019674469,tokens/kWh,563159.7003740213,,s,630,16.96282008552551,0.02692511124686589,0.0004015260553422425,0.026792959213256837,0.027576525306701658,0.027712870216369628,0.028213585052490234,"[0.027072511672973632, 0.028106752395629882, 0.028013568878173828, 0.027672576904296874, 0.026959871292114256, 0.026797056198120117, 0.02686566352844238, 0.02757427215576172, 0.027615232467651366, 0.0273305606842041, 0.026886144638061524, 0.02694758415222168, 0.026887168884277345, 0.02680729675292969, 0.027054079055786134, 0.02696499252319336, 0.026804224014282226, 0.02675916862487793, 0.02679193687438965, 0.027594751358032226, 0.026819583892822265, 0.026852352142333984, 0.02674380874633789, 0.02672332763671875, 0.026762239456176756, 0.026756095886230468, 0.026762239456176756, 0.02670796775817871, 0.026754047393798826, 0.026705919265747072, 0.02670899200439453, 0.026719232559204102, 0.026869760513305665, 0.026738687515258788, 0.027825151443481445, 0.027809791564941407, 0.027670528411865233, 0.027576320648193358, 0.026814464569091798, 0.026810367584228514, 0.02671001625061035, 0.026755071640014647, 0.02669977569580078, 0.02670182418823242, 0.026845184326171875, 0.026802175521850585, 0.026804224014282226, 0.026860544204711914, 0.026786815643310546, 0.026697727203369142, 0.026806272506713868, 0.026836992263793946, 0.026833919525146483, 0.026838016510009766, 0.026861568450927735, 0.02671001625061035, 0.02666803169250488, 0.026797056198120117, 0.026811391830444335, 0.02674278450012207, 0.026830848693847657, 0.026840063095092775, 0.02652774429321289, 0.026786815643310546, 0.026772480010986328, 0.026754047393798826, 0.02671820831298828, 0.02678169631958008, 0.026665983200073243, 0.026834943771362304, 0.0267827205657959, 0.02674176025390625, 0.026671104431152344, 0.026704896926879884, 0.026830848693847657, 0.02678169631958008, 0.026714111328125, 0.026747903823852538, 0.02671718406677246, 0.02681548881530762, 0.02673971176147461, 0.02695680046081543, 0.028220415115356445, 0.027892736434936522, 0.027614208221435548, 0.0274913272857666, 0.02673459243774414, 0.026778623580932616, 0.026779647827148437, 0.026910720825195314, 0.0265031681060791, 0.029450239181518553, 0.027578367233276366, 0.02676736068725586, 0.02671718406677246, 0.02768076705932617, 0.0275281925201416, 0.027599872589111327, 0.026564607620239256, 0.026657791137695314, 0.02635775947570801, 0.026435583114624024, 0.02652672004699707, 0.02679910469055176, 0.0267458553314209, 0.026809343338012694, 0.02728447914123535, 0.0276889591217041, 0.027146240234375, 0.02678169631958008, 0.026817535400390623, 0.026869760513305665, 0.0267827205657959, 0.026822656631469727, 0.026797056198120117, 0.026793983459472655, 0.026680320739746095, 0.026728448867797853, 0.0267827205657959, 0.02671308708190918, 0.026728448867797853, 0.02672230339050293, 0.027259904861450194, 0.026855424880981447, 0.02687283134460449, 0.026789888381958008, 0.02773196792602539, 0.02760601615905762, 0.027631616592407225, 0.02772172737121582, 0.027639808654785155, 0.027649023056030272, 0.027621376037597657, 0.02669875144958496, 0.02651033592224121, 0.026292224884033204, 0.026784767150878908, 0.027823104858398437, 0.02755276870727539, 0.02748313522338867, 0.02674380874633789, 0.026786815643310546, 0.02693120002746582, 0.026830848693847657, 0.026789888381958008, 0.02677350425720215, 0.02672640037536621, 0.02668339157104492, 0.026792959213256837, 0.026680320739746095, 0.026764287948608398, 0.026662912368774414, 0.02671001625061035, 0.026738687515258788, 0.026570751190185548, 0.026651647567749022, 0.027011072158813477, 0.0267007999420166, 0.02671718406677246, 0.026697727203369142, 0.026720256805419923, 0.02680729675292969, 0.026860544204711914, 0.027356159210205077, 0.02719436836242676, 0.026785791397094725, 0.02700492858886719, 0.027091968536376954, 0.02674380874633789, 0.026711040496826172, 0.02668441581726074, 0.0267325439453125, 0.02668441581726074, 0.027571199417114257, 0.028643327713012694, 0.02818764877319336, 0.027429887771606445, 0.02679910469055176, 0.026779647827148437, 0.026831872940063478, 0.026780672073364258, 0.02677452850341797, 0.02675302314758301, 0.02693222427368164, 0.026858495712280273, 0.026778623580932616, 0.026826751708984374, 0.02672127914428711, 0.026849279403686522, 0.026711040496826172, 0.02676633644104004, 0.026566656112670898, 0.02666803169250488, 0.02758143997192383, 0.027650047302246093, 0.02679193687438965, 0.02689638328552246, 0.027045888900756834, 0.027637760162353517, 0.02672640037536621, 0.02699673652648926, 0.02672332763671875, 0.026861568450927735, 0.026780672073364258, 0.026792959213256837, 0.026598400115966796, 0.026446847915649413, 0.026754047393798826, 0.02755379295349121, 0.027658239364624023, 0.028033023834228517, 0.02676121520996094, 0.026834943771362304, 0.026877952575683595, 0.026820608139038086, 0.02673459243774414, 0.026793983459472655, 0.026704896926879884, 0.026771455764770507, 0.02677555274963379, 0.026784767150878908, 0.0267775993347168, 0.026816511154174806, 0.02673356819152832, 0.026816511154174806, 0.026787839889526367, 0.026747903823852538, 0.026659839630126952, 0.026364927291870118, 0.02673459243774414, 0.027810815811157227, 0.02776166343688965, 0.027644927978515626, 0.0267007999420166, 0.02693529510498047, 0.02669158363342285, 0.026812416076660156, 0.026804224014282226, 0.026862592697143556, 0.026797056198120117, 0.026874879837036132, 0.02675916862487793, 0.02696499252319336, 0.02677555274963379, 0.026801151275634767, 0.026746879577636717, 0.026792959213256837, 0.026689535140991212, 0.026802175521850585, 0.026738687515258788, 0.02676838493347168, 0.026746879577636717, 0.027633663177490234, 0.026735616683959962, 0.02681548881530762, 0.026665983200073243, 0.02674278450012207, 0.026805248260498047, 0.027457536697387694, 0.026688512802124024, 0.026805248260498047, 0.027696128845214843, 0.02693120002746582, 0.026793983459472655, 0.02691481590270996, 0.026779647827148437, 0.026868736267089844, 0.026817535400390623, 0.02689945602416992, 0.026835968017578125, 0.026853376388549805, 0.02691993522644043, 0.027033599853515625, 0.02681548881530762, 0.026857471466064452, 0.026884096145629883, 0.02691993522644043, 0.02671820831298828, 0.026789888381958008, 0.026831872940063478, 0.026796031951904296, 0.026808319091796876, 0.026828800201416016, 0.026693632125854492, 0.02676736068725586, 0.026802175521850585, 0.02677555274963379, 0.0269117431640625, 0.026843135833740234, 0.026772480010986328, 0.026763263702392577, 0.02677350425720215, 0.026802175521850585, 0.026855424880981447, 0.026735616683959962, 0.02694758415222168, 0.026735616683959962, 0.026780672073364258, 0.027608064651489257, 0.026871807098388673, 0.026738687515258788, 0.02676531219482422, 0.026776575088500978, 0.026797056198120117, 0.026711040496826172, 0.026961919784545898, 0.026772480010986328, 0.02682368087768555, 0.02671615982055664, 0.026832895278930666, 0.026901504516601563, 0.0267775993347168, 0.026793983459472655, 0.026833919525146483, 0.027305984497070314, 0.027579391479492187, 0.02675200080871582, 0.026588159561157225, 0.02666803169250488, 0.026728448867797853, 0.02671308708190918, 0.026730495452880858, 0.026796031951904296, 0.026696704864501954, 0.026694656372070313, 0.02668441581726074, 0.026755071640014647, 0.026788864135742187, 0.027174911499023437, 0.02697216033935547, 0.026730495452880858, 0.026664960861206056, 0.0267775993347168, 0.02672127914428711, 0.026738687515258788, 0.02671513557434082, 0.026821632385253907, 0.026665983200073243, 0.026380287170410157, 0.027459583282470702, 0.02686566352844238, 0.026772480010986328, 0.02679193687438965, 0.02669977569580078, 0.026840063095092775, 0.02632089614868164, 0.02651033592224121, 0.026735616683959962, 0.026801151275634767, 0.026594303131103517, 0.02670182418823242, 0.026690559387207033, 0.02675916862487793, 0.026662912368774414, 0.02691276741027832, 0.02671718406677246, 0.026945535659790038, 0.026900480270385742, 0.02691993522644043, 0.026792959213256837, 0.02690457534790039, 0.026792959213256837, 0.026795007705688476, 0.027215871810913086, 0.026804224014282226, 0.026730495452880858, 0.027085823059082033, 0.026825727462768553, 0.026820608139038086, 0.026822656631469727, 0.026894336700439454, 0.02679091262817383, 0.026824703216552736, 0.0267325439453125, 0.026908672332763672, 0.026788864135742187, 0.026834943771362304, 0.026828800201416016, 0.02674176025390625, 0.026680320739746095, 0.02676121520996094, 0.026719232559204102, 0.02680729675292969, 0.02675712013244629, 0.026785791397094725, 0.026779647827148437, 0.02678169631958008, 0.0267827205657959, 0.02679193687438965, 0.0267325439453125, 0.026847232818603517, 0.02671718406677246, 0.026864639282226564, 0.026835968017578125, 0.02694041633605957, 0.02680729675292969, 0.028013568878173828, 0.027053056716918947, 0.02689945602416992, 0.02674380874633789, 0.026817535400390623, 0.02672947120666504, 0.02677452850341797, 0.02675712013244629, 0.026788864135742187, 0.026795007705688476, 0.026695680618286134, 0.026730495452880858, 0.02687283134460449, 0.02676121520996094, 0.02667519950866699, 0.02676633644104004, 0.026992639541625976, 0.02674176025390625, 0.026719232559204102, 0.026778623580932616, 0.02671513557434082, 0.0267458553314209, 0.02674176025390625, 0.02679910469055176, 0.027216896057128907, 0.02697932815551758, 0.026832895278930666, 0.026780672073364258, 0.02677452850341797, 0.026824703216552736, 0.026878976821899415, 0.026375167846679686, 0.026702848434448243, 0.02675712013244629, 0.02675302314758301, 0.02925056076049805, 0.02790809631347656, 0.026792959213256837, 0.02676531219482422, 0.026689535140991212, 0.02671820831298828, 0.026746879577636717, 0.02714931106567383, 0.026705919265747072, 0.026679296493530274, 0.026674175262451173, 0.026657791137695314, 0.02676019287109375, 0.026787839889526367, 0.026728448867797853, 0.026802175521850585, 0.02674278450012207, 0.02676019287109375, 0.02670182418823242, 0.02676838493347168, 0.02668441581726074, 0.026863616943359377, 0.026705919265747072, 0.02673151969909668, 0.026705919265747072, 0.0267775993347168, 0.026634239196777345, 0.02671001625061035, 0.026779647827148437, 0.02671308708190918, 0.026763263702392577, 0.02768387222290039, 0.026712032318115236, 0.027482112884521483, 0.027631616592407225, 0.026762239456176756, 0.02674995231628418, 0.026927104949951174, 0.02687283134460449, 0.02693529510498047, 0.026822656631469727, 0.02674995231628418, 0.026728448867797853, 0.026811391830444335, 0.027266048431396486, 0.027003904342651368, 0.026820608139038086, 0.02693017578125, 0.026818559646606444, 0.02692915153503418, 0.026801151275634767, 0.027125759124755858, 0.026840063095092775, 0.026838016510009766, 0.026786815643310546, 0.026826751708984374, 0.02671820831298828, 0.026784767150878908, 0.02698956871032715, 0.026858495712280273, 0.026728448867797853, 0.026844160079956055, 0.02687283134460449, 0.026792959213256837, 0.026855424880981447, 0.02687385559082031, 0.02673766326904297, 0.026836992263793946, 0.026863616943359377, 0.02680729675292969, 0.02711039924621582, 0.027817983627319336, 0.027641855239868163, 0.02689638328552246, 0.026644479751586913, 0.02718617630004883, 0.030253055572509766, 0.027999231338500977, 0.02774732780456543, 0.027428863525390625, 0.02679193687438965, 0.026809343338012694, 0.0268984317779541, 0.026821632385253907, 0.026812416076660156, 0.026829824447631836, 0.026786815643310546, 0.029113344192504883, 0.027572223663330078, 0.026871807098388673, 0.02693222427368164, 0.026901504516601563, 0.026829824447631836, 0.02751590347290039, 0.02752921676635742, 0.026998783111572267, 0.026680320739746095, 0.02672537612915039, 0.027067392349243165, 0.026848255157470705, 0.026910720825195314, 0.027527168273925783, 0.026843135833740234, 0.02675916862487793, 0.026645503997802734, 0.02734182357788086, 0.027229183197021483, 0.026801151275634767, 0.02674380874633789, 0.026730495452880858, 0.027554815292358398, 0.027757568359375, 0.027703296661376952, 0.027274240493774415, 0.02773504066467285, 0.027609088897705077, 0.026869760513305665, 0.026425344467163086, 0.026647552490234375, 0.026662912368774414, 0.02671718406677246, 0.026625024795532228, 0.026292224884033204, 0.02629324722290039, 0.026572799682617186, 0.02668441581726074, 0.026686464309692383, 0.026681343078613282, 0.026669055938720702, 0.026639360427856446, 0.02672230339050293, 0.026649599075317384, 0.0267827205657959, 0.026639360427856446, 0.02672127914428711, 0.027314176559448244, 0.02672537612915039, 0.0267775993347168, 0.026798080444335938, 0.026944511413574217, 0.026818559646606444, 0.02672947120666504, 0.026671104431152344, 0.02672537612915039, 0.026682367324829103, 0.026687488555908204, 0.026682367324829103, 0.02676736068725586, 0.026610687255859376, 0.026727424621582032, 0.026764287948608398, 0.026714111328125, 0.026595327377319337, 0.02672230339050293, 0.026705919265747072, 0.027051008224487305, 0.028440576553344726, 0.027991039276123047, 0.026829824447631836, 0.02678374481201172, 0.02753433609008789, 0.02738380813598633, 0.026604543685913085, 0.02676019287109375, 0.026618879318237306, 0.026798080444335938, 0.02667622375488281, 0.0267775993347168, 0.028196863174438477, 0.028067840576171874, 0.02681548881530762, 0.02692198371887207, 0.02711039924621582, 0.026910720825195314, 0.02671718406677246, 0.026787839889526367, 0.026836992263793946, 0.026916864395141602, 0.02679091262817383, 0.027596799850463868, 0.02795008087158203, 0.027720703125, 0.027586559295654296, 0.027371519088745116, 0.027610111236572265, 0.02690662384033203, 0.026977279663085937, 0.027782144546508788, 0.026878976821899415, 0.026825727462768553, 0.026822656631469727, 0.026746879577636717, 0.026681343078613282, 0.026770431518554686, 0.026776575088500978, 0.02676838493347168, 0.026808319091796876, 0.026789888381958008, 0.02671001625061035]",tokens/s,37.14005081841216,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694939a-394731614e35532d646178ed;29d4ce01-bbb2-4bb4-87f8-d34ca42d88b0) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpuur1cgl9/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,914.86208,845.676544,0.0,260.046848,253.883392,s,1,7.22239794921875,7.22239794921875,0.0,7.22239794921875,7.22239794921875,7.22239794921875,7.22239794921875,[7.22239794921875],,kWh,5.564249375683833e-06,3.0334224375010496e-06,6.502227423987872e-06,1.5099899237172754e-05,,MB,1411.9936,988.28288,0.0,341.835776,312.754176,s,16,0.17934406185150145,0.01120900386571884,0.00025472812174572306,0.011279295921325684,0.011374576091766357,0.01148479199409485,0.011705111932754517,"[0.011281503677368163, 0.011302111625671386, 0.01139299201965332, 0.01124947166442871, 0.011277088165283203, 0.011287455558776855, 0.011275327682495118, 0.011356160163879395, 0.011319711685180664, 0.011273759841918946, 0.011307744026184082, 0.010807552337646485, 0.011760191917419434, 0.010847007751464844, 0.01080620765686035, 0.010799776077270508]",tokens/s,22838.782381272962,kWh,1.305177730931501e-07,7.151740140082243e-08,3.2514943139001274e-07,5.271846058839853e-07,tokens/kWh,485598397.8719147,MB,1447.739392,1015.545856,0.0,369.098752,313.412096,s,16,9.9056015625,0.61910009765625,0.008958822419412956,0.6228625183105468,0.6262242126464843,0.6264590759277344,0.6268468933105469,"[0.6261509399414062, 0.6262974853515625, 0.6246585693359376, 0.6238973388671875, 0.6258057861328125, 0.62694384765625, 0.6249612426757812, 0.62056396484375, 0.6242137451171875, 0.6218276977539062, 0.6211593017578125, 0.62074462890625, 0.6117276611328125, 0.6057608032226562, 0.6033469848632812, 0.5975415649414062]",tokens/s,101.76060420358746,kWh,7.124593738712357e-06,3.903971720388377e-06,1.1131281302370295e-05,2.2159846761471022e-05,tokens/kWh,2842979.9482881404,,s,1008,9.89891589736938,0.009820353072787088,0.00022115596392135163,0.009880576133728027,0.009983590507507325,0.01004610548019409,0.010426296129226685,"[0.00994099235534668, 0.009951231956481933, 0.009981951713562011, 0.009855999946594238, 0.009968640327453614, 0.009962495803833007, 0.009961471557617188, 0.009927680015563965, 0.009913344383239747, 0.00993791961669922, 0.010024959564208985, 0.009950207710266114, 0.010027008056640625, 0.009881600379943848, 0.009903103828430175, 0.009981951713562011, 0.00999014377593994, 0.00991641616821289, 0.009926655769348144, 0.009844736099243164, 0.009945088386535645, 0.009925632476806641, 0.010012672424316407, 0.009809920310974121, 0.00984985637664795, 0.009917440414428711, 0.009863167762756348, 0.010193920135498047, 0.009927712440490722, 0.009972703933715821, 0.009839615821838378, 0.009903103828430175, 0.009972736358642579, 0.009971712112426758, 0.009913344383239747, 0.009861120223999023, 0.00990822410583496, 0.00993996810913086, 0.009928704261779785, 0.009987071990966797, 0.009855999946594238, 0.009921536445617676, 0.010004480361938477, 0.00999833583831787, 0.009945088386535645, 0.009997311592102051, 0.00983142375946045, 0.009973759651184083, 0.009812992095947265, 0.009943039894104003, 0.009887743949890136, 0.009878527641296387, 0.009963520050048828, 0.009942015647888184, 0.00990822410583496, 0.009945088386535645, 0.009906175613403321, 0.009880576133728027, 0.009966591835021972, 0.009905152320861817, 0.00992255973815918, 0.00993791961669922, 0.00991436767578125, 0.009877504348754883, 0.00993791961669922, 0.009874431610107422, 0.010148863792419433, 0.009972736358642579, 0.010038304328918456, 0.00997372817993164, 0.009971712112426758, 0.009977855682373048, 0.009857024192810059, 0.009839615821838378, 0.010061823844909668, 0.010067968368530274, 0.010058752059936523, 0.009895936012268066, 0.009895936012268066, 0.009921536445617676, 0.00993791961669922, 0.009975808143615723, 0.009975808143615723, 0.009921536445617676, 0.009986047744750976, 0.009875455856323241, 0.009921536445617676, 0.009880576133728027, 0.009892864227294922, 0.009864192008972168, 0.009956352233886719, 0.010066944122314453, 0.010001407623291016, 0.009912320137023926, 0.009906175613403321, 0.009951231956481933, 0.009935872077941894, 0.009971712112426758, 0.009878527641296387, 0.009884672164916992, 0.009842687606811524, 0.010042367935180664, 0.00992972755432129, 0.009905152320861817, 0.009903103828430175, 0.009865216255187988, 0.009887743949890136, 0.009900032043457031, 0.009894911766052245, 0.01002188777923584, 0.009877504348754883, 0.009910271644592286, 0.009988096237182617, 0.009920512199401856, 0.009883647918701171, 0.009874431610107422, 0.009921536445617676, 0.009885696411132813, 0.00991539192199707, 0.009923583984375, 0.009905152320861817, 0.009848832130432129, 0.009992192268371582, 0.009917440414428711, 0.009950207710266114, 0.009881600379943848, 0.009942015647888184, 0.009873408317565918, 0.009881600379943848, 0.009887743949890136, 0.00992972755432129, 0.009912320137023926, 0.009960448265075684, 0.00990822410583496, 0.009975808143615723, 0.00990719985961914, 0.009887743949890136, 0.009926655769348144, 0.009902079582214356, 0.009926655769348144, 0.009944064140319824, 0.01011404800415039, 0.00992460823059082, 0.009948160171508789, 0.009925632476806641, 0.009928704261779785, 0.009892864227294922, 0.009936896324157715, 0.009897983551025391, 0.009989119529724122, 0.00991436767578125, 0.010046463966369629, 0.009954303741455077, 0.009917440414428711, 0.009901056289672852, 0.009973759651184083, 0.009893888473510743, 0.009970687866210937, 0.009769984245300293, 0.00990719985961914, 0.009862144470214844, 0.00994611167907715, 0.00992255973815918, 0.00981503963470459, 0.009904128074645996, 0.009893888473510743, 0.009903136253356934, 0.009844703674316406, 0.00990719985961914, 0.009866239547729493, 0.009973759651184083, 0.009908255577087402, 0.010011615753173829, 0.009905152320861817, 0.010007552146911621, 0.01002905559539795, 0.010002431869506835, 0.009891839981079101, 0.009935872077941894, 0.009845760345458985, 0.009905152320861817, 0.009982975959777832, 0.009919487953186035, 0.010016768455505372, 0.009928704261779785, 0.009656319618225098, 0.009605119705200196, 0.009637887954711915, 0.00964406394958496, 0.009918463706970216, 0.009961471557617188, 0.009847807884216308, 0.009846783638000489, 0.009925632476806641, 0.009894911766052245, 0.009911295890808105, 0.009877504348754883, 0.009891839981079101, 0.009921536445617676, 0.009869312286376953, 0.009971712112426758, 0.009932831764221191, 0.00989897632598877, 0.009863167762756348, 0.00993791961669922, 0.009902079582214356, 0.009843711853027343, 0.009894911766052245, 0.01002086353302002, 0.009979904174804688, 0.009959424018859863, 0.009837568283081055, 0.00991436767578125, 0.009902079582214356, 0.009964544296264649, 0.009884672164916992, 0.009819135665893555, 0.009880576133728027, 0.009855999946594238, 0.009840640068054199, 0.0098088960647583, 0.009981951713562011, 0.009887743949890136, 0.009879551887512206, 0.009885696411132813, 0.009911295890808105, 0.00985804843902588, 0.009900032043457031, 0.009842687606811524, 0.009935872077941894, 0.009876480102539062, 0.00999014377593994, 0.009851903915405273, 0.009848832130432129, 0.009880576133728027, 0.009886719703674317, 0.009892864227294922, 0.009892864227294922, 0.009891839981079101, 0.009885696411132813, 0.009917440414428711, 0.009846783638000489, 0.009887743949890136, 0.009827327728271485, 0.009901056289672852, 0.009884672164916992, 0.00993280029296875, 0.00991641616821289, 0.009885696411132813, 0.009846783638000489, 0.009868288040161132, 0.009980928421020508, 0.009844736099243164, 0.009909248352050782, 0.009855999946594238, 0.009925632476806641, 0.00982425594329834, 0.010124287605285644, 0.009892959594726563, 0.009929632186889649, 0.009993215560913087, 0.009917440414428711, 0.009851967811584473, 0.00992454433441162, 0.00990822410583496, 0.009902079582214356, 0.009905216217041015, 0.009954239845275879, 0.009897983551025391, 0.00991436767578125, 0.009942015647888184, 0.009926655769348144, 0.009926655769348144, 0.010034175872802734, 0.009935872077941894, 0.01042636775970459, 0.010043392181396485, 0.009971712112426758, 0.009872384071350097, 0.009925632476806641, 0.009972736358642579, 0.009880576133728027, 0.009850879669189454, 0.009911295890808105, 0.009891839981079101, 0.009860095977783203, 0.009963520050048828, 0.009843711853027343, 0.009953280448913575, 0.009903103828430175, 0.009992192268371582, 0.009801728248596191, 0.009821184158325195, 0.00991436767578125, 0.00993177604675293, 0.009890815734863282, 0.009906175613403321, 0.009854975700378419, 0.009892864227294922, 0.009877504348754883, 0.00999014377593994, 0.00989900779724121, 0.00991641616821289, 0.00982630443572998, 0.009923583984375, 0.009986047744750976, 0.009909248352050782, 0.00993177604675293, 0.009836544036865234, 0.010012672424316407, 0.009969663619995118, 0.009926655769348144, 0.00991436767578125, 0.009964544296264649, 0.009962495803833007, 0.009877504348754883, 0.009932831764221191, 0.009926624298095703, 0.009862144470214844, 0.009982975959777832, 0.009923583984375, 0.009927680015563965, 0.009897983551025391, 0.00994611167907715, 0.009882623672485352, 0.009859071731567384, 0.009879551887512206, 0.00989900779724121, 0.009902079582214356, 0.009888768196105957, 0.009938943862915038, 0.009942015647888184, 0.009741312026977538, 0.009893888473510743, 0.009985024452209473, 0.009852928161621094, 0.00994611167907715, 0.009893888473510743, 0.009945088386535645, 0.009934847831726074, 0.00985804843902588, 0.009918463706970216, 0.009926655769348144, 0.009951231956481933, 0.009992192268371582, 0.009844736099243164, 0.009901056289672852, 0.009866239547729493, 0.009887743949890136, 0.009945088386535645, 0.010002431869506835, 0.009923583984375, 0.009957375526428223, 0.010070015907287597, 0.00992972755432129, 0.009958399772644042, 0.009996288299560547, 0.009918463706970216, 0.00992972755432129, 0.009943103790283203, 0.01010374355316162, 0.0101396484375, 0.009881600379943848, 0.01072332763671875, 0.009975808143615723, 0.009806879997253418, 0.009952223777770997, 0.009955360412597656, 0.009908191680908203, 0.009919487953186035, 0.0101396484375, 0.009920512199401856, 0.009897983551025391, 0.01002291202545166, 0.009909248352050782, 0.009966591835021972, 0.00992460823059082, 0.00999833583831787, 0.00990822410583496, 0.010066944122314453, 0.009794560432434082, 0.009860095977783203, 0.009980928421020508, 0.00994918441772461, 0.009861120223999023, 0.009938943862915038, 0.009874431610107422, 0.010045439720153808, 0.009887743949890136, 0.010011648178100586, 0.009805824279785156, 0.009887743949890136, 0.010106880187988282, 0.009985024452209473, 0.010241024017333985, 0.01002086353302002, 0.010098688125610352, 0.010425344467163086, 0.010606592178344726, 0.010074111938476562, 0.009902079582214356, 0.009886719703674317, 0.009989119529724122, 0.009933856010437012, 0.009829343795776367, 0.00992972755432129, 0.009882623672485352, 0.009863167762756348, 0.00984166431427002, 0.009957375526428223, 0.009827327728271485, 0.009881600379943848, 0.010250240325927735, 0.009954303741455077, 0.009787391662597657, 0.009887743949890136, 0.009868288040161132, 0.009917440414428711, 0.009855999946594238, 0.010052607536315919, 0.009667584419250488, 0.009656319618225098, 0.009548800468444824, 0.00951910400390625, 0.009462783813476563, 0.009697279930114745, 0.009910271644592286, 0.009861120223999023, 0.009820159912109374, 0.009915424346923827, 0.009887711524963379, 0.00991436767578125, 0.00983347225189209, 0.009883647918701171, 0.009875455856323241, 0.01001369571685791, 0.009857024192810059, 0.009880576133728027, 0.009828351974487304, 0.009878527641296387, 0.009906175613403321, 0.009853952407836914, 0.009897983551025391, 0.009810943603515625, 0.00990719985961914, 0.009885696411132813, 0.009882623672485352, 0.009856032371520997, 0.009978848457336426, 0.009828351974487304, 0.009953280448913575, 0.009867263793945312, 0.009951231956481933, 0.009867263793945312, 0.009887743949890136, 0.009867263793945312, 0.009965567588806153, 0.009912320137023926, 0.009893888473510743, 0.009886719703674317, 0.009902079582214356, 0.009878591537475586, 0.009903039932250977, 0.010147839546203614, 0.009771007537841797, 0.009876480102539062, 0.009871359825134277, 0.00993791961669922, 0.009963520050048828, 0.009708543777465821, 0.009476096153259277, 0.00960307216644287, 0.010007552146911621, 0.009885696411132813, 0.009935872077941894, 0.009870335578918457, 0.009981951713562011, 0.009857024192810059, 0.009966591835021972, 0.00982323169708252, 0.009391103744506836, 0.009498623847961426, 0.009496576309204101, 0.009527296066284179, 0.009684991836547852, 0.009868288040161132, 0.009822208404541016, 0.009906175613403321, 0.009893888473510743, 0.009836544036865234, 0.009778176307678223, 0.00981606388092041, 0.009912320137023926, 0.009827327728271485, 0.009873439788818359, 0.009818079948425294, 0.00982528018951416, 0.009805824279785156, 0.009930815696716309, 0.00993887996673584, 0.009832448005676269, 0.009809920310974121, 0.009850879669189454, 0.009878527641296387, 0.009936896324157715, 0.010584063529968261, 0.00993996810913086, 0.009860159873962403, 0.009869248390197753, 0.00989900779724121, 0.009812992095947265, 0.01013759994506836, 0.009885696411132813, 0.009840640068054199, 0.009799679756164551, 0.009863167762756348, 0.009867263793945312, 0.009812992095947265, 0.009936896324157715, 0.010136575698852538, 0.01014681625366211, 0.009908288002014161, 0.00983135986328125, 0.009912320137023926, 0.009892864227294922, 0.009928704261779785, 0.009840640068054199, 0.009878527641296387, 0.00990719985961914, 0.009879551887512206, 0.009846783638000489, 0.009844736099243164, 0.009863167762756348, 0.009845760345458985, 0.009952256202697754, 0.009842687606811524, 0.009903103828430175, 0.009818112373352051, 0.00991641616821289, 0.00971878433227539, 0.009883647918701171, 0.009780223846435548, 0.009811967849731444, 0.009883647918701171, 0.009799679756164551, 0.00988368034362793, 0.00986723232269287, 0.009958399772644042, 0.01005568027496338, 0.009886719703674317, 0.009995264053344726, 0.00990719985961914, 0.009828351974487304, 0.00992460823059082, 0.009848832130432129, 0.009861120223999023, 0.009799679756164551, 0.009888768196105957, 0.009893888473510743, 0.009837568283081055, 0.009797632217407226, 0.009891839981079101, 0.0101396484375, 0.009918463706970216, 0.009832448005676269, 0.009804800033569335, 0.009957375526428223, 0.009955327987670898, 0.009878527641296387, 0.009713664054870605, 0.009634816169738769, 0.009729023933410644, 0.009871359825134277, 0.009938943862915038, 0.009864192008972168, 0.00991436767578125, 0.009913344383239747, 0.00991641616821289, 0.009927680015563965, 0.00993280029296875, 0.009902079582214356, 0.009966591835021972, 0.009877504348754883, 0.009868288040161132, 0.009839615821838378, 0.00990719985961914, 0.009746432304382324, 0.009798656463623047, 0.009810943603515625, 0.009779199600219727, 0.009854975700378419, 0.009705471992492675, 0.009517056465148926, 0.009464832305908203, 0.009492480278015136, 0.009523200035095216, 0.009458687782287598, 0.009923583984375, 0.009910271644592286, 0.009859071731567384, 0.009867263793945312, 0.009934847831726074, 0.009829376220703125, 0.009882623672485352, 0.009860095977783203, 0.010044416427612305, 0.009853952407836914, 0.009863167762756348, 0.009909248352050782, 0.009838591575622559, 0.009923583984375, 0.010054656028747559, 0.01013043212890625, 0.00990822410583496, 0.009866239547729493, 0.009846783638000489, 0.009896960258483887, 0.009865216255187988, 0.009970687866210937, 0.009804800033569335, 0.009910271644592286, 0.009836544036865234, 0.010126336097717285, 0.010143744468688964, 0.009935872077941894, 0.009875455856323241, 0.009888799667358398, 0.010189824104309082, 0.010116064071655273, 0.009834495544433594, 0.009863167762756348, 0.009855999946594238, 0.009829376220703125, 0.009906175613403321, 0.009760767936706542, 0.009863167762756348, 0.009830400466918946, 0.00992460823059082, 0.00971571159362793, 0.009576448440551758, 0.009482239723205567, 0.009477120399475097, 0.009510911941528321, 0.009783295631408692, 0.00985804843902588, 0.009797632217407226, 0.009854975700378419, 0.009850879669189454, 0.009860095977783203, 0.009906175613403321, 0.009882623672485352, 0.009892864227294922, 0.009843711853027343, 0.009903103828430175, 0.009829376220703125, 0.009843744277954102, 0.009823200225830078, 0.00984985637664795, 0.009851903915405273, 0.009959424018859863, 0.009811967849731444, 0.009869312286376953, 0.010674176216125488, 0.010117119789123535, 0.010446847915649414, 0.010617856025695802, 0.009958399772644042, 0.00991539192199707, 0.009918463706970216, 0.009891839981079101, 0.00981606388092041, 0.009902079582214356, 0.009844736099243164, 0.009888768196105957, 0.009869312286376953, 0.00988265609741211, 0.009848799705505371, 0.009912320137023926, 0.009912320137023926, 0.009906175613403321, 0.009834495544433594, 0.009885696411132813, 0.009868288040161132, 0.009850879669189454, 0.009897983551025391, 0.009836544036865234, 0.009885696411132813, 0.009884672164916992, 0.009840640068054199, 0.009369600296020507, 0.009468928337097168, 0.009498623847961426, 0.009443360328674317, 0.009472000122070312, 0.00953446388244629, 0.009476096153259277, 0.009496576309204101, 0.009501728057861328, 0.009456607818603515, 0.00952627182006836, 0.009454591751098633, 0.009482239723205567, 0.009513983726501465, 0.010898431777954102, 0.01001369571685791, 0.009883647918701171, 0.00990719985961914, 0.009877504348754883, 0.009952256202697754, 0.009845760345458985, 0.009953280448913575, 0.010343423843383789, 0.010353664398193359, 0.009962495803833007, 0.009814016342163086, 0.009867263793945312, 0.009901056289672852, 0.009863167762756348, 0.009889792442321778, 0.009794560432434082, 0.009882623672485352, 0.009844736099243164, 0.009920512199401856, 0.009894911766052245, 0.009912320137023926, 0.009848832130432129, 0.009890815734863282, 0.00982630443572998, 0.009905152320861817, 0.009845760345458985, 0.009820159912109374, 0.009901056289672852, 0.009835519790649415, 0.009866239547729493, 0.009870335578918457, 0.00984985637664795, 0.009892864227294922, 0.009883711814880371, 0.009849791526794433, 0.009803775787353516, 0.009874431610107422, 0.009879551887512206, 0.009892864227294922, 0.009942015647888184, 0.009882623672485352, 0.009818112373352051, 0.009847807884216308, 0.009845760345458985, 0.009918463706970216, 0.009862144470214844, 0.009910271644592286, 0.009840640068054199, 0.009868288040161132, 0.009863167762756348, 0.009852928161621094, 0.009848832130432129, 0.01103667163848877, 0.011898880004882812, 0.01041919994354248, 0.009997311592102051, 0.009982975959777832, 0.00994099235534668, 0.00993280029296875, 0.009918463706970216, 0.009942015647888184, 0.009993215560913087, 0.009851903915405273, 0.00990822410583496, 0.009861120223999023, 0.009863167762756348, 0.009867263793945312, 0.009945088386535645, 0.009890815734863282, 0.009903103828430175, 0.009861120223999023, 0.009901056289672852, 0.009875455856323241, 0.009640959739685059, 0.00951910400390625, 0.00952627182006836, 0.009551872253417968, 0.009560064315795898, 0.009494527816772461, 0.00961740779876709, 0.009486335754394531, 0.009547776222229003, 0.009487360000610352, 0.00951296043395996, 0.009445535659790038, 0.009464672088623047, 0.00951193618774414, 0.009546751976013184, 0.009475071907043458, 0.009529343605041504, 0.009465855598449707, 0.009497599601745605, 0.00951193618774414, 0.00949350357055664, 0.009485312461853027, 0.009504768371582031, 0.00949350357055664, 0.009525247573852539, 0.009485312461853027, 0.009478143692016602, 0.009558015823364258, 0.009524224281311035, 0.009504768371582031, 0.009463808059692384, 0.009477120399475097, 0.009530367851257325, 0.009484288215637206, 0.009473024368286133, 0.009554944038391112, 0.009680895805358887, 0.009515007972717286, 0.009414655685424805, 0.00949350357055664, 0.009499648094177245, 0.009496576309204101, 0.009452544212341308, 0.00950169563293457, 0.009464896202087403, 0.009479104042053222, 0.009507840156555175, 0.009499648094177245, 0.00951910400390625, 0.009541631698608399, 0.009502719879150391, 0.009520159721374512, 0.009524191856384277, 0.009500672340393066, 0.009890815734863282, 0.009872384071350097, 0.009910271644592286, 0.009902079582214356, 0.00974233627319336, 0.009522175788879395, 0.00951296043395996, 0.009553919792175293, 0.009447423934936524, 0.009472000122070312, 0.00951296043395996, 0.009496576309204101, 0.009508864402770996, 0.00951193618774414, 0.009486335754394531, 0.009521151542663574, 0.009491456031799317, 0.00950169563293457, 0.00951910400390625, 0.009475071907043458, 0.009553919792175293, 0.009537535667419434, 0.00951296043395996, 0.009527296066284179, 0.009464832305908203, 0.00951910400390625, 0.009498656272888184, 0.009387999534606934, 0.00941260814666748, 0.009393152236938476, 0.009352191925048828, 0.009407487869262696, 0.009512991905212402, 0.01039971160888672, 0.010033151626586915, 0.009821184158325195, 0.009893888473510743, 0.009942015647888184, 0.009880576133728027, 0.009969728469848633, 0.009854911804199219, 0.009911295890808105, 0.009895936012268066, 0.009895936012268066, 0.00982425594329834, 0.009502719879150391, 0.00954265594482422, 0.009507840156555175, 0.009502719879150391, 0.009525247573852539, 0.009488384246826171, 0.009449472427368164, 0.009554944038391112, 0.009499648094177245, 0.009560064315795898, 0.00951910400390625, 0.009495552062988282, 0.009545727729797364, 0.009502719879150391, 0.00951200008392334, 0.009416640281677246, 0.009497599601745605, 0.009500672340393066, 0.009496576309204101, 0.009480192184448242, 0.00952012825012207, 0.00949350357055664, 0.009630720138549804, 0.009671680450439453, 0.009664511680603028, 0.010683391571044922, 0.010066944122314453, 0.010007552146911621, 0.009897983551025391, 0.009928704261779785, 0.009936896324157715, 0.00960102367401123, 0.009452544212341308, 0.009488384246826171, 0.00952627182006836, 0.00950169563293457, 0.009533439636230469, 0.009423871994018555, 0.00951807975769043, 0.00953651237487793, 0.009530367851257325, 0.00940236759185791, 0.009492480278015136, 0.009508895874023437, 0.009531359672546386, 0.009496576309204101, 0.009482239723205567, 0.00952012825012207, 0.009492480278015136, 0.00941158390045166, 0.009379839897155762, 0.009354240417480468, 0.00951910400390625, 0.009477151870727538, 0.009495519638061523, 0.009504768371582031, 0.009515007972717286, 0.009482239723205567, 0.009529343605041504, 0.009463808059692384, 0.009521151542663574, 0.009504768371582031, 0.009478143692016602, 0.010062848091125488, 0.00963379192352295, 0.009532416343688965, 0.009522239685058593, 0.009495488166809082, 0.009486335754394531, 0.009513983726501465, 0.009487360000610352, 0.009495552062988282, 0.009524224281311035, 0.009492480278015136, 0.00950169563293457, 0.009440256118774413, 0.009372672080993653, 0.009347071647644043, 0.00935321617126465, 0.009373696327209472, 0.009348095893859864, 0.009464832305908203, 0.009529343605041504, 0.009466879844665528, 0.009468928337097168, 0.009521151542663574, 0.009465855598449707, 0.009483263969421387, 0.00952012825012207, 0.00953446388244629, 0.00951296043395996, 0.009476096153259277, 0.009412768363952637, 0.009438048362731934, 0.009496576309204101, 0.0095447359085083, 0.009494496345520019, 0.009475071907043458, 0.009533439636230469, 0.009505791664123535, 0.009484288215637206, 0.009502719879150391, 0.009477120399475097, 0.009482239723205567, 0.009551903724670411, 0.009455583572387695, 0.00954265594482422, 0.009478143692016602, 0.009488384246826171, 0.009545727729797364, 0.009488384246826171, 0.00949350357055664, 0.009495552062988282, 0.009553919792175293, 0.009522175788879395, 0.009543680191040039, 0.009500672340393066, 0.00955084800720215, 0.009474047660827637, 0.009375743865966797, 0.009489407539367676, 0.009498623847961426, 0.009523200035095216, 0.009499648094177245, 0.009370623588562011, 0.00939417552947998, 0.009364480018615723, 0.00935321617126465, 0.009476096153259277, 0.009517056465148926, 0.00951296043395996, 0.009539584159851074]",tokens/s,101.82933267145687,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8197.963776,11287.396352,0.0,10701.766656,10468.923392,s,1,11.83710546875,11.83710546875,0.0,11.83710546875,11.83710546875,11.83710546875,11.83710546875,[11.83710546875],,kWh,5.804716547919119e-05,3.179892426203213e-05,8.303728865211024e-05,0.00017288337839333357,,MB,4047.36,11834.753024,0.0,11188.30592,10924.283904,s,10,2.0813307189941406,0.20813307189941405,0.0003380951007756001,0.20809423828125,0.2085631576538086,0.20871415786743164,0.20883495803833008,"[0.20821165466308594, 0.20799577331542968, 0.2077703399658203, 0.2079361572265625, 0.20809196472167968, 0.20763577270507813, 0.20809651184082031, 0.20886515808105469, 0.20819778442382814, 0.20852960205078125]",tokens/s,1229.9823265171378,kWh,2.463220360952105e-06,1.3497142884277882e-06,1.4437916642918066e-05,1.825085129229796e-05,tokens/kWh,14026742.96667107,MB,4051.623936,11836.850176,0.0,11190.403072,10924.286464,s,10,26.473201660156253,2.6473201660156254,0.00818888775603213,2.6467493896484378,2.656170361328125,2.6601845458984377,2.6633958935546875,"[2.64821142578125, 2.6306904296875, 2.646068115234375, 2.6455517578125, 2.6552783203125, 2.643073974609375, 2.64315576171875, 2.64954248046875, 2.6474306640625, 2.66419873046875]",tokens/s,23.797650472635787,kWh,3.13028375532154e-05,1.7152798305537616e-05,0.00010767909771728321,0.00015613473357603628,tokens/kWh,403497.6622887023,,s,630,26.471164867401107,0.04201772201174781,0.0005350071306868471,0.041785280227661134,0.04262635612487793,0.04282593421936035,0.04408522888183594,"[0.041831424713134766, 0.04177920150756836, 0.04192563247680664, 0.04178636932373047, 0.04163174438476563, 0.04174131011962891, 0.041611263275146484, 0.04235776138305664, 0.041711616516113284, 0.042487808227539066, 0.041627647399902344, 0.041768959045410156, 0.0425082893371582, 0.04211404800415039, 0.041695232391357424, 0.041747455596923826, 0.041637889862060545, 0.04177305603027344, 0.04273766326904297, 0.042531841278076174, 0.042382335662841795, 0.04234035110473633, 0.04178227233886719, 0.041731071472167966, 0.04212736129760742, 0.041752574920654296, 0.04165222549438476, 0.04311040115356445, 0.04176588821411133, 0.04174131011962891, 0.04176793670654297, 0.04252774429321289, 0.04169830322265625, 0.04174848175048828, 0.04154470443725586, 0.04181913757324219, 0.042264575958251956, 0.04154265594482422, 0.04170137786865234, 0.041708545684814455, 0.041545726776123046, 0.04171571350097656, 0.04192563247680664, 0.04231987380981445, 0.04242124938964844, 0.042382335662841795, 0.04259430313110352, 0.04165427017211914, 0.042177536010742187, 0.04165017700195312, 0.04229529571533203, 0.043052032470703126, 0.042327041625976565, 0.04182527923583984, 0.041981952667236325, 0.04289843368530273, 0.04153753662109375, 0.041458686828613284, 0.042177536010742187, 0.04253593444824219, 0.042856449127197264, 0.04248473739624024, 0.04247552108764648, 0.04171059036254883, 0.04244172668457031, 0.04248064041137695, 0.042412033081054686, 0.04163481521606445, 0.041468929290771485, 0.042363903045654294, 0.04198809432983398, 0.041250816345214845, 0.04143001556396484, 0.042246143341064454, 0.042633216857910154, 0.04244172668457031, 0.04153343963623047, 0.04174540710449219, 0.04163891220092773, 0.04200755310058594, 0.04170342254638672, 0.041606143951416014, 0.04160409545898437, 0.04132352066040039, 0.04177407836914063, 0.04189388656616211, 0.04163379287719727, 0.041506816864013675, 0.04133478546142578, 0.041545726776123046, 0.04167168045043945, 0.04159590530395508, 0.041270271301269534, 0.04138905715942383, 0.04140748977661133, 0.04155084609985352, 0.041605121612548826, 0.041643009185791016, 0.04232191848754883, 0.04169830322265625, 0.04165119934082031, 0.041837566375732424, 0.041545726776123046, 0.042777599334716795, 0.04211507034301758, 0.041596927642822266, 0.04168703842163086, 0.04147507095336914, 0.0419051513671875, 0.04158259201049805, 0.041599998474121096, 0.04161228942871094, 0.04151603317260742, 0.04262604904174805, 0.04197785568237305, 0.04150374221801758, 0.041523200988769535, 0.04136959838867187, 0.04132352066040039, 0.04116582489013672, 0.04151603317260742, 0.04208230209350586, 0.041657344818115234, 0.041635841369628904, 0.04134400177001953, 0.042352638244628905, 0.04178425598144531, 0.04156415939331055, 0.04264857482910156, 0.04381388854980469, 0.04251443099975586, 0.04165222549438476, 0.04174643325805664, 0.04166963195800781, 0.04187238311767578, 0.042249214172363284, 0.04245913696289062, 0.04203007888793946, 0.041381889343261716, 0.041644031524658204, 0.041285633087158206, 0.041670654296875, 0.04159795379638672, 0.04166656112670898, 0.041855998992919925, 0.04160204696655274, 0.04267724609375, 0.041646080017089845, 0.04170137786865234, 0.04170035171508789, 0.04135935974121094, 0.04146380615234375, 0.0419420166015625, 0.04163993453979492, 0.04154265594482422, 0.04164710235595703, 0.0415467529296875, 0.0416890869140625, 0.04169420623779297, 0.043545600891113284, 0.04334592056274414, 0.04291993713378906, 0.0424192008972168, 0.04199321746826172, 0.042425342559814457, 0.042431488037109374, 0.041501697540283204, 0.0420055046081543, 0.04218675231933594, 0.042277889251708986, 0.042461185455322265, 0.041534465789794923, 0.041543678283691404, 0.04156927871704102, 0.04156825637817383, 0.041935871124267575, 0.04260147094726562, 0.04176793670654297, 0.04153241729736328, 0.041984001159667966, 0.0425799674987793, 0.04161740875244141, 0.04163071823120117, 0.04228812789916992, 0.041981952667236325, 0.04275404739379883, 0.041768959045410156, 0.04239462280273437, 0.04233830261230469, 0.0422737922668457, 0.04166451263427735, 0.04139622497558594, 0.041646080017089845, 0.0417259521484375, 0.0428328971862793, 0.04238848114013672, 0.042537982940673826, 0.042482688903808595, 0.04224204635620117, 0.04507955169677735, 0.04193075180053711, 0.04271206283569336, 0.04173311996459961, 0.04159385681152344, 0.04163481521606445, 0.041603073120117184, 0.04166451263427735, 0.04155187225341797, 0.041431041717529295, 0.04151500701904297, 0.04167987060546875, 0.04212428665161133, 0.04258508682250976, 0.04178841781616211, 0.0416102409362793, 0.04132863998413086, 0.04131327819824219, 0.041234432220458986, 0.04423372650146484, 0.042823680877685545, 0.042649600982666014, 0.0414730224609375, 0.04179046249389649, 0.041608192443847655, 0.04153548812866211, 0.04125696182250976, 0.04174028778076172, 0.04148223876953125, 0.04130508804321289, 0.041506816864013675, 0.041575424194335936, 0.04148940658569336, 0.04155596923828125, 0.04151295852661133, 0.04249599838256836, 0.04167475128173828, 0.042240001678466796, 0.04170956802368164, 0.04174848175048828, 0.04163993453979492, 0.04134092712402344, 0.04208537673950195, 0.042797054290771484, 0.04168294525146484, 0.041567230224609376, 0.04217036819458008, 0.04248064041137695, 0.04227276611328125, 0.04194406509399414, 0.0425984001159668, 0.04357632064819336, 0.042477569580078124, 0.04164812850952149, 0.04239257431030274, 0.042543102264404296, 0.0428042221069336, 0.041768959045410156, 0.04241305541992187, 0.04173209762573242, 0.041608192443847655, 0.04188671875, 0.04246835327148438, 0.04177920150756836, 0.04166348648071289, 0.041619457244873044, 0.04196556854248047, 0.042466304779052735, 0.04243046569824219, 0.04158156967163086, 0.044470272064208984, 0.042874881744384766, 0.04251955032348633, 0.042625022888183595, 0.041998336791992184, 0.04154163360595703, 0.041659393310546876, 0.041711616516113284, 0.042668033599853515, 0.04166348648071289, 0.04214476776123047, 0.04171987152099609, 0.041786304473876955, 0.04162355041503906, 0.04177510452270508, 0.04261273574829102, 0.04234137725830078, 0.04163379287719727, 0.041816062927246093, 0.042567680358886716, 0.04239769744873047, 0.042352638244628905, 0.041799678802490234, 0.04227481460571289, 0.042343425750732425, 0.04270284652709961, 0.04256256103515625, 0.04244377517700195, 0.0425687026977539, 0.04251136016845703, 0.0426690559387207, 0.04260659027099609, 0.04174540710449219, 0.041578495025634765, 0.042281982421875, 0.041626625061035157, 0.04166144180297852, 0.041605121612548826, 0.04221747207641602, 0.042572799682617186, 0.04173209762573242, 0.04248064041137695, 0.041678848266601565, 0.04157952117919922, 0.04218982315063476, 0.04236492919921875, 0.04171263885498047, 0.04169830322265625, 0.04156620788574219, 0.04166041564941406, 0.04157952117919922, 0.0417894401550293, 0.042120193481445314, 0.04167987060546875, 0.04157747268676758, 0.04141260910034179, 0.041659393310546876, 0.04160204696655274, 0.04271615982055664, 0.0425799674987793, 0.04205670547485352, 0.04166758346557617, 0.041659393310546876, 0.04269772720336914, 0.04241408157348633, 0.04172083282470703, 0.041783294677734374, 0.042447872161865234, 0.042449920654296876, 0.04173311996459961, 0.042038272857666016, 0.041565185546875, 0.04166451263427735, 0.041393150329589845, 0.04235878372192383, 0.04167987060546875, 0.04232908630371094, 0.042559486389160156, 0.0424376335144043, 0.04201881790161133, 0.04165324783325195, 0.04225024032592774, 0.04262400054931641, 0.04153139114379883, 0.04240588760375977, 0.04398489761352539, 0.0421662712097168, 0.04152524948120117, 0.04157747268676758, 0.041768959045410156, 0.041695232391357424, 0.041629695892333986, 0.04144947052001953, 0.04161228942871094, 0.04171571350097656, 0.04181094360351562, 0.04182732772827148, 0.04171059036254883, 0.04171571350097656, 0.04164505767822266, 0.042403839111328126, 0.04275609588623047, 0.042501121520996096, 0.04256665420532227, 0.04165631866455078, 0.04181913757324219, 0.04173926544189453, 0.04168601608276367, 0.041411582946777346, 0.043215873718261716, 0.04189081573486328, 0.041714687347412106, 0.04235776138305664, 0.04172185516357422, 0.04163686370849609, 0.04166963195800781, 0.04174540710449219, 0.041708545684814455, 0.04179046249389649, 0.042313728332519535, 0.04186316680908203, 0.04173823928833008, 0.04163071823120117, 0.041611263275146484, 0.04179148864746094, 0.04249087905883789, 0.041859073638916014, 0.04216012954711914, 0.041812992095947264, 0.04156927871704102, 0.04175769424438477, 0.0422369270324707, 0.04207820892333984, 0.04136140823364258, 0.04171366500854492, 0.04197478485107422, 0.04169113540649414, 0.0417781753540039, 0.04168806457519531, 0.041763839721679685, 0.04177407836914063, 0.041763839721679685, 0.04174233627319336, 0.04282777786254883, 0.04177305603027344, 0.04163993453979492, 0.041488384246826174, 0.04169728088378906, 0.043109375, 0.041885696411132815, 0.042071041107177735, 0.042477569580078124, 0.042501121520996096, 0.04291584014892578, 0.04264243316650391, 0.04249292755126953, 0.04264755249023437, 0.04254207992553711, 0.04248678588867188, 0.04244172668457031, 0.04152012634277344, 0.041675777435302735, 0.04170444869995117, 0.04159283065795898, 0.04165017700195312, 0.04151193618774414, 0.04213759994506836, 0.04173311996459961, 0.041565185546875, 0.041594879150390625, 0.0413829116821289, 0.0416286735534668, 0.043284481048583984, 0.04262911987304688, 0.04253081512451172, 0.041534465789794923, 0.041425918579101564, 0.04236083221435547, 0.042077182769775394, 0.041662464141845705, 0.04158566284179688, 0.04172390365600586, 0.041836544036865236, 0.04169728088378906, 0.0417781753540039, 0.04194303894042969, 0.04313292694091797, 0.04234137725830078, 0.041626625061035157, 0.04144947052001953, 0.04170751953125, 0.04196147155761719, 0.041575424194335936, 0.04173516845703125, 0.041632766723632815, 0.04141260910034179, 0.04178124618530273, 0.04165529632568359, 0.04171571350097656, 0.041527294158935545, 0.044126209259033204, 0.04213452911376953, 0.04138700866699219, 0.04160921478271484, 0.042068992614746094, 0.042379264831542966, 0.04157132720947266, 0.042415103912353515, 0.04230758285522461, 0.042638336181640625, 0.04247040176391602, 0.042548225402832034, 0.041632766723632815, 0.04248064041137695, 0.0416286735534668, 0.04182527923583984, 0.041635841369628904, 0.04171059036254883, 0.041692161560058595, 0.04172697448730469, 0.04172697448730469, 0.04211711883544922, 0.04168294525146484, 0.04355276870727539, 0.04303564834594727, 0.04195840072631836, 0.041998336791992184, 0.04248166275024414, 0.0425082893371582, 0.04250419235229492, 0.04163174438476563, 0.04269055938720703, 0.04233932876586914, 0.04217036819458008, 0.0416286735534668, 0.0425984001159668, 0.04183244705200195, 0.04142694473266602, 0.041724929809570314, 0.042714111328125, 0.041747455596923826, 0.042006526947021484, 0.04255641555786133, 0.04200447845458984, 0.04256256103515625, 0.04220620727539062, 0.041855998992919925, 0.04245913696289062, 0.04265574264526367, 0.04283084869384766, 0.04222771072387695, 0.04203417587280273, 0.04197785568237305, 0.04168601608276367, 0.04250726318359375, 0.04180377578735352, 0.04247654342651367, 0.042436607360839845, 0.04208844757080078, 0.04168601608276367, 0.04168294525146484, 0.04167168045043945, 0.04203724670410156, 0.045274112701416014, 0.041924606323242186, 0.04149248123168945, 0.04180582427978516, 0.041589759826660154, 0.04158262252807617, 0.0416317138671875, 0.04165529632568359, 0.04163993453979492, 0.04175667190551758, 0.042418174743652344, 0.0420136947631836, 0.0416286735534668, 0.04216115188598633, 0.04172390365600586, 0.04156825637817383, 0.041527294158935545, 0.04228915023803711, 0.042434558868408204, 0.042068992614746094, 0.041621505737304686, 0.041866241455078126, 0.041404415130615234, 0.042123264312744144, 0.04172390365600586, 0.04184473419189453, 0.042237953186035154, 0.041485313415527345, 0.04157241439819336, 0.04172998428344726, 0.04142387390136719, 0.042243072509765625, 0.04205363082885742, 0.04174950408935547, 0.04245708847045898, 0.04257894515991211, 0.042218494415283206, 0.042537982940673826, 0.04175360107421875, 0.04167987060546875, 0.041728000640869144, 0.04415488052368164, 0.04277964782714844, 0.042387454986572266, 0.042246143341064454, 0.04254105758666992, 0.04170956802368164, 0.042016769409179686, 0.04179251098632813, 0.04167679977416992, 0.04165222549438476, 0.041606143951416014, 0.04158771133422851, 0.04236800003051758, 0.04257894515991211, 0.04233830261230469, 0.041559040069580076, 0.04165427017211914, 0.04171059036254883, 0.04177407836914063, 0.04303462219238281, 0.042780670166015625, 0.041619457244873044, 0.0424898567199707, 0.042559486389160156, 0.04171571350097656, 0.04276633453369141, 0.042575870513916016, 0.04424703979492187, 0.043261951446533206, 0.043153408050537106, 0.042651649475097655, 0.04244377517700195, 0.041777153015136716, 0.04254924774169922, 0.04246015930175781, 0.04173516845703125, 0.042656768798828126, 0.04205875015258789, 0.04190105438232422, 0.04252262496948242, 0.0425687026977539, 0.041768959045410156, 0.0417259521484375, 0.04165222549438476, 0.04234854507446289, 0.042534912109375, 0.042689537048339846, 0.042003456115722655, 0.04190924835205078, 0.04248166275024414, 0.04244275283813476, 0.04269977569580078, 0.04189081573486328, 0.04324966430664062, 0.04175564956665039, 0.04227174377441406, 0.04241100692749023]",tokens/s,23.799481554959314,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1200, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 976, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 325, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948191-30cb704d77ac1aa928a61db8;fe0bf4d6-477a-4fed-ada3-5ca129fe865f) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1870.671872,2926.051328,0.0,2340.421632,2285.568,s,1,8.639236328125,8.639236328125,0.0,8.639236328125,8.639236328125,8.639236328125,8.639236328125,[8.639236328125],,kWh,2.1951574133312432e-05,1.1983851122135222e-05,3.126669168007634e-05,6.5202116935524e-05,,MB,1889.316864,3305.63584,0.0,2659.188736,2578.857984,s,10,0.414264705657959,0.0414264705657959,0.00029218935193081337,0.04135747146606445,0.041857463073730464,0.04187124404907226,0.041882268829345703,"[0.0410948486328125, 0.04172515106201172, 0.04188502502441406, 0.0412204475402832, 0.04121760177612305, 0.0414944953918457, 0.041134208679199216, 0.04149542236328125, 0.041143104553222655, 0.04185440063476562]",tokens/s,6179.623716517343,kWh,4.87419639960591e-07,2.670815755022465e-07,1.9882297570207715e-06,2.7427309724836094e-06,tokens/kWh,93337626.82826519,MB,1897.476096,3305.63584,0.0,2659.188736,2578.860544,s,10,15.41090344238281,1.5410903442382815,0.017775088437081893,1.5349993896484375,1.5661140258789064,1.5667595275878907,1.5672759289550782,"[1.5477601318359375, 1.524239013671875, 1.5265804443359374, 1.519886962890625, 1.525062255859375, 1.5363245849609375, 1.567405029296875, 1.5659705810546876, 1.564000244140625, 1.5336741943359375]",tokens/s,40.88014712151036,kWh,1.8100675301774972e-05,9.918845181598138e-06,3.505807765776899e-05,6.307759814114208e-05,tokens/kWh,998769.7987331661,,s,630,15.409168292999272,0.024458997290475026,0.0005598839419083956,0.024226816177368164,0.025115853118896487,0.025268787002563475,0.02582980640411377,"[0.024197120666503907, 0.02406707191467285, 0.023945215225219727, 0.02410700798034668, 0.024123392105102538, 0.024081407546997072, 0.023957504272460937, 0.0246691837310791, 0.025274368286132814, 0.02517913627624512, 0.025041919708251953, 0.025013248443603517, 0.024863744735717775, 0.02454732894897461, 0.024851455688476562, 0.025018367767333984, 0.024978431701660156, 0.025011199951171875, 0.025027584075927735, 0.025034751892089844, 0.024987648010253907, 0.02494259262084961, 0.024961088180541994, 0.02504390335083008, 0.02512281608581543, 0.024664064407348633, 0.024647680282592774, 0.023994367599487306, 0.02409881591796875, 0.02405171203613281, 0.024070144653320313, 0.023925760269165038, 0.023826431274414063, 0.02386534309387207, 0.02404761505126953, 0.024094720840454102, 0.02389708709716797, 0.023756799697875978, 0.024001535415649415, 0.023949312210083007, 0.02405990409851074, 0.024135679244995118, 0.02411724853515625, 0.02389708709716797, 0.02387763214111328, 0.024046592712402344, 0.023813119888305666, 0.023835647583007814, 0.02515456008911133, 0.025413631439208984, 0.025358335494995117, 0.025144319534301757, 0.025150527954101564, 0.025009088516235352, 0.025060352325439454, 0.025143295288085937, 0.0250644474029541, 0.025268224716186522, 0.025430015563964844, 0.02473574447631836, 0.02655232048034668, 0.02430771255493164, 0.024078336715698243, 0.024434688568115235, 0.024174591064453126, 0.02411315155029297, 0.024224767684936522, 0.0241080322265625, 0.02410905647277832, 0.024081407546997072, 0.024044607162475588, 0.02400147247314453, 0.024069120407104492, 0.02413670349121094, 0.024173568725585938, 0.025793535232543945, 0.02472755241394043, 0.024443904876708986, 0.024962047576904296, 0.02415001678466797, 0.023768064498901367, 0.023797760009765623, 0.023794687271118165, 0.024161279678344725, 0.02431795120239258, 0.023796735763549806, 0.023760896682739258, 0.023772159576416017, 0.024105983734130858, 0.024001535415649415, 0.02411212730407715, 0.024044544219970702, 0.024247295379638673, 0.02413260841369629, 0.02372915267944336, 0.024040447235107423, 0.02408448028564453, 0.024123392105102538, 0.02409676742553711, 0.023984128952026368, 0.023739391326904297, 0.023723007202148438, 0.023738367080688477, 0.02366361618041992, 0.023814144134521483, 0.023757823944091795, 0.023777280807495117, 0.02374143981933594, 0.023756799697875978, 0.02412851142883301, 0.024078336715698243, 0.024176639556884767, 0.024189952850341798, 0.024452096939086915, 0.02490572738647461, 0.02495692825317383, 0.025830400466918944, 0.02532454490661621, 0.024964096069335938, 0.02497331237792969, 0.02482585525512695, 0.023747583389282227, 0.023818239212036133, 0.02408038330078125, 0.02406399917602539, 0.024218624114990234, 0.024171520233154296, 0.0241530876159668, 0.024073215484619142, 0.02413363265991211, 0.023811071395874024, 0.023916543960571288, 0.02453196716308594, 0.024822784423828126, 0.024416255950927734, 0.024066047668457033, 0.02405887985229492, 0.024194047927856444, 0.02406809616088867, 0.024160255432128908, 0.025188352584838865, 0.02547302436828613, 0.02515558433532715, 0.025034751892089844, 0.025205759048461913, 0.024622079849243163, 0.024853504180908204, 0.025445375442504883, 0.024648704528808595, 0.023940095901489256, 0.023743488311767577, 0.023764991760253908, 0.02391449546813965, 0.024494112014770506, 0.024093727111816406, 0.02403219223022461, 0.02414899253845215, 0.024797183990478516, 0.024229888916015626, 0.02409676742553711, 0.024185855865478514, 0.02409164810180664, 0.024015871047973633, 0.023823360443115234, 0.02364723205566406, 0.023750656127929686, 0.024048639297485352, 0.0238919677734375, 0.0241530876159668, 0.024034303665161134, 0.024755199432373046, 0.024619007110595705, 0.024848384857177733, 0.024907808303833007, 0.023939039230346678, 0.024022016525268555, 0.023797760009765623, 0.023816192626953125, 0.02409267234802246, 0.02372403144836426, 0.02390732765197754, 0.024056831359863282, 0.02373740768432617, 0.023680959701538086, 0.02366054344177246, 0.023773183822631837, 0.024198144912719727, 0.02405171203613281, 0.02372403144836426, 0.024161216735839843, 0.02391347122192383, 0.02369126319885254, 0.023813119888305666, 0.023621631622314454, 0.023796735763549806, 0.024102912902832032, 0.02411110305786133, 0.02411315155029297, 0.02457804870605469, 0.024358911514282225, 0.024054784774780274, 0.023993343353271485, 0.024026111602783205, 0.024040447235107423, 0.023842815399169923, 0.024237056732177735, 0.02410086441040039, 0.023763967514038087, 0.024370176315307617, 0.02406399917602539, 0.02405068778991699, 0.024014848709106446, 0.024066047668457033, 0.024407039642333983, 0.024808448791503908, 0.0249487361907959, 0.024619007110595705, 0.023820287704467775, 0.025018367767333984, 0.024437759399414064, 0.024052736282348632, 0.02410188865661621, 0.024105983734130858, 0.024078336715698243, 0.024038400650024414, 0.02413260841369629, 0.02386534309387207, 0.02369126319885254, 0.023644159317016602, 0.023637056350708008, 0.024056768417358397, 0.02407219123840332, 0.024029184341430664, 0.02386227226257324, 0.02373017692565918, 0.024031232833862305, 0.02408857536315918, 0.02425753593444824, 0.024130559921264647, 0.023813119888305666, 0.024073215484619142, 0.02445414352416992, 0.024435712814331056, 0.024162303924560546, 0.024081407546997072, 0.02392678451538086, 0.023977983474731446, 0.023809024810791016, 0.023996416091918944, 0.024151039123535157, 0.024779775619506835, 0.025488384246826173, 0.024270912170410157, 0.024558528900146485, 0.023793664932250977, 0.0241213436126709, 0.02388787269592285, 0.02373734474182129, 0.024267776489257813, 0.024186880111694335, 0.024026111602783205, 0.02414489555358887, 0.024070144653320313, 0.024052736282348632, 0.02397295951843262, 0.0240546875, 0.024054784774780274, 0.024030208587646484, 0.023736320495605468, 0.024378368377685547, 0.024169471740722655, 0.024114175796508788, 0.02407935905456543, 0.024147968292236328, 0.0248985595703125, 0.023979007720947267, 0.023848960876464844, 0.024005632400512695, 0.023791616439819335, 0.02405580711364746, 0.024056831359863282, 0.024163328170776367, 0.02443059158325195, 0.024329216003417968, 0.023954431533813478, 0.02386534309387207, 0.023788543701171876, 0.023931903839111326, 0.023973888397216796, 0.024771583557128905, 0.02411622428894043, 0.026198015213012696, 0.024990720748901366, 0.02433433532714844, 0.025003007888793945, 0.02514739227294922, 0.02428211212158203, 0.02411212730407715, 0.02410905647277832, 0.024936447143554686, 0.02494976043701172, 0.024276992797851563, 0.02412544059753418, 0.02406707191467285, 0.02408038330078125, 0.02388479995727539, 0.023875583648681642, 0.024139776229858398, 0.02410700798034668, 0.024070144653320313, 0.023846912384033202, 0.02431590461730957, 0.024056831359863282, 0.02408038330078125, 0.02408448028564453, 0.024167423248291017, 0.024626176834106447, 0.02486579132080078, 0.024649728775024415, 0.024777727127075197, 0.02447871971130371, 0.023748607635498048, 0.023968767166137696, 0.02456268882751465, 0.024247295379638673, 0.02389606475830078, 0.024057855606079103, 0.023995391845703123, 0.02409369659423828, 0.02406399917602539, 0.023973888397216796, 0.02372812843322754, 0.024185855865478514, 0.02431488037109375, 0.025198591232299804, 0.024886272430419923, 0.025026559829711914, 0.024989696502685548, 0.024436735153198243, 0.024201215744018553, 0.024123392105102538, 0.02410188865661621, 0.02407526397705078, 0.025434112548828124, 0.024638463973999023, 0.02410086441040039, 0.02416640090942383, 0.024190975189208985, 0.024612863540649413, 0.024929279327392577, 0.024968191146850584, 0.02490777587890625, 0.024943616867065428, 0.02409779167175293, 0.024020992279052734, 0.02373324775695801, 0.02412646484375, 0.02530816078186035, 0.024952831268310546, 0.02493337631225586, 0.02492416000366211, 0.024172544479370117, 0.02384588813781738, 0.024142847061157227, 0.02411520004272461, 0.02457606315612793, 0.024269760131835936, 0.024094751358032226, 0.024126432418823243, 0.024370176315307617, 0.024229888916015626, 0.024843263626098632, 0.02493132781982422, 0.024078336715698243, 0.024259584426879883, 0.023757823944091795, 0.023789567947387694, 0.02410905647277832, 0.02503167915344238, 0.025019392013549805, 0.024666112899780275, 0.02494054412841797, 0.024984575271606444, 0.024984575271606444, 0.024809471130371095, 0.024954879760742187, 0.02494976043701172, 0.025021440505981447, 0.024955904006958008, 0.025828351974487306, 0.028232704162597655, 0.025358335494995117, 0.024991743087768553, 0.024820735931396484, 0.024739839553833007, 0.02488934326171875, 0.024377344131469726, 0.02404249572753906, 0.023965696334838867, 0.024023040771484375, 0.02394316864013672, 0.023996416091918944, 0.023948287963867186, 0.024027135848999022, 0.02469171142578125, 0.02492313575744629, 0.02533683204650879, 0.024913919448852538, 0.02512588882446289, 0.024459264755249024, 0.025240575790405274, 0.02574950408935547, 0.024620031356811522, 0.02476748847961426, 0.024382463455200197, 0.023738367080688477, 0.024593408584594727, 0.0248668155670166, 0.026738687515258788, 0.02607411193847656, 0.025183231353759765, 0.02511359977722168, 0.02457907295227051, 0.025068544387817384, 0.02508595275878906, 0.02476748847961426, 0.023757823944091795, 0.023739391326904297, 0.02447769546508789, 0.02490163230895996, 0.024896511077880858, 0.024851455688476562, 0.024985599517822265, 0.025076736450195314, 0.0249036808013916, 0.024828927993774414, 0.025205759048461913, 0.025069568634033205, 0.024959999084472655, 0.025015296936035155, 0.025038848876953124, 0.024663040161132813, 0.02465279960632324, 0.02455046463012695, 0.024961984634399415, 0.025247743606567383, 0.025803775787353517, 0.025145343780517578, 0.02530303955078125, 0.025044992446899415, 0.025014272689819338, 0.024945663452148437, 0.02471833610534668, 0.02477670478820801, 0.02526924705505371, 0.02494259262084961, 0.02510438346862793, 0.02527948760986328, 0.02533478355407715, 0.025043968200683595, 0.025212928771972655, 0.025176063537597656, 0.025057279586791992, 0.025000959396362304, 0.024829952239990235, 0.024441856384277344, 0.02467020797729492, 0.02503987121582031, 0.024944639205932616, 0.02407935905456543, 0.02406707191467285, 0.02488012886047363, 0.02446233558654785, 0.023960575103759766, 0.024038400650024414, 0.025027584075927735, 0.0250644474029541, 0.02510745620727539, 0.024977407455444335, 0.02492620849609375, 0.024951808929443358, 0.024992767333984374, 0.025010175704956054, 0.025027584075927735, 0.02486783981323242, 0.024959999084472655, 0.025043968200683595, 0.026007551193237305, 0.024985599517822265, 0.025029632568359376, 0.02407423973083496, 0.02473676872253418, 0.025176063537597656, 0.024951808929443358, 0.02478387260437012, 0.024947711944580078, 0.024756223678588866, 0.02503987121582031, 0.0249169921875, 0.024863744735717775, 0.023967744827270508, 0.023949312210083007, 0.023994367599487306, 0.023989248275756835, 0.024219648361206055, 0.02408448028564453, 0.023977983474731446, 0.023986175537109376, 0.024666112899780275, 0.024996864318847657, 0.02511564826965332, 0.024000511169433594, 0.02405887985229492, 0.02434252738952637, 0.025118783950805666, 0.025001920700073243, 0.024936447143554686, 0.025035776138305665, 0.024985599517822265, 0.02504806327819824, 0.025012224197387696, 0.024557567596435546, 0.025046016693115233, 0.02488832092285156, 0.025034751892089844, 0.02511769676208496, 0.025276416778564452, 0.025677824020385744, 0.025159679412841796, 0.025046016693115233, 0.025153535842895508, 0.02489753532409668, 0.02372403144836426, 0.02370150375366211, 0.024155136108398437, 0.02424934387207031, 0.02428108787536621, 0.024228864669799805, 0.025033727645874023, 0.025260032653808592, 0.02509619140625, 0.025043968200683595, 0.02552729606628418, 0.024980480194091798, 0.02511052894592285, 0.025111551284790038, 0.024731647491455077, 0.024135679244995118, 0.02469478416442871, 0.025054208755493163, 0.02474291229248047, 0.025165824890136718, 0.02509516716003418, 0.02503987121582031, 0.02524569511413574, 0.02508598327636719, 0.02501628875732422, 0.02511052894592285, 0.025136127471923828, 0.02524569511413574, 0.025011199951171875, 0.02452070426940918, 0.025009151458740234, 0.024954879760742187, 0.024969215393066405, 0.024976383209228514, 0.02493337631225586, 0.02570035171508789, 0.024609792709350587, 0.024721408843994142, 0.024972288131713868, 0.024705024719238283, 0.023747583389282227, 0.024040447235107423, 0.023792640686035156, 0.02413670349121094, 0.023998464584350586, 0.023743488311767577, 0.02389606475830078, 0.02369228744506836, 0.02391347122192383, 0.02408755111694336, 0.02394316864013672, 0.02414080047607422, 0.024171520233154296, 0.023999488830566407, 0.023988224029541014, 0.02409574317932129, 0.02409881591796875, 0.02406809616088867, 0.024851455688476562, 0.024912895202636717, 0.024818687438964843, 0.024987648010253907, 0.024894464492797853, 0.024083488464355467, 0.02408239936828613, 0.023784448623657226, 0.024013824462890625, 0.02408755111694336, 0.02405068778991699, 0.02409062385559082, 0.02385408020019531, 0.0245166072845459, 0.025019392013549805, 0.024809471130371095, 0.024796159744262695, 0.02493951988220215, 0.024621055603027343, 0.02557439994812012, 0.025251840591430662, 0.024975360870361327, 0.024978431701660156, 0.024813568115234375, 0.02487398338317871, 0.024944639205932616, 0.02407935905456543, 0.024041471481323243, 0.024062976837158204, 0.024049663543701173, 0.023842815399169923, 0.023768064498901367, 0.024062976837158204, 0.024007680892944337, 0.02412031936645508, 0.02412748718261719, 0.024143871307373048, 0.0241080322265625, 0.024105983734130858, 0.02409676742553711]",tokens/s,40.884750430444925,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 414, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1064, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 804, in forward - attn_outputs, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 435, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 339, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 128115 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 414, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949133-1763ea6738638c21768845eb;c6347db2-66bb-45eb-b8b7-e60822dbdd0a) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948272-5ba0203a4f38e7e2422d20e0;37272a59-d7ff-496c-ab1f-986badf65d9c) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 414, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481f8-2e51f5f66252b0f32e379376;c3945018-fff2-4be5-8f45-759d29442b9e) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494ad-6d9704883731337648b27f8a;e3cad4c8-cb4e-4089-83b2-bd05e47a0d0a) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2219.29472,2693.267456,0.0,2107.63776,1984.899072,s,1,8.07941796875,8.07941796875,0.0,8.07941796875,8.07941796875,8.07941796875,8.07941796875,[8.07941796875],,kWh,1.5028711875011898e-05,8.216453060667464e-06,1.993140483402822e-05,4.3176569769707585e-05,,MB,2296.000512,3020.423168,0.0,2373.976064,2248.105984,s,10,0.5104333076477051,0.05104333076477051,2.7234986790954617e-05,0.05104279899597168,0.05107532196044922,0.051079325103759766,0.0510825276184082,"[0.05098089599609375, 0.05103238296508789, 0.051039390563964844, 0.051074432373046874, 0.05104620742797852, 0.05106159973144531, 0.05102822494506836, 0.05105452728271485, 0.05103231811523438, 0.05108332824707031]",tokens/s,5015.346690045708,kWh,6.035149534226233e-07,3.306935201467783e-07,3.0965004930713578e-06,4.03070896664076e-06,tokens/kWh,63512399.95711062,MB,2306.875392,3104.309248,0.0,2457.862144,2341.374976,s,10,14.956755004882812,1.4956755004882811,0.004073202479353452,1.4950446777343749,1.501366223144531,1.5021018127441406,1.5026902844238281,"[1.4986026611328125, 1.50283740234375, 1.48974072265625, 1.4960809326171876, 1.4940084228515624, 1.4967984619140624, 1.493192626953125, 1.5012027587890624, 1.490566650390625, 1.493724365234375]",tokens/s,42.12143608652605,kWh,1.866631163782661e-05,1.0229210070554808e-05,4.110086819812981e-05,6.999638990651122e-05,tokens/kWh,900046.4178816113,,s,630,14.95447038841248,0.02373725458478171,0.0003560359478983154,0.023631360054016115,0.02425487289428711,0.024484096813201905,0.02513468450546265,"[0.02387455940246582, 0.023734272003173826, 0.023976959228515626, 0.023739391326904297, 0.023609344482421874, 0.023665664672851562, 0.023649280548095702, 0.023781375885009767, 0.02348646354675293, 0.023580671310424805, 0.023548927307128906, 0.02364825630187988, 0.024044544219970702, 0.024534015655517577, 0.02430771255493164, 0.023925760269165038, 0.023639039993286134, 0.023610368728637695, 0.023464960098266603, 0.023618560791015625, 0.02350592041015625, 0.023610368728637695, 0.023706623077392578, 0.0237127685546875, 0.023646207809448243, 0.023628799438476563, 0.023588863372802735, 0.02393497657775879, 0.025391103744506836, 0.02484121513366699, 0.024518655776977538, 0.024370176315307617, 0.02411008071899414, 0.024369152069091796, 0.024429567337036134, 0.0236759033203125, 0.023410688400268553, 0.023377920150756838, 0.023592960357666014, 0.023557119369506836, 0.023540735244750977, 0.023649280548095702, 0.023649280548095702, 0.02366054344177246, 0.023330816268920897, 0.024204288482666016, 0.02406399917602539, 0.02367283248901367, 0.023610368728637695, 0.023571456909179687, 0.023478271484375, 0.023645183563232423, 0.023610368728637695, 0.023607295989990236, 0.02366054344177246, 0.023677951812744142, 0.023604223251342774, 0.023645183563232423, 0.023657472610473632, 0.023602176666259765, 0.023640064239501952, 0.02352230453491211, 0.023634944915771484, 0.023650304794311523, 0.02473369598388672, 0.023964672088623046, 0.023621631622314454, 0.024383487701416014, 0.023764991760253908, 0.023842815399169923, 0.02391449546813965, 0.023242752075195314, 0.023608320236206053, 0.02327347183227539, 0.024238079071044923, 0.023908351898193358, 0.023916543960571288, 0.023965696334838867, 0.023545856475830077, 0.023561216354370116, 0.02348441505432129, 0.024385536193847656, 0.024730623245239256, 0.02390323257446289, 0.023556095123291015, 0.024235008239746093, 0.02406809616088867, 0.02414182472229004, 0.023979007720947267, 0.023588863372802735, 0.023423999786376954, 0.023542783737182618, 0.023517183303833008, 0.02349363136291504, 0.023780351638793946, 0.023621631622314454, 0.023567359924316408, 0.023638015747070314, 0.023572479248046875, 0.023572479248046875, 0.02364723205566406, 0.02348236846923828, 0.023572479248046875, 0.024632320404052735, 0.02531942367553711, 0.024594432830810548, 0.02429542350769043, 0.025590784072875978, 0.024653823852539062, 0.023998464584350586, 0.023580671310424805, 0.023395328521728515, 0.02352742385864258, 0.023549951553344727, 0.024254463195800782, 0.02427903938293457, 0.023591936111450194, 0.023537664413452147, 0.023583744049072267, 0.023611391067504883, 0.02331648063659668, 0.023339008331298827, 0.023546880722045898, 0.023540735244750977, 0.023653375625610353, 0.023584768295288085, 0.023762943267822266, 0.02366361618041992, 0.023602176666259765, 0.023517183303833008, 0.02354380798339844, 0.023593984603881835, 0.02351820755004883, 0.023557119369506836, 0.023625728607177734, 0.023617536544799804, 0.023631872177124022, 0.02372096061706543, 0.023574527740478517, 0.023625728607177734, 0.023624704360961913, 0.02345369529724121, 0.023629823684692384, 0.023769088745117187, 0.023585792541503905, 0.023593984603881835, 0.023596031188964844, 0.023629823684692384, 0.023585792541503905, 0.023607295989990236, 0.023572479248046875, 0.02366054344177246, 0.0234833927154541, 0.023633920669555664, 0.023589887619018556, 0.02449612808227539, 0.02332159996032715, 0.023375871658325196, 0.023578624725341796, 0.023644159317016602, 0.023572479248046875, 0.023627775192260742, 0.023428096771240234, 0.023632896423339843, 0.023646207809448243, 0.023630847930908205, 0.023953407287597657, 0.023811071395874024, 0.0237076473236084, 0.023644159317016602, 0.023617536544799804, 0.02366464042663574, 0.023591936111450194, 0.023567359924316408, 0.023610368728637695, 0.023673856735229492, 0.023815168380737304, 0.02369024085998535, 0.023629823684692384, 0.02370457649230957, 0.023658496856689453, 0.023661567687988282, 0.023645183563232423, 0.023565311431884766, 0.023624704360961913, 0.02370355224609375, 0.02367180824279785, 0.02370969581604004, 0.024044544219970702, 0.02366361618041992, 0.023588863372802735, 0.023665664672851562, 0.02352230453491211, 0.023646207809448243, 0.023662591934204103, 0.02367692756652832, 0.023590911865234376, 0.023625728607177734, 0.023658496856689453, 0.02355200004577637, 0.023600128173828124, 0.023662591934204103, 0.02351923179626465, 0.02364723205566406, 0.023649280548095702, 0.023640064239501952, 0.023576576232910155, 0.0241582088470459, 0.023871488571166992, 0.02367283248901367, 0.023577600479125976, 0.023610368728637695, 0.02369024085998535, 0.023565311431884766, 0.023597055435180665, 0.023654399871826173, 0.02368409538269043, 0.023650304794311523, 0.023634944915771484, 0.023617536544799804, 0.023604223251342774, 0.023609344482421874, 0.02345062446594238, 0.023331840515136718, 0.02350284767150879, 0.023801855087280274, 0.023686143875122072, 0.023617536544799804, 0.02375372886657715, 0.023824384689331055, 0.024285184860229493, 0.02551603126525879, 0.024475648880004884, 0.0242739200592041, 0.024662015914916992, 0.024169471740722655, 0.024040447235107423, 0.024572927474975585, 0.023813119888305666, 0.0234967041015625, 0.023604223251342774, 0.02371788787841797, 0.023829504013061522, 0.02353459167480469, 0.02350182342529297, 0.023152639389038086, 0.023418880462646483, 0.023228416442871092, 0.023417856216430662, 0.02435686492919922, 0.023948287963867186, 0.023521280288696288, 0.02350592041015625, 0.023096319198608398, 0.023550975799560548, 0.023610368728637695, 0.024231935501098634, 0.023802879333496094, 0.02370355224609375, 0.023634944915771484, 0.023459840774536132, 0.02328780746459961, 0.023368703842163087, 0.023669759750366212, 0.023636991500854493, 0.023649280548095702, 0.02369945526123047, 0.023585792541503905, 0.023565311431884766, 0.02365542411804199, 0.023613439559936524, 0.023638015747070314, 0.02364313507080078, 0.023609344482421874, 0.023718912124633788, 0.02405068778991699, 0.023940095901489256, 0.02366873550415039, 0.023581695556640626, 0.02367487907409668, 0.023598079681396485, 0.023669759750366212, 0.023568384170532225, 0.02366873550415039, 0.023564287185668945, 0.02371993637084961, 0.023752704620361328, 0.02368716812133789, 0.023612415313720703, 0.024160255432128908, 0.023879680633544922, 0.023606271743774415, 0.023645183563232423, 0.023793664932250977, 0.023622655868530275, 0.023583744049072267, 0.023568384170532225, 0.02370560073852539, 0.023813119888305666, 0.025040895462036132, 0.023760896682739258, 0.02370047950744629, 0.023612415313720703, 0.0236810245513916, 0.023599103927612306, 0.023582719802856447, 0.02375372886657715, 0.0243507194519043, 0.024292352676391602, 0.02388991928100586, 0.023658496856689453, 0.02369536018371582, 0.023734272003173826, 0.02369331169128418, 0.023666688919067383, 0.023589887619018556, 0.023628799438476563, 0.023546880722045898, 0.023611391067504883, 0.023601152420043944, 0.023669759750366212, 0.023590911865234376, 0.023592960357666014, 0.02348748779296875, 0.023545856475830077, 0.0235284481048584, 0.023646207809448243, 0.023558143615722657, 0.023640064239501952, 0.024027135848999022, 0.023553024291992186, 0.023649280548095702, 0.02327347183227539, 0.023631872177124022, 0.023610368728637695, 0.023432191848754884, 0.023561216354370116, 0.02365132713317871, 0.023617536544799804, 0.024491008758544923, 0.025397247314453125, 0.024171520233154296, 0.023841791152954102, 0.02353459167480469, 0.02367692756652832, 0.023524351119995117, 0.023599103927612306, 0.023563264846801758, 0.023847936630249023, 0.023576576232910155, 0.02434048080444336, 0.023806976318359374, 0.02365235137939453, 0.025172992706298827, 0.02390630340576172, 0.0237260799407959, 0.023626752853393555, 0.023619583129882812, 0.023682048797607422, 0.023666688919067383, 0.023940095901489256, 0.024199167251586915, 0.023772159576416017, 0.02352230453491211, 0.023558143615722657, 0.023590911865234376, 0.023666688919067383, 0.023600128173828124, 0.023706623077392578, 0.02370150375366211, 0.02365951919555664, 0.02364723205566406, 0.024769535064697267, 0.024011775970458983, 0.02369536018371582, 0.023631872177124022, 0.023642112731933593, 0.023584768295288085, 0.023734272003173826, 0.023971839904785155, 0.023624704360961913, 0.02349875259399414, 0.023624704360961913, 0.023612415313720703, 0.023601152420043944, 0.023433216094970705, 0.023508991241455078, 0.023579647064208984, 0.023540735244750977, 0.023544832229614256, 0.023609344482421874, 0.023603200912475586, 0.023626752853393555, 0.02354380798339844, 0.023631872177124022, 0.023590911865234376, 0.023386112213134767, 0.023570432662963867, 0.023576576232910155, 0.023758848190307616, 0.02370150375366211, 0.023677951812744142, 0.02365235137939453, 0.023649280548095702, 0.023550975799560548, 0.023570432662963867, 0.023355392456054686, 0.02347315216064453, 0.023586816787719726, 0.023771135330200196, 0.023596031188964844, 0.023963647842407225, 0.023541759490966797, 0.023738367080688477, 0.02349056053161621, 0.023572479248046875, 0.02349465560913086, 0.023427072525024413, 0.023547903060913086, 0.023472127914428712, 0.023423999786376954, 0.02472652816772461, 0.02451251220703125, 0.02429849624633789, 0.02333286476135254, 0.023390207290649414, 0.023595008850097656, 0.02330931282043457, 0.023395328521728515, 0.023609344482421874, 0.023588863372802735, 0.023829504013061522, 0.023571456909179687, 0.02364825630187988, 0.023564287185668945, 0.02395136070251465, 0.024632320404052735, 0.024937471389770507, 0.023835647583007814, 0.024818687438964843, 0.023991296768188477, 0.02436403274536133, 0.02351411247253418, 0.023567359924316408, 0.02365235137939453, 0.02349465560913086, 0.023204864501953124, 0.023538688659667968, 0.023629823684692384, 0.023763967514038087, 0.023758848190307616, 0.02386534309387207, 0.024422399520874022, 0.02433024024963379, 0.023580671310424805, 0.02351513671875, 0.02352025604248047, 0.023949312210083007, 0.024267776489257813, 0.023547903060913086, 0.024052736282348632, 0.02444697570800781, 0.0241582088470459, 0.02366361618041992, 0.02391551971435547, 0.02355200004577637, 0.023872512817382813, 0.02386636734008789, 0.023631872177124022, 0.023567359924316408, 0.02385408020019531, 0.023525375366210938, 0.023613439559936524, 0.023557119369506836, 0.02364313507080078, 0.023532543182373047, 0.023583744049072267, 0.02411929512023926, 0.024012800216674804, 0.02443059158325195, 0.024218624114990234, 0.02431385612487793, 0.02408755111694336, 0.02430668830871582, 0.023855104446411132, 0.023644159317016602, 0.023786495208740235, 0.023582719802856447, 0.023576576232910155, 0.02370150375366211, 0.024612863540649413, 0.02365132713317871, 0.023595008850097656, 0.023979007720947267, 0.024384511947631835, 0.02369536018371582, 0.02389504051208496, 0.023629823684692384, 0.02365235137939453, 0.02346598434448242, 0.023650304794311523, 0.023617536544799804, 0.02407935905456543, 0.02390630340576172, 0.023611391067504883, 0.023243776321411135, 0.023442432403564452, 0.023405567169189453, 0.023187456130981447, 0.023189504623413085, 0.02332159996032715, 0.02342911911010742, 0.023193599700927735, 0.023262208938598632, 0.0237127685546875, 0.026452991485595705, 0.02478489685058594, 0.023967744827270508, 0.02348543930053711, 0.023405567169189453, 0.023560192108154295, 0.023653375625610353, 0.023480319976806642, 0.02365644836425781, 0.023524351119995117, 0.023567359924316408, 0.023566335678100587, 0.02367897605895996, 0.02372198486328125, 0.023622655868530275, 0.02351820755004883, 0.02366361618041992, 0.023504896163940428, 0.023636991500854493, 0.024022016525268555, 0.023738367080688477, 0.023578624725341796, 0.023631872177124022, 0.023565311431884766, 0.023568384170532225, 0.023587839126586914, 0.023633920669555664, 0.023607295989990236, 0.023750656127929686, 0.02370457649230957, 0.023758848190307616, 0.023581695556640626, 0.02371583938598633, 0.023605247497558594, 0.024571903228759767, 0.023559167861938478, 0.023653375625610353, 0.023618560791015625, 0.023608320236206053, 0.02349260711669922, 0.023621631622314454, 0.023564287185668945, 0.023582719802856447, 0.023561216354370116, 0.023611391067504883, 0.023604223251342774, 0.02352025604248047, 0.023639039993286134, 0.0237127685546875, 0.023623680114746092, 0.023642112731933593, 0.023654399871826173, 0.023776256561279296, 0.023669759750366212, 0.023464960098266603, 0.024556543350219725, 0.02449305534362793, 0.02434662437438965, 0.02411622428894043, 0.023579647064208984, 0.02368819236755371, 0.023640064239501952, 0.02348543930053711, 0.023517183303833008, 0.02351411247253418, 0.023547903060913086, 0.02352742385864258, 0.023557119369506836, 0.023793664932250977, 0.023612415313720703, 0.023558143615722657, 0.023561216354370116, 0.023521280288696288, 0.023573503494262696, 0.023537664413452147, 0.023410688400268553, 0.023565311431884766, 0.023650304794311523, 0.023561216354370116, 0.02352332878112793, 0.02354380798339844, 0.02353561592102051, 0.02351103973388672, 0.02350284767150879, 0.0235100154876709, 0.023563264846801758, 0.023431167602539063, 0.023649280548095702, 0.023639039993286134, 0.023564287185668945, 0.02353459167480469, 0.023565311431884766, 0.023638015747070314, 0.023592960357666014, 0.0239052791595459, 0.02434662437438965, 0.023748607635498048, 0.02434048080444336, 0.02431795120239258, 0.024258560180664062, 0.02428006362915039, 0.02369331169128418, 0.023585792541503905, 0.024498176574707032, 0.023954431533813478, 0.023536640167236327, 0.023517183303833008, 0.02348543930053711, 0.023560192108154295, 0.023536640167236327, 0.02347724723815918, 0.02351923179626465, 0.023397375106811523, 0.023793664932250977, 0.023618560791015625]",tokens/s,42.12787104036514,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 414, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3571.769344,4698.144768,0.0,4112.515072,3976.487424,s,1,9.855740234375,9.855740234375,0.0,9.855740234375,9.855740234375,9.855740234375,9.855740234375,[9.855740234375],,kWh,3.6308621462498696e-05,1.9884481013586087e-05,5.1292541034000244e-05,0.00010748564351008503,,MB,1635.278848,4928.831488,0.0,4282.384384,4102.201856,s,10,0.9053240051269531,0.09053240051269532,0.00010158058827284038,0.09050239944458008,0.0906130500793457,0.09071006278991699,0.09078767295837402,"[0.09051849365234375, 0.09050972747802734, 0.09080707550048828, 0.09049507141113282, 0.09046514892578125, 0.090560546875, 0.09059149169921875, 0.09044169616699219, 0.09045875549316407, 0.0904759979248047]",tokens/s,2827.7169118486067,kWh,1.0700106918919092e-06,5.862005205422938e-07,5.7939260565585405e-06,7.450137268992744e-06,tokens/kWh,34361782.978880756,MB,1693.261824,4937.220096,0.0,4290.772992,4102.204416,s,10,17.299482543945313,1.7299482543945313,0.007222546225050691,1.7299981079101563,1.7388112182617186,1.7398288269042967,1.7406429138183592,"[1.7270947265625, 1.722298828125, 1.7330452880859375, 1.72133154296875, 1.7374984130859374, 1.7385850830078124, 1.7329014892578125, 1.740846435546875, 1.7203094482421875, 1.7255712890625]",tokens/s,36.41727423925146,kWh,2.0392048094358107e-05,1.117528544164445e-05,5.345984356864157e-05,8.502717710464414e-05,tokens/kWh,740939.5695033486,,s,630,17.29753907966614,0.027456411237565297,0.0004766836156949038,0.02727833652496338,0.028189080810546874,0.028405248546600342,0.029300120906829843,"[0.027303936004638672, 0.02726092720031738, 0.027196416854858397, 0.027427839279174804, 0.027347967147827147, 0.027275264739990233, 0.027266080856323243, 0.02720867156982422, 0.027274240493774415, 0.028618751525878908, 0.027238399505615234, 0.027133951187133788, 0.027501567840576172, 0.02728550338745117, 0.027244543075561522, 0.02730905532836914, 0.02816716766357422, 0.027197439193725585, 0.027213823318481444, 0.02726092720031738, 0.0269434871673584, 0.027181055068969725, 0.027286527633666992, 0.02714931106567383, 0.02733670425415039, 0.027291648864746092, 0.027216896057128907, 0.027282432556152345, 0.02732339286804199, 0.027337728500366212, 0.027403263092041014, 0.027407360076904298, 0.02732339286804199, 0.02730188751220703, 0.027433984756469725, 0.02752102470397949, 0.027228160858154295, 0.02733670425415039, 0.027287551879882813, 0.0275281925201416, 0.02736128044128418, 0.027430912017822266, 0.027204608917236327, 0.027231231689453125, 0.027371519088745116, 0.02773811149597168, 0.029039615631103514, 0.028693504333496093, 0.028442623138427735, 0.02797875213623047, 0.027254783630371093, 0.02733465576171875, 0.027249664306640626, 0.02732646369934082, 0.027226112365722657, 0.027356159210205077, 0.027217920303344727, 0.02717695999145508, 0.027222015380859374, 0.02735820770263672, 0.027313152313232423, 0.027256832122802735, 0.027256832122802735, 0.02729471969604492, 0.028022783279418945, 0.028251136779785156, 0.028399616241455077, 0.027431936264038087, 0.02772172737121582, 0.02735103988647461, 0.02912563133239746, 0.028022783279418945, 0.027394048690795897, 0.027304960250854493, 0.027251712799072264, 0.027233280181884766, 0.027266048431396486, 0.0271779842376709, 0.027487232208251954, 0.027253759384155272, 0.02725273513793945, 0.027165695190429686, 0.027228160858154295, 0.027380735397338866, 0.028270591735839845, 0.027241472244262696, 0.027488256454467775, 0.02717184066772461, 0.027188224792480467, 0.027116544723510744, 0.02718720054626465, 0.027251712799072264, 0.027227136611938478, 0.027190271377563476, 0.027286527633666992, 0.027280384063720704, 0.027356159210205077, 0.027280384063720704, 0.027266048431396486, 0.027182079315185546, 0.027272192001342774, 0.027291648864746092, 0.027348991394042968, 0.02722822380065918, 0.027279296875, 0.02717900848388672, 0.027478015899658204, 0.027173887252807616, 0.02714521598815918, 0.0271278076171875, 0.027040767669677734, 0.027123712539672853, 0.02709503936767578, 0.027091968536376954, 0.027026432037353516, 0.027131904602050783, 0.027099136352539063, 0.02711039924621582, 0.027001855850219726, 0.0270960636138916, 0.02716364860534668, 0.027075584411621095, 0.027021312713623048, 0.027074560165405274, 0.0271646728515625, 0.027257856369018556, 0.028672000885009766, 0.02819584083557129, 0.028342271804809572, 0.028454912185668944, 0.02830335998535156, 0.027619327545166016, 0.027204608917236327, 0.027225088119506836, 0.02716262435913086, 0.027239423751831054, 0.027209728240966798, 0.02838835144042969, 0.027926528930664062, 0.027256832122802735, 0.027653215408325195, 0.027270048141479493, 0.02722822380065918, 0.030658496856689452, 0.028640256881713868, 0.028300287246704102, 0.027256832122802735, 0.027337728500366212, 0.027272192001342774, 0.027511808395385744, 0.027270143508911132, 0.027245567321777343, 0.02710323143005371, 0.02731827163696289, 0.02730803108215332, 0.027354112625122072, 0.027272192001342774, 0.0273623046875, 0.027248640060424805, 0.027198463439941405, 0.027303936004638672, 0.02813542366027832, 0.027830272674560546, 0.027182079315185546, 0.027223039627075195, 0.0273438720703125, 0.027247615814208984, 0.02730086326599121, 0.027133951187133788, 0.026756095886230468, 0.026836992263793946, 0.02713907241821289, 0.02716160011291504, 0.027538431167602538, 0.027188224792480467, 0.028240896224975585, 0.027604991912841798, 0.0272988166809082, 0.027395072937011718, 0.02733260726928711, 0.027224063873291016, 0.027207679748535156, 0.02716979217529297, 0.027189247131347655, 0.027371519088745116, 0.02735001564025879, 0.027131904602050783, 0.02714726448059082, 0.026856447219848634, 0.026953727722167968, 0.026784767150878908, 0.026894336700439454, 0.02692915153503418, 0.02694144058227539, 0.026928127288818358, 0.02692095947265625, 0.027241472244262696, 0.028194816589355468, 0.03015372848510742, 0.028482559204101563, 0.028294143676757814, 0.02731827163696289, 0.027250688552856447, 0.027158527374267577, 0.02721177673339844, 0.02714419174194336, 0.02730291175842285, 0.027254783630371093, 0.027465728759765624, 0.027708415985107423, 0.027238399505615234, 0.027683839797973633, 0.02733670425415039, 0.027238399505615234, 0.027235328674316408, 0.02720256042480469, 0.027108352661132814, 0.027227136611938478, 0.027229183197021483, 0.02735820770263672, 0.027290624618530275, 0.027222015380859374, 0.027286527633666992, 0.02736742401123047, 0.027378688812255858, 0.02733363151550293, 0.027274240493774415, 0.027231231689453125, 0.02726911926269531, 0.027201536178588868, 0.027411455154418944, 0.027258880615234377, 0.02715545654296875, 0.027240447998046875, 0.02712883186340332, 0.027464704513549806, 0.027131904602050783, 0.027064319610595702, 0.02720256042480469, 0.027165695190429686, 0.02716979217529297, 0.027188224792480467, 0.027106304168701172, 0.027196416854858397, 0.02714931106567383, 0.02755788803100586, 0.02738380813598633, 0.027190271377563476, 0.027150335311889647, 0.027198463439941405, 0.02717184066772461, 0.027206655502319335, 0.027578367233276366, 0.028019712448120116, 0.02716262435913086, 0.027233280181884766, 0.0272988166809082, 0.02728550338745117, 0.027224063873291016, 0.027140096664428712, 0.027225088119506836, 0.02718617630004883, 0.02714419174194336, 0.027275264739990233, 0.027325439453125, 0.027299840927124022, 0.02731110382080078, 0.027251712799072264, 0.0271278076171875, 0.02710425567626953, 0.02712985610961914, 0.027204608917236327, 0.027204608917236327, 0.027215871810913086, 0.02736844825744629, 0.02734182357788086, 0.027768831253051757, 0.028221439361572266, 0.028095487594604493, 0.027447296142578126, 0.027658239364624023, 0.02733670425415039, 0.027422719955444336, 0.02718617630004883, 0.02717081642150879, 0.027283456802368163, 0.027254783630371093, 0.02736025619506836, 0.02713907241821289, 0.02716364860534668, 0.027215871810913086, 0.027257856369018556, 0.027185152053833008, 0.02718003273010254, 0.027219968795776366, 0.02719436836242676, 0.027142143249511717, 0.02754252815246582, 0.028237823486328126, 0.029371391296386717, 0.029501440048217774, 0.028474367141723633, 0.028370943069458008, 0.028433408737182617, 0.028140544891357422, 0.02818662452697754, 0.02816204833984375, 0.02811801528930664, 0.028267520904541016, 0.028036096572875976, 0.028188671112060547, 0.0271779842376709, 0.027658239364624023, 0.02817945671081543, 0.027699199676513672, 0.027495424270629884, 0.027792383193969726, 0.02791935920715332, 0.027271167755126953, 0.02749951934814453, 0.02859519958496094, 0.028200960159301756, 0.028257280349731444, 0.027261951446533202, 0.02728447914123535, 0.027291648864746092, 0.027196416854858397, 0.027826175689697266, 0.027280384063720704, 0.027185152053833008, 0.02735820770263672, 0.027455488204956056, 0.02775961685180664, 0.028264448165893553, 0.02713702392578125, 0.027837440490722655, 0.027684864044189454, 0.027131904602050783, 0.027527168273925783, 0.02796031951904297, 0.02727628707885742, 0.02716262435913086, 0.027146240234375, 0.028249088287353515, 0.027907072067260744, 0.028453887939453124, 0.02860748863220215, 0.027247615814208984, 0.027674623489379883, 0.02726092720031738, 0.02816819190979004, 0.028222463607788087, 0.027860992431640624, 0.027259904861450194, 0.027279359817504883, 0.0271646728515625, 0.02780467224121094, 0.02754867172241211, 0.027699199676513672, 0.028760128021240235, 0.027262975692749023, 0.027167680740356446, 0.027181055068969725, 0.027501567840576172, 0.02714419174194336, 0.02730803108215332, 0.027130880355834962, 0.02719436836242676, 0.027364351272583007, 0.027587583541870117, 0.028069952011108398, 0.02723423957824707, 0.02737766456604004, 0.027259904861450194, 0.027199487686157226, 0.027174911499023437, 0.027846656799316406, 0.02816409683227539, 0.028017663955688478, 0.028039167404174805, 0.027271167755126953, 0.02774835205078125, 0.0272988166809082, 0.027403263092041014, 0.027297792434692384, 0.02731827163696289, 0.027347967147827147, 0.027271167755126953, 0.02728550338745117, 0.028677120208740234, 0.02975436782836914, 0.02872831916809082, 0.02840985679626465, 0.027844608306884764, 0.027266048431396486, 0.027543552398681642, 0.027291648864746092, 0.027259904861450194, 0.027322368621826174, 0.027313152313232423, 0.02728447914123535, 0.028193792343139647, 0.027583488464355467, 0.02721177673339844, 0.027201536178588868, 0.027385856628417967, 0.027216896057128907, 0.027222015380859374, 0.02731622314453125, 0.027304960250854493, 0.027212799072265623, 0.027219968795776366, 0.02730291175842285, 0.027667455673217774, 0.028489728927612305, 0.028297216415405273, 0.027275264739990233, 0.027277376174926756, 0.027587520599365235, 0.028058624267578124, 0.02777190399169922, 0.027232255935668945, 0.027207679748535156, 0.02733875274658203, 0.027303936004638672, 0.027270143508911132, 0.027281408309936524, 0.027423744201660157, 0.027196416854858397, 0.027251712799072264, 0.027240447998046875, 0.02731520080566406, 0.027414527893066407, 0.027238399505615234, 0.027201536178588868, 0.027140096664428712, 0.02716979217529297, 0.027197439193725585, 0.02721177673339844, 0.027114496231079102, 0.02716364860534668, 0.027206655502319335, 0.02717081642150879, 0.027122688293457032, 0.027229183197021483, 0.027184127807617187, 0.02717081642150879, 0.02731110382080078, 0.027322368621826174, 0.027185152053833008, 0.02712063980102539, 0.027586559295654296, 0.027181055068969725, 0.027238399505615234, 0.027282432556152345, 0.027257856369018556, 0.027238399505615234, 0.027242496490478517, 0.027064319610595702, 0.0281343994140625, 0.028279808044433592, 0.027238399505615234, 0.02716057586669922, 0.027255807876586914, 0.027235328674316408, 0.027216896057128907, 0.027337728500366212, 0.02735923194885254, 0.027183103561401366, 0.02717900848388672, 0.027241472244262696, 0.02735206413269043, 0.029619199752807617, 0.028624895095825196, 0.0285296630859375, 0.028069887161254883, 0.028218368530273437, 0.028228607177734375, 0.028232704162597655, 0.028428287506103517, 0.028284927368164063, 0.02728447914123535, 0.027322368621826174, 0.02733363151550293, 0.028192768096923827, 0.02755072021484375, 0.027197439193725585, 0.027578367233276366, 0.027150335311889647, 0.027373567581176757, 0.02723027229309082, 0.028202943801879883, 0.030661632537841797, 0.02869964790344238, 0.027724800109863282, 0.02834022331237793, 0.027645952224731447, 0.027212799072265623, 0.02731110382080078, 0.027228160858154295, 0.027214847564697265, 0.027247615814208984, 0.02813132858276367, 0.028283903121948242, 0.028177408218383788, 0.027831296920776367, 0.02720256042480469, 0.027511808395385744, 0.02734694480895996, 0.027339775085449217, 0.02718720054626465, 0.02679091262817383, 0.027237375259399413, 0.027227136611938478, 0.027198463439941405, 0.027212799072265623, 0.027219968795776366, 0.027467775344848632, 0.027243520736694334, 0.027626495361328125, 0.027180063247680665, 0.027163616180419924, 0.02730086326599121, 0.02718720054626465, 0.027356159210205077, 0.027263999938964844, 0.027237375259399413, 0.027115520477294923, 0.027784191131591796, 0.027414527893066407, 0.02731007957458496, 0.027243520736694334, 0.02717184066772461, 0.027213823318481444, 0.027288576126098633, 0.02731622314453125, 0.027405311584472656, 0.0273438720703125, 0.02721177673339844, 0.027254783630371093, 0.027108352661132814, 0.02720358467102051, 0.027215871810913086, 0.027182079315185546, 0.027199487686157226, 0.027167743682861328, 0.02718617630004883, 0.027076608657836915, 0.027076608657836915, 0.02718003273010254, 0.02715443229675293, 0.027328512191772462, 0.027381759643554687, 0.027442176818847655, 0.02718617630004883, 0.027224063873291016, 0.027200511932373047, 0.027644927978515626, 0.027207679748535156, 0.027254783630371093, 0.02709401512145996, 0.02712678337097168, 0.02713907241821289, 0.027259904861450194, 0.028177408218383788, 0.027625471115112304, 0.027489280700683592, 0.027511808395385744, 0.027265024185180665, 0.027232255935668945, 0.02731622314453125, 0.02733875274658203, 0.02733158493041992, 0.02733670425415039, 0.027442176818847655, 0.027347967147827147, 0.02729471969604492, 0.02740019226074219, 0.027266048431396486, 0.027266048431396486, 0.027272192001342774, 0.027215871810913086, 0.027314176559448244, 0.027209728240966798, 0.027254783630371093, 0.027283456802368163, 0.027339775085449217, 0.02758246421813965, 0.027275264739990233, 0.027190271377563476, 0.027687936782836913, 0.027287551879882813, 0.027406335830688477, 0.02718726348876953, 0.027167680740356446, 0.027249664306640626, 0.027386880874633788, 0.028064767837524415, 0.028828672409057617, 0.028431360244750976, 0.027436031341552734, 0.027321344375610353, 0.02735923194885254, 0.027234304428100587, 0.02727731132507324, 0.027370496749877928, 0.02736947250366211, 0.027224063873291016, 0.027356159210205077, 0.027228160858154295, 0.027258880615234377, 0.02731827163696289, 0.027092031478881836, 0.027544511795043945, 0.02751283264160156, 0.0272988166809082, 0.02712063980102539, 0.02720256042480469, 0.0271329288482666, 0.027232255935668945, 0.027231231689453125, 0.02812928009033203, 0.027421695709228516, 0.02771046447753906, 0.027496448516845705, 0.027188224792480467, 0.02760704040527344, 0.027213823318481444, 0.02730803108215332, 0.027191295623779296]",tokens/s,36.42136590057409,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,2185.474048,2405.957632,0.0,1820.327936,1730.89792,s,1,9.31283984375,9.31283984375,0.0,9.31283984375,9.31283984375,9.31283984375,9.31283984375,[9.31283984375],,kWh,2.9247845110426677e-05,1.6014344026589252e-05,4.000447644797456e-05,8.526666558499049e-05,,MB,2296.741888,2561.14688,0.0,1914.699776,1884.530688,s,10,0.533465503692627,0.0533465503692627,0.00020314210763551458,0.053408206939697264,0.053463033294677734,0.05346487693786621,0.05346635185241699,"[0.052748607635498046, 0.053344638824462894, 0.053379550933837894, 0.053466720581054686, 0.05341667175292969, 0.05346262359619141, 0.053365184783935544, 0.05345171356201172, 0.053399742126464846, 0.053430049896240235]",tokens/s,4798.81076148276,kWh,6.239251309210459e-07,3.41876362173787e-07,3.0482261227894385e-06,4.014027615884271e-06,tokens/kWh,63776342.491257235,MB,2304.851968,2728.91904,0.0,2082.471936,1949.917184,s,10,11.859364013671874,1.1859364013671874,0.012307585142669468,1.1877008666992188,1.1954744628906249,1.195595751953125,1.195692783203125,"[1.190955810546875, 1.195717041015625, 1.193032958984375, 1.18560009765625, 1.195447509765625, 1.1870438232421876, 1.18835791015625, 1.1506485595703124, 1.1858232421875, 1.186737060546875]",tokens/s,53.12257885614395,kWh,1.371293954678779e-05,7.514365955050336e-06,3.193855186661198e-05,5.316585736845009e-05,tokens/kWh,1184971.0155786132,,s,630,11.856444419860832,0.018819753047398156,0.00036565822899875197,0.018889728546142577,0.019070054435729983,0.019266304397583006,0.02011877368927002,"[0.018519039154052733, 0.018754560470581053, 0.019099647521972657, 0.01887948799133301, 0.018911231994628908, 0.018946048736572265, 0.01904025650024414, 0.01869004821777344, 0.018911231994628908, 0.01883135986328125, 0.018906112670898437, 0.01886412811279297, 0.018874368667602538, 0.01886207962036133, 0.018885631561279297, 0.01856716728210449, 0.018948095321655273, 0.01883750343322754, 0.018856960296630858, 0.019119104385375976, 0.018914304733276367, 0.01878835105895996, 0.018934783935546876, 0.018893823623657227, 0.018573312759399413, 0.018949119567871094, 0.0190382080078125, 0.019162111282348633, 0.019145727157592773, 0.01883647918701172, 0.01887948799133301, 0.01887846374511719, 0.018893823623657227, 0.01886720085144043, 0.018856960296630858, 0.01889587211608887, 0.01880575942993164, 0.019002368927001953, 0.019098623275756836, 0.019070976257324217, 0.018932735443115235, 0.018953216552734374, 0.018930688858032226, 0.018886655807495118, 0.01886207962036133, 0.018916351318359375, 0.018950143814086915, 0.01878835105895996, 0.01905356788635254, 0.018948095321655273, 0.018914304733276367, 0.01883545684814453, 0.018932735443115235, 0.01904742431640625, 0.019135488510131835, 0.018893823623657227, 0.01883135986328125, 0.01884160041809082, 0.018877439498901367, 0.018877439498901367, 0.018759679794311524, 0.018880512237548826, 0.018832384109497072, 0.018769920349121092, 0.018889728546142577, 0.018888704299926756, 0.018944000244140623, 0.018865152359008788, 0.01883135986328125, 0.018884607315063476, 0.01882316780090332, 0.018231296539306642, 0.01859584045410156, 0.018743295669555664, 0.01830297660827637, 0.018272256851196288, 0.018832384109497072, 0.018945024490356444, 0.018762752532958983, 0.018856960296630858, 0.01880678367614746, 0.018807807922363282, 0.018778112411499022, 0.01889587211608887, 0.018861055374145508, 0.018926591873168946, 0.01904435157775879, 0.01902284812927246, 0.01905356788635254, 0.0190699520111084, 0.018894847869873048, 0.01887846374511719, 0.018869247436523438, 0.02067353630065918, 0.020199424743652345, 0.01967820739746094, 0.019313663482666017, 0.018856960296630858, 0.01904844856262207, 0.01902592086791992, 0.019083263397216797, 0.018917375564575196, 0.01903001594543457, 0.01899929618835449, 0.019583999633789064, 0.01942323112487793, 0.018921472549438476, 0.01901568031311035, 0.01899622344970703, 0.0192225284576416, 0.018926591873168946, 0.018888704299926756, 0.01904128074645996, 0.018970624923706055, 0.019106815338134766, 0.01886617660522461, 0.018942975997924806, 0.018908159255981445, 0.01887948799133301, 0.018941951751708985, 0.018976768493652343, 0.018897920608520507, 0.018918399810791017, 0.018957311630249024, 0.01882624053955078, 0.01902592086791992, 0.018948095321655273, 0.018952192306518553, 0.019062784194946288, 0.018910207748413087, 0.01903513526916504, 0.019019775390625, 0.018998271942138673, 0.018985984802246093, 0.018935808181762694, 0.018928640365600585, 0.019154943466186524, 0.019054592132568358, 0.019216384887695313, 0.018930688858032226, 0.018985984802246093, 0.01887948799133301, 0.018955263137817382, 0.01905766487121582, 0.018981887817382814, 0.018927616119384767, 0.019017728805541992, 0.01889587211608887, 0.018959360122680666, 0.019002368927001953, 0.01837772750854492, 0.018249727249145507, 0.018892799377441406, 0.018914304733276367, 0.01925017547607422, 0.019078144073486326, 0.018938880920410156, 0.018897920608520507, 0.018910207748413087, 0.01837772750854492, 0.018280448913574218, 0.01826918411254883, 0.018291711807250977, 0.018326528549194337, 0.018471935272216796, 0.018913280487060546, 0.01899622344970703, 0.01898700714111328, 0.018471935272216796, 0.018692096710205077, 0.01904844856262207, 0.019207168579101562, 0.019590143203735352, 0.020571136474609376, 0.019268608093261717, 0.019056640625, 0.018905088424682616, 0.01899519920349121, 0.0189040641784668, 0.018868223190307617, 0.018975744247436522, 0.01900851249694824, 0.018966527938842775, 0.018931711196899414, 0.018947071075439453, 0.01901875114440918, 0.01901875114440918, 0.018914304733276367, 0.019096576690673828, 0.018366464614868162, 0.018331647872924805, 0.01823232078552246, 0.01824665641784668, 0.018546688079833985, 0.018844671249389648, 0.01926348876953125, 0.018903039932250978, 0.018918399810791017, 0.018791423797607423, 0.018928640365600585, 0.018808832168579103, 0.01883545684814453, 0.018824192047119142, 0.0188723201751709, 0.019119104385375976, 0.018991104125976564, 0.018330623626708984, 0.018184192657470705, 0.018573312759399413, 0.018936832427978514, 0.01881907272338867, 0.018757631301879883, 0.01881497573852539, 0.01903001594543457, 0.01887948799133301, 0.01887948799133301, 0.018861055374145508, 0.01886310386657715, 0.018832384109497072, 0.018840576171875, 0.018951168060302736, 0.01884876823425293, 0.018917375564575196, 0.018883583068847656, 0.0188723201751709, 0.01879347229003906, 0.018890752792358398, 0.018900991439819336, 0.018759679794311524, 0.018882560729980468, 0.01882009506225586, 0.01899519920349121, 0.018913280487060546, 0.01886310386657715, 0.018257919311523436, 0.018276351928710938, 0.018719743728637696, 0.018880512237548826, 0.018777088165283205, 0.01929523277282715, 0.018971647262573242, 0.019091455459594727, 0.018954240798950195, 0.018899967193603515, 0.018946048736572265, 0.018899967193603515, 0.018964479446411133, 0.0190382080078125, 0.018948095321655273, 0.018852863311767578, 0.018944000244140623, 0.018900991439819336, 0.018390016555786134, 0.018989055633544923, 0.019530752182006835, 0.019145727157592773, 0.018997247695922852, 0.018946048736572265, 0.018898944854736328, 0.018865152359008788, 0.018933759689331055, 0.018923519134521484, 0.018742271423339844, 0.018860031127929687, 0.01887539291381836, 0.018891775131225585, 0.018929664611816405, 0.018861055374145508, 0.018916351318359375, 0.018975744247436522, 0.019006464004516603, 0.018731008529663085, 0.018888704299926756, 0.01886310386657715, 0.018947071075439453, 0.018892799377441406, 0.019002368927001953, 0.018921472549438476, 0.018958335876464845, 0.018929664611816405, 0.01864089584350586, 0.01904332733154297, 0.019005439758300782, 0.018933759689331055, 0.018903039932250978, 0.018940927505493164, 0.018945024490356444, 0.018882560729980468, 0.01887027168273926, 0.019009536743164062, 0.01903001594543457, 0.019578880310058593, 0.01941094398498535, 0.018935808181762694, 0.018874368667602538, 0.018832384109497072, 0.018971647262573242, 0.018914304733276367, 0.01884160041809082, 0.0188723201751709, 0.018883583068847656, 0.018549760818481444, 0.01883955192565918, 0.018874368667602538, 0.018899967193603515, 0.018911231994628908, 0.01887539291381836, 0.01928704071044922, 0.019612672805786133, 0.020131839752197265, 0.019363840103149413, 0.0189040641784668, 0.019019775390625, 0.018803712844848632, 0.018956287384033203, 0.018900991439819336, 0.018960384368896483, 0.018953216552734374, 0.018869247436523438, 0.01884979248046875, 0.018900991439819336, 0.018921472549438476, 0.01882316780090332, 0.018457599639892578, 0.01882931137084961, 0.018898944854736328, 0.018742271423339844, 0.018893823623657227, 0.01884774398803711, 0.01884979248046875, 0.018774015426635742, 0.018913280487060546, 0.01879347229003906, 0.018861055374145508, 0.018791423797607423, 0.018922496795654296, 0.018952192306518553, 0.0188272647857666, 0.01885491180419922, 0.01879756736755371, 0.01903206443786621, 0.018917375564575196, 0.018921472549438476, 0.019092479705810548, 0.019323904037475585, 0.01883033561706543, 0.01882931137084961, 0.01840640068054199, 0.018356224060058594, 0.018316287994384766, 0.018349056243896485, 0.018295808792114256, 0.0184586238861084, 0.018784255981445314, 0.018784255981445314, 0.01883545684814453, 0.01881088066101074, 0.018979839324951172, 0.018979839324951172, 0.018994176864624023, 0.018949119567871094, 0.018957311630249024, 0.01877299118041992, 0.018955263137817382, 0.01899622344970703, 0.01905971145629883, 0.01881497573852539, 0.018912256240844725, 0.0189040641784668, 0.018975744247436522, 0.018868223190307617, 0.018860031127929687, 0.018889728546142577, 0.01885593605041504, 0.018877439498901367, 0.018899967193603515, 0.018874368667602538, 0.0188272647857666, 0.018292736053466797, 0.018347007751464844, 0.018264064788818358, 0.01829478454589844, 0.018328575134277342, 0.018257919311523436, 0.0182609920501709, 0.01823846435546875, 0.018231296539306642, 0.01822105598449707, 0.018283519744873047, 0.018328575134277342, 0.018938880920410156, 0.018971647262573242, 0.019294208526611328, 0.019340288162231444, 0.018876415252685547, 0.019121152877807617, 0.018985984802246093, 0.018942975997924806, 0.01901875114440918, 0.019167232513427734, 0.018944000244140623, 0.01908121681213379, 0.018919424057006837, 0.018976768493652343, 0.019118080139160155, 0.018876415252685547, 0.01883955192565918, 0.018909183502197266, 0.019160064697265625, 0.01901260757446289, 0.018886655807495118, 0.018933759689331055, 0.018889728546142577, 0.018949119567871094, 0.018972671508789063, 0.018945024490356444, 0.01884671974182129, 0.01883033561706543, 0.01900032043457031, 0.018928640365600585, 0.018935808181762694, 0.018934783935546876, 0.018906112670898437, 0.018872352600097658, 0.019017696380615234, 0.018930688858032226, 0.019346431732177736, 0.019718143463134767, 0.019737600326538086, 0.01902694320678711, 0.019334144592285156, 0.018970624923706055, 0.018937856674194335, 0.018911231994628908, 0.018976768493652343, 0.019084287643432618, 0.019002368927001953, 0.0188538875579834, 0.018520063400268554, 0.018494464874267577, 0.018526208877563476, 0.018315263748168945, 0.018307071685791015, 0.018340864181518556, 0.01819343948364258, 0.018216928482055663, 0.018361343383789062, 0.018256895065307616, 0.018347007751464844, 0.01822105598449707, 0.018312192916870116, 0.018217983245849608, 0.018168832778930662, 0.018358272552490236, 0.01804902458190918, 0.018233343124389647, 0.01820262336730957, 0.018215936660766603, 0.018231296539306642, 0.018215936660766603, 0.018159616470336915, 0.0182476806640625, 0.018249727249145507, 0.01822822380065918, 0.018395135879516602, 0.01821183967590332, 0.018173952102661133, 0.01822412872314453, 0.018113536834716795, 0.018214912414550782, 0.018284543991088868, 0.01820364761352539, 0.018143232345581056, 0.018229248046875, 0.018242559432983398, 0.018367488861083983, 0.01821900749206543, 0.018320383071899413, 0.018176000595092775, 0.018198528289794923, 0.018181119918823242, 0.01819340705871582, 0.018101247787475586, 0.01822719955444336, 0.018242559432983398, 0.01822412872314453, 0.018265087127685545, 0.01820979118347168, 0.0182476806640625, 0.01821286392211914, 0.01821900749206543, 0.018307071685791015, 0.01822719955444336, 0.018291711807250977, 0.01822822380065918, 0.01824358367919922, 0.018317312240600587, 0.01843814468383789, 0.018315263748168945, 0.018256895065307616, 0.018334720611572267, 0.01824051284790039, 0.018539520263671876, 0.018928640365600585, 0.018380800247192384, 0.018471935272216796, 0.018156543731689453, 0.018327552795410155, 0.018323455810546875, 0.01825382423400879, 0.018576383590698242, 0.018907136917114258, 0.01902592086791992, 0.018940927505493164, 0.01887846374511719, 0.018885631561279297, 0.01848320007324219, 0.01824460792541504, 0.0182794246673584, 0.01819545555114746, 0.018241535186767577, 0.02024448013305664, 0.02008678436279297, 0.01986764717102051, 0.01900339126586914, 0.018869247436523438, 0.018927616119384767, 0.018889728546142577, 0.018951168060302736, 0.0189040641784668, 0.018908159255981445, 0.01884774398803711, 0.01883135986328125, 0.018912256240844725, 0.018972671508789063, 0.018911231994628908, 0.018900991439819336, 0.018787328720092773, 0.018959360122680666, 0.019002368927001953, 0.018894847869873048, 0.018921472549438476, 0.01903001594543457, 0.018922496795654296, 0.019146751403808594, 0.019198976516723632, 0.01861427116394043, 0.018947071075439453, 0.01903104019165039, 0.018893823623657227, 0.018937856674194335, 0.018881536483764647, 0.018883583068847656, 0.019056640625, 0.01902899169921875, 0.018945024490356444, 0.01906073570251465, 0.019056640625, 0.01902079963684082, 0.018907136917114258, 0.018326528549194337, 0.018291711807250977, 0.018388992309570314, 0.018747392654418944, 0.018501632690429686, 0.018307071685791015, 0.01827020835876465, 0.019110912322998046, 0.019449855804443358, 0.01898700714111328, 0.01881292724609375, 0.01920102310180664, 0.01899929618835449, 0.018994176864624023, 0.01887539291381836, 0.01886617660522461, 0.01822412872314453, 0.018300928115844727, 0.018619392395019533, 0.018551807403564453, 0.018324480056762696, 0.018345983505249023, 0.018268159866333008, 0.018375680923461913, 0.018259967803955078, 0.018311168670654295, 0.018304000854492186, 0.01836953544616699, 0.018257919311523436, 0.018993152618408202, 0.018981887817382814, 0.01903206443786621, 0.01899622344970703, 0.018981887817382814, 0.019042303085327148, 0.018933759689331055, 0.019341312408447265, 0.01884671974182129, 0.01883647918701172, 0.018885631561279297, 0.018868223190307617, 0.018288639068603514, 0.01823232078552246, 0.01843507194519043, 0.018423807144165038, 0.02104115104675293, 0.02104934310913086, 0.01928704071044922, 0.018945024490356444, 0.01883955192565918, 0.01882111930847168, 0.01888768005371094, 0.01887027168273926, 0.01901260757446289, 0.018948095321655273, 0.018892799377441406, 0.018942975997924806, 0.01887539291381836, 0.018743295669555664, 0.01824870491027832, 0.018197504043579102, 0.018757631301879883, 0.018906112670898437, 0.01883750343322754, 0.01880575942993164, 0.018885631561279297, 0.018911231994628908, 0.01902694320678711, 0.018902015686035157, 0.018889728546142577]",tokens/s,53.13566004194995,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694950d-516c3f0169bcae4b776073fe;1bda9735-8ee9-483f-938a-6b820cbd2888) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpa90x580i/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5167.005696,5651.300352,0.0,5058.330624,5057.441792,s,1,11.886142578125,11.886142578125,0.0,11.886142578125,11.886142578125,11.886142578125,11.886142578125,[11.886142578125],,kWh,5.229486804166692e-05,2.8643061660510323e-05,7.713506170803353e-05,0.00015807299141021075,,MB,1787.86304,5869.40416,0.0,5215.617024,5189.707776,s,10,1.416440368652344,0.14164403686523436,0.00019977528858270777,0.141651123046875,0.1417861602783203,0.14194380035400392,0.14206991241455078,"[0.14165728759765625, 0.1414171142578125, 0.1421014404296875, 0.14150236511230468, 0.1415879364013672, 0.14174421691894531, 0.14169187927246094, 0.14164495849609374, 0.14175112915039062, 0.141342041015625]",tokens/s,1807.3475288166796,kWh,1.6712592625978554e-06,9.157171516506274e-07,9.091638728704317e-06,1.16786151429528e-05,tokens/kWh,21920407.24575786,MB,1802.993664,5888.278528,0.0,5234.491392,5189.710336,s,10,28.587490234374997,2.8587490234375,0.03934892104982959,2.8792010498046876,2.894743920898437,2.8968621948242186,2.8985568139648437,"[2.814942138671875, 2.83842578125, 2.817991455078125, 2.78344189453125, 2.885580078125, 2.894273193359375, 2.89898046875, 2.89154296875, 2.889490234375, 2.872822021484375]",tokens/s,22.03761137598771,kWh,3.388295063809687e-05,1.8569112315533414e-05,8.59223539490961e-05,0.00013837441690272637,tokens/kWh,455286.471373443,,s,630,28.584751132965106,0.04537262084597633,0.0008435274484009045,0.045644289016723634,0.04622922554016113,0.04650603542327881,0.047311912765502936,"[0.046450687408447267, 0.04572467041015625, 0.04554035186767578, 0.045695999145507815, 0.045195262908935545, 0.04413030242919922, 0.045949951171875, 0.04513689422607422, 0.04431155014038086, 0.04420915222167969, 0.04407910537719727, 0.04419481658935547, 0.04420915222167969, 0.04427673721313476, 0.04414156723022461, 0.0442716178894043, 0.04419276809692383, 0.044083198547363284, 0.04605644989013672, 0.04462387084960937, 0.044943359375, 0.04443545532226562, 0.04427468872070312, 0.044082176208496096, 0.04398284912109375, 0.044058624267578124, 0.04410060882568359, 0.04409958267211914, 0.04432179260253906, 0.045722625732421876, 0.04470476913452148, 0.04415488052368164, 0.04403507232666016, 0.04494438552856445, 0.04571340942382812, 0.045687808990478515, 0.04413849639892578, 0.04491980743408203, 0.04414361572265625, 0.045861888885498046, 0.04528844833374023, 0.044183551788330076, 0.04441600036621094, 0.045088768005371094, 0.0443422737121582, 0.04409446334838867, 0.04401971054077149, 0.04401356887817383, 0.044229633331298826, 0.04427673721313476, 0.04413542556762695, 0.04416409683227539, 0.04475904083251953, 0.046020606994628906, 0.044252159118652344, 0.04402073669433594, 0.04409036636352539, 0.04463411331176758, 0.04550041580200195, 0.045794303894042966, 0.04561203384399414, 0.044862464904785154, 0.04410265731811523, 0.04498124694824219, 0.04435353469848633, 0.043930622100830076, 0.04555263900756836, 0.046069759368896485, 0.04505395126342773, 0.04684185409545898, 0.046069759368896485, 0.04554751968383789, 0.04555878448486328, 0.04537548828125, 0.045499393463134766, 0.04537855911254883, 0.04535398483276367, 0.0455813102722168, 0.045886463165283206, 0.04566527938842774, 0.045881343841552735, 0.045682689666748044, 0.04564275360107422, 0.04579840087890625, 0.045434879302978515, 0.046031871795654294, 0.0462366714477539, 0.04533555221557617, 0.04411904144287109, 0.04593561553955078, 0.04566732788085937, 0.04559769439697266, 0.04508467102050781, 0.044055553436279295, 0.044055553436279295, 0.044214271545410154, 0.04402483367919922, 0.04401766586303711, 0.04405759811401367, 0.04409958267211914, 0.04405964660644531, 0.044014591217041016, 0.0439552001953125, 0.04458700942993164, 0.044068862915039066, 0.04421017456054688, 0.04542156982421875, 0.04609228897094726, 0.046228511810302735, 0.046390239715576174, 0.04538470458984375, 0.046615550994873044, 0.04559360122680664, 0.04443033599853516, 0.04418048095703125, 0.044028926849365234, 0.044030975341796875, 0.04406476974487305, 0.044093441009521485, 0.044052478790283206, 0.044006401062011716, 0.044816383361816405, 0.04541747283935547, 0.045338623046875, 0.045228031158447264, 0.04417846298217774, 0.04439039993286133, 0.04558233642578125, 0.04580966567993164, 0.0459417610168457, 0.04527718353271484, 0.044009471893310545, 0.04426342391967773, 0.043979774475097655, 0.044044288635253906, 0.0439818229675293, 0.04388249588012695, 0.04401356887817383, 0.04389990234375, 0.04388351821899414, 0.04400230407714844, 0.04400332641601563, 0.04389683151245117, 0.04389785766601562, 0.04380057525634766, 0.043947006225585936, 0.04390604782104492, 0.043815937042236325, 0.04395110321044922, 0.04387942504882812, 0.04421017456054688, 0.04722175979614258, 0.04560486221313476, 0.045383678436279294, 0.04451327896118164, 0.04398796844482422, 0.04547993469238281, 0.04406784057617188, 0.044148735046386715, 0.044523521423339846, 0.04440883255004883, 0.045330432891845705, 0.044254207611083986, 0.04562432098388672, 0.045281280517578126, 0.045385726928710936, 0.045437950134277344, 0.04534067153930664, 0.0454389762878418, 0.044450817108154295, 0.044598270416259765, 0.044080127716064454, 0.044052478790283206, 0.044801025390625, 0.045505535125732424, 0.04658585739135742, 0.046258174896240234, 0.045265918731689454, 0.044763137817382816, 0.04522086334228516, 0.044088321685791014, 0.04407398223876953, 0.04411801528930664, 0.0448983039855957, 0.04637491226196289, 0.045758464813232425, 0.04522188949584961, 0.04419686508178711, 0.04571033477783203, 0.04442931365966797, 0.04418867111206055, 0.04416819381713867, 0.04405452728271484, 0.04419481658935547, 0.04440371322631836, 0.0451778564453125, 0.04441292953491211, 0.044203041076660156, 0.04475593566894531, 0.044286975860595705, 0.044197887420654294, 0.044050430297851564, 0.04422553634643555, 0.04418048095703125, 0.04409446334838867, 0.04410060882568359, 0.04405350494384765, 0.04405145645141602, 0.04401971054077149, 0.04405452728271484, 0.04412313461303711, 0.04410163116455078, 0.04428492736816406, 0.04419686508178711, 0.044278785705566405, 0.04410265731811523, 0.044283905029296876, 0.04434022521972656, 0.044788734436035156, 0.04441907119750976, 0.04395315170288086, 0.04396236801147461, 0.04384460830688477, 0.04411084747314453, 0.04391731262207031, 0.04398899078369141, 0.04394803237915039, 0.04395622253417969, 0.04401971054077149, 0.04386099243164063, 0.043976703643798826, 0.043865089416503904, 0.04396953582763672, 0.04396543884277344, 0.043947006225585936, 0.044153854370117186, 0.044112895965576174, 0.04418867111206055, 0.0441610221862793, 0.043990016937255856, 0.044034046173095705, 0.04443340682983398, 0.04497510528564453, 0.044371967315673826, 0.04404633712768555, 0.04402175903320313, 0.04396543884277344, 0.04397055816650391, 0.04394496154785156, 0.044088321685791014, 0.04402483367919922, 0.04509286499023438, 0.045859840393066405, 0.045541374206542966, 0.04561203384399414, 0.04555263900756836, 0.04506316757202149, 0.04376883316040039, 0.04403712081909179, 0.04583116912841797, 0.045413375854492184, 0.0460871696472168, 0.046260223388671876, 0.045638656616210936, 0.046235649108886716, 0.046996479034423826, 0.046148609161376954, 0.045690879821777344, 0.04564070510864258, 0.045832191467285156, 0.04573798370361328, 0.04566527938842774, 0.0456181755065918, 0.04564070510864258, 0.04602163314819336, 0.045843456268310545, 0.04603289413452148, 0.045843456268310545, 0.045758464813232425, 0.04566835021972656, 0.04584960174560547, 0.04600320053100586, 0.04573593521118164, 0.045764606475830076, 0.04633599853515625, 0.04631347274780274, 0.045813758850097655, 0.045692928314208986, 0.045568000793457034, 0.04530995178222656, 0.0452147216796875, 0.045879295349121094, 0.045840385437011716, 0.04569804763793945, 0.04597760009765625, 0.045707263946533204, 0.04594588851928711, 0.045921249389648436, 0.04595404815673828, 0.045797374725341795, 0.04581478500366211, 0.04548303985595703, 0.045899742126464846, 0.04610355377197266, 0.04612710571289062, 0.04606259155273437, 0.046192638397216795, 0.0467589111328125, 0.04610355377197266, 0.04592127990722656, 0.04587417602539062, 0.04607385635375977, 0.04585881423950195, 0.04586393737792969, 0.045810688018798826, 0.04398284912109375, 0.04499763107299805, 0.04606771087646484, 0.04514508819580078, 0.045162494659423826, 0.04537446212768555, 0.045902847290039066, 0.04578713607788086, 0.0457891845703125, 0.04568473434448242, 0.04569497680664063, 0.046714881896972656, 0.04626739120483398, 0.045797374725341795, 0.0456181755065918, 0.04574003219604492, 0.0458158073425293, 0.045818878173828126, 0.045861888885498046, 0.04576051330566406, 0.04562636947631836, 0.04752588653564453, 0.046676990509033206, 0.046134273529052736, 0.04595404815673828, 0.04601958465576172, 0.04629708862304688, 0.045900798797607424, 0.04620083236694336, 0.04595609664916992, 0.045777919769287106, 0.045830142974853515, 0.04573183822631836, 0.04599398422241211, 0.04647628784179687, 0.046020606994628906, 0.045623294830322264, 0.04553318405151367, 0.04562124633789062, 0.04567244720458984, 0.04597862243652344, 0.04568576049804687, 0.045608959197998046, 0.04539699172973633, 0.045704193115234375, 0.04558438491821289, 0.04572774505615235, 0.04632371139526367, 0.047764480590820314, 0.04707328033447265, 0.04628889465332031, 0.046017536163330076, 0.04595302581787109, 0.046458881378173826, 0.04601651382446289, 0.04577280044555664, 0.046513153076171876, 0.04639539337158203, 0.046074878692626955, 0.046367774963378905, 0.046045150756835934, 0.045917182922363284, 0.04578201675415039, 0.044391422271728515, 0.0452044792175293, 0.045813758850097655, 0.045876224517822264, 0.04586700820922852, 0.04796416091918945, 0.04637491226196289, 0.045911041259765625, 0.046017536163330076, 0.046031871795654294, 0.04593971252441406, 0.045859840393066405, 0.046279678344726564, 0.046838783264160154, 0.04677836990356445, 0.04630531311035156, 0.045975521087646486, 0.0460840950012207, 0.04592127990722656, 0.04589567947387695, 0.045818878173828126, 0.0458260498046875, 0.04592947387695313, 0.046124031066894534, 0.0459048957824707, 0.046058494567871096, 0.04698112106323242, 0.046543872833251954, 0.04598681640625, 0.04558848190307617, 0.04506521606445312, 0.0459233283996582, 0.0457154541015625, 0.04582092666625977, 0.04589363098144531, 0.04658892822265625, 0.04617216110229492, 0.045930496215820314, 0.045706241607666016, 0.04589977645874024, 0.045868030548095705, 0.045876224517822264, 0.045603839874267575, 0.045722625732421876, 0.04573286437988281, 0.045706241607666016, 0.04689408111572266, 0.04745113754272461, 0.046279678344726564, 0.0460687370300293, 0.04582195281982422, 0.04574720001220703, 0.045692928314208986, 0.04580966567993164, 0.04580454254150391, 0.045871105194091794, 0.0458076171875, 0.04658380889892578, 0.046448638916015625, 0.04556902313232422, 0.04597964859008789, 0.045897727966308595, 0.04569804763793945, 0.04554547119140625, 0.0457523193359375, 0.045646846771240236, 0.045840385437011716, 0.04595711898803711, 0.045620223999023435, 0.04566425704956055, 0.04568678283691406, 0.04563251113891602, 0.04581273651123047, 0.045672481536865234, 0.045689823150634766, 0.04584447860717773, 0.04572467041015625, 0.04655615997314453, 0.046150657653808595, 0.04571852874755859, 0.04564275360107422, 0.04562124633789062, 0.04568473434448242, 0.045638656616210936, 0.045551616668701174, 0.04568576049804687, 0.045725696563720705, 0.04572979354858398, 0.045706241607666016, 0.046048255920410154, 0.04587724685668945, 0.046132225036621094, 0.045841407775878903, 0.045794303894042966, 0.045870079040527346, 0.04571852874755859, 0.04744704055786133, 0.046284801483154295, 0.04577689743041992, 0.04652646255493164, 0.04648038482666016, 0.04582092666625977, 0.045824001312255856, 0.04589875030517578, 0.045832191467285156, 0.04561920166015625, 0.04601241683959961, 0.0456376953125, 0.04597139358520508, 0.04585369491577149, 0.04582912063598633, 0.04609024047851563, 0.045948928833007815, 0.045841407775878903, 0.045195262908935545, 0.04636876678466797, 0.046383102416992186, 0.04592025756835937, 0.04580966567993164, 0.045832191467285156, 0.04588032150268555, 0.04589875030517578, 0.04686336135864258, 0.046080001831054686, 0.04586598587036133, 0.04564582443237305, 0.045780990600585936, 0.04587519836425781, 0.04584550476074219, 0.046104576110839846, 0.04607385635375977, 0.04614348983764648, 0.045778942108154294, 0.045813758850097655, 0.045797374725341795, 0.04563455963134765, 0.0457625617980957, 0.04569804763793945, 0.045649921417236325, 0.045859840393066405, 0.04597350311279297, 0.04634726333618164, 0.04601139068603516, 0.04566220855712891, 0.045557758331298825, 0.04569497680664063, 0.04561612701416016, 0.045777919769287106, 0.045628414154052735, 0.045649921417236325, 0.04566016006469727, 0.04570111846923828, 0.045639678955078124, 0.045625343322753906, 0.045515777587890625, 0.04588851165771484, 0.0458158073425293, 0.04550451278686524, 0.045587455749511716, 0.04566527938842774, 0.04572159957885742, 0.04570316696166992, 0.04576870346069336, 0.04563455963134765, 0.046655487060546875, 0.04598988723754883, 0.04735795211791992, 0.046729217529296874, 0.046511104583740234, 0.04586393737792969, 0.04588544082641602, 0.04558643341064453, 0.04595404815673828, 0.04604006576538086, 0.0458342399597168, 0.04577996826171875, 0.045646846771240236, 0.04573286437988281, 0.0456888313293457, 0.04556492614746094, 0.045646846771240236, 0.045641727447509765, 0.045728767395019534, 0.04571852874755859, 0.04600115203857422, 0.04568473434448242, 0.046499839782714845, 0.046203903198242184, 0.04612300872802735, 0.04571340942382812, 0.04734873580932617, 0.046459903717041014, 0.046134273529052736, 0.045889537811279295, 0.045676544189453126, 0.045690879821777344, 0.04580556869506836, 0.045861888885498046, 0.04605644989013672, 0.04576665496826172, 0.04563251113891602, 0.045943809509277345, 0.045884414672851564, 0.045851646423339845, 0.04563046264648438, 0.04568064117431641, 0.047083518981933595, 0.04628582382202148, 0.045413375854492184, 0.04455321502685547, 0.04400844955444336, 0.04406476974487305, 0.04486963272094727, 0.04561407852172852, 0.04620800018310547, 0.04568473434448242, 0.045497344970703124, 0.04544102478027344, 0.04563148880004883, 0.044698623657226565, 0.04541747283935547, 0.04541747283935547, 0.04558848190307617, 0.04675481414794922, 0.04560588836669922, 0.045358081817626954, 0.04521881484985352, 0.0452863998413086, 0.045590526580810545, 0.04534374237060547, 0.045352958679199216, 0.045341697692871094, 0.045538303375244144, 0.04536012649536133, 0.04528947067260742, 0.0463001594543457, 0.04589363098144531, 0.045418495178222655, 0.04537139129638672, 0.0454205436706543, 0.045620223999023435, 0.04536012649536133, 0.04550860977172851, 0.04532735824584961, 0.04538675308227539, 0.0456099853515625, 0.04563251113891602, 0.045474815368652347, 0.04542259216308594, 0.04544716644287109, 0.04534067153930664, 0.045464576721191405]",tokens/s,22.039723105143935,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpkj1bzu63/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 292, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 90579 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493f7-2380cd24135617be1ae4dd2d;c2b5372d-44f8-49c6-9a7a-a9413a12d142) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490e2-2b6888404018696802e4f36e;bd4a434d-36d8-4fcd-9dfa-8c23f1940452) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 339, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpgml24945/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 292, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491cd-4323898874a15e627bcd9c3b;531a0a54-8fe3-4618-a4a6-d0b2f3737c4f) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17851.572224,24081.072128,0.0,23444.062208,22019.972096,s,1,18.33216015625,18.33216015625,0.0,18.33216015625,18.33216015625,18.33216015625,18.33216015625,[18.33216015625],,kWh,0.00013568049681735868,7.433786205753354e-05,0.0002875982856340026,0.0004976166445088948,,MB,4439.01952,24146.08384,0.0,23496.491008,20926.605824,s,10,4.720280609130859,0.4720280609130859,2.8841172402019554e-05,0.4720277099609375,0.47206148681640625,0.4720615966796875,0.4720616845703125,"[0.47205691528320315, 0.4720334167480469, 0.4720091552734375, 0.4720035400390625, 0.47206170654296875, 0.47206146240234376, 0.47202200317382814, 0.47204571533203127, 0.47196603393554687, 0.47202066040039065]",tokens/s,542.3406386154171,kWh,5.5797554813762945e-06,3.05731043978886e-06,2.405832732745454e-05,3.269539324861969e-05,tokens/kWh,7829849.240635992,MB,4443.283456,23003.136,0.0,22347.251712,20926.608384,s,10,35.85977270507812,3.585977270507813,0.02018542601305503,3.58726513671875,3.6088769775390626,3.614168103027344,3.618401003417969,"[3.599657958984375, 3.5899755859375, 3.619459228515625, 3.55729345703125, 3.553048583984375, 3.5975693359375, 3.57366552734375, 3.5845546875, 3.57684716796875, 3.607701171875]",tokens/s,17.568432605006034,kWh,4.2156425097582095e-05,2.3104369792553154e-05,0.00014768986562654584,0.0002129506605166811,tokens/kWh,295843.17722773633,,s,630,35.857160175323465,0.05691612726241823,0.00098438600131968,0.05653196907043457,0.058163302612304686,0.05840870323181152,0.05968214096069336,"[0.05711872100830078, 0.0561080322265625, 0.05633331298828125, 0.05640806579589844, 0.05600153732299805, 0.055989246368408206, 0.05589913558959961, 0.05581619262695312, 0.05590528106689453, 0.05830963134765625, 0.05920358276367187, 0.05859942245483398, 0.05826047897338867, 0.05801881790161133, 0.05789593505859375, 0.058336254119873046, 0.05816320037841797, 0.057998336791992185, 0.057869312286376956, 0.055926784515380856, 0.05590220642089844, 0.05599846267700195, 0.056025089263916014, 0.05603225708007813, 0.05615513610839844, 0.055926784515380856, 0.05596876907348633, 0.057780223846435545, 0.05929574584960937, 0.057643009185791017, 0.05595340728759766, 0.057180160522460936, 0.05660774230957031, 0.056288257598876956, 0.056197120666503904, 0.058636287689208984, 0.05603839874267578, 0.057419776916503906, 0.0569804801940918, 0.058000385284423826, 0.05793689727783203, 0.05825228881835937, 0.05812223815917969, 0.05635583877563476, 0.056615936279296876, 0.058169345855712894, 0.05812736129760742, 0.05697433471679687, 0.058057727813720705, 0.05603430557250977, 0.057267200469970705, 0.05648588943481445, 0.05724160003662109, 0.05804032135009766, 0.05600665664672851, 0.05796966552734375, 0.057847808837890625, 0.057613311767578126, 0.05653094482421875, 0.05791027069091797, 0.05799935913085937, 0.05609062576293945, 0.05757132720947265, 0.057057281494140626, 0.05631795120239258, 0.056251392364501954, 0.056242176055908207, 0.05607628631591797, 0.058159103393554686, 0.05697228622436523, 0.05607014465332031, 0.0564316177368164, 0.05609471893310547, 0.05670502471923828, 0.05614080047607422, 0.05609676742553711, 0.05602816009521484, 0.05765529632568359, 0.05705420684814453, 0.056774654388427735, 0.05697843170166016, 0.05791743850708008, 0.057970687866210936, 0.057919486999511716, 0.057807872772216794, 0.05655244827270508, 0.056288257598876956, 0.05714636611938476, 0.05653299331665039, 0.05810892868041992, 0.05605580902099609, 0.05658931350708008, 0.05624319839477539, 0.057057281494140626, 0.05728768157958984, 0.05814169692993164, 0.05754880142211914, 0.057763839721679686, 0.05774131011962891, 0.05765529632568359, 0.057606143951416014, 0.05663334274291992, 0.057649150848388675, 0.05796966552734375, 0.0577710075378418, 0.05551001739501953, 0.05583052825927735, 0.05575372695922851, 0.05574553680419922, 0.0557844467163086, 0.056809471130371096, 0.057613311767578126, 0.05797683334350586, 0.05620019149780273, 0.05773311996459961, 0.05791231918334961, 0.05599641418457031, 0.057799678802490234, 0.057818111419677735, 0.05656576156616211, 0.05622784042358398, 0.058651649475097656, 0.05825331115722656, 0.05643571090698242, 0.05731840133666992, 0.05672447967529297, 0.058982398986816405, 0.05763686370849609, 0.05836800003051758, 0.057145343780517575, 0.05841100692749023, 0.056883201599121094, 0.05808947372436524, 0.056659969329833984, 0.05614182281494141, 0.05730713653564453, 0.058172416687011716, 0.05805977630615235, 0.05715763092041016, 0.05830144119262695, 0.05806796646118164, 0.058071041107177736, 0.05812531280517578, 0.0580208625793457, 0.05826047897338867, 0.05861273574829102, 0.05804339218139649, 0.05819801712036133, 0.05746278381347656, 0.05780070495605469, 0.05842227172851563, 0.05772083282470703, 0.05709823989868164, 0.058278911590576174, 0.057294849395751954, 0.057148414611816405, 0.058552318572998044, 0.05841100692749023, 0.058142719268798826, 0.05724160003662109, 0.05658931350708008, 0.05801574325561523, 0.057474048614501956, 0.05588787078857422, 0.05791231918334961, 0.058055679321289064, 0.05862604904174805, 0.05976166534423828, 0.059431934356689455, 0.05848371124267578, 0.05635379028320313, 0.05631590270996094, 0.05614591979980469, 0.05611724853515625, 0.056134654998779294, 0.056011775970458984, 0.056207359313964846, 0.056085502624511716, 0.05605376052856445, 0.05601792144775391, 0.05607526397705078, 0.055992321014404295, 0.056008705139160155, 0.05584691238403321, 0.05691392135620117, 0.05638860702514648, 0.05826047897338867, 0.059309055328369144, 0.05643468856811523, 0.05819289779663086, 0.05620121765136719, 0.056005630493164066, 0.05606195068359375, 0.055943168640136716, 0.057078784942626956, 0.057047039031982424, 0.05587251281738281, 0.05589811325073242, 0.05650841522216797, 0.057985023498535154, 0.057929729461669924, 0.059698177337646485, 0.05653094482421875, 0.05579980850219726, 0.05586943817138672, 0.05595443344116211, 0.05580083084106445, 0.05594112014770508, 0.05671628952026367, 0.056368129730224606, 0.05609369659423828, 0.055979007720947264, 0.05606195068359375, 0.05608345413208008, 0.0558551025390625, 0.05596979141235352, 0.05613363265991211, 0.05619507217407226, 0.0564213752746582, 0.055858177185058595, 0.05624934387207031, 0.058087425231933595, 0.05637222290039062, 0.05649612808227539, 0.05606911849975586, 0.056180736541748044, 0.056592384338378904, 0.0578600959777832, 0.05599334335327148, 0.05627084732055664, 0.056169471740722655, 0.05603123092651367, 0.056008705139160155, 0.055975936889648435, 0.056231937408447265, 0.05691187286376953, 0.05611110305786133, 0.05615206527709961, 0.056220672607421876, 0.056068096160888675, 0.05667942428588867, 0.05789593505859375, 0.05799731063842774, 0.05603430557250977, 0.05590528106689453, 0.05602816009521484, 0.05548134231567383, 0.055959552764892576, 0.05596364974975586, 0.056068096160888675, 0.05670912170410156, 0.05820108795166016, 0.05650022506713867, 0.056153087615966796, 0.056018943786621096, 0.05601279830932617, 0.0560261116027832, 0.05777920150756836, 0.05805670547485352, 0.05964287948608398, 0.05830144119262695, 0.05833318328857422, 0.05714739227294922, 0.057022464752197265, 0.056272895812988284, 0.05620121765136719, 0.056010753631591796, 0.05602099227905273, 0.05591449737548828, 0.055949310302734374, 0.05590630340576172, 0.05605478286743164, 0.05607731246948242, 0.05608038330078125, 0.05599846267700195, 0.05634764862060547, 0.056446975708007815, 0.05606399917602539, 0.05598515319824219, 0.05612236785888672, 0.05637222290039062, 0.056167423248291014, 0.05575065612792969, 0.05756415939331055, 0.055962623596191405, 0.05600460815429688, 0.055790592193603515, 0.05598822402954102, 0.05588684844970703, 0.056095745086669924, 0.05581619262695312, 0.05611724853515625, 0.05584998321533203, 0.05584691238403321, 0.05591244888305664, 0.05602201461791992, 0.055826431274414064, 0.05591551971435547, 0.055795711517333986, 0.056251392364501954, 0.05602918243408203, 0.05631999969482422, 0.05593600082397461, 0.05612134552001953, 0.05591961669921875, 0.055967742919921876, 0.05599641418457031, 0.05627391815185547, 0.056220672607421876, 0.05621760177612305, 0.05605068969726563, 0.056289279937744144, 0.05793484878540039, 0.05816012954711914, 0.057970687866210936, 0.057527294158935545, 0.056493057250976565, 0.056095745086669924, 0.05800447845458984, 0.05997055816650391, 0.05973811340332031, 0.05793484878540039, 0.05624729537963867, 0.05808230209350586, 0.056169471740722655, 0.05669683074951172, 0.05608755111694336, 0.05587353515625, 0.056118270874023435, 0.055947265625, 0.05594828796386719, 0.05618175888061523, 0.056043521881103515, 0.05602816009521484, 0.055946239471435545, 0.05595033645629883, 0.05598003387451172, 0.057404415130615234, 0.05796249771118164, 0.0579317741394043, 0.061483009338378906, 0.059597824096679686, 0.059200511932373044, 0.05817446517944336, 0.05627084732055664, 0.05611110305786133, 0.056292350769042966, 0.05586431884765625, 0.05618175888061523, 0.05779455947875976, 0.05747814559936523, 0.057148414611816405, 0.05626163101196289, 0.05584896087646484, 0.056807422637939455, 0.05787136077880859, 0.05832806396484375, 0.058011646270751956, 0.05687807846069336, 0.05584998321533203, 0.05824512100219727, 0.05802905654907226, 0.058068992614746094, 0.05791436767578125, 0.05636505508422852, 0.05588684844970703, 0.05592268753051758, 0.05855744171142578, 0.05704499053955078, 0.05686374282836914, 0.05729177474975586, 0.05543833541870117, 0.056837120056152345, 0.05584076690673828, 0.05806387329101562, 0.0578600959777832, 0.05627391815185547, 0.056973312377929686, 0.05650022506713867, 0.055947265625, 0.05706547164916992, 0.05693132781982422, 0.05769625473022461, 0.05663334274291992, 0.05601587295532227, 0.057915393829345706, 0.0566640625, 0.05782015991210938, 0.05801574325561523, 0.05649612808227539, 0.05599027252197265, 0.055651329040527345, 0.05577830505371094, 0.05604044723510742, 0.0569620475769043, 0.05572198486328125, 0.05587148666381836, 0.05589606475830078, 0.056062976837158204, 0.05707468795776367, 0.05600460815429688, 0.05636198425292969, 0.05725183868408203, 0.05718220901489258, 0.0564213752746582, 0.057059326171875, 0.055818241119384764, 0.057985023498535154, 0.057591808319091796, 0.05818675231933594, 0.05812940979003906, 0.057896961212158204, 0.05573529434204102, 0.05603123092651367, 0.056948734283447267, 0.0562083854675293, 0.056546302795410154, 0.05708697509765625, 0.05590323257446289, 0.055790592193603515, 0.05740851211547852, 0.056221694946289064, 0.05671321487426758, 0.05592473602294922, 0.05609983825683594, 0.05768396759033203, 0.057987071990966796, 0.056068096160888675, 0.055700481414794924, 0.055913471221923826, 0.056185855865478515, 0.05627084732055664, 0.05744025421142578, 0.05724979019165039, 0.05645721435546875, 0.05619814300537109, 0.055965694427490234, 0.05700812911987305, 0.057940990447998046, 0.057869312286376956, 0.058164222717285156, 0.05696307373046875, 0.056231937408447265, 0.05694976043701172, 0.05772697448730469, 0.05589503860473633, 0.05616844940185547, 0.05615206527709961, 0.056095745086669924, 0.057437183380126954, 0.05802598571777344, 0.057981952667236325, 0.055981056213378906, 0.055981056213378906, 0.05595852661132812, 0.05585715103149414, 0.0573306884765625, 0.05595340728759766, 0.05606399917602539, 0.05591551971435547, 0.05582131195068359, 0.05738598251342773, 0.057809921264648435, 0.057734142303466796, 0.056025089263916014, 0.05754265594482422, 0.05690982437133789, 0.05791743850708008, 0.05803519821166992, 0.056030208587646485, 0.05799116897583008, 0.05785702514648437, 0.057867263793945314, 0.05774233627319336, 0.05597491073608398, 0.05633740615844727, 0.0581396484375, 0.05692927932739258, 0.05751705551147461, 0.06029721450805664, 0.05874790573120117, 0.05623603057861328, 0.056420352935791014, 0.05624115371704102, 0.05768806457519531, 0.05689753723144531, 0.056376319885253906, 0.056030208587646485, 0.05605990219116211, 0.05593190383911133, 0.055951358795166016, 0.05605887985229492, 0.05620019149780273, 0.05612953567504883, 0.056714241027832034, 0.05588070297241211, 0.057262081146240235, 0.05789593505859375, 0.05731532669067383, 0.05590016174316406, 0.05972889709472656, 0.057706497192382813, 0.05635379028320313, 0.056036350250244144, 0.05659648132324219, 0.056940544128417966, 0.05806182479858398, 0.057234432220458986, 0.056174591064453126, 0.05605887985229492, 0.05642956924438477, 0.05878579330444336, 0.05626265716552734, 0.056471553802490235, 0.05654425430297851, 0.05623603057861328, 0.0559185905456543, 0.056081409454345706, 0.05635583877563476, 0.05622988891601562, 0.056151039123535154, 0.05604761505126953, 0.055944190979003904, 0.056730625152587894, 0.05795328140258789, 0.05791027069091797, 0.05827993774414063, 0.056134654998779294, 0.0579420166015625, 0.05753753662109375, 0.05717708969116211, 0.05813862228393555, 0.05918310546875, 0.05634764862060547, 0.056387584686279295, 0.05621145629882812, 0.057422847747802735, 0.05716377639770508, 0.05731020736694336, 0.05614182281494141, 0.05650022506713867, 0.05706444931030273, 0.055890945434570315, 0.05588787078857422, 0.05588582229614258, 0.05599846267700195, 0.05690163040161133, 0.05646950531005859, 0.05599129486083984, 0.05636710357666016, 0.05767475128173828, 0.05634355163574219, 0.05620019149780273, 0.05609983825683594, 0.056035327911376956, 0.05597798538208008, 0.05766656112670898, 0.05586739349365234, 0.056938495635986325, 0.05616230392456055, 0.056753150939941405, 0.057106433868408205, 0.05625856018066406, 0.057592830657958984, 0.058229759216308595, 0.05794918441772461, 0.05628211212158203, 0.05700198364257812, 0.05761740875244141, 0.05617766571044922, 0.05624422454833984, 0.056118270874023435, 0.05606195068359375, 0.057115646362304685, 0.05840588760375977, 0.05793894577026367, 0.05796249771118164, 0.05600972747802734, 0.056019966125488284, 0.055812095642089846, 0.05599334335327148, 0.05583462524414062, 0.056043521881103515, 0.055926784515380856, 0.05592166519165039, 0.05632614517211914, 0.05624934387207031, 0.05604556655883789, 0.05653401565551758, 0.05614899063110351, 0.056182785034179686, 0.05912063980102539, 0.057455615997314455, 0.058106880187988284, 0.05877350234985351, 0.058294273376464846, 0.058270721435546874, 0.05703782272338867, 0.058102783203125, 0.05767270278930664, 0.057744384765625, 0.05775667190551758, 0.058014720916748044, 0.05609062576293945, 0.0579051513671875, 0.057870334625244144, 0.057422847747802735, 0.056957950592041014, 0.05830246353149414, 0.05812428665161133, 0.056123390197753906, 0.05794918441772461, 0.058295295715332034, 0.05810995101928711, 0.05811609649658203, 0.05820620727539062, 0.057780223846435545, 0.05611315155029297, 0.05794611358642578, 0.05629951858520508, 0.05775872039794922, 0.05809254455566406, 0.05801574325561523, 0.058060798645019535, 0.05778636932373047, 0.05770751953125, 0.058537982940673826, 0.05624319839477539, 0.05720883178710937, 0.05837823867797851]",tokens/s,17.569712629768134,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1228.386304,1002.962944,0.0,356.51584,319.013888,s,25,0.17499311971664427,0.006999724788665772,0.0002502547192134345,0.006900447845458985,0.007165145587921143,0.007245798397064208,0.007853303222656248,"[0.008038944244384765, 0.006923200130462647, 0.006900447845458985, 0.006933184146881104, 0.006880576133728027, 0.006815231800079345, 0.006844160079956055, 0.006788415908813477, 0.0067693119049072265, 0.006920479774475098, 0.006882304191589355, 0.0071567678451538086, 0.006879392147064209, 0.007167232036590576, 0.006890048027038574, 0.0068763837814331055, 0.007155519962310791, 0.007082911968231201, 0.0070314879417419434, 0.007011424064636231, 0.0068913278579711915, 0.007162015914916992, 0.006900288105010986, 0.006826623916625977, 0.007265439987182617]",tokens/s,36572.86646676812,kWh,8.150109774308188e-08,4.46547292366166e-08,1.7293998509524707e-07,2.9909581207494555e-07,tokens/kWh,855913020.7274622,MB,1228.386304,1002.962944,0.0,356.51584,319.016448,s,25,10.13888803100586,0.40555552124023436,0.006311312712275679,0.4059725341796875,0.41398695068359376,0.41548115234375,0.41856573608398434,"[0.41943405151367186, 0.4079971618652344, 0.39821878051757814, 0.39863568115234377, 0.39658810424804686, 0.39883779907226563, 0.39907968139648436, 0.39881124877929686, 0.41027752685546875, 0.3991168212890625, 0.39967874145507815, 0.4059725341796875, 0.4137551574707031, 0.4085873718261719, 0.39897418212890623, 0.4062608947753906, 0.40200985717773435, 0.40696990966796875, 0.41320684814453124, 0.4053135986328125, 0.40550439453125, 0.4070886535644531, 0.4086114807128906, 0.4141414794921875, 0.4158160705566406]",tokens/s,155.3424788974366,kWh,4.94671478769522e-06,2.710110287520569e-06,8.266001951238741e-06,1.5922827026454533e-05,tokens/kWh,3956583.833720634,,s,1574,10.298755065441126,0.006543046420229436,0.0009065478681006509,0.006355999946594238,0.0066198528289794925,0.006672076821327209,0.013541457796096801,"[0.007553023815155029, 0.007541759967803955, 0.007498752117156982, 0.007409664154052734, 0.007941120147705078, 0.0069253120422363285, 0.006652927875518798, 0.00662937593460083, 0.006646783828735352, 0.006633471965789795, 0.006604800224304199, 0.0067983360290527345, 0.006624256134033203, 0.006653952121734619, 0.006639616012573242, 0.0066406397819519045, 0.006626304149627686, 0.006649856090545654, 0.006614016056060791, 0.006621183872222901, 0.006628352165222168, 0.006662144184112549, 0.006616064071655273, 0.006665215969085693, 0.006718463897705078, 0.00659660816192627, 0.006651904106140137, 0.006834176063537598, 0.006624256134033203, 0.006623231887817383, 0.006614016056060791, 0.006594560146331787, 0.006635519981384277, 0.006584320068359375, 0.006608895778656006, 0.006612991809844971, 0.0066304001808166506, 0.006595583915710449, 0.0066078720092773435, 0.006628352165222168, 0.006616064071655273, 0.0066119680404663084, 0.006525951862335205, 0.006498303890228272, 0.006526976108551025, 0.006487040042877197, 0.006495232105255127, 0.006487040042877197, 0.006569983959197998, 0.006379519939422608, 0.006252543926239014, 0.006254591941833496, 0.006284287929534912, 0.00632422399520874, 0.006354944229125976, 0.0063211522102355954, 0.006364160060882569, 0.006338560104370118, 0.0063201279640197755, 0.006326272010803223, 0.006315008163452148, 0.006311935901641846, 0.013566975593566894, 0.006316031932830811, 0.006298624038696289, 0.0063201279640197755, 0.006344704151153564, 0.006319104194641113, 0.006331391811370849, 0.006323200225830078, 0.006335487842559814, 0.006322175979614258, 0.006358016014099121, 0.006372352123260498, 0.006303743839263916, 0.006379519939422608, 0.006315008163452148, 0.00636518383026123, 0.006347775936126709, 0.00662937593460083, 0.007802879810333252, 0.007903232097625732, 0.007887872219085693, 0.007824384212493896, 0.006624256134033203, 0.006551551818847656, 0.006576128005981445, 0.006462463855743408, 0.006359039783477783, 0.006407167911529541, 0.006405119895935059, 0.00643071985244751, 0.0064204797744750975, 0.006385663986206055, 0.006385663986206055, 0.006342656135559082, 0.006355967998504639, 0.006334464073181153, 0.006367231845855713, 0.006338560104370118, 0.006322175979614258, 0.006336512088775635, 0.006360064029693604, 0.0063272957801818845, 0.00633241605758667, 0.006339583873748779, 0.006359039783477783, 0.006326272010803223, 0.0064542717933654785, 0.006344704151153564, 0.006348800182342529, 0.006339583873748779, 0.006306816101074219, 0.006323200225830078, 0.006344704151153564, 0.006472703933715821, 0.006535168170928955, 0.006359039783477783, 0.006425600051879882, 0.006412288188934326, 0.006355967998504639, 0.006393856048583985, 0.006352896213531494, 0.0063836159706115725, 0.006378496170043945, 0.01365503978729248, 0.006351871967315674, 0.006328320026397705, 0.006339583873748779, 0.0063211522102355954, 0.006352896213531494, 0.006337535858154297, 0.006345727920532227, 0.006335487842559814, 0.006315008163452148, 0.006355967998504639, 0.0063508801460266115, 0.00633135986328125, 0.006347775936126709, 0.006371327877044678, 0.006358016014099121, 0.006330368041992188, 0.006339583873748779, 0.00633241605758667, 0.006341631889343262, 0.006363135814666748, 0.006351871967315674, 0.00637440013885498, 0.00643071985244751, 0.006362112045288086, 0.006343679904937744, 0.006339583873748779, 0.006322175979614258, 0.0062975997924804685, 0.00626585578918457, 0.006291456222534179, 0.006246399879455566, 0.006270976066589356, 0.006244351863861084, 0.006285312175750732, 0.006254591941833496, 0.006260735988616943, 0.006264832019805908, 0.006291456222534179, 0.006273024082183838, 0.006239232063293457, 0.00628223991394043, 0.006227968215942382, 0.006294528007507324, 0.0062740478515625, 0.006308864116668702, 0.006285312175750732, 0.006392831802368164, 0.006340608119964599, 0.006330368041992188, 0.006354944229125976, 0.006335487842559814, 0.006323200225830078, 0.006337535858154297, 0.006237184047698975, 0.00626585578918457, 0.006254591941833496, 0.006262784004211426, 0.006262784004211426, 0.006273024082183838, 0.006322175979614258, 0.006329343795776367, 0.0063498239517211915, 0.013574144363403321, 0.006351871967315674, 0.0063272957801818845, 0.006416384220123291, 0.006345727920532227, 0.006398975849151611, 0.006466559886932373, 0.006325247764587402, 0.006358016014099121, 0.006358016014099121, 0.006358016014099121, 0.006342656135559082, 0.006338560104370118, 0.006326272010803223, 0.006348800182342529, 0.006322175979614258, 0.006341631889343262, 0.0063170561790466305, 0.006346784114837647, 0.0063200960159301755, 0.006329343795776367, 0.006400000095367431, 0.006338560104370118, 0.0063569917678833006, 0.006446080207824707, 0.006358016014099121, 0.0063569917678833006, 0.006323200225830078, 0.006325247764587402, 0.006337535858154297, 0.006341631889343262, 0.006340608119964599, 0.00636518383026123, 0.006339583873748779, 0.006331391811370849, 0.0063498239517211915, 0.006315008163452148, 0.006341631889343262, 0.006371327877044678, 0.006300672054290772, 0.006342656135559082, 0.0063201279640197755, 0.006342656135559082, 0.006293504238128662, 0.006366208076477051, 0.006284287929534912, 0.006259712219238281, 0.0062740478515625, 0.006264832019805908, 0.006250495910644531, 0.006248447895050049, 0.006259712219238281, 0.0062679038047790524, 0.006238207817077636, 0.006242303848266601, 0.006231040000915527, 0.0062494721412658695, 0.006238207817077636, 0.006262784004211426, 0.00633241605758667, 0.0062576642036437985, 0.006259712219238281, 0.006255616188049316, 0.013544447898864746, 0.006300672054290772, 0.006345727920532227, 0.006344704151153564, 0.006329343795776367, 0.006352896213531494, 0.0063201279640197755, 0.006367231845855713, 0.006322175979614258, 0.006341631889343262, 0.006329343795776367, 0.00638976001739502, 0.006337535858154297, 0.006329343795776367, 0.006345727920532227, 0.0063498239517211915, 0.0063539199829101565, 0.006387712001800537, 0.006310912132263183, 0.006340608119964599, 0.006301695823669433, 0.0062576642036437985, 0.0062341117858886715, 0.006278143882751465, 0.006254591941833496, 0.006247424125671387, 0.0062975997924804685, 0.006230016231536865, 0.0062638077735900875, 0.006244351863861084, 0.0062679038047790524, 0.006277120113372803, 0.006237184047698975, 0.006244351863861084, 0.006210559844970703, 0.006259712219238281, 0.006266880035400391, 0.0063283839225769046, 0.006219711780548096, 0.006268928050994873, 0.0062566399574279785, 0.006260735988616943, 0.006245376110076905, 0.006250495910644531, 0.006285312175750732, 0.006333439826965332, 0.006364160060882569, 0.006348800182342529, 0.006300672054290772, 0.006351871967315674, 0.006240255832672119, 0.006242303848266601, 0.0062464637756347655, 0.006262720108032227, 0.006248447895050049, 0.0062228479385375976, 0.006261760234832763, 0.0063211522102355954, 0.0062576642036437985, 0.006209536075592041, 0.006245376110076905, 0.006276095867156982, 0.006243328094482422, 0.013425663948059082, 0.006345727920532227, 0.006366208076477051, 0.006371327877044678, 0.006328320026397705, 0.006311935901641846, 0.00632422399520874, 0.0063539199829101565, 0.006340608119964599, 0.006342656135559082, 0.006334464073181153, 0.006346752166748047, 0.006329343795776367, 0.006348800182342529, 0.006338560104370118, 0.006318079948425293, 0.006333439826965332, 0.006388735771179199, 0.0063272957801818845, 0.006350912094116211, 0.006325183868408203, 0.006355967998504639, 0.0063211522102355954, 0.006333439826965332, 0.006351871967315674, 0.006306816101074219, 0.006325247764587402, 0.006345727920532227, 0.0063805441856384275, 0.006335487842559814, 0.006355967998504639, 0.006302720069885254, 0.006245376110076905, 0.006280191898345947, 0.006252543926239014, 0.0063498239517211915, 0.006242303848266601, 0.006284287929534912, 0.006289408206939697, 0.006280191898345947, 0.006346752166748047, 0.0063498239517211915, 0.006347775936126709, 0.0063508481979370115, 0.006347775936126709, 0.006300672054290772, 0.0062197761535644535, 0.006250495910644531, 0.006247424125671387, 0.006277120113372803, 0.006301695823669433, 0.006284287929534912, 0.006262784004211426, 0.00626585578918457, 0.006363135814666748, 0.006362112045288086, 0.006379519939422608, 0.00636627197265625, 0.006358975887298584, 0.0064133119583129885, 0.006367231845855713, 0.006362112045288086, 0.00636518383026123, 0.013474847793579102, 0.006304736137390137, 0.006323232173919678, 0.006257631778717041, 0.00624128007888794, 0.006244351863861084, 0.006248447895050049, 0.006277120113372803, 0.006258687973022461, 0.006303743839263916, 0.0062740478515625, 0.0063272957801818845, 0.006352896213531494, 0.006315008163452148, 0.006352896213531494, 0.006358016014099121, 0.006330368041992188, 0.006330368041992188, 0.006344704151153564, 0.00636518383026123, 0.0063211522102355954, 0.006347775936126709, 0.0063294081687927245, 0.006420415878295898, 0.006358016014099121, 0.006328320026397705, 0.006343679904937744, 0.006364160060882569, 0.006335487842559814, 0.006344704151153564, 0.006355967998504639, 0.006371327877044678, 0.00636518383026123, 0.0063211522102355954, 0.006343679904937744, 0.006344704151153564, 0.0063201279640197755, 0.006239232063293457, 0.006293504238128662, 0.00632422399520874, 0.006340608119964599, 0.0063539199829101565, 0.006364160060882569, 0.006326272010803223, 0.006331391811370849, 0.006387712001800537, 0.006437888145446777, 0.00633241605758667, 0.006347775936126709, 0.006355967998504639, 0.006359039783477783, 0.006352896213531494, 0.006370304107666015, 0.006326272010803223, 0.006337535858154297, 0.006359072208404541, 0.006336480140686035, 0.0063272957801818845, 0.0063498239517211915, 0.006315040111541748, 0.0063200960159301755, 0.00628223991394043, 0.006285312175750732, 0.013402112007141113, 0.0063170561790466305, 0.006387712001800537, 0.00633241605758667, 0.006262784004211426, 0.00626585578918457, 0.006268928050994873, 0.00626585578918457, 0.006294528007507324, 0.006299647808074951, 0.006277120113372803, 0.006385663986206055, 0.006337567806243897, 0.006363103866577149, 0.006330431938171387, 0.006377439975738525, 0.006355936050415039, 0.006333439826965332, 0.006363135814666748, 0.006334496021270752, 0.006326240062713623, 0.0064542717933654785, 0.006329343795776367, 0.006344704151153564, 0.006341631889343262, 0.006333439826965332, 0.006271999835968017, 0.006259712219238281, 0.0062638077735900875, 0.006252543926239014, 0.006246399879455566, 0.0062576642036437985, 0.006258687973022461, 0.0062566399574279785, 0.006334464073181153, 0.0063498239517211915, 0.006312960147857666, 0.006330368041992188, 0.006300672054290772, 0.006333439826965332, 0.006349887847900391, 0.006418367862701416, 0.006323200225830078, 0.006313983917236328, 0.006328320026397705, 0.006315008163452148, 0.006339583873748779, 0.0063498239517211915, 0.006355967998504639, 0.0063539199829101565, 0.006322175979614258, 0.0063272957801818845, 0.006322175979614258, 0.006319104194641113, 0.006360064029693604, 0.006335487842559814, 0.006340608119964599, 0.0063201279640197755, 0.006338560104370118, 0.006394879817962646, 0.006402048110961914, 0.006336512088775635, 0.0063211522102355954, 0.013421567916870117, 0.006616064071655273, 0.006685696125030518, 0.006583295822143555, 0.006564864158630371, 0.0065771517753601075, 0.006532095909118653, 0.006549503803253174, 0.006567935943603515, 0.006543360233306885, 0.0065484800338745115, 0.00653004789352417, 0.006536191940307618, 0.006542335987091064, 0.0065710082054138185, 0.006550528049468994, 0.006557695865631104, 0.0066344962120056155, 0.00652185583114624, 0.006583295822143555, 0.006567935943603515, 0.006520832061767578, 0.006569983959197998, 0.006535168170928955, 0.006551551818847656, 0.0065474557876586915, 0.006527999877929688, 0.006554624080657959, 0.006463488101959228, 0.006458367824554443, 0.006569983959197998, 0.006790143966674805, 0.006398975849151611, 0.006395904064178467, 0.007122943878173828, 0.007017471790313721, 0.006656000137329102, 0.006595583915710449, 0.006564864158630371, 0.006590464115142822, 0.006599679946899414, 0.006574079990386963, 0.006592512130737305, 0.0067348480224609375, 0.006368256092071533, 0.006345727920532227, 0.006308864116668702, 0.006351871967315674, 0.006345727920532227, 0.006338560104370118, 0.006392831802368164, 0.0063498239517211915, 0.006330368041992188, 0.006302720069885254, 0.00633241605758667, 0.006333439826965332, 0.006315008163452148, 0.006326272010803223, 0.006310912132263183, 0.0063211522102355954, 0.006340672016143798, 0.00630675220489502, 0.006346752166748047, 0.013609984397888183, 0.006345727920532227, 0.006343679904937744, 0.0063272957801818845, 0.006323200225830078, 0.006330368041992188, 0.006333439826965332, 0.006354944229125976, 0.006346752166748047, 0.006335487842559814, 0.006337535858154297, 0.006348800182342529, 0.0063272957801818845, 0.006331391811370849, 0.006347775936126709, 0.006339583873748779, 0.006310912132263183, 0.006319104194641113, 0.006375423908233643, 0.006299647808074951, 0.006300735950469971, 0.006322112083435059, 0.006328320026397705, 0.006300672054290772, 0.006435840129852295, 0.006328320026397705, 0.006347775936126709, 0.006405119895935059, 0.006336512088775635, 0.006370304107666015, 0.0063508481979370115, 0.006328320026397705, 0.006347775936126709, 0.006347775936126709, 0.0063201279640197755, 0.006325247764587402, 0.0063170561790466305, 0.0063272957801818845, 0.006304768085479737, 0.006363135814666748, 0.006377471923828125, 0.006348800182342529, 0.006338592052459717, 0.006314976215362549, 0.006348800182342529, 0.0063170561790466305, 0.00628223991394043, 0.006262784004211426, 0.006254591941833496, 0.006262784004211426, 0.0062904319763183595, 0.0062904319763183595, 0.006266880035400391, 0.006288383960723877, 0.00630790376663208, 0.006268864154815674, 0.00628223991394043, 0.0062576642036437985, 0.006338560104370118, 0.0065218877792358396, 0.006366176128387451, 0.0063569917678833006, 0.006330368041992188, 0.013532159805297851, 0.006331391811370849, 0.006326272010803223, 0.006303743839263916, 0.006318079948425293, 0.006369279861450195, 0.006342656135559082, 0.006354944229125976, 0.006341631889343262, 0.0063569917678833006, 0.006308864116668702, 0.0063610877990722655, 0.006354944229125976, 0.006339583873748779, 0.006340608119964599, 0.00636518383026123, 0.006388735771179199, 0.0063508481979370115, 0.006334464073181153, 0.0063272957801818845, 0.006301695823669433, 0.006336512088775635, 0.006295551776885986, 0.006325247764587402, 0.0063272957801818845, 0.006309887886047363, 0.006323200225830078, 0.006300672054290772, 0.006334464073181153, 0.006348800182342529, 0.006479872226715088, 0.00636521577835083, 0.006327263832092285, 0.006336512088775635, 0.006636544227600098, 0.0063508481979370115, 0.006322175979614258, 0.006299647808074951, 0.0063498239517211915, 0.006301695823669433, 0.00633241605758667, 0.006334464073181153, 0.006330368041992188, 0.006387712001800537, 0.006308864116668702, 0.006339583873748779, 0.006313983917236328, 0.00632422399520874, 0.00633241605758667, 0.006369279861450195, 0.006307839870452881, 0.006307839870452881, 0.006362112045288086, 0.006336512088775635, 0.006307839870452881, 0.006328320026397705, 0.006291456222534179, 0.006311935901641846, 0.006340640068054199, 0.0062863039970397945, 0.006346752166748047, 0.006344704151153564, 0.006330368041992188, 0.01417625617980957, 0.006680575847625733, 0.007311359882354736, 0.00695091199874878, 0.0065710082054138185, 0.006619135856628418, 0.006626304149627686, 0.0066109437942504885, 0.006896639823913574, 0.007411712169647216, 0.00667852783203125, 0.006569983959197998, 0.006575104236602783, 0.006576128005981445, 0.006584320068359375, 0.006497280120849609, 0.006366208076477051, 0.006243328094482422, 0.006244351863861084, 0.006262784004211426, 0.00626585578918457, 0.006271999835968017, 0.006341631889343262, 0.006309887886047363, 0.006328320026397705, 0.0063272957801818845, 0.006322175979614258, 0.006492159843444824, 0.006329343795776367, 0.0063508481979370115, 0.0063170561790466305, 0.0063211522102355954, 0.006322175979614258, 0.006313983917236328, 0.006367231845855713, 0.00633241605758667, 0.006331391811370849, 0.006316031932830811, 0.006325247764587402, 0.006329343795776367, 0.006300672054290772, 0.006355967998504639, 0.006323200225830078, 0.006359039783477783, 0.0063610877990722655, 0.006348800182342529, 0.006340608119964599, 0.006331391811370849, 0.006360064029693604, 0.0063539199829101565, 0.006341631889343262, 0.0063272957801818845, 0.006293504238128662, 0.006461440086364746, 0.0063508481979370115, 0.006352896213531494, 0.0063272957801818845, 0.006330368041992188, 0.006352896213531494, 0.00638976001739502, 0.006411263942718506, 0.006364192008972168, 0.006347743988037109, 0.013545536041259765, 0.006293439865112304, 0.006326272010803223, 0.006346752166748047, 0.006339583873748779, 0.006386688232421875, 0.006369279861450195, 0.006340608119964599, 0.006345727920532227, 0.006348800182342529, 0.006553599834442139, 0.0074414081573486324, 0.007172095775604248, 0.008360960006713868, 0.007012351989746094, 0.0066938881874084475, 0.006558720111846924, 0.006568960189819336, 0.006591487884521485, 0.006663167953491211, 0.0065781760215759275, 0.006591519832611084, 0.006590432167053223, 0.006583295822143555, 0.006527999877929688, 0.006648831844329834, 0.00658841609954834, 0.006606847763061524, 0.006754303932189941, 0.006639616012573242, 0.006535168170928955, 0.006597631931304931, 0.006586368083953857, 0.00657919979095459, 0.00657203197479248, 0.006638591766357422, 0.006512639999389648, 0.006337535858154297, 0.0063272957801818845, 0.0063569917678833006, 0.0063539199829101565, 0.006335487842559814, 0.006384640216827392, 0.006345727920532227, 0.006359039783477783, 0.006347775936126709, 0.006338560104370118, 0.006344704151153564, 0.006316031932830811, 0.00638156795501709, 0.006494207859039307, 0.006662144184112549, 0.006584320068359375, 0.006542335987091064, 0.006576128005981445, 0.006589439868927002, 0.006760447978973389, 0.006642687797546387, 0.00658739185333252, 0.006532127857208252, 0.006598624229431152, 0.006594560146331787, 0.00657203197479248, 0.014103551864624024, 0.00659660816192627, 0.0065781760215759275, 0.006557695865631104, 0.006559743881225586, 0.00657919979095459, 0.00658739185333252, 0.006556672096252441, 0.006576128005981445, 0.006602752208709717, 0.00658841609954834, 0.006584320068359375, 0.0066007041931152345, 0.006558720111846924, 0.006586368083953857, 0.006576128005981445, 0.006555647850036621, 0.0065669121742248536, 0.006532095909118653, 0.006586368083953857, 0.006643712043762207, 0.0065710082054138185, 0.006590464115142822, 0.006565887928009034, 0.006562816143035889, 0.006546432018280029, 0.0065484800338745115, 0.006560768127441406, 0.006562816143035889, 0.006585343837738037, 0.0065710082054138185, 0.006556672096252441, 0.006556672096252441, 0.006586368083953857, 0.006822912216186523, 0.0065484800338745115, 0.006464511871337891, 0.00638976001739502, 0.006238207817077636, 0.006276095867156982, 0.0064245758056640625, 0.0063569917678833006, 0.00637337589263916, 0.006312960147857666, 0.006323200225830078, 0.006300672054290772, 0.006334464073181153, 0.006323200225830078, 0.006363135814666748, 0.006346752166748047, 0.006334464073181153, 0.006334464073181153, 0.006323200225830078, 0.0063508481979370115, 0.0063498239517211915, 0.006342656135559082, 0.006339583873748779, 0.006335487842559814, 0.006343679904937744, 0.006415359973907471, 0.0063610877990722655, 0.006379519939422608, 0.0063508481979370115, 0.013540351867675781, 0.006306816101074219, 0.006302720069885254, 0.006303743839263916, 0.006346752166748047, 0.006351871967315674, 0.006336512088775635, 0.006335487842559814, 0.006315040111541748, 0.0063835840225219724, 0.0063211522102355954, 0.0063539199829101565, 0.006347775936126709, 0.00632422399520874, 0.006372352123260498, 0.0063272957801818845, 0.006354944229125976, 0.006307839870452881, 0.006311935901641846, 0.006351871967315674, 0.0063272957801818845, 0.006333439826965332, 0.00632422399520874, 0.006336512088775635, 0.006343679904937744, 0.006391808032989502, 0.0064849920272827145, 0.0063272957801818845, 0.006334464073181153, 0.006308864116668702, 0.006337535858154297, 0.006375423908233643, 0.00632422399520874, 0.006312960147857666, 0.006348800182342529, 0.00633241605758667, 0.0063498239517211915, 0.006281216144561768, 0.006343679904937744, 0.0063201279640197755, 0.006323200225830078, 0.006322175979614258, 0.006307839870452881, 0.006308864116668702, 0.006297632217407227, 0.006309855937957764, 0.0062873601913452145, 0.006243328094482422, 0.006319104194641113, 0.006339583873748779, 0.006302720069885254, 0.006308864116668702, 0.00653004789352417, 0.006338560104370118, 0.006243328094482422, 0.006260735988616943, 0.006223872184753418, 0.0062740478515625, 0.0062873601913452145, 0.006325247764587402, 0.006342656135559082, 0.00633241605758667, 0.00633241605758667, 0.013538304328918458, 0.006305791854858398, 0.0063201279640197755, 0.006355967998504639, 0.0063272957801818845, 0.0071905279159545895, 0.006692863941192627, 0.006627327919006347, 0.006545407772064209, 0.006565887928009034, 0.0065669121742248536, 0.006585343837738037, 0.006595583915710449, 0.006563839912414551, 0.006583295822143555, 0.006498303890228272, 0.0066078720092773435, 0.006591487884521485, 0.006567935943603515, 0.006550528049468994, 0.006534143924713135, 0.0067010560035705566, 0.006738944053649902, 0.0066447358131408694, 0.006623231887817383, 0.0065484800338745115, 0.006585343837738037, 0.0066007041931152345, 0.0065669121742248536, 0.0063170561790466305, 0.006325247764587402, 0.006352896213531494, 0.0063508481979370115, 0.006322175979614258, 0.00633241605758667, 0.006326272010803223, 0.006341631889343262, 0.006310912132263183, 0.006316031932830811, 0.006348800182342529, 0.006289408206939697, 0.006318079948425293, 0.006301695823669433, 0.006330368041992188, 0.006312960147857666, 0.0063508481979370115, 0.006307839870452881, 0.006326335906982422, 0.006351808071136475, 0.006346752166748047, 0.006372384071350098, 0.006464479923248291, 0.0062975997924804685, 0.006333439826965332, 0.006339583873748779, 0.006326303958892822, 0.00633955192565918, 0.006329343795776367, 0.006329343795776367, 0.00638976001739502, 0.006347775936126709, 0.006343679904937744, 0.006351871967315674, 0.013858816146850587, 0.006318079948425293, 0.006269951820373535, 0.006247424125671387, 0.0062566399574279785, 0.006277120113372803, 0.0063201279640197755, 0.00633241605758667, 0.006351871967315674, 0.006341631889343262, 0.006334464073181153, 0.006367231845855713, 0.0063201279640197755, 0.006375423908233643, 0.006359039783477783, 0.006351871967315674, 0.006368256092071533, 0.006331391811370849, 0.006333439826965332, 0.006348800182342529, 0.006318079948425293, 0.006363135814666748, 0.006392831802368164, 0.006328320026397705, 0.006382656097412109, 0.006395840167999268, 0.0063508481979370115, 0.006355967998504639, 0.0063907837867736815, 0.0065382399559021, 0.006352896213531494, 0.006335487842559814, 0.006323200225830078, 0.006363135814666748, 0.0063201279640197755, 0.006360064029693604, 0.006364160060882569, 0.006319104194641113, 0.006338560104370118, 0.0063211522102355954, 0.006339583873748779, 0.0063610877990722655, 0.006337535858154297, 0.006345727920532227, 0.0063498239517211915, 0.006408192157745361, 0.006341631889343262, 0.0063498239517211915, 0.006362112045288086, 0.006359039783477783, 0.006348800182342529, 0.006322175979614258, 0.006319104194641113, 0.006351871967315674, 0.006311935901641846, 0.006342656135559082, 0.006316031932830811, 0.006887423992156983, 0.0067338237762451176, 0.006602752208709717, 0.006619135856628418, 0.006602752208709717, 0.006591487884521485, 0.01399500846862793, 0.006564864158630371, 0.00659660816192627, 0.006553599834442139, 0.00653926420211792, 0.006597631931304931, 0.0065781760215759275, 0.00653926420211792, 0.006567935943603515, 0.006497280120849609, 0.006352896213531494, 0.006359039783477783, 0.006344704151153564, 0.006344704151153564, 0.0063610877990722655, 0.0063539199829101565, 0.006341631889343262, 0.0063211522102355954, 0.006342656135559082, 0.006333439826965332, 0.006345727920532227, 0.006341631889343262, 0.00632422399520874, 0.006307839870452881, 0.006315008163452148, 0.006328320026397705, 0.006459392070770263, 0.006576128005981445, 0.006601727962493896, 0.0065484800338745115, 0.006592512130737305, 0.006593535900115967, 0.006565887928009034, 0.0066119680404663084, 0.006568960189819336, 0.006563839912414551, 0.006405119895935059, 0.006418432235717773, 0.006535168170928955, 0.006617087841033936, 0.006565887928009034, 0.006543360233306885, 0.006558720111846924, 0.006552639961242676, 0.0065985918045043945, 0.006584320068359375, 0.006687744140625, 0.006435840129852295, 0.006351871967315674, 0.0063272957801818845, 0.006329343795776367, 0.006331391811370849, 0.006339583873748779, 0.006336512088775635, 0.006336512088775635, 0.006397952079772949, 0.006358016014099121, 0.006367231845855713, 0.006323200225830078, 0.006620160102844238, 0.006355967998504639, 0.006335519790649414, 0.006356959819793701, 0.013624320030212403, 0.0063170561790466305, 0.00628223991394043, 0.0063201279640197755, 0.006336512088775635, 0.006379519939422608, 0.006331391811370849, 0.006304800033569336, 0.006493152141571045, 0.006665215969085693, 0.0066979842185974124, 0.006715392112731934, 0.006619135856628418, 0.006852608203887939, 0.007166975975036621, 0.006658048152923584, 0.006584320068359375, 0.006662144184112549, 0.00658739185333252, 0.006594560146331787, 0.006597631931304931, 0.006562816143035889, 0.0065484800338745115, 0.006445055961608887, 0.0063569917678833006, 0.006468607902526856, 0.006880256175994873, 0.006615039825439453, 0.006583295822143555, 0.006616064071655273, 0.006603775978088379, 0.006628352165222168, 0.006658048152923584, 0.006810624122619629, 0.006584320068359375, 0.006451200008392334, 0.006351871967315674, 0.006369344234466553, 0.006613952159881592, 0.006575104236602783, 0.0063508481979370115, 0.006345727920532227, 0.0064737281799316405, 0.0065812478065490725, 0.006590464115142822, 0.006584320068359375, 0.006568960189819336, 0.00658841609954834, 0.006594560146331787, 0.006601727962493896, 0.006545407772064209, 0.006574079990386963, 0.006575104236602783, 0.006573056221008301, 0.006582272052764892, 0.006558720111846924, 0.006612991809844971, 0.006558720111846924, 0.0065710082054138185, 0.006575104236602783, 0.006565887928009034, 0.006576128005981445, 0.006585343837738037, 0.013684736251831055, 0.006346752166748047, 0.006388735771179199, 0.006403071880340576, 0.006362112045288086, 0.0063272957801818845, 0.0064102401733398436, 0.006333439826965332, 0.006565887928009034, 0.006631423950195312, 0.006559743881225586, 0.006612991809844971, 0.00657203197479248, 0.006412288188934326, 0.006355967998504639, 0.006387712001800537, 0.00633241605758667, 0.006366208076477051, 0.006305823802947998, 0.0063794879913330075, 0.0063508481979370115, 0.006348800182342529, 0.0064225602149963375, 0.006326240062713623, 0.006553599834442139, 0.0065781760215759275, 0.0065146880149841305, 0.006550528049468994, 0.006556672096252441, 0.006688767910003662, 0.006586368083953857, 0.006553599834442139, 0.006549503803253174, 0.006561791896820069, 0.006567935943603515, 0.006459392070770263, 0.006468607902526856, 0.0064737281799316405, 0.00643993616104126, 0.006464511871337891, 0.0064471039772033695, 0.0064471039772033695, 0.006284287929534912, 0.006376448154449463, 0.00628326416015625, 0.0062904319763183595, 0.006239232063293457, 0.006311935901641846, 0.006388735771179199, 0.00626585578918457, 0.006298624038696289, 0.0062638077735900875, 0.006499328136444092, 0.006605823993682861, 0.006582335948944092, 0.006577087879180908, 0.006540319919586182, 0.00632316780090332, 0.006334464073181153, 0.006347775936126709, 0.0063211522102355954, 0.006299647808074951, 0.006298624038696289, 0.013540351867675781, 0.006284287929534912, 0.0063498239517211915, 0.006307839870452881, 0.006331391811370849, 0.006343679904937744, 0.006295551776885986, 0.006497280120849609, 0.006576128005981445, 0.006319104194641113, 0.006334464073181153, 0.006356031894683838, 0.006310848236083985, 0.00637443208694458, 0.006343647956848144, 0.0063569917678833006, 0.006362112045288086, 0.006288383960723877, 0.006318079948425293, 0.006367231845855713, 0.0063201279640197755, 0.006336512088775635, 0.006334464073181153, 0.006516736030578613, 0.0065177597999572755, 0.006330368041992188, 0.006339583873748779, 0.006331391811370849, 0.0063508481979370115, 0.006376448154449463, 0.006322175979614258, 0.006355967998504639, 0.006339583873748779, 0.0062873601913452145, 0.006277120113372803, 0.006306816101074219, 0.006441984176635742, 0.006354944229125976, 0.006400000095367431, 0.0064481601715087895, 0.006589407920837402, 0.006568960189819336, 0.006575104236602783, 0.006586368083953857, 0.00658841609954834, 0.0065669121742248536, 0.006593535900115967, 0.006583295822143555, 0.006557695865631104, 0.006591487884521485, 0.006527999877929688, 0.0065669121742248536, 0.006463488101959228, 0.006529024124145508, 0.006575104236602783, 0.006545407772064209, 0.0065382399559021, 0.00658022403717041, 0.006534143924713135, 0.006536191940307618, 0.006555647850036621, 0.00653926420211792, 0.00658022403717041, 0.01406668758392334, 0.006556672096252441, 0.006573056221008301, 0.006558720111846924, 0.006558720111846924, 0.006584320068359375, 0.006593535900115967, 0.006619135856628418, 0.006565919876098633, 0.006564864158630371, 0.006589407920837402, 0.0065443840026855465, 0.006532095909118653, 0.006386688232421875, 0.0063569917678833006, 0.006402048110961914, 0.006369279861450195, 0.006311935901641846, 0.006366208076477051, 0.006342656135559082, 0.006562816143035889, 0.006598656177520752, 0.006523903846740723, 0.006594560146331787, 0.006601727962493896, 0.006555647850036621, 0.006455296039581298, 0.006396927833557129, 0.006313983917236328, 0.006369279861450195, 0.006307839870452881, 0.006371327877044678, 0.0063836159706115725, 0.0065136637687683106, 0.0063836159706115725, 0.00638976001739502, 0.006556672096252441, 0.006590464115142822, 0.006540287971496582, 0.006481919765472412, 0.006435840129852295, 0.006292479991912842, 0.006289408206939697, 0.0062740478515625, 0.006262784004211426, 0.0062863359451293946, 0.0062679038047790524, 0.006250495910644531, 0.006398975849151611, 0.006347775936126709, 0.0065485119819641115, 0.006573023796081543, 0.006589439868927002, 0.006621183872222901, 0.006602752208709717, 0.006575104236602783, 0.006529024124145508, 0.0063610877990722655, 0.006348800182342529, 0.006418432235717773, 0.006340608119964599, 0.006368256092071533, 0.006372352123260498, 0.013567999839782715, 0.0064245758056640625, 0.006529024124145508, 0.006575104236602783, 0.006619135856628418, 0.006550528049468994, 0.006595583915710449, 0.006590464115142822, 0.006543360233306885, 0.006593535900115967, 0.006552576065063476, 0.006565887928009034, 0.006494207859039307, 0.006326303958892822, 0.00638153600692749, 0.006367231845855713, 0.006330368041992188, 0.006515711784362793, 0.006567935943603515, 0.006685696125030518, 0.006481919765472412, 0.006453248023986816, 0.006451200008392334, 0.006505472183227539, 0.006331391811370849, 0.006295551776885986, 0.006278143882751465, 0.0062638077735900875, 0.0062863359451293946, 0.006394879817962646, 0.00657919979095459, 0.00658841609954834, 0.006568960189819336, 0.006604800224304199, 0.00658739185333252, 0.0066109437942504885, 0.00658022403717041, 0.006546463966369629, 0.006536191940307618, 0.006556640148162842, 0.006592512130737305, 0.006557695865631104, 0.006593535900115967, 0.0065443840026855465, 0.006582272052764892, 0.006558720111846924, 0.0065382399559021, 0.006556672096252441, 0.006585343837738037, 0.006604832172393799, 0.006500319957733154, 0.006387712001800537, 0.006351935863494873, 0.006343616008758545, 0.006391808032989502, 0.006341631889343262, 0.006362112045288086, 0.006337535858154297, 0.006540287971496582, 0.006371327877044678, 0.006310912132263183, 0.006352896213531494, 0.006352896213531494, 0.01367347240447998, 0.006591487884521485, 0.0065484800338745115, 0.006677504062652588, 0.006626304149627686, 0.006788095951080322, 0.006605823993682861, 0.0065372161865234375, 0.006333439826965332, 0.006369279861450195, 0.006322175979614258, 0.00638156795501709, 0.006378496170043945, 0.006351871967315674, 0.00636518383026123, 0.006529024124145508, 0.0066119680404663084, 0.006621183872222901, 0.006585343837738037, 0.00656492805480957, 0.006626239776611328, 0.006565887928009034, 0.0065710082054138185, 0.00659660816192627, 0.0065669121742248536, 0.006598656177520752, 0.006589439868927002, 0.006546432018280029, 0.0066109437942504885, 0.006589439868927002, 0.006534143924713135, 0.006642687797546387, 0.0066007041931152345, 0.006549503803253174, 0.006639616012573242, 0.006569983959197998, 0.006773759841918945, 0.0066406397819519045, 0.0065771517753601075, 0.006616064071655273, 0.006615039825439453, 0.006546432018280029, 0.006608895778656006, 0.0066416640281677245, 0.006534175872802734, 0.006616032123565674, 0.0066375679969787596, 0.00658739185333252, 0.006638591766357422, 0.006621183872222901, 0.006582272052764892, 0.0066375679969787596, 0.0065812478065490725, 0.006542335987091064, 0.0065382399559021, 0.006474751949310303, 0.006479872226715088, 0.0065710082054138185, 0.00653004789352417, 0.006699007987976074, 0.006642687797546387, 0.006553599834442139, 0.0066406397819519045, 0.014166015625, 0.006624256134033203, 0.006648831844329834, 0.006559743881225586, 0.006635519981384277, 0.00667852783203125, 0.00657203197479248, 0.006632448196411133, 0.006670335769653321, 0.006561791896820069, 0.006660096168518067, 0.0066344962120056155, 0.006597631931304931, 0.006661119937896728, 0.006627327919006347, 0.006591487884521485, 0.006631423950195312, 0.006584320068359375, 0.0066007041931152345, 0.006633471965789795, 0.006520832061767578, 0.006591487884521485, 0.006525951862335205, 0.006432767868041992, 0.0065413122177124024, 0.00657203197479248, 0.006455296039581298, 0.006535168170928955, 0.006466559886932373, 0.0065781760215759275, 0.006616064071655273, 0.00652288007736206, 0.006623231887817383, 0.006817791938781738, 0.006590464115142822, 0.006673408031463623, 0.006645760059356689, 0.006555647850036621, 0.006639616012573242, 0.006662144184112549, 0.00658739185333252, 0.006666240215301514, 0.0066713600158691405, 0.006501376152038574, 0.006546432018280029, 0.006534143924713135, 0.006626304149627686, 0.006622208118438721, 0.00659660816192627, 0.006631423950195312, 0.006619135856628418, 0.006562816143035889, 0.006619135856628418, 0.0065382399559021, 0.0064287037849426265, 0.006519775867462158, 0.006512639999389648, 0.00657919979095459, 0.006752255916595459, 0.006648831844329834, 0.006575104236602783, 0.006623231887817383, 0.006612991809844971]",tokens/s,152.83400663462425,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,3176.660992,4874.305536,0.0,4244.635648,4125.520384,s,1,10.7049873046875,10.7049873046875,0.0,10.7049873046875,10.7049873046875,10.7049873046875,10.7049873046875,[10.7049873046875],,kWh,4.5024813095170025e-05,2.4661391924109028e-05,7.3827836840068e-05,0.00014351404185934704,,MB,3225.84576,5052.563456,0.0,4404.0192,4310.79936,s,10,0.6032301750183107,0.06032301750183104,6.657679212557612e-05,0.06030380630493164,0.060389807891845705,0.06043308029174805,0.060467698211669925,"[0.060476352691650394, 0.060281375885009765, 0.060244575500488284, 0.06028569412231445, 0.06026457595825195, 0.06032191848754883, 0.06038019180297852, 0.06035481643676758, 0.060352256774902344, 0.06026841735839844]",tokens/s,4243.8195336005765,kWh,7.131531203645886e-07,3.9077311961782073e-07,3.0582872525178496e-06,4.1622134925002585e-06,tokens/kWh,61505734.98002376,MB,3230.048256,5054.660608,0.0,4406.116352,4310.80192,s,10,20.2030576171875,2.02030576171875,0.005144696922444268,2.0198549804687502,2.024226428222656,2.028483026123047,2.0318883044433593,"[2.0160115966796877, 2.0164893798828123, 2.0189765625, 2.0132275390625, 2.0327396240234377, 2.0207333984375, 2.023280517578125, 2.022938232421875, 2.01741455078125, 2.0212462158203124]",tokens/s,31.183398668528042,kWh,2.4761853186857137e-05,1.3570144223444133e-05,5.048772780648495e-05,8.881972521678625e-05,tokens/kWh,709301.9016466568,,s,630,20.200977388381975,0.03206504347362216,0.0004498002499415609,0.03194113540649414,0.032555929565429687,0.032951859855651855,0.03396859775543213,"[0.03219046401977539, 0.03185766410827637, 0.031849472045898435, 0.031748096466064454, 0.032024574279785153, 0.03298611068725586, 0.03242905426025391, 0.03197747230529785, 0.03189043235778809, 0.03172659111022949, 0.03197644805908203, 0.03160063934326172, 0.03205836868286133, 0.03236556625366211, 0.0316753921508789, 0.03218534469604492, 0.031939584732055666, 0.03219148635864258, 0.03197235107421875, 0.0316180477142334, 0.03202560043334961, 0.0318023681640625, 0.031649791717529296, 0.031833087921142575, 0.031865856170654294, 0.03179520034790039, 0.03193548774719238, 0.03248537445068359, 0.032118785858154295, 0.03162112045288086, 0.03180646324157715, 0.031698944091796875, 0.03191910362243652, 0.031869951248168944, 0.031955968856811526, 0.03174502372741699, 0.03284377670288086, 0.03355648040771484, 0.03193343925476074, 0.032500736236572264, 0.031866880416870115, 0.03181977653503418, 0.0329881591796875, 0.03194572830200195, 0.03192934417724609, 0.0317071361541748, 0.032656383514404294, 0.03202867126464844, 0.03182899284362793, 0.03193036842346191, 0.03184332847595215, 0.031905792236328126, 0.03186278343200684, 0.03182387161254883, 0.03157606315612793, 0.031939584732055666, 0.031529983520507815, 0.03177164840698242, 0.031902719497680664, 0.03196108818054199, 0.03188121604919433, 0.03184435272216797, 0.03203891372680664, 0.03215155029296875, 0.03173990440368652, 0.03267071914672852, 0.03226931381225586, 0.03191398429870605, 0.03226419067382812, 0.031821823120117186, 0.031887359619140625, 0.03192422485351563, 0.03181260871887207, 0.03175424003601074, 0.03179520034790039, 0.03201228713989258, 0.03222118377685547, 0.0319109115600586, 0.031938560485839845, 0.032069633483886716, 0.03176755142211914, 0.03187711906433106, 0.03180441665649414, 0.03177471923828125, 0.03198566436767578, 0.03159244728088379, 0.03173785591125488, 0.03186892890930176, 0.032097278594970705, 0.031941631317138675, 0.03371219253540039, 0.033231807708740235, 0.03299532699584961, 0.03208499145507813, 0.03187609672546387, 0.031974399566650394, 0.03183616065979004, 0.031937536239624024, 0.031834112167358396, 0.032314369201660156, 0.03181363105773926, 0.031649791717529296, 0.03133132743835449, 0.031528959274291994, 0.03184639930725098, 0.031925247192382815, 0.031373311996459964, 0.03177984046936035, 0.03177164840698242, 0.03177779197692871, 0.03165184020996094, 0.031922176361083986, 0.031527936935424807, 0.032031742095947266, 0.0320184326171875, 0.031882240295410154, 0.03236454391479492, 0.03367628860473633, 0.03249868774414062, 0.032347137451171876, 0.032036865234375, 0.03191910362243652, 0.03162419128417969, 0.031884288787841795, 0.031817728042602536, 0.03186380767822266, 0.03198464012145996, 0.03194572830200195, 0.03182080078125, 0.03196416091918945, 0.03184332847595215, 0.03178291130065918, 0.031437824249267575, 0.03177369689941406, 0.0320184326171875, 0.03193139266967773, 0.0326932487487793, 0.032075775146484374, 0.03177471923828125, 0.03212595367431641, 0.03221196746826172, 0.03180646324157715, 0.031920127868652344, 0.03212799835205078, 0.031682559967041016, 0.03180031967163086, 0.03179724884033203, 0.031851520538330076, 0.03180646324157715, 0.03202560043334961, 0.03219660949707031, 0.03189248085021973, 0.03178598403930664, 0.03194572830200195, 0.03191500854492187, 0.031904767990112305, 0.03186892890930176, 0.03213926315307617, 0.03223654556274414, 0.03210444641113281, 0.0324956169128418, 0.032418815612792966, 0.031869951248168944, 0.03208703994750976, 0.032043006896972655, 0.03186380767822266, 0.03215359878540039, 0.032661502838134765, 0.032464897155761716, 0.03177267265319824, 0.03292979049682617, 0.03197337532043457, 0.03186175918579102, 0.03190169525146484, 0.031662080764770506, 0.03401932907104492, 0.03314688110351562, 0.032026622772216795, 0.032173057556152344, 0.0319498233795166, 0.03190681648254395, 0.031908863067626955, 0.031971328735351565, 0.031677440643310545, 0.03201740646362305, 0.031855615615844726, 0.03188121604919433, 0.03191500854492187, 0.03197337532043457, 0.0319682559967041, 0.03213926315307617, 0.03201638412475586, 0.03185971260070801, 0.03183001518249512, 0.032350208282470705, 0.03171123123168945, 0.03246694564819336, 0.03378585433959961, 0.033301502227783206, 0.03262771224975586, 0.032118785858154295, 0.03186892890930176, 0.03184332847595215, 0.03194367980957031, 0.03165798377990723, 0.03167129516601563, 0.03178291130065918, 0.03191296005249023, 0.03180339241027832, 0.03193548774719238, 0.03176652717590332, 0.03178291130065918, 0.03131699180603027, 0.031834112167358396, 0.03191398429870605, 0.031898624420166014, 0.03171327972412109, 0.031959039688110355, 0.03191193580627441, 0.032026622772216795, 0.031932416915893554, 0.03197542381286621, 0.03177471923828125, 0.03204915237426758, 0.03181363105773926, 0.03161292839050293, 0.03184537506103516, 0.03189657592773437, 0.031752191543579104, 0.03185663986206055, 0.0318791675567627, 0.03286220932006836, 0.03219558334350586, 0.03154022407531738, 0.03163545608520508, 0.03181158447265625, 0.0316682243347168, 0.03159244728088379, 0.03160883140563965, 0.03157811164855957, 0.031748096466064454, 0.032043006896972655, 0.03194367980957031, 0.03202867126464844, 0.03165388870239258, 0.031925247192382815, 0.03189760017395019, 0.031955968856811526, 0.03191910362243652, 0.03225088119506836, 0.032069633483886716, 0.031987712860107424, 0.0324136962890625, 0.03211980819702148, 0.03191398429870605, 0.03198259162902832, 0.03198361587524414, 0.032737281799316405, 0.03301990509033203, 0.032121856689453124, 0.032008190155029294, 0.03206246566772461, 0.03199385643005371, 0.03195289611816406, 0.03205120086669922, 0.03200614547729492, 0.03236044692993164, 0.031821823120117186, 0.03460403060913086, 0.034462718963623046, 0.03291545486450195, 0.032761856079101564, 0.03269734573364258, 0.03174297523498535, 0.03244236755371094, 0.03208809661865234, 0.032089054107666014, 0.0329615364074707, 0.03202969741821289, 0.03219046401977539, 0.032059391021728514, 0.032216064453125, 0.03189043235778809, 0.03204095840454101, 0.03277004623413086, 0.03177984046936035, 0.03277414321899414, 0.031591424942016604, 0.03246080017089844, 0.03239833450317383, 0.03237580871582031, 0.031784959793090824, 0.0320706558227539, 0.03196928024291992, 0.03190169525146484, 0.031898624420166014, 0.03235942459106445, 0.03198259162902832, 0.03179929542541504, 0.03231948852539063, 0.03237887954711914, 0.03271372985839844, 0.03241676712036133, 0.03186380767822266, 0.031734783172607424, 0.032353279113769534, 0.032347137451171876, 0.03160166358947754, 0.032115711212158206, 0.0319682559967041, 0.032574462890625, 0.03221811294555664, 0.03215564727783203, 0.031851520538330076, 0.03225600051879883, 0.03242393493652344, 0.03205222320556641, 0.03264409637451172, 0.031871999740600586, 0.03198259162902832, 0.03193343925476074, 0.03177267265319824, 0.03187302398681641, 0.03179417610168457, 0.03184230422973633, 0.03159552001953125, 0.03166720008850098, 0.03148287963867188, 0.03183616065979004, 0.03141427230834961, 0.031954944610595705, 0.03216998291015625, 0.03189760017395019, 0.03211264038085938, 0.03276902389526367, 0.03179520034790039, 0.031692800521850584, 0.03143680000305176, 0.032010238647460935, 0.03184435272216797, 0.03288576126098633, 0.03244339370727539, 0.03196928024291992, 0.03159347152709961, 0.03244339370727539, 0.031954944610595705, 0.03232972717285156, 0.03281203079223633, 0.03282534408569336, 0.031870975494384765, 0.03256422424316406, 0.031643648147583005, 0.03184639930725098, 0.03225395202636719, 0.032024574279785153, 0.03240345764160156, 0.03195187187194824, 0.0326379508972168, 0.03202150344848633, 0.03198975944519043, 0.031954944610595705, 0.0328724479675293, 0.03199795150756836, 0.031922176361083986, 0.03199385643005371, 0.0317388801574707, 0.03183616065979004, 0.03197235107421875, 0.03175014305114746, 0.031663103103637694, 0.031850496292114255, 0.03193139266967773, 0.03251200103759765, 0.03194367980957031, 0.03201228713989258, 0.0323768310546875, 0.032263168334960936, 0.03360153579711914, 0.032710655212402344, 0.032143360137939454, 0.03198975944519043, 0.03188121604919433, 0.0320552978515625, 0.03213312149047851, 0.03196211242675781, 0.031916032791137694, 0.03181875228881836, 0.03181363105773926, 0.03182489585876465, 0.03196211242675781, 0.03210137557983399, 0.03209523010253906, 0.03188121604919433, 0.031940607070922854, 0.03451596832275391, 0.03297382354736328, 0.031954944610595705, 0.03181260871887207, 0.031768575668334964, 0.0318525447845459, 0.03194367980957031, 0.03239116668701172, 0.03219148635864258, 0.03194985580444336, 0.031967199325561524, 0.03178700828552246, 0.03156991958618164, 0.03198566436767578, 0.03184230422973633, 0.03232563018798828, 0.03201228713989258, 0.03194777679443359, 0.0319682559967041, 0.03210956954956055, 0.03203788757324219, 0.03193548774719238, 0.031953920364379884, 0.031493120193481446, 0.031850496292114255, 0.03265126419067383, 0.0333383674621582, 0.03251200103759765, 0.032489471435546875, 0.032004096984863284, 0.031970304489135744, 0.032039936065673826, 0.03194470405578613, 0.032198654174804685, 0.03197747230529785, 0.03199385643005371, 0.03213926315307617, 0.03198259162902832, 0.031937536239624024, 0.03243724822998047, 0.03193548774719238, 0.03227340698242188, 0.032440319061279296, 0.032008190155029294, 0.03194879913330078, 0.032056320190429685, 0.03241471862792969, 0.03213312149047851, 0.03203481674194336, 0.032271358489990236, 0.03205734252929687, 0.03200204849243164, 0.03214950561523437, 0.03181875228881836, 0.031941631317138675, 0.03180953598022461, 0.032048126220703126, 0.03209625625610352, 0.03199283218383789, 0.032045055389404296, 0.03394355010986328, 0.033225727081298825, 0.03304550552368164, 0.03213619232177734, 0.03196416091918945, 0.03196723175048828, 0.032555007934570314, 0.03177984046936035, 0.03229695892333984, 0.031644672393798826, 0.0317573127746582, 0.03164159965515137, 0.031939584732055666, 0.03191296005249023, 0.032010238647460935, 0.033124351501464845, 0.03260723114013672, 0.03175014305114746, 0.03178598403930664, 0.03181875228881836, 0.031871999740600586, 0.032077823638916016, 0.03185766410827637, 0.031927295684814457, 0.0317706241607666, 0.03180544090270996, 0.03194470405578613, 0.03196416091918945, 0.031932416915893554, 0.03191910362243652, 0.03185663986206055, 0.031732736587524416, 0.031735807418823245, 0.031817728042602536, 0.03181260871887207, 0.03184025573730469, 0.03177267265319824, 0.03184537506103516, 0.03191500854492187, 0.03235123062133789, 0.031680511474609374, 0.03163852882385254, 0.031838207244873046, 0.031953920364379884, 0.03193548774719238, 0.0318156795501709, 0.03161087989807129, 0.03492454528808594, 0.03334143829345703, 0.03294003295898437, 0.03218022537231445, 0.03194063949584961, 0.032155616760253906, 0.03258572769165039, 0.03196211242675781, 0.032775169372558595, 0.03212799835205078, 0.0320819206237793, 0.03231027221679687, 0.03268096160888672, 0.032502784729003906, 0.031936511993408204, 0.03181670379638672, 0.032102401733398435, 0.03189555168151856, 0.031936511993408204, 0.031920127868652344, 0.031834112167358396, 0.03182796859741211, 0.03296255874633789, 0.03210444641113281, 0.031909887313842776, 0.0317573127746582, 0.031902719497680664, 0.03191500854492187, 0.03216998291015625, 0.032086017608642575, 0.031513599395751955, 0.03182899284362793, 0.03342950439453125, 0.03208806228637695, 0.03219353485107422, 0.03200102233886719, 0.03178803253173828, 0.0319866886138916, 0.031888383865356446, 0.03201228713989258, 0.03175014305114746, 0.03204710388183594, 0.032198654174804685, 0.032010238647460935, 0.03186892890930176, 0.03191500854492187, 0.03180339241027832, 0.0320552978515625, 0.03181977653503418, 0.03179724884033203, 0.031869951248168944, 0.03175833511352539, 0.031926271438598636, 0.032215038299560544, 0.031665151596069335, 0.03191500854492187, 0.03184435272216797, 0.03181260871887207, 0.031848447799682614, 0.03182796859741211, 0.03186380767822266, 0.03197644805908203, 0.031693824768066405, 0.03180544090270996, 0.03189043235778809, 0.03191705513000488, 0.03196928024291992, 0.03186483192443847, 0.03184230422973633, 0.03322880172729492, 0.033949695587158206, 0.03239014434814453, 0.03243008041381836, 0.03190169525146484, 0.03180031967163086, 0.03182489585876465, 0.03184230422973633, 0.031909887313842776, 0.03176755142211914, 0.03183103942871094, 0.03207884979248047, 0.03189452743530274, 0.0324956169128418, 0.03176652717590332, 0.031923200607299806, 0.031854591369628905, 0.03174399948120117, 0.03191193580627441, 0.03183206367492676, 0.03215564727783203, 0.03194777679443359, 0.03174092864990234, 0.03179417610168457, 0.03196416091918945, 0.031900672912597655, 0.031888383865356446, 0.03182592010498047, 0.03192831993103027, 0.032020481109619144, 0.03171123123168945, 0.03181056022644043, 0.03151872062683105, 0.031902719497680664, 0.031970304489135744, 0.03188019180297851, 0.03190169525146484, 0.03193548774719238, 0.03165593528747559, 0.031853567123413085, 0.03196108818054199, 0.03188019180297851, 0.032059391021728514, 0.031954944610595705, 0.032043006896972655, 0.03191500854492187, 0.031849472045898435, 0.031854591369628905, 0.03182899284362793, 0.03202969741821289, 0.03200614547729492, 0.032173057556152344, 0.032284671783447266, 0.032129024505615236, 0.03185868835449219, 0.03198873519897461, 0.033976318359375, 0.03409920120239258, 0.0334202880859375, 0.032361473083496094]",tokens/s,31.186609830192044,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1396.801536,1382.547456,0.0,752.877568,710.554112,s,1,8.11517138671875,8.11517138671875,0.0,8.11517138671875,8.11517138671875,8.11517138671875,8.11517138671875,[8.11517138671875],,kWh,1.5774114109710052e-05,8.601881958123925e-06,2.3926685808084436e-05,4.830268187591841e-05,,MB,1642.192896,1667.760128,0.0,1019.215872,949.099008,s,10,0.24559097671508787,0.024559097671508788,0.0015956013879635667,0.02380073547363281,0.025674540328979492,0.027381862068176267,0.028747719459533694,"[0.02446067237854004, 0.023687936782836913, 0.029089183807373048, 0.02449065589904785, 0.025295135498046874, 0.023545696258544923, 0.02384486389160156, 0.023756607055664063, 0.023704736709594727, 0.02371548843383789]",tokens/s,10423.835737946827,kWh,2.8011625249967337e-07,1.5349058430458803e-07,6.81116921933437e-07,1.1147237587376985e-06,tokens/kWh,229653309.16593337,MB,1668.83328,1678.245888,0.0,1027.60448,949.101568,s,10,12.067171875,1.2067171874999998,0.008759988405386547,1.2052493286132813,1.2157380249023437,1.220831231689453,1.2249057971191406,"[1.2121832275390625, 1.214606201171875, 1.2259244384765624, 1.196039306640625, 1.205164794921875, 1.20493603515625, 1.19642919921875, 1.19801806640625, 1.2053338623046874, 1.2085367431640626]",tokens/s,52.20775891202761,kWh,1.4273664201180876e-05,7.819747827421108e-06,2.4148402941667164e-05,4.6241814970269146e-05,tokens/kWh,1362403.2715953172,,s,630,12.06501578712463,0.01915081870972164,0.00046202867557531375,0.01904332733154297,0.01978716220855713,0.019903027057647706,0.0203681591796875,"[0.019597312927246095, 0.01905971145629883, 0.018932735443115235, 0.018893823623657227, 0.018884607315063476, 0.01884364891052246, 0.018896896362304686, 0.01899519920349121, 0.018989055633544923, 0.019108863830566408, 0.019143680572509765, 0.019106815338134766, 0.019134464263916014, 0.018917375564575196, 0.0188538875579834, 0.018948095321655273, 0.018893823623657227, 0.018899967193603515, 0.01879756736755371, 0.018949119567871094, 0.0192174072265625, 0.018702335357666015, 0.01884774398803711, 0.019124223709106446, 0.019142656326293944, 0.019087360382080077, 0.018954240798950195, 0.019111936569213867, 0.019163135528564454, 0.020291584014892578, 0.020241439819335936, 0.01988502311706543, 0.01991372871398926, 0.019677183151245118, 0.019750911712646483, 0.019992576599121094, 0.02024038314819336, 0.020595712661743162, 0.019689472198486328, 0.019198976516723632, 0.019160064697265625, 0.019466239929199217, 0.019137535095214844, 0.018925567626953126, 0.018928640365600585, 0.018908159255981445, 0.019803136825561524, 0.019717119216918946, 0.019551231384277345, 0.019522560119628905, 0.02004582405090332, 0.019449855804443358, 0.018980863571166993, 0.018852863311767578, 0.018909183502197266, 0.019194879531860352, 0.01883750343322754, 0.019078144073486326, 0.018886655807495118, 0.018840576171875, 0.018759679794311524, 0.018929664611816405, 0.01945395278930664, 0.019059680938720704, 0.01886617660522461, 0.018933759689331055, 0.019005439758300782, 0.01885798454284668, 0.018811904907226562, 0.01885593605041504, 0.018832384109497072, 0.02002841567993164, 0.01969254493713379, 0.01983590316772461, 0.018936832427978514, 0.018884607315063476, 0.01958502388000488, 0.01986662483215332, 0.019765247344970704, 0.020183040618896485, 0.02003455924987793, 0.0196177921295166, 0.019723264694213868, 0.018882560729980468, 0.01886720085144043, 0.018729984283447267, 0.01899523162841797, 0.01988912010192871, 0.019171327590942384, 0.019525632858276368, 0.01915497589111328, 0.01923580741882324, 0.019108863830566408, 0.01884774398803711, 0.018861055374145508, 0.018962432861328125, 0.018948095321655273, 0.018962432861328125, 0.019021823883056642, 0.01906380844116211, 0.01886617660522461, 0.018820127487182616, 0.01894704055786133, 0.018902015686035157, 0.01883852767944336, 0.018851839065551757, 0.019503103256225587, 0.021377023696899415, 0.019557376861572266, 0.01904332733154297, 0.019129344940185547, 0.019289087295532227, 0.019825664520263672, 0.01979801559448242, 0.019273727416992188, 0.019051551818847656, 0.019283935546875, 0.019556352615356445, 0.019802112579345704, 0.019711999893188475, 0.019125247955322267, 0.018803712844848632, 0.01943449592590332, 0.019511295318603517, 0.018787328720092773, 0.019398656845092774, 0.02517913627624512, 0.019862527847290038, 0.019761152267456054, 0.0198287353515625, 0.020214784622192384, 0.021712896347045898, 0.020133888244628906, 0.019885055541992186, 0.019795967102050782, 0.01964134407043457, 0.01988403129577637, 0.019478527069091797, 0.019158016204833983, 0.019212287902832033, 0.019083263397216797, 0.019070976257324217, 0.019147775650024415, 0.01880575942993164, 0.01887948799133301, 0.01884262466430664, 0.018881536483764647, 0.018990079879760743, 0.01941913604736328, 0.019179519653320314, 0.0188272647857666, 0.018900991439819336, 0.020152320861816408, 0.01978265571594238, 0.019796991348266603, 0.019750911712646483, 0.019749887466430666, 0.019732479095458985, 0.019759103775024413, 0.019728384017944335, 0.019324928283691405, 0.018718719482421875, 0.019086336135864256, 0.019523584365844726, 0.01966182327270508, 0.01964031982421875, 0.019602432250976562, 0.019100671768188478, 0.019107839584350587, 0.01909760093688965, 0.01905766487121582, 0.01907711982727051, 0.019138559341430664, 0.01906175994873047, 0.018947071075439453, 0.01904332733154297, 0.019108863830566408, 0.019099647521972657, 0.019106815338134766, 0.019111936569213867, 0.019113983154296875, 0.01925017547607422, 0.0192542724609375, 0.019080192565917968, 0.018960384368896483, 0.01907302474975586, 0.018964479446411133, 0.018769920349121092, 0.01942425537109375, 0.01921331214904785, 0.019219455718994142, 0.01903206443786621, 0.01899519920349121, 0.018894847869873048, 0.018927616119384767, 0.01886412811279297, 0.01906585693359375, 0.019116031646728517, 0.0190382080078125, 0.018777088165283205, 0.01904947280883789, 0.019082239151000976, 0.018928640365600585, 0.019186687469482423, 0.018911231994628908, 0.01887948799133301, 0.018780160903930664, 0.01879654312133789, 0.01882931137084961, 0.018869247436523438, 0.018877439498901367, 0.018918399810791017, 0.018695167541503906, 0.019058687210083008, 0.019178495407104493, 0.01878118324279785, 0.01881804847717285, 0.01881804847717285, 0.018930688858032226, 0.018807807922363282, 0.018893823623657227, 0.018882560729980468, 0.01886412811279297, 0.01880678367614746, 0.01905766487121582, 0.01902694320678711, 0.01908121681213379, 0.018889728546142577, 0.01883443260192871, 0.01901158332824707, 0.01907711982727051, 0.018869247436523438, 0.0190699520111084, 0.019337215423583985, 0.019080192565917968, 0.01884876823425293, 0.01885798454284668, 0.018950143814086915, 0.0188538875579834, 0.018832384109497072, 0.01971609687805176, 0.019743743896484374, 0.019158016204833983, 0.018771968841552734, 0.018938880920410156, 0.01904947280883789, 0.019145727157592773, 0.01887539291381836, 0.019091455459594727, 0.01904640007019043, 0.018890752792358398, 0.018930688858032226, 0.02008780860900879, 0.019610624313354492, 0.019076095581054688, 0.01881395149230957, 0.019042303085327148, 0.01904844856262207, 0.018782207489013672, 0.01875660705566406, 0.01880166435241699, 0.018803712844848632, 0.018959360122680666, 0.018759679794311524, 0.018775039672851563, 0.019363840103149413, 0.019164159774780275, 0.018800640106201173, 0.01878118324279785, 0.018769920349121092, 0.018740224838256835, 0.018808832168579103, 0.01880268859863281, 0.01885081672668457, 0.019160064697265625, 0.019148799896240236, 0.019082239151000976, 0.018958335876464845, 0.019178495407104493, 0.019145727157592773, 0.0190248966217041, 0.018883583068847656, 0.018913280487060546, 0.01919692802429199, 0.019786752700805665, 0.019145727157592773, 0.019150848388671874, 0.01908940887451172, 0.01923276710510254, 0.019987455368041994, 0.020373504638671876, 0.019993600845336915, 0.0192491512298584, 0.018893823623657227, 0.018965503692626954, 0.01947340774536133, 0.01983590316772461, 0.019990528106689453, 0.019920896530151368, 0.01984000015258789, 0.019317760467529296, 0.019083263397216797, 0.018981887817382814, 0.018877439498901367, 0.018883583068847656, 0.0188723201751709, 0.01883033561706543, 0.018928640365600585, 0.01880575942993164, 0.01881907272338867, 0.018778112411499022, 0.01903411293029785, 0.019054592132568358, 0.018918399810791017, 0.018780160903930664, 0.01904844856262207, 0.01942118453979492, 0.01885593605041504, 0.01885491180419922, 0.01886412811279297, 0.01881907272338867, 0.018775039672851563, 0.01885081672668457, 0.018824192047119142, 0.018784255981445314, 0.01881497573852539, 0.019058687210083008, 0.019096576690673828, 0.01895529556274414, 0.01906377601623535, 0.019125247955322267, 0.019168256759643554, 0.01906175994873047, 0.019078144073486326, 0.019136512756347656, 0.01988198471069336, 0.019982336044311523, 0.019802112579345704, 0.01987379264831543, 0.019767295837402343, 0.01926553535461426, 0.018824192047119142, 0.01986662483215332, 0.01901670455932617, 0.018889728546142577, 0.01880473518371582, 0.0188723201751709, 0.01883955192565918, 0.018905088424682616, 0.018910207748413087, 0.01903001594543457, 0.01884671974182129, 0.018860031127929687, 0.01905971145629883, 0.018973695755004884, 0.019116031646728517, 0.018891775131225585, 0.018800640106201173, 0.019072000503540038, 0.019141632080078123, 0.019118080139160155, 0.019131391525268555, 0.019070976257324217, 0.01908531188964844, 0.019100671768188478, 0.019134464263916014, 0.019087360382080077, 0.018782207489013672, 0.019522560119628905, 0.019374080657958984, 0.018807807922363282, 0.019215360641479492, 0.01985843276977539, 0.01979084777832031, 0.019710975646972655, 0.018962432861328125, 0.018896896362304686, 0.019158016204833983, 0.019302400588989257, 0.019072000503540038, 0.0193832950592041, 0.019109888076782225, 0.019078144073486326, 0.019224576950073242, 0.019161088943481445, 0.019122175216674805, 0.01922662353515625, 0.019091455459594727, 0.019117055892944337, 0.01889587211608887, 0.019198976516723632, 0.018932735443115235, 0.01883545684814453, 0.019188735961914064, 0.018882560729980468, 0.018791423797607423, 0.018844671249389648, 0.018974720001220705, 0.01902899169921875, 0.01887129592895508, 0.01886720085144043, 0.01881395149230957, 0.01880575942993164, 0.0188723201751709, 0.01882111930847168, 0.018778112411499022, 0.018865152359008788, 0.018876415252685547, 0.018778112411499022, 0.01882931137084961, 0.01880268859863281, 0.01887539291381836, 0.01880985641479492, 0.0188590087890625, 0.01885081672668457, 0.01903513526916504, 0.019127296447753905, 0.01904128074645996, 0.01899622344970703, 0.018884607315063476, 0.019122175216674805, 0.019120128631591796, 0.018961408615112304, 0.0188272647857666, 0.018832384109497072, 0.01880985641479492, 0.018874368667602538, 0.01882009506225586, 0.018945024490356444, 0.019176448822021484, 0.019149824142456053, 0.018937856674194335, 0.018937856674194335, 0.019224576950073242, 0.019132415771484376, 0.018967552185058592, 0.01901158332824707, 0.01922969627380371, 0.01884774398803711, 0.01944268798828125, 0.018938880920410156, 0.01906073570251465, 0.019185663223266602, 0.01905356788635254, 0.019252223968505858, 0.019392511367797852, 0.019161088943481445, 0.019138559341430664, 0.019108863830566408, 0.01884160041809082, 0.01900748825073242, 0.018815999984741212, 0.01884876823425293, 0.01881907272338867, 0.018899967193603515, 0.01884364891052246, 0.01882316780090332, 0.018755584716796874, 0.019772415161132813, 0.019714048385620117, 0.018844671249389648, 0.01879654312133789, 0.018711551666259766, 0.018861055374145508, 0.018765823364257812, 0.018874368667602538, 0.01879654312133789, 0.018770944595336913, 0.01879654312133789, 0.018881536483764647, 0.01901568031311035, 0.018906112670898437, 0.01904742431640625, 0.01906380844116211, 0.01906790351867676, 0.01904742431640625, 0.019017728805541992, 0.01966796875, 0.019912704467773438, 0.019804159164428712, 0.01965772819519043, 0.019166208267211913, 0.019359807968139648, 0.018899904251098634, 0.018911231994628908, 0.01882931137084961, 0.018889728546142577, 0.019056640625, 0.018922496795654296, 0.01886310386657715, 0.018888704299926756, 0.018783231735229493, 0.018884607315063476, 0.018856960296630858, 0.018959360122680666, 0.019086336135864256, 0.01881907272338867, 0.01881088066101074, 0.01886412811279297, 0.01879859161376953, 0.018851839065551757, 0.01881804847717285, 0.0188590087890625, 0.01886412811279297, 0.01920204734802246, 0.019017728805541992, 0.02087321662902832, 0.019612672805786133, 0.018964479446411133, 0.019098623275756836, 0.019357696533203125, 0.019150848388671874, 0.01881804847717285, 0.01883750343322754, 0.01902694320678711, 0.019042303085327148, 0.018910207748413087, 0.01886617660522461, 0.01880678367614746, 0.01879756736755371, 0.018889759063720705, 0.01873404884338379, 0.01966080093383789, 0.01968639945983887, 0.01883750343322754, 0.01890105628967285, 0.018894784927368163, 0.01881907272338867, 0.018752511978149415, 0.018913280487060546, 0.01881292724609375, 0.018808832168579103, 0.01886720085144043, 0.01882931137084961, 0.018771968841552734, 0.018716672897338867, 0.01880985641479492, 0.01879859161376953, 0.018817024230957033, 0.01882316780090332, 0.018807807922363282, 0.019500032424926757, 0.01992192077636719, 0.019284992218017577, 0.019842048645019532, 0.019800064086914062, 0.020025344848632814, 0.020333568572998048, 0.019870719909667968, 0.019733503341674806, 0.019365888595581054, 0.018932735443115235, 0.019120128631591796, 0.019090431213378906, 0.019171327590942384, 0.019135488510131835, 0.018994176864624023, 0.01883033561706543, 0.019016767501831056, 0.019187648773193358, 0.018889728546142577, 0.01876479911804199, 0.019080192565917968, 0.01904844856262207, 0.018955263137817382, 0.01884671974182129, 0.01908531188964844, 0.019125247955322267, 0.019088384628295898, 0.018915327072143554, 0.01909350395202637, 0.01887027168273926, 0.018860031127929687, 0.019532800674438477, 0.019741695404052736, 0.019833887100219726, 0.019781600952148436, 0.01992192077636719, 0.019851264953613282, 0.01985024070739746, 0.019112960815429687, 0.018942975997924806, 0.018888704299926756, 0.018883583068847656, 0.018884607315063476, 0.0188221435546875, 0.018742271423339844, 0.018989055633544923, 0.019098623275756836, 0.019153919219970703, 0.01907302474975586, 0.019082239151000976, 0.0192675838470459, 0.019119104385375976, 0.019135488510131835, 0.018882560729980468, 0.019110912322998046, 0.01909350395202637, 0.018884607315063476, 0.01927577590942383, 0.018990079879760743, 0.020578304290771485, 0.020355072021484375, 0.019804159164428712, 0.019990528106689453, 0.019891199111938478, 0.019096576690673828, 0.019108863830566408, 0.01897881507873535, 0.019132415771484376, 0.019086336135864256, 0.019145727157592773, 0.018916351318359375, 0.01886207962036133, 0.019104768753051758, 0.019133440017700197, 0.01907711982727051, 0.019121152877807617, 0.019107839584350587, 0.019130367279052735, 0.019120128631591796, 0.01881804847717285, 0.01887129592895508, 0.01880268859863281, 0.01884671974182129, 0.018902015686035157, 0.018844671249389648, 0.018770944595336913, 0.01885491180419922, 0.019002368927001953]",tokens/s,52.21708873952028,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1509.441536,2095.579136,0.0,1465.909248,1358.169088,s,1,8.260166015625,8.260166015625,0.0,8.260166015625,8.260166015625,8.260166015625,8.260166015625,[8.260166015625],,kWh,1.703393824513771e-05,9.294051824317972e-06,2.4997519998015694e-05,5.1325510067471374e-05,,MB,1634.729984,2120.74496,0.0,1472.200704,1356.544512,s,10,0.2552842235565186,0.025528422355651857,0.00013279813856977005,0.025471296310424805,0.025698454093933107,0.025708059406280518,0.025715743656158446,"[0.025338623046875, 0.02542505645751953, 0.025469696044921875, 0.02544553565979004, 0.025696319580078127, 0.025634464263916017, 0.02539945602416992, 0.02571766471862793, 0.025684511184692383, 0.025472896575927734]",tokens/s,10028.038412774182,kWh,2.9942645261249103e-07,1.6406946043810824e-07,1.1264481585417395e-06,1.5899440715923387e-06,tokens/kWh,161011952.91958568,MB,1635.24608,2122.842112,0.0,1472.200704,1409.969664,s,10,11.186254760742187,1.1186254760742187,0.014640775461701054,1.1233395996093751,1.1302971313476562,1.1312777160644532,1.1320621838378906,"[1.12215673828125, 1.1245224609375, 1.11765576171875, 1.0789691162109376, 1.115362548828125, 1.127617919921875, 1.111263671875, 1.13225830078125, 1.1263690185546875, 1.1300792236328125]",tokens/s,56.319117834770346,kWh,1.2574061397456098e-05,6.8880119056137154e-06,2.2952765549258788e-05,4.2414838852328594e-05,tokens/kWh,1485329.2315772001,,s,630,11.182204952239989,0.017749531670222207,0.00044354599193730573,0.017804288864135744,0.018053119659423827,0.018206720352172853,0.018867650222778323,"[0.016368640899658202, 0.016611328125, 0.016704511642456055, 0.016699392318725585, 0.017943552017211914, 0.017847295761108398, 0.017738752365112305, 0.017737728118896484, 0.017839103698730468, 0.017830911636352538, 0.017732608795166017, 0.017748992919921876, 0.01782067108154297, 0.017724416732788087, 0.017763328552246094, 0.017758207321166994, 0.01782067108154297, 0.017712127685546874, 0.01770086479187012, 0.0178155517578125, 0.01784832000732422, 0.018126848220825196, 0.017993728637695314, 0.01785958480834961, 0.017661951065063478, 0.018137088775634767, 0.01781657600402832, 0.017748992919921876, 0.01782067108154297, 0.01785036849975586, 0.01778892707824707, 0.01775103950500488, 0.017868799209594728, 0.01785651206970215, 0.017839103698730468, 0.017794048309326172, 0.017762304306030274, 0.01782579231262207, 0.01819443130493164, 0.01825382423400879, 0.018050048828125, 0.018700288772583007, 0.01882111930847168, 0.01820364761352539, 0.01785753631591797, 0.017872896194458008, 0.017747968673706056, 0.017779712677001954, 0.018142208099365235, 0.017830911636352538, 0.01781452751159668, 0.017765375137329103, 0.01780633544921875, 0.01784524726867676, 0.017952768325805665, 0.017763328552246094, 0.017803264617919923, 0.017795072555541993, 0.01779199981689453, 0.01780838394165039, 0.018098175048828127, 0.01788211250305176, 0.017892351150512697, 0.01785753631591797, 0.01789952087402344, 0.017695743560791014, 0.018367488861083983, 0.018053119659423827, 0.01777663993835449, 0.01780531120300293, 0.017795072555541993, 0.01781452751159668, 0.017720319747924804, 0.017729536056518554, 0.017794048309326172, 0.017863679885864257, 0.017739776611328126, 0.017804288864135744, 0.01780940818786621, 0.017802240371704102, 0.017697792053222656, 0.017864704132080078, 0.01799884796142578, 0.01785139274597168, 0.017836032867431642, 0.018168832778930662, 0.017871871948242187, 0.017758207321166994, 0.017968128204345703, 0.017918975830078124, 0.017720319747924804, 0.01778483200073242, 0.01784115219116211, 0.017763328552246094, 0.017953792572021485, 0.018358272552490236, 0.018350080490112306, 0.017861631393432616, 0.017747968673706056, 0.017688575744628905, 0.017745920181274414, 0.017688575744628905, 0.017741823196411134, 0.01776742362976074, 0.017675264358520508, 0.017709056854248048, 0.017763328552246094, 0.017811456680297853, 0.017779712677001954, 0.017878015518188475, 0.017762304306030274, 0.017829887390136717, 0.017723392486572266, 0.017900543212890627, 0.017916927337646483, 0.018075647354125975, 0.01783296012878418, 0.01783705520629883, 0.01782579231262207, 0.01784524726867676, 0.01782374382019043, 0.01779199981689453, 0.017764352798461915, 0.01775103950500488, 0.017761280059814453, 0.017785856246948242, 0.016517120361328123, 0.016623615264892578, 0.0165928955078125, 0.016669696807861328, 0.017099775314331055, 0.0179814395904541, 0.017735679626464843, 0.017803264617919923, 0.018067455291748045, 0.01787392044067383, 0.01776742362976074, 0.017744895935058593, 0.01779302406311035, 0.017704959869384765, 0.017771520614624024, 0.017765375137329103, 0.017819648742675782, 0.017716224670410157, 0.017786880493164063, 0.017778688430786133, 0.017854463577270507, 0.01783193588256836, 0.017833984375, 0.017771520614624024, 0.01781657600402832, 0.018199552536010744, 0.018556928634643553, 0.017897472381591797, 0.017804288864135744, 0.017744895935058593, 0.01780121612548828, 0.017744895935058593, 0.01777459144592285, 0.01778278350830078, 0.01779199981689453, 0.01777459144592285, 0.01779097557067871, 0.017772544860839845, 0.0178288631439209, 0.017722368240356445, 0.017797119140625, 0.017720319747924804, 0.01779916763305664, 0.017935359954833984, 0.017896448135375977, 0.017746944427490235, 0.017769472122192383, 0.017768447875976562, 0.017889280319213868, 0.01787392044067383, 0.01790771293640137, 0.017821695327758787, 0.01781452751159668, 0.01778278350830078, 0.017827840805053712, 0.017786880493164063, 0.01779097557067871, 0.017763328552246094, 0.01779302406311035, 0.01775103950500488, 0.017821695327758787, 0.017748992919921876, 0.017744895935058593, 0.016914432525634765, 0.016769023895263673, 0.01663692855834961, 0.016564224243164064, 0.01657344055175781, 0.01664204788208008, 0.016684032440185546, 0.016655359268188476, 0.016615423202514648, 0.016644096374511717, 0.01663283157348633, 0.01657241630554199, 0.01664204788208008, 0.016713727951049806, 0.01658163261413574, 0.016662527084350585, 0.01660211181640625, 0.016631807327270508, 0.016635904312133788, 0.01675468826293945, 0.01702707290649414, 0.017095680236816405, 0.016678911209106445, 0.016660480499267577, 0.016706560134887697, 0.016685056686401366, 0.01660416030883789, 0.01660723114013672, 0.01663692855834961, 0.01656524848937988, 0.01666662406921387, 0.016673791885375978, 0.016648191452026367, 0.0166297607421875, 0.0166430721282959, 0.01662873649597168, 0.016649215698242188, 0.016685056686401366, 0.017743871688842772, 0.0178155517578125, 0.017756160736083985, 0.017727487564086913, 0.017900543212890627, 0.01778278350830078, 0.01785958480834961, 0.017710079193115236, 0.017728511810302734, 0.017707008361816406, 0.017810432434082032, 0.017713151931762695, 0.017705984115600586, 0.017752063751220702, 0.017697792053222656, 0.01775923156738281, 0.017729536056518554, 0.018280448913574218, 0.017879039764404296, 0.017737728118896484, 0.01777459144592285, 0.018128896713256838, 0.01780940818786621, 0.017689599990844726, 0.017748992919921876, 0.016474111557006836, 0.0166748161315918, 0.01666662406921387, 0.016668672561645507, 0.016659456253051756, 0.016948223114013672, 0.017735679626464843, 0.017716224670410157, 0.017810432434082032, 0.017743871688842772, 0.017754112243652344, 0.01776639938354492, 0.017761280059814453, 0.017763328552246094, 0.017819648742675782, 0.01824460792541504, 0.018150400161743165, 0.01781350326538086, 0.01780735969543457, 0.01779097557067871, 0.017714176177978515, 0.017764352798461915, 0.017773567199707033, 0.017833984375, 0.01781350326538086, 0.01778278350830078, 0.017761280059814453, 0.017757183074951173, 0.01784320068359375, 0.017748992919921876, 0.018066432952880858, 0.01776742362976074, 0.017900543212890627, 0.01777561569213867, 0.017892351150512697, 0.017925119400024413, 0.017796096801757814, 0.017836032867431642, 0.017758207321166994, 0.017681407928466796, 0.017863679885864257, 0.01781862449645996, 0.017727487564086913, 0.017769472122192383, 0.017779712677001954, 0.018177024841308592, 0.017880064010620117, 0.017709056854248048, 0.017763328552246094, 0.01776639938354492, 0.017717248916625978, 0.017778688430786133, 0.01783705520629883, 0.017679359436035155, 0.01779097557067871, 0.017745920181274414, 0.017762304306030274, 0.017777664184570312, 0.017725439071655275, 0.017708032608032227, 0.017722368240356445, 0.017761280059814453, 0.017746944427490235, 0.01777459144592285, 0.018488319396972656, 0.018371583938598633, 0.017968128204345703, 0.01785958480834961, 0.017854463577270507, 0.017804288864135744, 0.017833984375, 0.01787596893310547, 0.018554880142211915, 0.018886655807495118, 0.018510847091674804, 0.01785241508483887, 0.0176680965423584, 0.017747968673706056, 0.017713151931762695, 0.017812480926513673, 0.017780736923217775, 0.017777664184570312, 0.01782374382019043, 0.0177838077545166, 0.018001920700073244, 0.01785856056213379, 0.017763328552246094, 0.01773465538024902, 0.017746944427490235, 0.01781657600402832, 0.017785856246948242, 0.01782374382019043, 0.017765375137329103, 0.017797119140625, 0.017740800857543947, 0.01780735969543457, 0.017714176177978515, 0.017760255813598632, 0.017810432434082032, 0.017764352798461915, 0.017778688430786133, 0.01816166305541992, 0.018423807144165038, 0.01789030456542969, 0.01789030456542969, 0.01777561569213867, 0.01785958480834961, 0.018117631912231445, 0.017812480926513673, 0.017796096801757814, 0.01780735969543457, 0.0177838077545166, 0.017777664184570312, 0.01778892707824707, 0.017795072555541993, 0.0178288631439209, 0.017781759262084963, 0.01784012794494629, 0.01779814338684082, 0.017846271514892577, 0.017708032608032227, 0.01789030456542969, 0.01799577522277832, 0.01787596893310547, 0.017952768325805665, 0.017802240371704102, 0.016463872909545898, 0.016698368072509767, 0.016970752716064453, 0.017368064880371094, 0.016880640029907225, 0.016657407760620118, 0.016680959701538087, 0.016669696807861328, 0.016683008193969725, 0.016669696807861328, 0.01661952018737793, 0.016563199996948243, 0.01660416030883789, 0.01701273536682129, 0.017945600509643556, 0.01786675262451172, 0.017728511810302734, 0.017689599990844726, 0.017750015258789064, 0.01775103950500488, 0.01800294494628906, 0.01785651206970215, 0.017803264617919923, 0.017719295501708983, 0.017743871688842772, 0.017778688430786133, 0.017761280059814453, 0.017723392486572266, 0.017712127685546874, 0.01769267272949219, 0.017747968673706056, 0.01836851119995117, 0.017810432434082032, 0.017757183074951173, 0.017844224929809572, 0.0178155517578125, 0.01801523208618164, 0.017760255813598632, 0.01779814338684082, 0.017781759262084963, 0.01777663993835449, 0.017739776611328126, 0.01857535934448242, 0.01967616081237793, 0.01818726348876953, 0.018086912155151368, 0.01777459144592285, 0.017945600509643556, 0.017871871948242187, 0.017812480926513673, 0.017690624237060547, 0.017748992919921876, 0.017726463317871095, 0.017737728118896484, 0.017768447875976562, 0.017716224670410157, 0.017758207321166994, 0.01769267272949219, 0.017762304306030274, 0.017803264617919923, 0.01785958480834961, 0.018148351669311523, 0.018234367370605468, 0.01779302406311035, 0.017730560302734375, 0.017796096801757814, 0.01780019187927246, 0.01777459144592285, 0.018041856765747072, 0.01784012794494629, 0.017812480926513673, 0.01781452751159668, 0.017753087997436523, 0.01776742362976074, 0.01782067108154297, 0.017922048568725587, 0.01784524726867676, 0.017757183074951173, 0.01774284744262695, 0.017748992919921876, 0.017833984375, 0.01785958480834961, 0.017794048309326172, 0.01782579231262207, 0.019176448822021484, 0.020883455276489257, 0.019594240188598632, 0.018518016815185546, 0.018020351409912108, 0.017935359954833984, 0.01787494468688965, 0.018019327163696287, 0.01804697608947754, 0.017812480926513673, 0.017926143646240233, 0.017863679885864257, 0.017868799209594728, 0.0178155517578125, 0.01798963165283203, 0.01780940818786621, 0.017834016799926758, 0.017567712783813475, 0.017889280319213868, 0.017963008880615236, 0.01765888023376465, 0.01782374382019043, 0.01782579231262207, 0.01781862449645996, 0.01798041534423828, 0.017885183334350584, 0.017655807495117186, 0.017633279800415038, 0.01743155288696289, 0.017570816040039062, 0.017559551239013673, 0.017736703872680663, 0.017750015258789064, 0.018329599380493163, 0.018318336486816408, 0.018670591354370117, 0.018053119659423827, 0.017844224929809572, 0.017871871948242187, 0.017889280319213868, 0.018119680404663087, 0.017917951583862304, 0.016970752716064453, 0.01784217643737793, 0.01790771293640137, 0.018035711288452147, 0.01801523208618164, 0.017846271514892577, 0.017922048568725587, 0.017955839157104494, 0.017908735275268553, 0.017833984375, 0.017935359954833984, 0.017786880493164063, 0.017952768325805665, 0.0178657283782959, 0.01784832000732422, 0.017763328552246094, 0.017535999298095704, 0.01761894416809082, 0.01781452751159668, 0.017926143646240233, 0.017796096801757814, 0.017895423889160156, 0.01781760025024414, 0.017870847702026366, 0.01782476806640625, 0.017941503524780272, 0.018052095413208007, 0.017869823455810546, 0.01778892707824707, 0.017888256072998047, 0.017915903091430666, 0.017822719573974608, 0.01780940818786621, 0.0178288631439209, 0.017872896194458008, 0.017920000076293945, 0.017936384201049805, 0.017844224929809572, 0.01785139274597168, 0.01761894416809082, 0.01781452751159668, 0.01784934425354004, 0.01785856056213379, 0.017525760650634766, 0.017580032348632812, 0.01765273666381836, 0.017778688430786133, 0.018206720352172853, 0.018971647262573242, 0.018103296279907227, 0.01784832000732422, 0.01802137565612793, 0.017905664443969727, 0.01796403121948242, 0.017880064010620117, 0.017904640197753906, 0.017872896194458008, 0.017937408447265626, 0.0178288631439209, 0.018206720352172853, 0.01794867134094238, 0.017935359954833984, 0.017915903091430666, 0.017772544860839845, 0.01785958480834961, 0.017663999557495116, 0.01779199981689453, 0.017694719314575197, 0.01820159912109375, 0.01807360076904297, 0.017829887390136717, 0.017949695587158202, 0.017918975830078124, 0.017915903091430666, 0.01779916763305664, 0.017868799209594728, 0.017839103698730468, 0.017504255294799806, 0.017861631393432616, 0.01781862449645996, 0.01780531120300293, 0.017901567459106444, 0.017893375396728514, 0.017862655639648437, 0.017914880752563478, 0.017725439071655275, 0.017911808013916015, 0.01782579231262207, 0.018670591354370117, 0.018504703521728515, 0.017931264877319338, 0.01803468894958496, 0.0178288631439209, 0.01803878402709961, 0.01786777687072754, 0.018058240890502928, 0.017862655639648437, 0.01784832000732422, 0.017795072555541993, 0.017765375137329103, 0.017779712677001954, 0.01782681655883789, 0.01783193588256836, 0.018078720092773438, 0.0184770565032959, 0.018976768493652343, 0.017935359954833984, 0.018197504043579102, 0.017846271514892577, 0.01782476806640625, 0.01777561569213867, 0.017797119140625, 0.01781862449645996, 0.01779097557067871, 0.017811456680297853, 0.017777664184570312, 0.017904640197753906, 0.017853439331054686, 0.01805721664428711, 0.01802649688720703, 0.017922048568725587, 0.01788313674926758, 0.01798041534423828, 0.018123775482177733, 0.018111488342285157, 0.01785241508483887]",tokens/s,56.33951467450076,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 81236 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1515.769856,2095.579136,0.0,1465.909248,1358.169088,s,1,8.45063671875,8.45063671875,0.0,8.45063671875,8.45063671875,8.45063671875,8.45063671875,[8.45063671875],,kWh,1.8479481442351345e-05,1.008505444836068e-05,2.9985579544034024e-05,5.8550115434746045e-05,,MB,1658.515456,2120.74496,0.0,1472.200704,1356.544512,s,10,0.25428233718872073,0.025428233718872074,2.6081651932820973e-05,0.02542748832702637,0.02545974311828613,0.025462479400634765,0.02546466842651367,"[0.025381216049194334, 0.02544771194458008, 0.025449792861938478, 0.025404800415039064, 0.025436864852905274, 0.025459135055541992, 0.025406976699829102, 0.0254652156829834, 0.025412511825561524, 0.02541811180114746]",tokens/s,10067.549434627244,kWh,2.995547716789942e-07,1.6414009802856235e-07,1.1243082130885518e-06,1.5880030827961083e-06,tokens/kWh,161208755.0543308,MB,1670.742016,2122.842112,0.0,1472.200704,1409.969664,s,10,10.883926147460938,1.0883926147460938,0.011041552613218866,1.091833984375,1.0953853149414063,1.096459503173828,1.0973188537597656,"[1.0863819580078125, 1.0951466064453126, 1.09753369140625, 1.0945120849609375, 1.094833740234375, 1.0936934814453125, 1.0895838623046874, 1.0849027099609374, 1.0899744873046875, 1.057363525390625]",tokens/s,57.883523965932994,kWh,1.2574791218319433e-05,6.890499739568811e-06,2.3368683603510703e-05,4.283397456139894e-05,tokens/kWh,1470795.1023712435,,s,630,10.87994879722595,0.01726975999559675,0.0002994262270703523,0.017321983337402345,0.017467596817016602,0.01760768003463745,0.01818432580947876,"[0.016995328903198242, 0.017358848571777344, 0.01700556755065918, 0.01741619110107422, 0.017071104049682616, 0.01722777557373047, 0.017067007064819336, 0.01705369567871094, 0.017116159439086915, 0.01699942398071289, 0.01701171112060547, 0.017163263320922852, 0.01701683235168457, 0.017296384811401368, 0.017140735626220704, 0.017273855209350587, 0.017528831481933595, 0.017353727340698243, 0.01739468765258789, 0.01702604866027832, 0.017185792922973633, 0.017370111465454103, 0.01725644874572754, 0.017180671691894533, 0.017415168762207032, 0.017385471343994142, 0.01702604866027832, 0.016962560653686523, 0.01723494338989258, 0.01716223907470703, 0.017116159439086915, 0.017108991622924806, 0.017297407150268555, 0.017503231048583985, 0.017415168762207032, 0.01745715141296387, 0.017250303268432618, 0.017156095504760743, 0.01704140853881836, 0.017146879196166993, 0.01718169593811035, 0.017479679107666016, 0.01737932777404785, 0.01704140853881836, 0.01717452812194824, 0.01704960060119629, 0.017063936233520507, 0.017063936233520507, 0.01701785659790039, 0.017168384552001953, 0.017060863494873048, 0.017052671432495118, 0.01723289680480957, 0.01765068817138672, 0.018185216903686522, 0.017535999298095704, 0.017386495590209963, 0.01740287971496582, 0.017314815521240236, 0.017283071517944337, 0.017319936752319336, 0.017382400512695313, 0.01737215995788574, 0.017118207931518553, 0.017340415954589843, 0.017345535278320313, 0.017295360565185547, 0.01742131233215332, 0.017326080322265625, 0.017351680755615235, 0.01735577583312988, 0.017288192749023438, 0.017348608016967772, 0.017315839767456053, 0.017253376007080077, 0.01724723243713379, 0.017277952194213866, 0.0172410888671875, 0.01725951957702637, 0.01742950439453125, 0.017242111206054688, 0.017306623458862306, 0.01742950439453125, 0.01781862449645996, 0.017655807495117186, 0.017524736404418945, 0.01740595245361328, 0.017377279281616212, 0.017273855209350587, 0.01735577583312988, 0.01740083122253418, 0.01738137626647949, 0.017343488693237305, 0.017442815780639647, 0.017336320877075196, 0.017337343215942384, 0.017449983596801756, 0.017341440200805663, 0.017435647964477538, 0.017353727340698243, 0.017458175659179686, 0.017335296630859375, 0.017350656509399414, 0.017320959091186524, 0.017333248138427734, 0.017346559524536134, 0.017304576873779298, 0.017368064880371094, 0.01734758377075195, 0.017308671951293944, 0.017376256942749024, 0.01765273666381836, 0.018239488601684572, 0.017719295501708983, 0.017320959091186524, 0.017374208450317383, 0.017335296630859375, 0.01725542449951172, 0.01722265625, 0.017357824325561523, 0.017358848571777344, 0.017427455902099608, 0.017305599212646485, 0.017323007583618166, 0.01723904037475586, 0.017339391708374022, 0.01704960060119629, 0.0174335994720459, 0.018086912155151368, 0.017571840286254883, 0.01740390396118164, 0.017217536926269532, 0.017352703094482422, 0.017310720443725586, 0.01726361656188965, 0.017289215087890625, 0.017330175399780275, 0.017354751586914064, 0.01745510482788086, 0.017258495330810548, 0.017337343215942384, 0.017308671951293944, 0.017304576873779298, 0.017246208190917968, 0.017351680755615235, 0.017427455902099608, 0.017283071517944337, 0.017295360565185547, 0.017329151153564454, 0.017357824325561523, 0.017310720443725586, 0.017398784637451172, 0.017286144256591796, 0.017518592834472657, 0.017426431655883787, 0.017335296630859375, 0.017406976699829102, 0.01738137626647949, 0.01742131233215332, 0.01741823959350586, 0.017310720443725586, 0.017359872817993165, 0.017321983337402345, 0.01798860740661621, 0.0176312313079834, 0.01801318359375, 0.017528831481933595, 0.017537023544311522, 0.01741926383972168, 0.01765376091003418, 0.01782067108154297, 0.017320959091186524, 0.01741004753112793, 0.017398784637451172, 0.0174202880859375, 0.017329151153564454, 0.017349632263183593, 0.017357824325561523, 0.017370111465454103, 0.017330175399780275, 0.017330175399780275, 0.017288192749023438, 0.017341440200805663, 0.017460224151611328, 0.01744076728820801, 0.017460224151611328, 0.017532928466796875, 0.017335296630859375, 0.017549312591552735, 0.01703321647644043, 0.01737215995788574, 0.01725542449951172, 0.017277952194213866, 0.017311744689941407, 0.017289215087890625, 0.01738444709777832, 0.017375232696533204, 0.017360895156860352, 0.017326080322265625, 0.017362943649291994, 0.017345535278320313, 0.017459199905395507, 0.017596416473388672, 0.017361919403076173, 0.01739673614501953, 0.017319936752319336, 0.017276927947998046, 0.017274879455566407, 0.01737932777404785, 0.01741619110107422, 0.017351680755615235, 0.01738137626647949, 0.01726464080810547, 0.01724415969848633, 0.017325056076049804, 0.017305599212646485, 0.017315839767456053, 0.017408000946044923, 0.017288192749023438, 0.017274879455566407, 0.017358848571777344, 0.017273855209350587, 0.017254400253295898, 0.0176629753112793, 0.017452032089233398, 0.01738137626647949, 0.018102272033691406, 0.017713151931762695, 0.017360895156860352, 0.017516544342041016, 0.017549312591552735, 0.017588224411010742, 0.017382400512695313, 0.017273855209350587, 0.017350656509399414, 0.01724313545227051, 0.017369087219238282, 0.017341440200805663, 0.01723289680480957, 0.017373184204101562, 0.01721548843383789, 0.01742540740966797, 0.017406976699829102, 0.017345535278320313, 0.01739263916015625, 0.017354751586914064, 0.017327104568481445, 0.017260543823242186, 0.017356800079345702, 0.01724825668334961, 0.017285120010375975, 0.01738137626647949, 0.017217536926269532, 0.017406976699829102, 0.0172677116394043, 0.01740902328491211, 0.017328128814697266, 0.017730560302734375, 0.01761689567565918, 0.017319936752319336, 0.01755340766906738, 0.017331199645996095, 0.017306623458862306, 0.017333248138427734, 0.01775923156738281, 0.01757798385620117, 0.01741312026977539, 0.01742131233215332, 0.017340415954589843, 0.01745408058166504, 0.01737113571166992, 0.017406976699829102, 0.017288192749023438, 0.017359872817993165, 0.017348608016967772, 0.017329151153564454, 0.017350656509399414, 0.017367040634155274, 0.017326080322265625, 0.01739366340637207, 0.01741823959350586, 0.01741004753112793, 0.017368064880371094, 0.017375232696533204, 0.01741209602355957, 0.017391616821289063, 0.017362943649291994, 0.0172728328704834, 0.017294336318969726, 0.017295360565185547, 0.01739263916015625, 0.017283071517944337, 0.017345535278320313, 0.017369087219238282, 0.017297407150268555, 0.017321983337402345, 0.01739673614501953, 0.017469440460205078, 0.017324031829833983, 0.017385471343994142, 0.017353727340698243, 0.017406976699829102, 0.017349632263183593, 0.017325056076049804, 0.017338367462158204, 0.01724006462097168, 0.017310720443725586, 0.017305599212646485, 0.01741004753112793, 0.017370111465454103, 0.017265663146972657, 0.017348608016967772, 0.01722777557373047, 0.017300479888916014, 0.017369087219238282, 0.01822412872314453, 0.017549312591552735, 0.017344512939453126, 0.017358848571777344, 0.017346559524536134, 0.017299455642700197, 0.017271808624267578, 0.017324031829833983, 0.01721241569519043, 0.017333248138427734, 0.017510400772094727, 0.0172677116394043, 0.01738751983642578, 0.01734758377075195, 0.017313791275024415, 0.017317888259887695, 0.017271808624267578, 0.017354751586914064, 0.017298431396484376, 0.017229824066162108, 0.017094655990600584, 0.017373184204101562, 0.017344512939453126, 0.017333248138427734, 0.017358848571777344, 0.017305599212646485, 0.017351680755615235, 0.017338367462158204, 0.017385471343994142, 0.017344512939453126, 0.01742848014831543, 0.017300479888916014, 0.01717862319946289, 0.017309696197509765, 0.017184768676757813, 0.01702707290649414, 0.017310720443725586, 0.017307647705078123, 0.0175861759185791, 0.01734758377075195, 0.017303552627563477, 0.017283071517944337, 0.017326080322265625, 0.017443840026855468, 0.017312768936157227, 0.017427455902099608, 0.017341440200805663, 0.017361919403076173, 0.017364992141723632, 0.017320959091186524, 0.017377279281616212, 0.017295360565185547, 0.01744076728820801, 0.017301504135131835, 0.017374208450317383, 0.017295360565185547, 0.017557504653930665, 0.01745715141296387, 0.017369087219238282, 0.017458175659179686, 0.017378303527832033, 0.017314815521240236, 0.017370111465454103, 0.017096704483032226, 0.01740083122253418, 0.01739776039123535, 0.01740595245361328, 0.017337343215942384, 0.0174335994720459, 0.01739571189880371, 0.017374208450317383, 0.017323007583618166, 0.017334272384643554, 0.01738137626647949, 0.017262592315673828, 0.017339391708374022, 0.01697689628601074, 0.017480703353881837, 0.017321983337402345, 0.017492992401123047, 0.017308671951293944, 0.017548288345336914, 0.01734758377075195, 0.017111040115356444, 0.01702092742919922, 0.0170700798034668, 0.017039360046386717, 0.01703321647644043, 0.017108991622924806, 0.01700864028930664, 0.017369087219238282, 0.017262592315673828, 0.017276927947998046, 0.017331199645996095, 0.017308671951293944, 0.017473535537719728, 0.017337343215942384, 0.017307647705078123, 0.017300479888916014, 0.017399808883666993, 0.017327104568481445, 0.017292287826538084, 0.017374208450317383, 0.017297407150268555, 0.017325056076049804, 0.017329151153564454, 0.01736809539794922, 0.017325023651123046, 0.01739673614501953, 0.017254400253295898, 0.017246208190917968, 0.01720012855529785, 0.017313791275024415, 0.01743052864074707, 0.017319936752319336, 0.01723391914367676, 0.0172410888671875, 0.017348608016967772, 0.01721446418762207, 0.017246208190917968, 0.01724825668334961, 0.01723187255859375, 0.01722163200378418, 0.01725132751464844, 0.01723391914367676, 0.01718169593811035, 0.016527360916137695, 0.016638975143432617, 0.016740352630615234, 0.01663283157348633, 0.016652288436889647, 0.017871871948242187, 0.018447359085083007, 0.01783500862121582, 0.017434623718261717, 0.017309696197509765, 0.017236991882324217, 0.01701888084411621, 0.016957439422607423, 0.017321983337402345, 0.017354751586914064, 0.0171909122467041, 0.017305599212646485, 0.017291263580322267, 0.017238016128540038, 0.017314815521240236, 0.017270784378051757, 0.017370111465454103, 0.017333248138427734, 0.017318912506103516, 0.017084415435791016, 0.0176680965423584, 0.017452032089233398, 0.01728102493286133, 0.017352703094482422, 0.017242111206054688, 0.017314815521240236, 0.017260543823242186, 0.017082368850708008, 0.016645120620727538, 0.01640652847290039, 0.016635904312133788, 0.016753664016723634, 0.016664575576782227, 0.016681983947753908, 0.016331775665283203, 0.016368640899658202, 0.016555007934570314, 0.016676864624023437, 0.017532928466796875, 0.017361919403076173, 0.018777088165283205, 0.018603008270263673, 0.017833984375, 0.01757798385620117, 0.017467391967773437, 0.017795072555541993, 0.01740902328491211, 0.01747148895263672, 0.017378303527832033, 0.01744076728820801, 0.017312768936157227, 0.017113088607788086, 0.017146879196166993, 0.017064960479736328, 0.01703321647644043, 0.01701068878173828, 0.01704652786254883, 0.017076223373413087, 0.017320959091186524, 0.017340415954589843, 0.017294336318969726, 0.017116159439086915, 0.01724723243713379, 0.017336320877075196, 0.017106943130493164, 0.017095680236816405, 0.017314815521240236, 0.017257471084594727, 0.017452032089233398, 0.017100799560546876, 0.017031167984008787, 0.017069055557250978, 0.01701171112060547, 0.01743257522583008, 0.01719193649291992, 0.01722675132751465, 0.01721036720275879, 0.01740902328491211, 0.017476608276367187, 0.017168384552001953, 0.017052671432495118, 0.01705779266357422, 0.017350656509399414, 0.01700249671936035, 0.017067007064819336, 0.017261568069458007, 0.017311744689941407, 0.017108991622924806, 0.017115135192871094, 0.01702707290649414, 0.017075199127197266, 0.016936960220336913, 0.017103872299194335, 0.017081344604492187, 0.01721343994140625, 0.01757798385620117, 0.017337343215942384, 0.017335296630859375, 0.017352703094482422, 0.017300479888916014, 0.01742336082458496, 0.017076223373413087, 0.01746124839782715, 0.017064960479736328, 0.018182144165039063, 0.019156991958618166, 0.017710079193115236, 0.017293312072753905, 0.017329151153564454, 0.01725951957702637, 0.017276927947998046, 0.017398784637451172, 0.017331199645996095, 0.017252351760864256, 0.017334272384643554, 0.017366016387939453, 0.017157119750976564, 0.01766092872619629, 0.01742336082458496, 0.017378303527832033, 0.01717452812194824, 0.016508928298950197, 0.01641062355041504, 0.01637478446960449, 0.01640140724182129, 0.01640755271911621, 0.01704652786254883, 0.016955392837524414, 0.01662156867980957, 0.016727039337158203, 0.016719871520996094, 0.016727039337158203, 0.01661952018737793, 0.016942079544067384, 0.01743769645690918, 0.017310720443725586, 0.017449983596801756, 0.0173885440826416, 0.017317888259887695, 0.017246208190917968, 0.017335296630859375, 0.017378303527832033, 0.017288192749023438, 0.017250303268432618, 0.01732918357849121, 0.017259487152099608, 0.017282047271728516, 0.017055744171142577, 0.016867328643798828, 0.0164270076751709, 0.016638975143432617, 0.016664575576782227, 0.01662873649597168, 0.01663488006591797, 0.016570367813110352, 0.0166430721282959, 0.01659596824645996, 0.01657651138305664, 0.016667648315429686, 0.016693248748779296, 0.0165928955078125, 0.01660211181640625, 0.01661030387878418, 0.01660723114013672, 0.01661952018737793, 0.016656383514404297, 0.0166297607421875, 0.01661440086364746, 0.01660518455505371, 0.016611328125, 0.01662873649597168, 0.01639936065673828, 0.016358400344848634, 0.01664102363586426, 0.016615423202514648, 0.01662259292602539, 0.01663385581970215, 0.016644096374511717, 0.01665433692932129, 0.016664575576782227, 0.01660620880126953, 0.01662054443359375, 0.016668672561645507, 0.016673791885375978]",tokens/s,57.90468427210157,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1252.749312,2645.03296,0.0,1998.585856,1692.386816,s,10,0.18764684867858888,0.018764684867858886,0.0005992433307238454,0.01878708839416504,0.01952073516845703,0.019708079528808594,0.019857955017089843,"[0.019895423889160158, 0.018243936538696288, 0.018047775268554687, 0.01779680061340332, 0.01896006393432617, 0.01885526466369629, 0.01893507194519043, 0.018714496612548828, 0.019479103088378906, 0.01871891212463379]",tokens/s,13642.648507169439,kWh,2.0918840269540734e-07,1.1462499605261722e-07,6.201499553131089e-07,9.439633540611335e-07,tokens/kWh,271196968.5037379,MB,1253.888,2645.03296,0.0,1998.585856,1714.454528,s,10,11.419435302734374,1.1419435302734375,0.01935842230442361,1.1489760131835938,1.1544193725585938,1.1615755920410156,1.1673005676269532,"[1.1687318115234375, 1.14187158203125, 1.1200245361328125, 1.09501806640625, 1.1501046142578124, 1.143095458984375, 1.149320556640625, 1.1486314697265625, 1.1528291015625, 1.14980810546875]",tokens/s,55.16910278822167,kWh,1.3099662190341671e-05,7.1781809893895315e-06,2.638012164489513e-05,4.6657964824626325e-05,tokens/kWh,1350251.778807726,,s,629,11.567720458984363,0.018390652558003776,0.0022690962265683495,0.01818726348876953,0.018542176055908206,0.019018138122558592,0.03558961151123047,"[0.019501056671142578, 0.019162111282348633, 0.018941951751708985, 0.019066879272460938, 0.01902899169921875, 0.01900441551208496, 0.019058687210083008, 0.01902387237548828, 0.018993152618408202, 0.018852863311767578, 0.019162111282348633, 0.019190784454345702, 0.01906073570251465, 0.01916214370727539, 0.019212255477905272, 0.018974752426147462, 0.018965471267700196, 0.019121152877807617, 0.01924300765991211, 0.019508224487304687, 0.018880512237548826, 0.018902015686035157, 0.01886617660522461, 0.018918399810791017, 0.019088384628295898, 0.018933759689331055, 0.018364416122436524, 0.018290687561035156, 0.018215936660766603, 0.018141183853149414, 0.018117631912231445, 0.018125823974609375, 0.01810534477233887, 0.018126880645751953, 0.018101215362548828, 0.018391040802001952, 0.018130943298339842, 0.018176000595092775, 0.018163711547851562, 0.018165760040283203, 0.018115583419799804, 0.018102272033691406, 0.018117631912231445, 0.018163711547851562, 0.01817190361022949, 0.018099199295043944, 0.01810534477233887, 0.018035711288452147, 0.018132991790771484, 0.018129919052124025, 0.018310144424438478, 0.01823846435546875, 0.018265087127685545, 0.0180633602142334, 0.017743871688842772, 0.017752063751220702, 0.01804287910461426, 0.018145280838012694, 0.018197504043579102, 0.01824563217163086, 0.018159616470336915, 0.018191360473632814, 0.03562803268432617, 0.017076223373413087, 0.017086463928222655, 0.017167360305786132, 0.017089536666870117, 0.017047552108764647, 0.018058240890502928, 0.018059328079223634, 0.01834899139404297, 0.018276351928710938, 0.01819647979736328, 0.018250751495361327, 0.018142208099365235, 0.018107391357421874, 0.018147327423095702, 0.01824051284790039, 0.018332672119140626, 0.018497535705566406, 0.01864089584350586, 0.01923583984375, 0.01843916893005371, 0.018267135620117187, 0.018134016036987305, 0.018089984893798827, 0.018084863662719726, 0.01804800033569336, 0.018050048828125, 0.018152448654174806, 0.018058240890502928, 0.018107391357421874, 0.018075647354125975, 0.01815449523925781, 0.01826918411254883, 0.01882828712463379, 0.01827020835876465, 0.018112512588500978, 0.01822003173828125, 0.018148351669311523, 0.01818012809753418, 0.01826505661010742, 0.018214912414550782, 0.018086912155151368, 0.01820262336730957, 0.01820569610595703, 0.018292736053466797, 0.018268159866333008, 0.018142208099365235, 0.018082815170288084, 0.01815449523925781, 0.018143232345581056, 0.01820876884460449, 0.01820364761352539, 0.018139135360717772, 0.018288639068603514, 0.01816985511779785, 0.018242559432983398, 0.018145280838012694, 0.018168832778930662, 0.01838591957092285, 0.018288639068603514, 0.01816985511779785, 0.018164735794067383, 0.01818726348876953, 0.035490814208984374, 0.017146879196166993, 0.017096704483032226, 0.017101823806762697, 0.017148927688598634, 0.017076223373413087, 0.017095680236816405, 0.01705369567871094, 0.017084415435791016, 0.01704652786254883, 0.017073152542114257, 0.017122304916381836, 0.017118207931518553, 0.016943103790283204, 0.016923648834228516, 0.016948223114013672, 0.017068031311035157, 0.01701785659790039, 0.017042463302612304, 0.017095647811889648, 0.01701785659790039, 0.017040384292602538, 0.017099775314331055, 0.017438720703125, 0.017123327255249024, 0.017154048919677735, 0.01840025520324707, 0.01818009567260742, 0.018229248046875, 0.018310144424438478, 0.018306079864501952, 0.01845859146118164, 0.01840640068054199, 0.01824870491027832, 0.01821696090698242, 0.018378751754760742, 0.018288639068603514, 0.018337791442871093, 0.018119680404663087, 0.018142240524291992, 0.018222047805786134, 0.018165760040283203, 0.018128896713256838, 0.018109439849853515, 0.018035711288452147, 0.018190336227416993, 0.018142208099365235, 0.018572288513183592, 0.01823846435546875, 0.018312192916870116, 0.01840127944946289, 0.01828659248352051, 0.018241535186767577, 0.018165760040283203, 0.018241535186767577, 0.018564096450805666, 0.018354175567626953, 0.018292736053466797, 0.018290687561035156, 0.01822003173828125, 0.018307071685791015, 0.017810432434082032, 0.018223104476928712, 0.035160064697265625, 0.01702911949157715, 0.01702809524536133, 0.01705881690979004, 0.01706598472595215, 0.016983039855957033, 0.017069055557250978, 0.016987136840820313, 0.017040384292602538, 0.017047552108764647, 0.017079296112060546, 0.01699839973449707, 0.01716633605957031, 0.017104896545410156, 0.017161216735839844, 0.017048576354980468, 0.017089536666870117, 0.016977920532226562, 0.01703628730773926, 0.01696460723876953, 0.0170700798034668, 0.016954368591308593, 0.016942079544067384, 0.01699635124206543, 0.016995328903198242, 0.016898048400878905, 0.016713727951049806, 0.016722944259643553, 0.01681305694580078, 0.017180671691894533, 0.017088512420654296, 0.01701888084411621, 0.01700147247314453, 0.017062911987304686, 0.01698508834838867, 0.01697177505493164, 0.01697279930114746, 0.016986112594604492, 0.017044479370117188, 0.01701068878173828, 0.016991231918334963, 0.016957439422607423, 0.017006591796875, 0.016970752716064453, 0.018117631912231445, 0.018298879623413086, 0.01826201629638672, 0.018158592224121094, 0.018311168670654295, 0.01823846435546875, 0.018192384719848635, 0.018215936660766603, 0.018215936660766603, 0.018316287994384766, 0.017958911895751953, 0.017957887649536132, 0.018362367630004883, 0.018308095932006836, 0.01840742492675781, 0.01827020835876465, 0.01828659248352051, 0.018313215255737304, 0.018189311981201172, 0.037544960021972655, 0.01821286392211914, 0.018301952362060548, 0.018177024841308592, 0.018341888427734376, 0.018166784286499024, 0.01823027229309082, 0.01816985511779785, 0.018239488601684572, 0.018156543731689453, 0.018299903869628906, 0.018185216903686522, 0.01816268730163574, 0.018096128463745118, 0.018284543991088868, 0.018226175308227538, 0.018324480056762696, 0.01860812759399414, 0.018324480056762696, 0.0178155517578125, 0.017785856246948242, 0.018095104217529297, 0.01842278480529785, 0.01823539161682129, 0.017905664443969727, 0.01782681655883789, 0.0179783992767334, 0.018200544357299803, 0.018266111373901366, 0.01816166305541992, 0.01824665641784668, 0.018141183853149414, 0.018144256591796876, 0.018190336227416993, 0.01824358367919922, 0.018172927856445312, 0.01820159912109375, 0.018255872726440428, 0.018272256851196288, 0.018358272552490236, 0.018521087646484375, 0.018334720611572267, 0.018185216903686522, 0.018242559432983398, 0.01826918411254883, 0.01823027229309082, 0.018207744598388673, 0.018310176849365235, 0.019415008544921876, 0.018423807144165038, 0.01822003173828125, 0.018291711807250977, 0.018289663314819335, 0.018283519744873047, 0.0181790714263916, 0.01822208023071289, 0.018185216903686522, 0.01886412811279297, 0.018226175308227538, 0.018282495498657226, 0.018313215255737304, 0.018164735794067383, 0.01845043182373047, 0.03739648056030274, 0.018181119918823242, 0.018241535186767577, 0.01823027229309082, 0.01821392059326172, 0.018124767303466797, 0.018082815170288084, 0.01809715270996094, 0.018166784286499024, 0.018148351669311523, 0.01822719955444336, 0.018189311981201172, 0.018256895065307616, 0.018296831130981444, 0.018318336486816408, 0.018317312240600587, 0.018249727249145507, 0.01822105598449707, 0.01821388816833496, 0.018130943298339842, 0.01822719955444336, 0.018092031478881835, 0.018159616470336915, 0.018121728897094725, 0.018221088409423828, 0.01806332778930664, 0.018075647354125975, 0.01804083251953125, 0.018119680404663087, 0.01819545555114746, 0.018163711547851562, 0.01859686470031738, 0.017810432434082032, 0.017913856506347657, 0.018059263229370116, 0.018198528289794923, 0.01818828773498535, 0.017778688430786133, 0.017869823455810546, 0.017741823196411134, 0.018295808792114256, 0.017896448135375977, 0.017772544860839845, 0.017969152450561524, 0.01826304054260254, 0.018215936660766603, 0.018177024841308592, 0.01818623924255371, 0.01823744010925293, 0.018198528289794923, 0.01825382423400879, 0.01824051284790039, 0.01818726348876953, 0.01818726348876953, 0.017923072814941408, 0.01808793640136719, 0.018089984893798827, 0.0182108154296875, 0.018126848220825196, 0.018173952102661133, 0.018095104217529297, 0.01818009567260742, 0.018108415603637695, 0.03722854232788086, 0.018264064788818358, 0.018149375915527344, 0.018223104476928712, 0.01821183967590332, 0.018185216903686522, 0.01822412872314453, 0.01826918411254883, 0.018124799728393554, 0.018149375915527344, 0.01822208023071289, 0.01824563217163086, 0.018125823974609375, 0.018198528289794923, 0.018142208099365235, 0.018223104476928712, 0.018132991790771484, 0.018152448654174806, 0.018113536834716795, 0.018110464096069336, 0.018173952102661133, 0.018098175048828127, 0.01864396858215332, 0.01861631965637207, 0.01824870491027832, 0.01820876884460449, 0.018185216903686522, 0.0181790714263916, 0.018264064788818358, 0.018883583068847656, 0.018126848220825196, 0.018206720352172853, 0.01845964813232422, 0.01884876823425293, 0.01919385528564453, 0.018539520263671876, 0.018249727249145507, 0.01821183967590332, 0.018140159606933593, 0.018144256591796876, 0.018119680404663087, 0.018159616470336915, 0.018124799728393554, 0.018163711547851562, 0.018130943298339842, 0.01814630317687988, 0.01801625633239746, 0.018094079971313477, 0.018101247787475586, 0.018257919311523436, 0.018185216903686522, 0.018156543731689453, 0.018214912414550782, 0.0180633602142334, 0.018174976348876954, 0.018197504043579102, 0.01824870491027832, 0.01803264045715332, 0.018129919052124025, 0.018128896713256838, 0.01823744010925293, 0.018160640716552736, 0.018131967544555663, 0.03765350341796875, 0.01818422317504883, 0.018552799224853516, 0.018185216903686522, 0.018251775741577148, 0.018109439849853515, 0.018101247787475586, 0.018156543731689453, 0.018367488861083983, 0.018241535186767577, 0.018173952102661133, 0.01819545555114746, 0.01821286392211914, 0.0182476806640625, 0.018231296539306642, 0.018241535186767577, 0.01845043182373047, 0.018157567977905274, 0.018182144165039063, 0.01813811111450195, 0.018151424407958985, 0.018158592224121094, 0.018131967544555663, 0.018102272033691406, 0.018387968063354493, 0.018963455200195312, 0.019009536743164062, 0.01837772750854492, 0.018300928115844727, 0.01823027229309082, 0.018131967544555663, 0.018241535186767577, 0.018181119918823242, 0.018173952102661133, 0.01818009567260742, 0.018192384719848635, 0.018124799728393554, 0.018095104217529297, 0.018111488342285157, 0.01816985511779785, 0.018111488342285157, 0.018273279190063475, 0.018086912155151368, 0.018127872467041017, 0.01804800033569336, 0.018068479537963866, 0.01807257652282715, 0.018143232345581056, 0.01836953544616699, 0.018771968841552734, 0.018332672119140626, 0.018215936660766603, 0.018234367370605468, 0.0180633602142334, 0.018104320526123048, 0.018113536834716795, 0.01809715270996094, 0.018191360473632814, 0.018207744598388673, 0.018197504043579102, 0.018155519485473632, 0.01821183967590332, 0.018310144424438478, 0.03782963180541992, 0.018319360733032225, 0.0182609920501709, 0.01858252716064453, 0.01844633674621582, 0.01822105598449707, 0.01816268730163574, 0.018199552536010744, 0.01820569610595703, 0.018272256851196288, 0.018266111373901366, 0.018281471252441405, 0.018259967803955078, 0.018241535186767577, 0.018298879623413086, 0.018288639068603514, 0.018290687561035156, 0.018226175308227538, 0.01784115219116211, 0.017913856506347657, 0.018182144165039063, 0.018107391357421874, 0.01822003173828125, 0.018142208099365235, 0.018151424407958985, 0.018266111373901366, 0.01818726348876953, 0.018129919052124025, 0.01813811111450195, 0.01824051284790039, 0.01821696090698242, 0.018259967803955078, 0.02001919937133789, 0.01999564743041992, 0.018327552795410155, 0.01823232078552246, 0.018370559692382812, 0.018197504043579102, 0.01824051284790039, 0.018125823974609375, 0.018531328201293946, 0.01827737617492676, 0.01825382423400879, 0.01817190361022949, 0.018314239501953124, 0.01825484848022461, 0.018183168411254884, 0.01818623924255371, 0.018306047439575195, 0.018114559173583983, 0.01822412872314453, 0.01819443130493164, 0.018314239501953124, 0.018771968841552734, 0.01904025650024414, 0.01839411163330078, 0.018292736053466797, 0.018183168411254884, 0.017855487823486327, 0.01780838394165039, 0.018231296539306642, 0.018173952102661133, 0.018111488342285157, 0.037116928100585936, 0.018333696365356447, 0.01808896064758301, 0.01819545555114746, 0.018264064788818358, 0.01824665641784668, 0.01822003173828125, 0.018165760040283203, 0.018125823974609375, 0.018256895065307616, 0.018242559432983398, 0.01821900749206543, 0.01814630317687988, 0.018191360473632814, 0.018185216903686522, 0.01820876884460449, 0.018250751495361327, 0.01823539161682129, 0.018181119918823242, 0.01821183967590332, 0.018050048828125, 0.018000896453857423, 0.01803264045715332, 0.018182144165039063, 0.01789952087402344, 0.017763328552246094, 0.018566144943237304, 0.018084863662719726, 0.018151424407958985, 0.018081792831420897, 0.018124799728393554, 0.018530303955078126, 0.01840332794189453, 0.018327552795410155, 0.018323455810546875, 0.018280448913574218, 0.018316287994384766, 0.018251775741577148, 0.018215936660766603, 0.01822003173828125, 0.018335744857788085, 0.018206720352172853, 0.018151424407958985, 0.01820364761352539, 0.018156543731689453, 0.018185216903686522, 0.01818726348876953, 0.018096128463745118, 0.018185216903686522, 0.018701311111450195, 0.018449407577514648, 0.018172927856445312, 0.018129919052124025, 0.01816783905029297, 0.018184160232543944, 0.01818009567260742, 0.01823027229309082, 0.018265087127685545, 0.01904640007019043, 0.018732032775878905, 0.01906073570251465, 0.01848320007324219, 0.018150400161743165]",tokens/s,54.375449530462205,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949939-3096e9a55b8312ce20cef7c4;4e145226-4519-4501-b294-75923264eb8d) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",deci,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,881.999872,793.247744,0.0,163.577856,154.631168,s,1,7.28373876953125,7.28373876953125,0.0,7.28373876953125,7.28373876953125,7.28373876953125,7.28373876953125,[7.28373876953125],,kWh,5.464717209031278e-06,2.9616524999527983e-06,8.250562156031371e-06,1.6676931865015448e-05,,MB,1478.12352,849.870848,0.0,201.326592,187.147776,s,31,0.19229372930526736,0.006203023525976365,0.00017604218529213432,0.006219744205474853,0.0063179202079772945,0.006419680118560791,0.006719564771652222,"[0.006811935901641846, 0.00631763219833374, 0.006032608032226563, 0.006262720108032227, 0.006204959869384766, 0.005906400203704834, 0.006078112125396729, 0.0063179202079772945, 0.006219744205474853, 0.0062347202301025395, 0.006139488220214844, 0.006180543899536133, 0.006035103797912598, 0.0060404157638549805, 0.005904096126556397, 0.005993631839752198, 0.005978879928588868, 0.006282944202423096, 0.006224480152130127, 0.006213568210601807, 0.006241504192352295, 0.006504032135009765, 0.006309855937957764, 0.006037951946258545, 0.006163839817047119, 0.006313663959503174, 0.006200160026550293, 0.006286911964416504, 0.006281599998474121, 0.006238976001739502, 0.006335328102111817]",tokens/s,41270.196530442016,kWh,7.251388492343344e-08,3.973401547000189e-08,1.3047858470474252e-07,2.4272648509817785e-07,tokens/kWh,1054685070.3022922,MB,1504.763904,849.870848,0.0,201.326592,187.150336,s,31,9.85742626953125,0.3179814925655242,0.004613224399903838,0.31892181396484376,0.3227403259277344,0.32337921142578124,0.32524371948242187,"[0.32589801025390625, 0.31470050048828124, 0.3230413818359375, 0.31892181396484376, 0.3198345947265625, 0.31346591186523437, 0.31803076171875, 0.3199458618164063, 0.3212616577148438, 0.32177685546875, 0.3179394836425781, 0.3167702941894531, 0.3173262634277344, 0.3073642883300781, 0.311357666015625, 0.3076251220703125, 0.3155120544433594, 0.32083917236328124, 0.3179750366210938, 0.31946258544921874, 0.3212673950195313, 0.320938232421875, 0.3209807739257812, 0.30938262939453126, 0.31652609252929687, 0.3206273193359375, 0.31149087524414065, 0.323717041015625, 0.3218089599609375, 0.3188973083496094, 0.3227403259277344]",tokens/s,198.1247383038119,kWh,3.602785801124831e-06,1.9733679597738814e-06,5.701159077233574e-06,1.1277312838132287e-05,tokens/kWh,5586437.203992105,,s,1953,9.844856830120094,0.005040889313937576,0.0001280728287460182,0.0050657281875610355,0.005153791904449463,0.00519966697692871,0.005470085048675537,"[0.005288959980010987, 0.00531763219833374, 0.005863423824310303, 0.005473279953002929, 0.005248000144958496, 0.005132287979125977, 0.005091328144073487, 0.005100543975830078, 0.0051404800415039064, 0.005206016063690186, 0.005329919815063477, 0.005408768177032471, 0.00531763219833374, 0.005315584182739258, 0.005429247856140137, 0.005575679779052735, 0.005501952171325684, 0.005450751781463623, 0.005125120162963868, 0.005107711791992187, 0.0051363840103149415, 0.0051066880226135255, 0.005100543975830078, 0.005079040050506592, 0.005047296047210693, 0.0050421757698059086, 0.005028863906860351, 0.005081088066101074, 0.005132287979125977, 0.005082111835479736, 0.005089280128479004, 0.005161983966827393, 0.005044223785400391, 0.0050135040283203125, 0.004978687763214112, 0.0051435518264770505, 0.005089280128479004, 0.00506879997253418, 0.005092351913452148, 0.005130239963531494, 0.005096447944641113, 0.0050728960037231445, 0.005115903854370117, 0.005096447944641113, 0.005083136081695557, 0.0050800638198852536, 0.005119999885559082, 0.005082111835479736, 0.005082111835479736, 0.005078015804290771, 0.005117951869964599, 0.005093376159667969, 0.00506982421875, 0.005100543975830078, 0.0051363840103149415, 0.005105663776397705, 0.005074944019317627, 0.005107711791992187, 0.005082111835479736, 0.005163008213043213, 0.005100543975830078, 0.005242879867553711, 0.00511897611618042, 0.005073919773101807, 0.005083136081695557, 0.005094399929046631, 0.0050769920349121095, 0.005070847988128662, 0.005341184139251709, 0.005194752216339111, 0.005119999885559082, 0.005192704200744629, 0.0054876160621643065, 0.005388288021087646, 0.005121024131774903, 0.005156864166259765, 0.005114880084991455, 0.0051036162376403805, 0.0051036162376403805, 0.005149695873260498, 0.005100543975830078, 0.005092351913452148, 0.005063680171966553, 0.005071872234344482, 0.005117951869964599, 0.005079040050506592, 0.004973567962646484, 0.0048895998001098635, 0.004895743846893311, 0.0048865280151367185, 0.004888576030731201, 0.004944896221160889, 0.004901887893676758, 0.004908031940460205, 0.004900864124298096, 0.004927487850189209, 0.0049090561866760255, 0.004880383968353271, 0.004869120121002197, 0.004924416065216064, 0.004900864124298096, 0.004894720077514648, 0.004893695831298828, 0.004901887893676758, 0.004922368049621582, 0.0048865280151367185, 0.004897791862487793, 0.004891647815704346, 0.004944896221160889, 0.004903935909271241, 0.004894720077514648, 0.0050063362121582035, 0.004892672061920166, 0.0049203200340270994, 0.004936704158782959, 0.005005343914031983, 0.004858848094940186, 0.0048496642112731934, 0.004828159809112549, 0.004839424133300781, 0.0048261117935180665, 0.004828159809112549, 0.004863999843597412, 0.004840447902679444, 0.004887551784515381, 0.004892672061920166, 0.0048772478103637695, 0.004904960155487061, 0.004947968006134033, 0.005082111835479736, 0.005192704200744629, 0.005108736038208008, 0.005062655925750732, 0.005044223785400391, 0.005041152000427246, 0.004944896221160889, 0.005449728012084961, 0.0055101442337036136, 0.005550079822540284, 0.005544960021972656, 0.005386240005493164, 0.005604351997375488, 0.005371903896331787, 0.005082111835479736, 0.005180416107177735, 0.00501043176651001, 0.0050094079971313476, 0.004981760025024414, 0.0050728960037231445, 0.005090303897857666, 0.004996096134185791, 0.005114880084991455, 0.005189631938934326, 0.0051036162376403805, 0.005097472190856934, 0.005094399929046631, 0.005086207866668701, 0.005172224044799805, 0.005093376159667969, 0.005134335994720459, 0.005102591991424561, 0.0050800638198852536, 0.005097472190856934, 0.0051066880226135255, 0.005074944019317627, 0.00511897611618042, 0.0051404800415039064, 0.005058559894561767, 0.005064703941345215, 0.005082143783569336, 0.005104671955108642, 0.00508512020111084, 0.0050800638198852536, 0.005092351913452148, 0.005083136081695557, 0.005086207866668701, 0.005062655925750732, 0.005245952129364013, 0.0051138558387756345, 0.0050841598510742185, 0.005044288158416748, 0.005088191986083984, 0.005023744106292725, 0.004986879825592041, 0.00499507188796997, 0.005111807823181152, 0.0050800638198852536, 0.005075967788696289, 0.005127168178558349, 0.005083136081695557, 0.005091328144073487, 0.005099520206451416, 0.005082111835479736, 0.005119999885559082, 0.005038080215454102, 0.005037055969238281, 0.005104640007019043, 0.0051435518264770505, 0.005107711791992187, 0.004984831809997559, 0.0050063362121582035, 0.005024767875671386, 0.005001215934753418, 0.0049827837944030765, 0.005001215934753418, 0.005017600059509277, 0.004975615978240967, 0.005017600059509277, 0.0049909758567810054, 0.005088255882263183, 0.005075967788696289, 0.005085184097290039, 0.005091360092163086, 0.005063648223876953, 0.00506982421875, 0.0050728960037231445, 0.005121024131774903, 0.0051138558387756345, 0.005088255882263183, 0.005053440093994141, 0.00510975980758667, 0.005071872234344482, 0.005058559894561767, 0.004985856056213379, 0.0049797120094299315, 0.004981760025024414, 0.004968448162078858, 0.005008384227752686, 0.00499507188796997, 0.004893695831298828, 0.004952064037322998, 0.0050657281875610355, 0.005096479892730713, 0.005051360130310058, 0.00506060791015625, 0.005161983966827393, 0.005087232112884522, 0.005056511878967285, 0.005085184097290039, 0.005123072147369385, 0.005088255882263183, 0.005078080177307129, 0.005066688060760498, 0.00510975980758667, 0.005064703941345215, 0.005044223785400391, 0.005099520206451416, 0.005078015804290771, 0.00506060791015625, 0.00505241584777832, 0.005081088066101074, 0.0050728960037231445, 0.005073919773101807, 0.005164031982421875, 0.005079040050506592, 0.005131264209747314, 0.005078015804290771, 0.005079040050506592, 0.0050800638198852536, 0.005108736038208008, 0.005085184097290039, 0.005075967788696289, 0.0050769920349121095, 0.005093376159667969, 0.005057536125183106, 0.0050728960037231445, 0.005128191947937012, 0.0050769920349121095, 0.005085184097290039, 0.0050728960037231445, 0.0051138558387756345, 0.005054463863372802, 0.004978687763214112, 0.004989952087402344, 0.0050135040283203125, 0.004965375900268554, 0.0049920320510864254, 0.005105663776397705, 0.005056479930877686, 0.005024767875671386, 0.005005311965942383, 0.005035007953643799, 0.005004288196563721, 0.005001215934753418, 0.005035007953643799, 0.005128191947937012, 0.005085184097290039, 0.005115903854370117, 0.005088255882263183, 0.005121024131774903, 0.005089280128479004, 0.005081088066101074, 0.00510975980758667, 0.0051036162376403805, 0.005079040050506592, 0.005085184097290039, 0.00510975980758667, 0.005067776203155518, 0.005073919773101807, 0.005057536125183106, 0.005039103984832764, 0.004984831809997559, 0.005000192165374756, 0.004988927841186524, 0.0050360321998596195, 0.00515993595123291, 0.005164031982421875, 0.005137407779693603, 0.0050769920349121095, 0.005089280128479004, 0.005061632156372071, 0.005097472190856934, 0.0050657281875610355, 0.005057536125183106, 0.005062655925750732, 0.004876287937164306, 0.004876287937164306, 0.004908031940460205, 0.004996096134185791, 0.004878335952758789, 0.004833280086517334, 0.004836351871490479, 0.004836351871490479, 0.004901887893676758, 0.004809728145599365, 0.004960256099700928, 0.0048752641677856446, 0.0049090561866760255, 0.00496230411529541, 0.004943871974945068, 0.004940800189971924, 0.004891647815704346, 0.004851712226867676, 0.00480460786819458, 0.004863999843597412, 0.004900864124298096, 0.004893695831298828, 0.0048855037689208985, 0.004888576030731201, 0.004908031940460205, 0.004984831809997559, 0.004884479999542236, 0.00487116813659668, 0.004884479999542236, 0.004923391819000244, 0.004902912139892578, 0.0048752641677856446, 0.004918272018432617, 0.004921343803405762, 0.004880383968353271, 0.004880383968353271, 0.004883456230163574, 0.0050462718009948735, 0.004907008171081543, 0.0048895998001098635, 0.004874239921569825, 0.004881408214569092, 0.004894720077514648, 0.0049090561866760255, 0.005846015930175781, 0.005434432029724121, 0.005887936115264893, 0.005517312049865723, 0.0051363840103149415, 0.005054463863372802, 0.005093376159667969, 0.005141503810882568, 0.005115903854370117, 0.00499507188796997, 0.0048855037689208985, 0.0048752641677856446, 0.004824063777923584, 0.004828159809112549, 0.0048261117935180665, 0.004849728107452393, 0.005176256179809571, 0.005102591991424561, 0.005026815891265869, 0.004874239921569825, 0.004892672061920166, 0.004880383968353271, 0.004899839878082276, 0.00489577579498291, 0.004873184204101563, 0.004872191905975342, 0.00501043176651001, 0.00487116813659668, 0.0048855037689208985, 0.0048793601989746095, 0.005292031764984131, 0.0051773438453674315, 0.005085184097290039, 0.005082111835479736, 0.005122047901153564, 0.0050503678321838375, 0.00515174388885498, 0.0050421757698059086, 0.005035007953643799, 0.005087232112884522, 0.005099520206451416, 0.005121024131774903, 0.005328927993774414, 0.0051281599998474125, 0.005175295829772949, 0.00511078405380249, 0.00506879997253418, 0.005087232112884522, 0.005133376121520996, 0.0050974078178405765, 0.005132287979125977, 0.005078015804290771, 0.005088255882263183, 0.00506060791015625, 0.005111807823181152, 0.005101568222045898, 0.005174272060394287, 0.005071872234344482, 0.005054463863372802, 0.005107711791992187, 0.0050657281875610355, 0.005100543975830078, 0.00515174388885498, 0.005088255882263183, 0.005083136081695557, 0.005108895778656006, 0.0051003842353820805, 0.005067776203155518, 0.0050800638198852536, 0.005088255882263183, 0.005123072147369385, 0.0050800638198852536, 0.005029888153076172, 0.00501043176651001, 0.005018623828887939, 0.005031936168670655, 0.004976640224456787, 0.00481382417678833, 0.004830207824707031, 0.00480460786819458, 0.004829184055328369, 0.004863999843597412, 0.00506060791015625, 0.005081088066101074, 0.005067776203155518, 0.005054463863372802, 0.0050657281875610355, 0.005096447944641113, 0.0050728960037231445, 0.00517632007598877, 0.005054463863372802, 0.005253119945526123, 0.005075967788696289, 0.00506879997253418, 0.005183487892150879, 0.005104640007019043, 0.005090303897857666, 0.005100543975830078, 0.005135359764099121, 0.0050954241752624516, 0.005179391860961914, 0.005181439876556396, 0.005008384227752686, 0.0049725441932678225, 0.004981760025024414, 0.005028863906860351, 0.00505241584777832, 0.0050657281875610355, 0.005067776203155518, 0.005102591991424561, 0.005083136081695557, 0.00506879997253418, 0.005210112094879151, 0.005190656185150146, 0.005134335994720459, 0.0050462718009948735, 0.005051392078399658, 0.005007359981536865, 0.004992000102996826, 0.0049827837944030765, 0.0051446080207824705, 0.004986847877502441, 0.004963327884674072, 0.004989952087402344, 0.005107711791992187, 0.005127168178558349, 0.005067776203155518, 0.0050063362121582035, 0.0050022401809692385, 0.004976640224456787, 0.004981760025024414, 0.004977663993835449, 0.005123072147369385, 0.005087232112884522, 0.0051138558387756345, 0.005055488109588623, 0.00515993595123291, 0.0051159682273864745, 0.0050646400451660155, 0.005098495960235596, 0.005021696090698242, 0.005067776203155518, 0.004977663993835449, 0.0050124797821044925, 0.0050728960037231445, 0.005078015804290771, 0.005059584140777588, 0.005076000213623047, 0.005079008102416992, 0.005059584140777588, 0.005071872234344482, 0.005063680171966553, 0.005107711791992187, 0.005079040050506592, 0.005056511878967285, 0.0050954241752624516, 0.005341184139251709, 0.0050728960037231445, 0.005167103767395019, 0.005182464122772217, 0.005091328144073487, 0.005093376159667969, 0.005001215934753418, 0.005066751956939697, 0.005093376159667969, 0.005129216194152832, 0.005116928100585938, 0.0050657281875610355, 0.005070847988128662, 0.00506060791015625, 0.005150720119476319, 0.004981760025024414, 0.005100543975830078, 0.0050032639503479, 0.005037055969238281, 0.004994048118591309, 0.004976640224456787, 0.005081088066101074, 0.005256192207336426, 0.0050728960037231445, 0.00515993595123291, 0.005138432025909424, 0.005079040050506592, 0.0050728960037231445, 0.005093376159667969, 0.005112832069396973, 0.005180416107177735, 0.005075967788696289, 0.0050954241752624516, 0.00510975980758667, 0.005054463863372802, 0.0051333122253417966, 0.005187583923339844, 0.005075967788696289, 0.005062655925750732, 0.005056511878967285, 0.005090303897857666, 0.005054463863372802, 0.005274623870849609, 0.005129216194152832, 0.005073919773101807, 0.005066751956939697, 0.005063680171966553, 0.00511078405380249, 0.005063680171966553, 0.0050800638198852536, 0.005044223785400391, 0.005091328144073487, 0.00537497615814209, 0.0050954241752624516, 0.0051363840103149415, 0.0050728960037231445, 0.005063680171966553, 0.005070847988128662, 0.005061632156372071, 0.005094399929046631, 0.005201920032501221, 0.005081088066101074, 0.005087232112884522, 0.0051138558387756345, 0.00515993595123291, 0.0053606400489807126, 0.005380095958709717, 0.005121024131774903, 0.005066751956939697, 0.005057536125183106, 0.00506060791015625, 0.00511078405380249, 0.005026815891265869, 0.0050094079971313476, 0.005025792121887207, 0.0050094079971313476, 0.005041152000427246, 0.00510265588760376, 0.005044159889221192, 0.005041152000427246, 0.00530944013595581, 0.005124095916748047, 0.004969471931457519, 0.004976640224456787, 0.005021696090698242, 0.00499507188796997, 0.0050728960037231445, 0.0050657281875610355, 0.005088255882263183, 0.005061632156372071, 0.005049344062805176, 0.005053440093994141, 0.005093376159667969, 0.00506982421875, 0.0051404800415039064, 0.00506982421875, 0.0050954241752624516, 0.005053440093994141, 0.005082111835479736, 0.005081088066101074, 0.005207071781158448, 0.005090271949768067, 0.005071872234344482, 0.005073919773101807, 0.004996096134185791, 0.005086207866668701, 0.005064703941345215, 0.005098495960235596, 0.005058559894561767, 0.005063680171966553, 0.005089344024658203, 0.005092288017272949, 0.0055848960876464845, 0.005126143932342529, 0.005128191947937012, 0.0049500160217285155, 0.004930560111999512, 0.0050432000160217285, 0.005105663776397705, 0.005075967788696289, 0.0050503678321838375, 0.005048319816589355, 0.005059584140777588, 0.005090303897857666, 0.005047296047210693, 0.00505241584777832, 0.0051968002319335935, 0.0050769920349121095, 0.005091328144073487, 0.005067776203155518, 0.005004288196563721, 0.005033984184265137, 0.005544960021972656, 0.005122047901153564, 0.005104640007019043, 0.005067776203155518, 0.005054463863372802, 0.005128191947937012, 0.005467135906219483, 0.0050728960037231445, 0.005094399929046631, 0.0048865280151367185, 0.004874239921569825, 0.0048752641677856446, 0.004883456230163574, 0.004914175987243652, 0.004881408214569092, 0.004872191905975342, 0.004878335952758789, 0.0049387521743774416, 0.005025792121887207, 0.004874239921569825, 0.004833375930786133, 0.004871071815490723, 0.004925439834594727, 0.004887551784515381, 0.004894720077514648, 0.00487014389038086, 0.005154816150665284, 0.005067776203155518, 0.005047296047210693, 0.005057536125183106, 0.005125120162963868, 0.005053440093994141, 0.00506879997253418, 0.005064703941345215, 0.005104671955108642, 0.005077983856201172, 0.00506879997253418, 0.005105663776397705, 0.005055488109588623, 0.005144576072692871, 0.005089280128479004, 0.005262335777282715, 0.005128191947937012, 0.0050800638198852536, 0.005021696090698242, 0.004996096134185791, 0.004988927841186524, 0.004987904071807861, 0.005063680171966553, 0.005056511878967285, 0.00506982421875, 0.005071872234344482, 0.005079040050506592, 0.0051066880226135255, 0.005088255882263183, 0.005061632156372071, 0.004974592208862305, 0.00501043176651001, 0.004980735778808594, 0.004907008171081543, 0.004903935909271241, 0.004925439834594727, 0.00501043176651001, 0.0050728960037231445, 0.00516096019744873, 0.005107711791992187, 0.005047296047210693, 0.005097472190856934, 0.0050165758132934574, 0.005059584140777588, 0.005089280128479004, 0.0050841598510742185, 0.005082111835479736, 0.0051066880226135255, 0.005061632156372071, 0.005107711791992187, 0.005127168178558349, 0.0050800638198852536, 0.005048319816589355, 0.00506982421875, 0.005115903854370117, 0.005083136081695557, 0.0050800638198852536, 0.005081088066101074, 0.005132287979125977, 0.005000192165374756, 0.0049827837944030765, 0.004976640224456787, 0.004993023872375488, 0.004835328102111816, 0.0049725441932678225, 0.0049459199905395506, 0.005033984184265137, 0.004895743846893311, 0.004944896221160889, 0.004876287937164306, 0.0049203200340270994, 0.004882431983947754, 0.004863999843597412, 0.00486297607421875, 0.0048558077812194825, 0.0049387521743774416, 0.004923391819000244, 0.00506879997253418, 0.0050769920349121095, 0.0051066880226135255, 0.0050544958114624025, 0.005047264099121093, 0.005097472190856934, 0.00487014389038086, 0.004907008171081543, 0.004859903812408447, 0.004883456230163574, 0.004881408214569092, 0.0049192957878112795, 0.004933631896972656, 0.004824063777923584, 0.004799488067626953, 0.004800511837005615, 0.00491315221786499, 0.004901887893676758, 0.004880383968353271, 0.004882431983947754, 0.004902912139892578, 0.004915200233459473, 0.004869120121002197, 0.0048855037689208985, 0.004882431983947754, 0.004906015872955322, 0.004995039939880371, 0.005488639831542969, 0.005156864166259765, 0.0051404800415039064, 0.005102591991424561, 0.0051773438453674315, 0.00516812801361084, 0.005089280128479004, 0.005090303897857666, 0.005083136081695557, 0.0051773438453674315, 0.005098495960235596, 0.005070847988128662, 0.005140511989593506, 0.005107679843902588, 0.005073919773101807, 0.005102591991424561, 0.005192704200744629, 0.005097472190856934, 0.005092351913452148, 0.0051066880226135255, 0.005111807823181152, 0.0050421757698059086, 0.00506982421875, 0.005101568222045898, 0.005069888114929199, 0.0050574722290039065, 0.0050462718009948735, 0.005090303897857666, 0.005056511878967285, 0.005063680171966553, 0.005063680171966553, 0.0051066880226135255, 0.005081088066101074, 0.005062655925750732, 0.005078015804290771, 0.00506982421875, 0.0050841598510742185, 0.005004288196563721, 0.005041152000427246, 0.005088255882263183, 0.005098495960235596, 0.005064703941345215, 0.004872223854064941, 0.0048537278175354, 0.0048455681800842285, 0.004811776161193848, 0.004802559852600098, 0.004808703899383545, 0.00480460786819458, 0.004892672061920166, 0.004859903812408447, 0.004915200233459473, 0.004867072105407715, 0.0048895998001098635, 0.0048895998001098635, 0.004872191905975342, 0.004881408214569092, 0.004888576030731201, 0.00490499210357666, 0.004955103874206543, 0.004873248100280761, 0.004882400035858154, 0.004929535865783692, 0.004881408214569092, 0.004967423915863037, 0.004894720077514648, 0.004876287937164306, 0.004901887893676758, 0.004893695831298828, 0.0048752641677856446, 0.00487014389038086, 0.004925439834594727, 0.00488044786453247, 0.004869056224822998, 0.004880383968353271, 0.004877312183380127, 0.0049090561866760255, 0.004857855796813965, 0.00481279993057251, 0.0047964158058166504, 0.004827136039733886, 0.0048148479461669925, 0.004809728145599365, 0.004831264019012451, 0.004936704158782959, 0.004890592098236084, 0.004872191905975342, 0.00480460786819458, 0.004808703899383545, 0.004881408214569092, 0.004901887893676758, 0.004876287937164306, 0.004880383968353271, 0.004884479999542236, 0.004903935909271241, 0.004884479999542236, 0.004868095874786377, 0.004868095874786377, 0.0048752641677856446, 0.004898816108703613, 0.004895743846893311, 0.004863999843597412, 0.004860928058624267, 0.004890624046325683, 0.004883456230163574, 0.004880383968353271, 0.00491315221786499, 0.0048865280151367185, 0.004880383968353271, 0.005107711791992187, 0.005026815891265869, 0.005055488109588623, 0.004992000102996826, 0.004988927841186524, 0.005066783905029297, 0.005088223934173584, 0.005057568073272705, 0.005138400077819824, 0.005078015804290771, 0.005070847988128662, 0.0050841598510742185, 0.004960256099700928, 0.004898816108703613, 0.004894720077514648, 0.004894720077514648, 0.004908031940460205, 0.004914175987243652, 0.004887551784515381, 0.004896768093109131, 0.004899839878082276, 0.004921343803405762, 0.0049192957878112795, 0.0049909758567810054, 0.0048895998001098635, 0.004924416065216064, 0.00499507188796997, 0.0048815679550170896, 0.0048157119750976566, 0.00491315221786499, 0.0049192957878112795, 0.004893695831298828, 0.004899839878082276, 0.004881408214569092, 0.004973567962646484, 0.004914175987243652, 0.004888576030731201, 0.004882431983947754, 0.004918272018432617, 0.004900864124298096, 0.004896768093109131, 0.004884479999542236, 0.004897791862487793, 0.004923456192016602, 0.004886464118957519, 0.0048865280151367185, 0.0048752641677856446, 0.004973567962646484, 0.004905983924865722, 0.004880383968353271, 0.004882431983947754, 0.00501964807510376, 0.004884479999542236, 0.00486195182800293, 0.004985856056213379, 0.004876287937164306, 0.0049203200340270994, 0.004858880043029785, 0.004874239921569825, 0.004866047859191895, 0.004873248100280761, 0.0048854718208312985, 0.004911104202270508, 0.00481279993057251, 0.004811776161193848, 0.004846591949462891, 0.004810751914978028, 0.004837376117706299, 0.004831232070922851, 0.00481382417678833, 0.004827136039733886, 0.0048220157623291016, 0.004865024089813232, 0.0048855037689208985, 0.004936704158782959, 0.004946944236755371, 0.004899839878082276, 0.004881408214569092, 0.004788224220275879, 0.004877312183380127, 0.004869120121002197, 0.004939775943756103, 0.004891647815704346, 0.004878335952758789, 0.004905983924865722, 0.00501043176651001, 0.004907008171081543, 0.0048865280151367185, 0.004838399887084961, 0.0048261117935180665, 0.004904960155487061, 0.0048855037689208985, 0.0048865280151367185, 0.004876287937164306, 0.004903935909271241, 0.0048793601989746095, 0.004824063777923584, 0.004876287937164306, 0.0049428482055664065, 0.004929535865783692, 0.004867072105407715, 0.004853759765625, 0.0048793601989746095, 0.0049162240028381345, 0.004863999843597412, 0.00487116813659668, 0.004857855796813965, 0.004867072105407715, 0.004893695831298828, 0.004877312183380127, 0.004869120121002197, 0.004873216152191162, 0.004859903812408447, 0.004905983924865722, 0.004877312183380127, 0.004860928058624267, 0.004883488178253174, 0.004899807929992676, 0.004874239921569825, 0.0048865280151367185, 0.0048855037689208985, 0.004892672061920166, 0.0048865280151367185, 0.004908031940460205, 0.004868095874786377, 0.005048319816589355, 0.004877312183380127, 0.004878335952758789, 0.004830207824707031, 0.004848639965057373, 0.004882431983947754, 0.004880383968353271, 0.004898816108703613, 0.004893695831298828, 0.004935679912567138, 0.004882431983947754, 0.004866047859191895, 0.004900864124298096, 0.004869120121002197, 0.0048793601989746095, 0.004877312183380127, 0.00490399980545044, 0.004902847766876221, 0.004872191905975342, 0.004874239921569825, 0.00486195182800293, 0.004897791862487793, 0.004878335952758789, 0.004863999843597412, 0.004867072105407715, 0.004929535865783692, 0.005184512138366699, 0.005062655925750732, 0.004952064037322998, 0.004944896221160889, 0.004841472148895264, 0.004820991992950439, 0.005071872234344482, 0.004941823959350586, 0.00532480001449585, 0.005129216194152832, 0.005090303897857666, 0.005181439876556396, 0.005075967788696289, 0.005122047901153564, 0.005090303897857666, 0.00521727991104126, 0.005097472190856934, 0.005093376159667969, 0.005156864166259765, 0.005087232112884522, 0.005149695873260498, 0.005227519989013672, 0.005246975898742676, 0.005191679954528809, 0.005100607872009277, 0.005163968086242676, 0.0050657281875610355, 0.005051392078399658, 0.005048319816589355, 0.005161983966827393, 0.0051138558387756345, 0.005089280128479004, 0.005178463935852051, 0.005066720008850098, 0.005062655925750732, 0.005142528057098389, 0.0050503678321838375, 0.005135359764099121, 0.005070847988128662, 0.005081088066101074, 0.005117951869964599, 0.005086207866668701, 0.0050769920349121095, 0.005083136081695557, 0.005070847988128662, 0.0051333122253417966, 0.005088255882263183, 0.005126143932342529, 0.0051066880226135255, 0.00510975980758667, 0.005182464122772217, 0.0050841598510742185, 0.005111807823181152, 0.005053440093994141, 0.0050503678321838375, 0.005081088066101074, 0.005078015804290771, 0.0050462718009948735, 0.005073919773101807, 0.005116928100585938, 0.005058559894561767, 0.004981760025024414, 0.004985856056213379, 0.005051392078399658, 0.005091328144073487, 0.005059584140777588, 0.0050462718009948735, 0.00510975980758667, 0.005044223785400391, 0.005189631938934326, 0.005187615871429443, 0.005103583812713623, 0.0050954241752624516, 0.00506060791015625, 0.00510975980758667, 0.0050124797821044925, 0.005032959938049316, 0.00506879997253418, 0.00516096019744873, 0.005126143932342529, 0.005086207866668701, 0.005101568222045898, 0.005058559894561767, 0.005071872234344482, 0.0050657281875610355, 0.005115903854370117, 0.005058559894561767, 0.005064703941345215, 0.005075967788696289, 0.00511078405380249, 0.00506982421875, 0.005063680171966553, 0.005093376159667969, 0.005092351913452148, 0.005070847988128662, 0.005051392078399658, 0.00517632007598877, 0.0050954241752624516, 0.005079040050506592, 0.005051392078399658, 0.005049344062805176, 0.0051138558387756345, 0.005059584140777588, 0.005186560153961181, 0.005063680171966553, 0.005102591991424561, 0.0049827837944030765, 0.0049725441932678225, 0.004968448162078858, 0.005000192165374756, 0.00496230411529541, 0.0049530878067016604, 0.004976672172546387, 0.005089248180389404, 0.0050769920349121095, 0.005066751956939697, 0.00510975980758667, 0.005079040050506592, 0.0050769920349121095, 0.005078015804290771, 0.004931583881378174, 0.004896768093109131, 0.004895743846893311, 0.004850687980651855, 0.004832255840301514, 0.004927487850189209, 0.004910079956054687, 0.004915200233459473, 0.004901887893676758, 0.004933631896972656, 0.004901887893676758, 0.004922368049621582, 0.004902912139892578, 0.004907008171081543, 0.005379072189331055, 0.005182464122772217, 0.005180416107177735, 0.005128191947937012, 0.005075967788696289, 0.0050800638198852536, 0.005169151782989502, 0.005089312076568603, 0.005079008102416992, 0.005075967788696289, 0.005099520206451416, 0.005112832069396973, 0.004980735778808594, 0.004986879825592041, 0.005001215934753418, 0.004980735778808594, 0.004974592208862305, 0.005145599842071533, 0.0051404800415039064, 0.0053534722328186036, 0.005088255882263183, 0.00511078405380249, 0.005073919773101807, 0.005065760135650635, 0.0050759358406066895, 0.005115903854370117, 0.005088255882263183, 0.005085184097290039, 0.005044223785400391, 0.005038080215454102, 0.005083136081695557, 0.005074944019317627, 0.0050841598510742185, 0.0050728960037231445, 0.0050657281875610355, 0.005071872234344482, 0.005066751956939697, 0.0050360321998596195, 0.0050094079971313476, 0.00506879997253418, 0.005062655925750732, 0.005104640007019043, 0.005082111835479736, 0.0050769920349121095, 0.005062687873840332, 0.00510972785949707, 0.005091328144073487, 0.0050800638198852536, 0.005130239963531494, 0.005108736038208008, 0.00506060791015625, 0.005085184097290039, 0.005098495960235596, 0.005081088066101074, 0.005082111835479736, 0.005067776203155518, 0.005114880084991455, 0.005086207866668701, 0.005082111835479736, 0.005171199798583984, 0.0050954241752624516, 0.0050800638198852536, 0.0050800638198852536, 0.005083136081695557, 0.00506060791015625, 0.0050462718009948735, 0.005055488109588623, 0.005100543975830078, 0.005025792121887207, 0.004967423915863037, 0.004967487812042236, 0.005014463901519776, 0.004967423915863037, 0.00497049617767334, 0.004957183837890625, 0.00506879997253418, 0.005053440093994141, 0.005001215934753418, 0.004977663993835449, 0.0050217280387878415, 0.004983776092529297, 0.005008384227752686, 0.005111807823181152, 0.0052408318519592285, 0.0051036162376403805, 0.0050841598510742185, 0.005102591991424561, 0.004973567962646484, 0.005101568222045898, 0.005099520206451416, 0.005099520206451416, 0.005095456123352051, 0.005120992183685303, 0.005055488109588623, 0.00511078405380249, 0.005101568222045898, 0.005098495960235596, 0.005038080215454102, 0.00495411205291748, 0.005056575775146484, 0.0049673600196838376, 0.004985856056213379, 0.0050206718444824215, 0.00497049617767334, 0.005070847988128662, 0.0050657281875610355, 0.005173247814178467, 0.005091392040252686, 0.005239744186401368, 0.005221375942230225, 0.0050954241752624516, 0.005127168178558349, 0.00506060791015625, 0.0051435518264770505, 0.0050954241752624516, 0.005062655925750732, 0.0051066880226135255, 0.0050800638198852536, 0.005137407779693603, 0.00506879997253418, 0.005083136081695557, 0.005128191947937012, 0.005070847988128662, 0.00511078405380249, 0.005100543975830078, 0.005152768135070801, 0.005058559894561767, 0.005089280128479004, 0.005197824001312256, 0.005100543975830078, 0.005146624088287354, 0.005081088066101074, 0.005057536125183106, 0.005198847770690918, 0.00515993595123291, 0.0050954241752624516, 0.005032959938049316, 0.005096447944641113, 0.005130239963531494, 0.005127168178558349, 0.005104703903198242, 0.005075903892517089, 0.005097472190856934, 0.0050063362121582035, 0.00501964807510376, 0.0050032958984375, 0.005092319965362549, 0.005230591773986816, 0.005101568222045898, 0.005128191947937012, 0.005085184097290039, 0.005081088066101074, 0.005123072147369385, 0.005123104095458984, 0.005120992183685303, 0.005176352024078369, 0.0050759358406066895, 0.0050800638198852536, 0.005105663776397705, 0.005070847988128662, 0.0050780482292175294, 0.005117919921875, 0.004986879825592041, 0.004987904071807861, 0.004994048118591309, 0.005006400108337403, 0.005048255920410156, 0.005094399929046631, 0.0049909758567810054, 0.005014527797698975, 0.0050462718009948735, 0.0050769920349121095, 0.0050657281875610355, 0.005351424217224121, 0.0051138558387756345, 0.005089280128479004, 0.005096447944641113, 0.005097472190856934, 0.0051476478576660155, 0.005073919773101807, 0.005088255882263183, 0.0050841598510742185, 0.005125120162963868, 0.005045248031616211, 0.005017600059509277, 0.004983808040618896, 0.004987904071807861, 0.004981760025024414, 0.005023744106292725, 0.004984864234924316, 0.005083104133605957, 0.005091328144073487, 0.005186560153961181, 0.0053975038528442385, 0.005108736038208008, 0.00515993595123291, 0.0051036162376403805, 0.005114880084991455, 0.005107776165008545, 0.005091263771057129, 0.005085184097290039, 0.005091360092163086, 0.00510972785949707, 0.004978687763214112, 0.0049909758567810054, 0.004983808040618896, 0.005071872234344482, 0.004997119903564453, 0.0049920320510864254, 0.005299168109893799, 0.005228544235229492, 0.005126143932342529, 0.005186560153961181, 0.0051404800415039064, 0.005129216194152832, 0.005097472190856934, 0.005083136081695557, 0.005055488109588623, 0.005105663776397705, 0.005058559894561767, 0.005090303897857666, 0.005091328144073487, 0.005115903854370117, 0.005064703941345215, 0.005093376159667969, 0.005386240005493164, 0.00521727991104126, 0.005784575939178467, 0.00586243200302124, 0.005216224193572998, 0.005142528057098389, 0.005059584140777588, 0.005114880084991455, 0.005083136081695557, 0.005180416107177735, 0.005152768135070801, 0.005083136081695557, 0.005158912181854248, 0.005153791904449463, 0.005171199798583984, 0.0050657281875610355, 0.005039103984832764, 0.005047296047210693, 0.004974592208862305, 0.005047296047210693, 0.0050124797821044925, 0.00502784013748169, 0.005262335777282715, 0.005139520168304443, 0.0051803522109985355, 0.0050841598510742185, 0.005278719902038574, 0.005102591991424561, 0.0051660799980163576, 0.005067776203155518, 0.005074944019317627, 0.005104640007019043, 0.004980735778808594, 0.004978687763214112, 0.004957183837890625, 0.00501145601272583, 0.00506060791015625, 0.00506879997253418, 0.005041152000427246, 0.0050022401809692385, 0.004985856056213379, 0.005008384227752686, 0.0050135040283203125, 0.0050165758132934574, 0.004984831809997559, 0.004914175987243652, 0.004827136039733886, 0.004865024089813232, 0.004829184055328369, 0.004832255840301514, 0.00496230411529541, 0.004908031940460205, 0.005038080215454102, 0.0049162240028381345, 0.004853759765625, 0.004830207824707031, 0.004881408214569092, 0.004892672061920166, 0.004923391819000244, 0.004865024089813232, 0.0048220157623291016, 0.0048895998001098635, 0.00486297607421875, 0.004847616195678711, 0.004819968223571777, 0.004819968223571777, 0.00481382417678833, 0.004832255840301514, 0.004842495918273926, 0.0048158721923828125, 0.004881408214569092, 0.004881408214569092, 0.004923391819000244, 0.004877312183380127, 0.004877312183380127, 0.004892672061920166, 0.0049203200340270994, 0.004952064037322998, 0.0049489917755126955, 0.004868095874786377, 0.0048752641677856446, 0.004911104202270508, 0.004891647815704346, 0.004944896221160889, 0.004884479999542236, 0.0048895998001098635, 0.004895743846893311, 0.004872191905975342, 0.004878335952758789, 0.004868095874786377, 0.004914175987243652, 0.0048793601989746095, 0.004876287937164306, 0.004882463932037354, 0.0049049282073974606, 0.004890624046325683, 0.0049162240028381345, 0.004873216152191162, 0.004926496028900147, 0.004842463970184326, 0.004806655883789063, 0.004789247989654541, 0.004832255840301514, 0.004836351871490479, 0.004989952087402344, 0.005047296047210693, 0.0050063362121582035, 0.005093376159667969, 0.0050022401809692385, 0.004964352130889893, 0.004989952087402344, 0.005028863906860351, 0.004998144149780274, 0.005062655925750732, 0.0050421757698059086, 0.005122047901153564, 0.005092351913452148, 0.0051036481857299806, 0.0051353278160095215, 0.005053440093994141, 0.005116928100585938, 0.005121024131774903, 0.005062655925750732, 0.005073919773101807, 0.00511187219619751, 0.005150688171386719, 0.005062623977661133, 0.0050841598510742185, 0.00506879997253418, 0.005088255882263183, 0.005071872234344482, 0.005100543975830078, 0.005088255882263183, 0.004923391819000244, 0.004825088024139404, 0.004860928058624267, 0.0048568320274353025, 0.004827136039733886, 0.004825088024139404, 0.0048220157623291016, 0.004846591949462891, 0.004928512096405029, 0.004993023872375488, 0.004888576030731201, 0.00490502405166626, 0.004904895782470703, 0.0048752641677856446, 0.004877312183380127, 0.0048855037689208985, 0.005037055969238281, 0.004872191905975342, 0.0048865280151367185, 0.004876287937164306, 0.0049192957878112795, 0.0048855037689208985, 0.004787199974060059, 0.004903935909271241, 0.004899839878082276, 0.004922368049621582, 0.004873216152191162, 0.0048855037689208985, 0.004959231853485108, 0.004900864124298096, 0.004868095874786377, 0.005386240005493164, 0.00516812801361084, 0.005180416107177735, 0.0052070398330688475, 0.0052633600234985355, 0.005636096000671387, 0.005389311790466309, 0.005171199798583984, 0.005165056228637695, 0.00501145601272583, 0.005017600059509277, 0.005071872234344482, 0.005135359764099121, 0.005083136081695557, 0.005121024131774903, 0.005074944019317627, 0.005089280128479004, 0.005139455795288086, 0.00506982421875, 0.0050811200141906735, 0.005115871906280518, 0.005089280128479004, 0.005056511878967285, 0.005161983966827393, 0.005139455795288086, 0.0051066880226135255, 0.00505241584777832, 0.005093376159667969, 0.005089280128479004, 0.005062655925750732, 0.005067776203155518, 0.005099520206451416, 0.005064703941345215, 0.00506982421875, 0.0050841598510742185, 0.005099520206451416, 0.005070847988128662, 0.005070847988128662, 0.005096447944641113, 0.005026815891265869, 0.00506982421875, 0.005076000213623047, 0.005101535797119141, 0.005107711791992187, 0.0050769920349121095, 0.005085184097290039, 0.00510975980758667, 0.00506060791015625, 0.005090303897857666, 0.0050769920349121095, 0.005114880084991455, 0.005097472190856934, 0.005051392078399658, 0.005079040050506592, 0.00511084794998169, 0.005082047939300537, 0.005040128231048584, 0.005107711791992187, 0.005090303897857666, 0.0050769920349121095, 0.00506982421875, 0.005126143932342529, 0.005078015804290771, 0.005057536125183106, 0.005089280128479004, 0.005063680171966553, 0.005057536125183106, 0.005063680171966553, 0.005083136081695557, 0.0050432000160217285, 0.005088255882263183, 0.00506060791015625, 0.005108736038208008, 0.00506879997253418, 0.005054463863372802, 0.005049344062805176, 0.0050954241752624516, 0.005092351913452148, 0.005064703941345215, 0.005099520206451416, 0.005081088066101074, 0.005164031982421875, 0.005079040050506592, 0.0049909758567810054, 0.005028863906860351, 0.0050063362121582035, 0.004998144149780274, 0.0048855037689208985, 0.00501145601272583, 0.005097504138946533, 0.004946911811828613, 0.004831232070922851, 0.004857855796813965, 0.0048895998001098635, 0.004893695831298828, 0.004896768093109131, 0.004896768093109131, 0.004985856056213379, 0.004901887893676758, 0.004888576030731201, 0.004890624046325683, 0.004934656143188477, 0.004911104202270508, 0.004888576030731201, 0.0048895998001098635, 0.004905983924865722, 0.0050022401809692385, 0.004931583881378174, 0.004830207824707031, 0.004905983924865722, 0.004905983924865722, 0.004880383968353271, 0.004865024089813232, 0.004874239921569825, 0.0050124797821044925, 0.0049162240028381345, 0.00487014389038086, 0.0048895998001098635, 0.004882431983947754, 0.004910079956054687, 0.0048895998001098635, 0.00491212797164917, 0.0048793601989746095, 0.004897791862487793, 0.004911104202270508, 0.004819968223571777, 0.004809728145599365, 0.004824063777923584, 0.0048527359962463375, 0.004939775943756103, 0.004905983924865722, 0.004895743846893311, 0.004877312183380127, 0.004918272018432617, 0.00487116813659668, 0.004872191905975342, 0.0050769920349121095, 0.005153791904449463, 0.0050032639503479, 0.00506982421875, 0.0050841598510742185, 0.005135359764099121, 0.005099552154541016, 0.005084127902984619, 0.005071872234344482, 0.005064703941345215, 0.0051138558387756345, 0.005062655925750732, 0.005073984146118164, 0.005007296085357666, 0.005008384227752686, 0.005703680038452149, 0.00520908784866333, 0.005150720119476319, 0.0050800638198852536, 0.005142528057098389, 0.005254144191741943, 0.0051363840103149415, 0.005139455795288086, 0.005182464122772217, 0.00522547197341919, 0.005097472190856934, 0.005167168140411377, 0.005123007774353028, 0.005073919773101807, 0.0051066880226135255, 0.005091392040252686, 0.005179327964782715, 0.0050124797821044925, 0.005000192165374756, 0.0050728960037231445, 0.005067808151245117, 0.0051322560310363766, 0.005078015804290771, 0.005181439876556396, 0.005091328144073487, 0.005101568222045898, 0.0052111358642578124, 0.005107711791992187, 0.005258240222930908, 0.005075967788696289, 0.0051968002319335935, 0.005073919773101807, 0.005087232112884522, 0.005138432025909424, 0.005090303897857666, 0.00507916784286499, 0.005080959796905518, 0.005119999885559082, 0.005099520206451416, 0.005123104095458984, 0.005149663925170899, 0.005130239963531494, 0.005182464122772217, 0.005126143932342529, 0.005156864166259765, 0.005213183879852295, 0.005088255882263183, 0.005190656185150146, 0.005161983966827393, 0.005132287979125977, 0.005075967788696289, 0.005310463905334473, 0.005125120162963868, 0.005111807823181152, 0.005083136081695557, 0.005101568222045898, 0.005129216194152832, 0.005096447944641113, 0.005114880084991455, 0.005051392078399658, 0.0050421757698059086, 0.004984831809997559, 0.005071872234344482, 0.005096447944641113, 0.005100543975830078, 0.005102591991424561, 0.00520908784866333, 0.005175295829772949, 0.005075967788696289, 0.0052008957862854, 0.005206016063690186, 0.005088255882263183, 0.005081088066101074, 0.005062655925750732, 0.0051036162376403805, 0.005158912181854248, 0.004996096134185791, 0.005071872234344482, 0.005115903854370117, 0.005111807823181152, 0.005251071929931641, 0.005163008213043213, 0.005038080215454102, 0.005025792121887207, 0.005079040050506592, 0.00511190414428711, 0.005153696060180664, 0.00506060791015625, 0.005104640007019043, 0.005102591991424561, 0.005066751956939697, 0.005158912181854248, 0.0050800638198852536, 0.005124095916748047, 0.005153791904449463, 0.005125120162963868, 0.005197824001312256, 0.004986879825592041, 0.005035007953643799, 0.004989952087402344, 0.005030911922454834, 0.005030911922454834, 0.005096479892730713, 0.005450719833374023, 0.00510975980758667, 0.005074944019317627, 0.004993023872375488, 0.004983808040618896, 0.004987904071807861, 0.0050022401809692385, 0.00511897611618042, 0.005098495960235596, 0.005066751956939697, 0.00517632007598877, 0.005344255924224854, 0.005083136081695557, 0.00502784013748169, 0.005074944019317627, 0.005108736038208008, 0.005055488109588623, 0.004955135822296143, 0.0050769920349121095, 0.0050800638198852536, 0.005214208126068115, 0.005008384227752686, 0.004976640224456787, 0.00499507188796997, 0.005004288196563721, 0.004957183837890625, 0.004976640224456787, 0.004983808040618896, 0.0049192957878112795, 0.004967423915863037, 0.0049530878067016604, 0.004872191905975342, 0.004922368049621582, 0.00487014389038086, 0.0048865280151367185, 0.004869120121002197, 0.005135359764099121, 0.0050800638198852536, 0.00505241584777832, 0.005056511878967285, 0.005093376159667969, 0.005100543975830078, 0.005059584140777588, 0.005102591991424561, 0.005074944019317627, 0.005099520206451416, 0.005158912181854248, 0.005165056228637695, 0.005228544235229492, 0.0050769920349121095, 0.005114880084991455, 0.0050462718009948735, 0.0050165758132934574, 0.004969471931457519, 0.0050124797821044925, 0.005104640007019043, 0.0050657281875610355, 0.005079040050506592, 0.0051701760292053225, 0.00506879997253418, 0.005087232112884522, 0.005073919773101807, 0.005091328144073487, 0.005149695873260498, 0.005073919773101807, 0.0050800638198852536, 0.005090303897857666, 0.005070847988128662, 0.005175295829772949, 0.005126143932342529, 0.0050769920349121095, 0.005139455795288086, 0.005062655925750732, 0.005098495960235596, 0.005091328144073487, 0.005112832069396973, 0.005004288196563721, 0.005066751956939697, 0.005093376159667969, 0.005139455795288086, 0.005094399929046631, 0.005082111835479736, 0.005058559894561767, 0.005005311965942383, 0.004883456230163574, 0.005049344062805176, 0.005062655925750732, 0.005062655925750732, 0.005090303897857666, 0.005008384227752686, 0.005091328144073487, 0.004984831809997559, 0.0050206718444824215, 0.00516096019744873, 0.005350399971008301, 0.005100543975830078, 0.00511078405380249, 0.00506879997253418, 0.005063680171966553, 0.005082111835479736, 0.005107711791992187, 0.0050728960037231445, 0.005093376159667969, 0.005055488109588623, 0.005092351913452148, 0.005075967788696289, 0.005224448204040527, 0.005142528057098389, 0.005274623870849609, 0.00510975980758667, 0.005102591991424561, 0.005112832069396973, 0.005100543975830078, 0.005193727970123291, 0.005108736038208008, 0.005062655925750732, 0.005083136081695557, 0.005131296157836914, 0.005314527988433838, 0.005055488109588623, 0.005015552043914795, 0.00506879997253418, 0.004958208084106445, 0.004997119903564453, 0.005064703941345215, 0.005017600059509277, 0.005040128231048584, 0.005175295829772949, 0.005171199798583984, 0.005089280128479004, 0.005290048122406006, 0.0050984320640563964, 0.005400576114654541, 0.005251071929931641, 0.005997568130493164, 0.005180416107177735, 0.005078015804290771, 0.005061632156372071, 0.005111807823181152]",tokens/s,198.37769443480852,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492d5-4c63eff13ee60a906843596b;f6e40f1f-80fc-4fae-bd4e-a4385b8b20e2) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gptj,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948123-12ddcba264ebf794745fb4e4;bd785dea-81ac-4dc7-8166-5244943f2562) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7366.88128,9907.470336,0.0,9277.800448,8679.633408,s,1,12.158142578125,12.158142578125,0.0,12.158142578125,12.158142578125,12.158142578125,12.158142578125,[12.158142578125],,kWh,6.323571049860499e-05,3.4641201854791755e-05,0.00012445343289602118,0.00022233034524941792,,MB,1762.660352,9926.344704,0.0,9277.800448,8205.395968,s,10,1.9098154296874998,0.19098154296874997,0.0002167594556476354,0.19106138610839843,0.19117670745849608,0.19118121871948243,0.1911848277282715,"[0.19053935241699219, 0.19099842834472655, 0.19100965881347656, 0.19111888122558593, 0.19118572998046876, 0.19117570495605468, 0.19058979797363282, 0.19105839538574218, 0.19106437683105468, 0.19107510375976564]",tokens/s,1340.4436681186983,kWh,2.2543492006157755e-06,1.2352781991274313e-06,8.936364591433833e-06,1.242599199117704e-05,tokens/kWh,20601976.903073046,MB,1766.739968,9926.344704,0.0,9277.800448,8483.12576,s,10,17.838481201171874,1.7838481201171876,0.025357932737637656,1.7940809326171876,1.80654384765625,1.8068367431640624,1.8070710595703126,"[1.750405029296875, 1.7350150146484375, 1.7812454833984375, 1.7959024658203124, 1.806478759765625, 1.7580172119140625, 1.7922593994140625, 1.80635498046875, 1.8056732177734376, 1.807129638671875]",tokens/s,35.31690803130778,kWh,2.107878181813527e-05,1.1551657277863218e-05,6.588380637576533e-05,9.851424547176382e-05,tokens/kWh,639501.421325478,,s,630,17.835157503128034,0.028309773814488973,0.0005945001054447777,0.028513792037963868,0.028836761856079102,0.029094553089141845,0.029686619510650634,"[0.027720703125, 0.02753228759765625, 0.02755276870727539, 0.027489280700683592, 0.027494400024414063, 0.027321344375610353, 0.027475967407226562, 0.027505664825439452, 0.027457536697387694, 0.02751283264160156, 0.027664384841918944, 0.02776780891418457, 0.027632640838623046, 0.027817983627319336, 0.028161024093627928, 0.027478015899658204, 0.027455488204956056, 0.02747494316101074, 0.027571199417114257, 0.027503616333007814, 0.027458560943603515, 0.027481088638305663, 0.027633663177490234, 0.02735513687133789, 0.02735513687133789, 0.02756505584716797, 0.02749235153198242, 0.027455488204956056, 0.027428863525390625, 0.027460607528686523, 0.027511808395385744, 0.02857881546020508, 0.028911615371704103, 0.027621376037597657, 0.028239871978759764, 0.027535360336303712, 0.027527168273925783, 0.028286975860595705, 0.027570175170898437, 0.027462656021118165, 0.027527168273925783, 0.028048383712768556, 0.028237823486328126, 0.028506111145019532, 0.028413951873779295, 0.02775449562072754, 0.02752204895019531, 0.028446720123291015, 0.028721151351928712, 0.02857676887512207, 0.027637760162353517, 0.028416000366210937, 0.027825151443481445, 0.028231679916381838, 0.027886592864990234, 0.027644927978515626, 0.02750873565673828, 0.027459583282470702, 0.02850201606750488, 0.02779955291748047, 0.02749849510192871, 0.02754764747619629, 0.028881919860839843, 0.02831667137145996, 0.027463680267333986, 0.027321344375610353, 0.027339775085449217, 0.027497472763061522, 0.02759884834289551, 0.02750668716430664, 0.02735103988647461, 0.027283456802368163, 0.027423744201660157, 0.027674623489379883, 0.027482112884521483, 0.027399168014526368, 0.027456512451171877, 0.02790809631347656, 0.02772377586364746, 0.02729471969604492, 0.027464704513549806, 0.027854848861694335, 0.02798182487487793, 0.027370496749877928, 0.027427839279174804, 0.027314176559448244, 0.0273623046875, 0.027444223403930663, 0.02752921676635742, 0.027381759643554687, 0.02856959915161133, 0.028302335739135744, 0.027568128585815428, 0.02746675109863281, 0.027449344635009764, 0.027445247650146484, 0.027630592346191408, 0.027616256713867186, 0.027405311584472656, 0.027397119522094726, 0.02753126335144043, 0.02716160011291504, 0.027421695709228516, 0.02734489631652832, 0.027473920822143554, 0.027433984756469725, 0.027365375518798828, 0.027227136611938478, 0.027365375518798828, 0.027417600631713866, 0.027463680267333986, 0.0273623046875, 0.027371519088745116, 0.027382783889770508, 0.027373567581176757, 0.027385856628417967, 0.02735308837890625, 0.027446271896362305, 0.02717184066772461, 0.027849727630615235, 0.027967487335205078, 0.0275230712890625, 0.02756096076965332, 0.027594751358032226, 0.028609535217285157, 0.027591680526733397, 0.027463680267333986, 0.027447296142578126, 0.027423744201660157, 0.027282432556152345, 0.027452415466308593, 0.027518976211547853, 0.02753331184387207, 0.027476991653442383, 0.027429887771606445, 0.027415552139282227, 0.027441152572631834, 0.027410432815551757, 0.02750771141052246, 0.028014591217041016, 0.029293567657470702, 0.028695552825927735, 0.02855423927307129, 0.02854195213317871, 0.028846080780029298, 0.028556287765502928, 0.028439552307128906, 0.0285347843170166, 0.028630016326904296, 0.029106176376342774, 0.02920038414001465, 0.02856959915161133, 0.028872703552246092, 0.028729343414306642, 0.028232704162597655, 0.02753331184387207, 0.027464704513549806, 0.027453439712524414, 0.027511808395385744, 0.027441152572631834, 0.027469823837280274, 0.027808767318725586, 0.02812518310546875, 0.028479488372802734, 0.02853887939453125, 0.028681215286254884, 0.02874367904663086, 0.02857676887512207, 0.028516351699829103, 0.028596223831176756, 0.02857676887512207, 0.028498943328857423, 0.028511232376098632, 0.028505088806152344, 0.028637184143066406, 0.02856959915161133, 0.028548095703125, 0.028677120208740234, 0.028473344802856446, 0.028606464385986328, 0.02874982452392578, 0.028491775512695314, 0.028472320556640625, 0.028641279220581056, 0.028660736083984374, 0.028951551437377928, 0.02938982391357422, 0.028669952392578125, 0.028794879913330077, 0.027489280700683592, 0.028180479049682617, 0.028420095443725587, 0.02833305549621582, 0.028443647384643556, 0.028494848251342773, 0.028294143676757814, 0.02831564712524414, 0.028486656188964843, 0.028446720123291015, 0.027885568618774413, 0.02831155204772949, 0.028395519256591797, 0.028520448684692383, 0.028297216415405273, 0.028521472930908204, 0.02840985679626465, 0.02854707145690918, 0.02853887939453125, 0.02860032081604004, 0.02836787223815918, 0.028479488372802734, 0.02855014419555664, 0.028513280868530274, 0.028507135391235353, 0.02853785514831543, 0.029054975509643553, 0.029073408126831055, 0.028619775772094725, 0.028410879135131836, 0.02854707145690918, 0.028452863693237306, 0.028432384490966797, 0.028447744369506835, 0.02850918388366699, 0.02856959915161133, 0.028663808822631837, 0.028461055755615236, 0.02850201606750488, 0.028465152740478516, 0.028470272064208983, 0.02857574462890625, 0.028664831161499024, 0.028676095962524413, 0.028614656448364258, 0.028478464126586913, 0.02856345558166504, 0.028499967575073244, 0.028493824005126952, 0.028503040313720703, 0.028590080261230468, 0.028494848251342773, 0.028483583450317384, 0.0286167049407959, 0.02817843246459961, 0.02860339164733887, 0.02837196731567383, 0.02857369613647461, 0.028636159896850585, 0.028609535217285157, 0.028649471282958985, 0.028835840225219726, 0.029092863082885743, 0.02837708854675293, 0.02854707145690918, 0.02894643211364746, 0.028535808563232422, 0.028472320556640625, 0.028676095962524413, 0.028609535217285157, 0.030657535552978517, 0.029276159286499022, 0.028812288284301758, 0.028721151351928712, 0.028641279220581056, 0.028424192428588867, 0.02858291244506836, 0.028626943588256838, 0.02857779121398926, 0.028572671890258788, 0.028513280868530274, 0.02857164764404297, 0.028387327194213868, 0.028383232116699218, 0.02841804885864258, 0.028712959289550782, 0.028678144454956055, 0.028556287765502928, 0.02852454376220703, 0.028511232376098632, 0.028589056015014647, 0.029232128143310547, 0.029831167221069335, 0.028911615371704103, 0.02873139190673828, 0.028424192428588867, 0.02855526351928711, 0.028710912704467774, 0.028606464385986328, 0.028856319427490236, 0.028916736602783204, 0.028536832809448243, 0.028472320556640625, 0.028601343154907227, 0.02874880027770996, 0.028648448944091798, 0.02857369613647461, 0.028469247817993162, 0.02852249526977539, 0.028645376205444335, 0.029452287673950195, 0.031117311477661135, 0.02919424057006836, 0.028681215286254884, 0.028499967575073244, 0.02835558319091797, 0.028491775512695314, 0.02793267250061035, 0.028108800888061523, 0.02733260726928711, 0.0274913272857666, 0.028444671630859376, 0.028611583709716795, 0.029683712005615235, 0.027830272674560546, 0.028024831771850587, 0.027866111755371094, 0.02751283264160156, 0.027639808654785155, 0.02756505584716797, 0.02752102470397949, 0.027570175170898437, 0.027458560943603515, 0.027423744201660157, 0.02739302444458008, 0.027495424270629884, 0.027554815292358398, 0.027543552398681642, 0.027475967407226562, 0.02751487922668457, 0.027413503646850586, 0.027561983108520507, 0.027493375778198242, 0.02990185546875, 0.02922287940979004, 0.028050432205200194, 0.02820812797546387, 0.028477439880371092, 0.02835353660583496, 0.02854195213317871, 0.028104703903198244, 0.028050432205200194, 0.028645376205444335, 0.028485631942749022, 0.028496896743774414, 0.02888707160949707, 0.02755580711364746, 0.028631040573120117, 0.02835968017578125, 0.027891712188720705, 0.02832793617248535, 0.028657663345336915, 0.02793574333190918, 0.028873727798461913, 0.028200960159301756, 0.027440128326416017, 0.02728447914123535, 0.027487232208251954, 0.02752409553527832, 0.028080127716064454, 0.027472896575927733, 0.027272192001342774, 0.027992063522338868, 0.028298240661621094, 0.029716480255126954, 0.029499391555786132, 0.02780364799499512, 0.027181055068969725, 0.027266048431396486, 0.02731929588317871, 0.027347967147827147, 0.02719539260864258, 0.027069440841674806, 0.027048959732055664, 0.027241472244262696, 0.02754047966003418, 0.02733875274658203, 0.027510784149169923, 0.02755686378479004, 0.028256256103515624, 0.027671552658081053, 0.027495424270629884, 0.028207103729248048, 0.030671871185302735, 0.029289472579956056, 0.028652544021606444, 0.028515327453613282, 0.028467199325561524, 0.028645376205444335, 0.028624895095825196, 0.028624895095825196, 0.028641279220581056, 0.02858393669128418, 0.028610559463500978, 0.028285951614379884, 0.028442623138427735, 0.02870681571960449, 0.02874880027770996, 0.02857881546020508, 0.02819891166687012, 0.02710527992248535, 0.02740121650695801, 0.027453439712524414, 0.02728447914123535, 0.027471872329711915, 0.02753126335144043, 0.027462656021118165, 0.02756403160095215, 0.027657215118408202, 0.027865087509155274, 0.02772787284851074, 0.027603967666625977, 0.028620800018310546, 0.02876313591003418, 0.029095935821533202, 0.02933964729309082, 0.028611583709716795, 0.028908544540405274, 0.029128704071044922, 0.02872012710571289, 0.028737535476684572, 0.028638208389282226, 0.028648448944091798, 0.028729343414306642, 0.0285296630859375, 0.02893414306640625, 0.028941312789916993, 0.028646400451660156, 0.028734464645385743, 0.028696575164794923, 0.02856243133544922, 0.028809215545654295, 0.028815359115600587, 0.028687360763549805, 0.0285665283203125, 0.028620800018310546, 0.028613632202148437, 0.02855526351928711, 0.028618751525878908, 0.028481536865234375, 0.028642303466796876, 0.028564479827880858, 0.028594175338745118, 0.02858598327636719, 0.02912665557861328, 0.02901094436645508, 0.028638208389282226, 0.02856345558166504, 0.028580863952636718, 0.028483583450317384, 0.02837196731567383, 0.02856959915161133, 0.028545024871826172, 0.02857779121398926, 0.028495872497558594, 0.02820403289794922, 0.028625919342041017, 0.028511232376098632, 0.02856755256652832, 0.02956185531616211, 0.02898636817932129, 0.02871500778198242, 0.028508159637451173, 0.02854911994934082, 0.028647424697875977, 0.028633087158203126, 0.028270591735839845, 0.02840985679626465, 0.028835840225219726, 0.028906496047973632, 0.028536832809448243, 0.02877235221862793, 0.028845056533813477, 0.02854400062561035, 0.02850918388366699, 0.02860851287841797, 0.028459007263183594, 0.02859110450744629, 0.028742656707763672, 0.02878156852722168, 0.029411327362060546, 0.02916966438293457, 0.028808191299438478, 0.02869862365722656, 0.028619775772094725, 0.02859929656982422, 0.028452863693237306, 0.02858598327636719, 0.028742656707763672, 0.02873139190673828, 0.028655616760253907, 0.02869964790344238, 0.02874060821533203, 0.028894208908081056, 0.028786687850952147, 0.02873958396911621, 0.02871603202819824, 0.028644351959228515, 0.028681215286254884, 0.02793369674682617, 0.028605440139770507, 0.02878976058959961, 0.02855526351928711, 0.02860851287841797, 0.02876518440246582, 0.02874880027770996, 0.028734464645385743, 0.02858291244506836, 0.028799999237060548, 0.02913689613342285, 0.02930790328979492, 0.028899328231811523, 0.02894745635986328, 0.028703744888305665, 0.028770303726196288, 0.028709888458251953, 0.02874367904663086, 0.02860748863220215, 0.028672000885009766, 0.028733440399169922, 0.028660736083984374, 0.028613632202148437, 0.028601343154907227, 0.028461055755615236, 0.02857881546020508, 0.028614656448364258, 0.028651519775390624, 0.02850201606750488, 0.028651519775390624, 0.028633087158203126, 0.028493824005126952, 0.02870681571960449, 0.02839449691772461, 0.028433408737182617, 0.028679168701171875, 0.02856038475036621, 0.028685312271118164, 0.028648479461669922, 0.02850710487365723, 0.02840166473388672, 0.02857574462890625, 0.028519424438476562, 0.02835148811340332, 0.028664831161499024, 0.02851020812988281, 0.028932096481323243, 0.02949836730957031, 0.02874470329284668, 0.02855936050415039, 0.02858598327636719, 0.028527616500854492, 0.028619775772094725, 0.028652544021606444, 0.02856550407409668, 0.02852556800842285, 0.02857574462890625, 0.028653568267822265, 0.028495872497558594, 0.028647424697875977, 0.028665855407714845, 0.02875187110900879, 0.028596223831176756, 0.028536832809448243, 0.028619775772094725, 0.02860032081604004, 0.028588031768798827, 0.02856550407409668, 0.028718080520629883, 0.028588031768798827, 0.028846080780029298, 0.028632064819335938, 0.028413951873779295, 0.0290119686126709, 0.028818431854248046, 0.02873855972290039, 0.029478912353515626, 0.029057024002075195, 0.028499967575073244, 0.028632064819335938, 0.02851020812988281, 0.029442047119140623, 0.029090816497802735, 0.028580863952636718, 0.028644351959228515, 0.028594175338745118, 0.028611583709716795, 0.02873036766052246, 0.02856243133544922, 0.028479488372802734, 0.02852556800842285, 0.02855731201171875, 0.028602367401123048, 0.02851430320739746, 0.028470272064208983, 0.028539903640747072, 0.028640256881713868, 0.028487680435180664, 0.028685312271118164, 0.028637184143066406, 0.028686336517333984, 0.028618751525878908, 0.028653568267822265, 0.02859929656982422, 0.02854297637939453, 0.028513280868530274, 0.028527616500854492, 0.02856755256652832, 0.028648448944091798, 0.028646400451660156, 0.028553216934204102, 0.028612607955932616, 0.029114368438720704, 0.02893926429748535, 0.028564479827880858, 0.028641279220581056, 0.02854195213317871, 0.028468223571777345, 0.02856755256652832, 0.02873855972290039, 0.02968780708312988, 0.029005823135375978, 0.028704767227172853, 0.028726272583007813, 0.028588031768798827, 0.028467199325561524, 0.02856038475036621, 0.02855014419555664, 0.028511232376098632, 0.02872217559814453, 0.028466175079345703, 0.028503040313720703]",tokens/s,35.32348956769831,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694902a-69084a0a681811a04cd716e3;e2d4feea-4322-4a56-a342-2d36f9978e50) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-rw-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-rw-1b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4215.996416,6159.859712,0.0,5530.189824,5138.859008,s,1,10.4622021484375,10.4622021484375,0.0,10.4622021484375,10.4622021484375,10.4622021484375,10.4622021484375,[10.4622021484375],,kWh,4.195050872846575e-05,2.297644391721924e-05,7.469644864593894e-05,0.00013962340129162394,,MB,1580.6464,6182.928384,0.0,5534.384128,4844.878336,s,10,1.0460218505859376,0.10460218505859376,2.3022558125100377e-05,0.10460023880004883,0.10462384414672851,0.10463841819763184,0.10465007743835449,"[0.10459203338623047, 0.10462060546875, 0.10461103820800781, 0.10456646728515626, 0.10457350158691406, 0.10460031890869141, 0.10465299224853515, 0.1046091537475586, 0.10460015869140625, 0.1045955810546875]",tokens/s,2447.367613368684,kWh,1.2362764626880661e-06,6.774188099963368e-07,5.065397570833592e-06,6.979092843517995e-06,tokens/kWh,36680985.0133698,MB,1601.302528,6185.025536,0.0,5534.384128,5015.826432,s,10,14.292628051757811,1.4292628051757812,0.0031193081143531527,1.4291558227539063,1.4325808837890623,1.4335771240234374,1.4343741162109374,"[1.4345733642578125, 1.4294637451171874, 1.42894873046875, 1.4274144287109376, 1.4291986083984376, 1.4323594970703124, 1.422193359375, 1.4280084228515626, 1.4313548583984375, 1.429113037109375]",tokens/s,44.078667528363894,kWh,1.6903775451269285e-05,9.263159987366053e-06,4.365036362396167e-05,6.9817299062597e-05,tokens/kWh,902355.1590489811,,s,630,14.290340862274157,0.022683080733768523,0.00030317742396786674,0.022617088317871094,0.022989926147460937,0.023299277114868164,0.023845304622650148,"[0.02263654327392578, 0.022584320068359375, 0.02254745674133301, 0.022683647155761717, 0.02330112075805664, 0.023085056304931642, 0.022756351470947265, 0.023334911346435547, 0.022616064071655274, 0.022937599182128905, 0.022573055267333983, 0.02260479927062988, 0.022833152770996092, 0.022231039047241212, 0.022692863464355468, 0.022586368560791017, 0.022664192199707032, 0.022666240692138673, 0.023235584259033205, 0.02329702377319336, 0.022674432754516603, 0.02269900894165039, 0.02267852783203125, 0.023009279251098632, 0.02249113655090332, 0.022326271057128907, 0.02232729530334473, 0.022312959671020507, 0.02369331169128418, 0.024885248184204102, 0.023757823944091795, 0.022608896255493165, 0.022563840866088865, 0.0224901123046875, 0.022588415145874022, 0.02249830436706543, 0.022541311264038084, 0.022640640258789063, 0.02289459228515625, 0.022716415405273437, 0.02268057632446289, 0.02272051239013672, 0.022622207641601562, 0.022261760711669923, 0.02234163284301758, 0.022960128784179686, 0.02287718391418457, 0.022569984436035157, 0.023378944396972655, 0.02290176010131836, 0.022534143447875975, 0.022200319290161134, 0.02287513542175293, 0.022785024642944338, 0.02267852783203125, 0.02252288055419922, 0.02223411178588867, 0.02208563232421875, 0.02291097640991211, 0.022304767608642577, 0.022322175979614257, 0.023993343353271485, 0.023323648452758788, 0.02270515251159668, 0.02242355155944824, 0.022603776931762694, 0.022615039825439453, 0.022269952774047853, 0.022420480728149415, 0.02243174362182617, 0.022573055267333983, 0.02268876838684082, 0.022617088317871094, 0.022592512130737305, 0.02265190315246582, 0.022626304626464845, 0.02263859176635742, 0.022754304885864256, 0.02267750358581543, 0.022581247329711913, 0.022697984695434572, 0.022776832580566408, 0.02264575958251953, 0.02262015914916992, 0.022658048629760744, 0.02272972869873047, 0.02275328063964844, 0.022938623428344726, 0.02247065544128418, 0.02250444793701172, 0.022814720153808594, 0.023029760360717775, 0.023179264068603517, 0.022674432754516603, 0.022611967086791994, 0.023194623947143556, 0.022675455093383787, 0.023037952423095705, 0.022540288925170897, 0.022384639739990234, 0.022784000396728517, 0.022801408767700194, 0.022495231628417968, 0.022648832321166993, 0.022683647155761717, 0.02269081687927246, 0.02267136001586914, 0.022619136810302733, 0.022606847763061523, 0.02267238426208496, 0.022624256134033204, 0.022607872009277344, 0.02270207977294922, 0.022715391159057616, 0.023389184951782226, 0.023350271224975586, 0.022895616531372072, 0.02248908805847168, 0.02269491195678711, 0.0228853759765625, 0.022576128005981445, 0.022761472702026365, 0.022365184783935548, 0.02234982490539551, 0.02272051239013672, 0.022633472442626954, 0.02284851264953613, 0.023616512298583983, 0.022575103759765625, 0.02289356803894043, 0.023250944137573244, 0.022666240692138673, 0.022329343795776366, 0.022676479339599608, 0.022617088317871094, 0.022635520935058592, 0.022575103759765625, 0.02260479927062988, 0.02266726493835449, 0.022616064071655274, 0.023608320236206053, 0.02284339141845703, 0.022585344314575196, 0.0225218563079834, 0.022569984436035157, 0.022172672271728516, 0.022584320068359375, 0.022589439392089843, 0.023022592544555662, 0.02251366424560547, 0.022737920761108397, 0.022742015838623047, 0.022808576583862306, 0.023020544052124024, 0.023178239822387696, 0.02391859245300293, 0.023451648712158202, 0.022701055526733398, 0.022343679428100584, 0.022189056396484375, 0.0223242244720459, 0.022615039825439453, 0.022890495300292968, 0.02226688003540039, 0.02244812774658203, 0.022562816619873048, 0.022683647155761717, 0.022589439392089843, 0.022599679946899414, 0.022379520416259766, 0.02211123275756836, 0.022510591506958007, 0.022559743881225586, 0.022534143447875975, 0.02307788848876953, 0.022575103759765625, 0.022766592025756836, 0.02264678382873535, 0.022585344314575196, 0.022542335510253905, 0.022633472442626954, 0.022227968215942383, 0.022478847503662108, 0.022376447677612304, 0.022535167694091796, 0.022657024383544923, 0.02266009521484375, 0.022600704193115235, 0.02261299133300781, 0.02264678382873535, 0.022598655700683593, 0.02263039970397949, 0.02269491195678711, 0.022600704193115235, 0.02271334457397461, 0.023587839126586914, 0.022738943099975584, 0.02239897537231445, 0.02262835121154785, 0.022681600570678712, 0.02268569564819336, 0.022602752685546876, 0.022709247589111328, 0.022673408508300782, 0.02266316795349121, 0.02268876838684082, 0.0226375675201416, 0.022627328872680662, 0.022656000137329102, 0.022553600311279298, 0.022936576843261718, 0.0227194881439209, 0.022656000137329102, 0.022616064071655274, 0.02305536079406738, 0.022583295822143554, 0.02265395164489746, 0.02270207977294922, 0.022665216445922853, 0.022641664505004884, 0.022603776931762694, 0.022562816619873048, 0.022895616531372072, 0.023169023513793945, 0.023040000915527343, 0.022773759841918945, 0.0222873592376709, 0.0222873592376709, 0.022566911697387695, 0.022756351470947265, 0.022611967086791994, 0.0225218563079834, 0.022597631454467772, 0.022819839477539062, 0.02271334457397461, 0.02243174362182617, 0.02251468849182129, 0.022777856826782225, 0.02268262481689453, 0.02261299133300781, 0.022610944747924806, 0.022582271575927734, 0.022618112564086915, 0.02247475242614746, 0.022733823776245117, 0.02260479927062988, 0.022601728439331056, 0.02265497589111328, 0.02260479927062988, 0.022329343795776366, 0.02226483154296875, 0.022236160278320313, 0.0225218563079834, 0.022571008682250978, 0.022714368820190428, 0.02271334457397461, 0.022558719635009765, 0.022561792373657227, 0.022607872009277344, 0.02231705665588379, 0.022399999618530272, 0.022627328872680662, 0.02249318313598633, 0.02305023956298828, 0.023855104446411132, 0.023633920669555664, 0.022932479858398438, 0.022378496170043945, 0.02264371109008789, 0.02270412826538086, 0.02260479927062988, 0.022520832061767578, 0.022610944747924806, 0.022632448196411133, 0.022666240692138673, 0.022227968215942383, 0.022421503067016603, 0.022533119201660155, 0.02342911911010742, 0.022684671401977538, 0.022501375198364256, 0.022536191940307617, 0.022487039566040038, 0.022590463638305663, 0.022572032928466795, 0.022782976150512696, 0.022609920501708985, 0.02249830436706543, 0.022594560623168947, 0.022545408248901368, 0.022588415145874022, 0.022253568649291993, 0.022226943969726562, 0.022210559844970702, 0.023194623947143556, 0.023821311950683592, 0.023992319107055664, 0.02345267105102539, 0.022524927139282228, 0.02288640022277832, 0.022565887451171874, 0.022559743881225586, 0.022566911697387695, 0.0225218563079834, 0.02247270393371582, 0.022520832061767578, 0.022577152252197266, 0.022549503326416014, 0.023120895385742187, 0.022378496170043945, 0.022289407730102538, 0.022373376846313478, 0.02252390480041504, 0.0225218563079834, 0.02295091247558594, 0.022376447677612304, 0.022358015060424806, 0.02307072067260742, 0.022615039825439453, 0.02269491195678711, 0.02260479927062988, 0.022578176498413087, 0.022563840866088865, 0.022758399963378906, 0.022579200744628908, 0.022591487884521484, 0.022592512130737305, 0.02268876838684082, 0.02234060859680176, 0.022683647155761717, 0.022590463638305663, 0.022692863464355468, 0.022579200744628908, 0.02264371109008789, 0.02270412826538086, 0.022609920501708985, 0.02290278434753418, 0.02266316795349121, 0.022511615753173828, 0.022582271575927734, 0.022586368560791017, 0.02263859176635742, 0.022599679946899414, 0.02269388771057129, 0.02225868797302246, 0.022576128005981445, 0.022565887451171874, 0.02342911911010742, 0.023427072525024413, 0.022872064590454103, 0.022656000137329102, 0.02268569564819336, 0.022303743362426756, 0.02245427131652832, 0.022634496688842775, 0.02255564880371094, 0.022592512130737305, 0.022640640258789063, 0.022675455093383787, 0.022588415145874022, 0.02260479927062988, 0.022692863464355468, 0.023812095642089845, 0.024258560180664062, 0.023545856475830077, 0.023244800567626952, 0.022968320846557616, 0.022402048110961914, 0.022377471923828125, 0.022707199096679686, 0.022731775283813475, 0.02264678382873535, 0.02255564880371094, 0.022632448196411133, 0.02253209686279297, 0.02265190315246582, 0.022591487884521484, 0.02409062385559082, 0.0229171199798584, 0.022634496688842775, 0.022861824035644532, 0.022602752685546876, 0.02246963119506836, 0.02230886459350586, 0.022182912826538087, 0.022533119201660155, 0.02283417510986328, 0.022559743881225586, 0.02247372817993164, 0.022207487106323243, 0.02262015914916992, 0.022610944747924806, 0.022601728439331056, 0.022608896255493165, 0.02264371109008789, 0.02215116882324219, 0.02224947166442871, 0.022626304626464845, 0.02262118339538574, 0.02262118339538574, 0.022632448196411133, 0.022576128005981445, 0.02259660720825195, 0.022503423690795898, 0.02224127960205078, 0.022222848892211915, 0.022041599273681642, 0.022271999359130858, 0.022345727920532226, 0.022371328353881836, 0.022537216186523438, 0.022510591506958007, 0.022595584869384764, 0.02250752067565918, 0.022543359756469726, 0.022603776931762694, 0.022581247329711913, 0.022987775802612305, 0.022837247848510742, 0.022534143447875975, 0.022616064071655274, 0.022956031799316406, 0.022587392807006838, 0.02266828727722168, 0.022562816619873048, 0.022614015579223632, 0.022537216186523438, 0.022593536376953126, 0.02244607925415039, 0.022586368560791017, 0.022932479858398438, 0.023319551467895508, 0.02262937545776367, 0.02264473533630371, 0.022565887451171874, 0.022607872009277344, 0.022674432754516603, 0.02264678382873535, 0.022684671401977538, 0.022483968734741212, 0.02263039970397949, 0.02287615966796875, 0.02265497589111328, 0.022683647155761717, 0.022610944747924806, 0.02253209686279297, 0.022830080032348633, 0.022559743881225586, 0.022602752685546876, 0.022551551818847656, 0.022585344314575196, 0.02263039970397949, 0.022428672790527345, 0.02244812774658203, 0.022563840866088865, 0.02259660720825195, 0.022606847763061523, 0.023213056564331053, 0.022658048629760744, 0.02287820816040039, 0.02308710479736328, 0.02267136001586914, 0.022584320068359375, 0.022675455093383787, 0.022639616012573242, 0.02265907287597656, 0.022800384521484376, 0.022599679946899414, 0.022617088317871094, 0.022583295822143554, 0.023228416442871092, 0.022832128524780275, 0.022792192459106447, 0.022815744400024415, 0.022305791854858398, 0.02242252731323242, 0.02265190315246582, 0.02265907287597656, 0.02270207977294922, 0.022579200744628908, 0.022632448196411133, 0.022568960189819336, 0.0226693115234375, 0.02267136001586914, 0.02307379150390625, 0.022707199096679686, 0.022615039825439453, 0.022573055267333983, 0.02267852783203125, 0.02261299133300781, 0.022722560882568358, 0.02293452835083008, 0.02290790367126465, 0.022379520416259766, 0.022582271575927734, 0.022616064071655274, 0.022589439392089843, 0.022575103759765625, 0.022269952774047853, 0.022386688232421875, 0.02267955207824707, 0.022786048889160155, 0.022586368560791017, 0.02255564880371094, 0.02263039970397949, 0.022769664764404295, 0.022616064071655274, 0.022563840866088865, 0.022640640258789063, 0.023069696426391603, 0.023385087966918947, 0.022763519287109374, 0.022589439392089843, 0.022922239303588866, 0.022545408248901368, 0.022509567260742186, 0.022597631454467772, 0.022577152252197266, 0.022495231628417968, 0.022770687103271483, 0.02269696044921875, 0.022632448196411133, 0.022540288925170897, 0.02287718391418457, 0.0232806396484375, 0.022906879425048828, 0.0228351993560791, 0.02267955207824707, 0.022610944747924806, 0.02269081687927246, 0.02266828727722168, 0.022588415145874022, 0.022557695388793944, 0.02265907287597656, 0.022635520935058592, 0.022560768127441407, 0.022619136810302733, 0.0228351993560791, 0.022603776931762694, 0.022552576065063477, 0.02269388771057129, 0.022589439392089843, 0.022550527572631835, 0.022564863204956053, 0.022975488662719725, 0.022573055267333983, 0.022611967086791994, 0.02267136001586914, 0.022557695388793944, 0.022626304626464845, 0.022814720153808594, 0.022606847763061523, 0.022609920501708985, 0.023786495208740235, 0.023185407638549805, 0.022616064071655274, 0.022619136810302733, 0.023137279510498047, 0.02309939193725586, 0.022589439392089843, 0.022849536895751952, 0.022687744140625, 0.022458368301391602, 0.022564863204956053, 0.022556671142578123, 0.02263039970397949, 0.02243174362182617, 0.023657472610473632, 0.022956031799316406, 0.022520832061767578, 0.022829055786132812, 0.02251571273803711, 0.022600704193115235, 0.022625280380249024, 0.022572032928466795, 0.022524927139282228, 0.022548479080200197, 0.022615039825439453, 0.022615039825439453, 0.022806528091430665, 0.022569984436035157, 0.022328319549560546, 0.022551551818847656, 0.022948863983154297, 0.022649856567382814, 0.022510591506958007, 0.022589439392089843, 0.0226375675201416, 0.022569984436035157, 0.02265497589111328, 0.022634496688842775, 0.022432767868041992, 0.02252288055419922, 0.023525375366210938, 0.022787071228027343, 0.022565887451171874, 0.022639616012573242, 0.02285158348083496, 0.02264473533630371, 0.02260479927062988, 0.02270310401916504, 0.022545408248901368, 0.02248089599609375, 0.022587392807006838, 0.022581247329711913, 0.02268262481689453, 0.022552576065063477, 0.022568960189819336, 0.022799392700195313, 0.02292732810974121, 0.022967296600341795, 0.022796287536621093, 0.02288435173034668, 0.02267136001586914, 0.02260479927062988, 0.02261299133300781, 0.022565887451171874, 0.022574079513549804, 0.022740991592407226, 0.02233241653442383, 0.02264473533630371, 0.0226693115234375, 0.022808576583862306, 0.022732799530029296, 0.02261299133300781, 0.022640640258789063, 0.022687744140625, 0.023088127136230468, 0.022589439392089843, 0.02262937545776367]",tokens/s,44.0857223821141,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17675.65312,22507.159552,0.0,21877.489664,21024.863232,s,1,19.64417578125,19.64417578125,0.0,19.64417578125,19.64417578125,19.64417578125,19.64417578125,[19.64417578125],,kWh,0.00015207049023263723,8.333210563762821e-05,0.0003275291509119982,0.0005629317467822637,,MB,4720.267264,22588.94848,0.0,21940.404224,19728.708096,s,10,5.17315283203125,0.517315283203125,8.61923824888662e-05,0.5173192138671875,0.5173928771972656,0.517405484008789,0.5174155694580078,"[0.5173900756835937, 0.5171132202148437, 0.5173878173828125, 0.517301025390625, 0.5172383422851563, 0.5172855224609375, 0.5174180908203125, 0.5173809814453125, 0.5173003540039063, 0.51733740234375]",tokens/s,494.86262693592414,kWh,6.115633787291759e-06,3.3510956131233365e-06,2.4504519603600616e-05,3.3971249004015714e-05,tokens/kWh,7535784.155882477,MB,4720.267264,22616.211456,0.0,21965.570048,19728.710656,s,10,30.491866943359376,3.049186694335938,0.020204914310888337,3.0420996093749997,3.072567529296875,3.0852365234375,3.09537171875,"[3.029458984375, 3.02744091796875, 3.04214697265625, 3.04478369140625, 3.04050439453125, 3.03822265625, 3.04205224609375, 3.059599365234375, 3.069752197265625, 3.097905517578125]",tokens/s,20.661247183397,kWh,3.5773163422986045e-05,1.9606038733830987e-05,0.00014045555680879812,0.00019583475896561516,tokens/kWh,321699.785741619,,s,630,30.489251838684073,0.048395637839181084,0.0007300847840336027,0.04813823890686035,0.04966195106506348,0.050019686126708984,0.05109001293182373,"[0.04851609420776367, 0.04784640121459961, 0.04784025573730469, 0.04782700729370117, 0.047875007629394534, 0.04792627334594726, 0.047922176361083986, 0.04793446350097656, 0.04797337722778321, 0.0479109115600586, 0.047887359619140625, 0.04793753433227539, 0.04798668670654297, 0.04794060897827149, 0.047982593536376954, 0.0479918098449707, 0.04794265747070312, 0.04793241500854492, 0.04795904159545898, 0.04798668670654297, 0.04800614547729492, 0.04795904159545898, 0.0482979850769043, 0.048059391021728515, 0.04796518325805664, 0.04795596694946289, 0.04798054504394531, 0.048039936065673826, 0.04797542572021484, 0.04796108627319336, 0.04795699310302735, 0.04800614547729492, 0.048113662719726565, 0.048113662719726565, 0.04809830474853516, 0.04812083053588867, 0.04806860733032227, 0.04806246566772461, 0.048123905181884766, 0.04819148635864258, 0.04809625625610352, 0.04815052795410156, 0.04813721466064453, 0.04818841552734375, 0.04810444641113281, 0.04857651138305664, 0.04828672027587891, 0.04825702285766602, 0.04811161422729492, 0.0481710090637207, 0.048161792755126956, 0.04820787048339844, 0.04818534469604492, 0.048290817260742185, 0.04827340698242188, 0.048290817260742185, 0.04821094512939453, 0.04820684814453125, 0.04823244857788086, 0.048302078247070314, 0.04816896057128906, 0.04822937774658203, 0.04819353485107422, 0.048194561004638675, 0.04791603088378906, 0.047800319671630856, 0.047903743743896485, 0.047852542877197264, 0.047963134765625, 0.04788633728027344, 0.047922176361083986, 0.04788531112670898, 0.047922176361083986, 0.047884288787841796, 0.04797030258178711, 0.047833087921142575, 0.04796108627319336, 0.04786483383178711, 0.048031742095947266, 0.04787712097167969, 0.04795084762573242, 0.047876094818115236, 0.04799692916870117, 0.04786175918579102, 0.048026622772216795, 0.04792422485351563, 0.048023551940917966, 0.0478996467590332, 0.04796211242675781, 0.04794265747070312, 0.048048126220703126, 0.047941631317138675, 0.048069633483886716, 0.04787712097167969, 0.048039936065673826, 0.04802560043334961, 0.048092193603515625, 0.0480214729309082, 0.048161792755126956, 0.048072704315185545, 0.04812902450561524, 0.04808703994750976, 0.04822630310058594, 0.04804915237426758, 0.04815769577026367, 0.04815769577026367, 0.04821299362182617, 0.04808396911621094, 0.04816486358642578, 0.048113662719726565, 0.04826726531982422, 0.048110591888427735, 0.04823654556274414, 0.048161792755126956, 0.04828876876831055, 0.048132095336914066, 0.04825497436523438, 0.04811775970458984, 0.0482979850769043, 0.048145408630371096, 0.04825088119506836, 0.04815359878540039, 0.04832460784912109, 0.048145408630371096, 0.04825395202636719, 0.048161792755126956, 0.0483061752319336, 0.047870975494384765, 0.047857662200927735, 0.047925247192382815, 0.047958015441894535, 0.04798463821411133, 0.05109862518310547, 0.04832665634155273, 0.048020481109619144, 0.04809011077880859, 0.04793446350097656, 0.04792729568481445, 0.047920127868652344, 0.047935489654541016, 0.04792934417724609, 0.04812492752075195, 0.04800921630859375, 0.04801638412475586, 0.048115711212158206, 0.04807680130004883, 0.04798668670654297, 0.048091136932373046, 0.04804403305053711, 0.04805017471313477, 0.04799795150756836, 0.04801331329345703, 0.04807680130004883, 0.04811468887329102, 0.050723838806152347, 0.04822732925415039, 0.04812083053588867, 0.048105472564697264, 0.04818227386474609, 0.04818739318847656, 0.0481525764465332, 0.04834611129760742, 0.04820377731323242, 0.04817407989501953, 0.04820991897583008, 0.04825702285766602, 0.04817203140258789, 0.04824063873291016, 0.04818841552734375, 0.04826521682739258, 0.04819660949707031, 0.04818739318847656, 0.048215038299560545, 0.048279552459716796, 0.04816384124755859, 0.05101772689819336, 0.04879052734375, 0.04835532760620117, 0.04820889663696289, 0.04825190353393555, 0.04820377731323242, 0.04828876876831055, 0.04827238464355469, 0.048333824157714846, 0.0483583984375, 0.04842086410522461, 0.04826009750366211, 0.04826726531982422, 0.04825395202636719, 0.04897484970092773, 0.04801945495605469, 0.047876094818115236, 0.04797542572021484, 0.04802764892578125, 0.04804403305053711, 0.047952896118164064, 0.048061439514160156, 0.04794265747070312, 0.04800102233886719, 0.04799078369140625, 0.04801126480102539, 0.04790169525146484, 0.04804915237426758, 0.04793241500854492, 0.048121856689453124, 0.04787814331054688, 0.04795084762573242, 0.04892979049682617, 0.04895436859130859, 0.04798054504394531, 0.04799795150756836, 0.047900672912597655, 0.04803379058837891, 0.047951873779296876, 0.04798054504394531, 0.047890430450439454, 0.048043006896972655, 0.04791398239135742, 0.047971328735351565, 0.047865856170654295, 0.048004096984863284, 0.04947148895263672, 0.04843008041381836, 0.048126976013183595, 0.04823756790161133, 0.04810649490356445, 0.04818841552734375, 0.04822220611572266, 0.05057843017578125, 0.05106892776489258, 0.04830003356933594, 0.04817203140258789, 0.04824576187133789, 0.048097278594970705, 0.0482949104309082, 0.048906238555908206, 0.04838195037841797, 0.04812595367431641, 0.04824063873291016, 0.048132095336914066, 0.04827545547485351, 0.04813926315307617, 0.04822016143798828, 0.048126976013183595, 0.04832563018798828, 0.048271358489990236, 0.04828672027587891, 0.04828057479858398, 0.048530433654785154, 0.048292865753173826, 0.05105152130126953, 0.049259521484375, 0.04825702285766602, 0.04803583908081055, 0.04797337722778321, 0.04786175918579102, 0.04788940811157227, 0.04807987213134766, 0.048056320190429686, 0.04815564727783203, 0.04792422485351563, 0.05110681533813476, 0.04812799835205078, 0.047941631317138675, 0.047915008544921874, 0.0479447021484375, 0.04947353744506836, 0.04821913528442383, 0.04795596694946289, 0.047900672912597655, 0.047900672912597655, 0.047949825286865234, 0.04794675064086914, 0.04794367980957031, 0.048094207763671876, 0.04801331329345703, 0.04797235107421875, 0.047938560485839846, 0.04797439956665039, 0.048020481109619144, 0.04807372665405273, 0.047911937713623044, 0.049600513458251956, 0.0494202880859375, 0.04823654556274414, 0.048113662719726565, 0.04809011077880859, 0.048092159271240234, 0.048140289306640625, 0.04806041717529297, 0.04809318542480469, 0.04813721466064453, 0.04809014511108398, 0.048151519775390624, 0.04820172882080078, 0.04820889663696289, 0.048115711212158206, 0.048126976013183595, 0.048143360137939455, 0.04825702285766602, 0.04815871810913086, 0.04825190353393555, 0.04824063873291016, 0.04832153701782226, 0.05079347229003906, 0.04840959930419922, 0.048277503967285154, 0.048330753326416016, 0.04822528076171875, 0.04827340698242188, 0.048231422424316404, 0.048293888092041014, 0.0481976318359375, 0.04820889663696289, 0.04820070266723633, 0.0504453125, 0.048023551940917966, 0.04782592010498047, 0.04796108627319336, 0.04786995315551758, 0.047941631317138675, 0.047874046325683595, 0.047941631317138675, 0.047895553588867185, 0.04795391845703125, 0.047854591369628906, 0.04797337722778321, 0.047833087921142575, 0.04800102233886719, 0.04788019180297851, 0.04803276824951172, 0.04797439956665039, 0.0479447021484375, 0.04789657592773437, 0.04803276824951172, 0.047890430450439454, 0.05080166244506836, 0.0481607666015625, 0.04806758499145508, 0.047951873779296876, 0.048023551940917966, 0.047949825286865234, 0.04813619232177734, 0.04797337722778321, 0.04803891372680664, 0.047963134765625, 0.048123905181884766, 0.04813312149047851, 0.04819046401977539, 0.04811468887329102, 0.04828364944458008, 0.048121856689453124, 0.04813312149047851, 0.04805222320556641, 0.04820889663696289, 0.04806655883789063, 0.048192512512207034, 0.04809011077880859, 0.05173657608032227, 0.04840857696533203, 0.048279552459716796, 0.04815155029296875, 0.048244735717773435, 0.04810956954956055, 0.04818636703491211, 0.048159744262695314, 0.04830310440063477, 0.04818739318847656, 0.04823859024047852, 0.048146430969238284, 0.048315391540527344, 0.04818636703491211, 0.04828467178344727, 0.048246784210205076, 0.04835225677490235, 0.04817715072631836, 0.04827340698242188, 0.0481525764465332, 0.04839731216430664, 0.04789350509643555, 0.047867904663085936, 0.04812287902832031, 0.04825907135009765, 0.04810342407226562, 0.04801536178588867, 0.04799692916870117, 0.04801740646362305, 0.048814079284667966, 0.047958015441894535, 0.04853657531738281, 0.04972851181030274, 0.048146430969238284, 0.04802560043334961, 0.048048126220703126, 0.0480184326171875, 0.04796416091918945, 0.047911937713623044, 0.04899020767211914, 0.04820479965209961, 0.04801638412475586, 0.047987712860107425, 0.04797030258178711, 0.0479815673828125, 0.05018009567260742, 0.04832563018798828, 0.048110591888427735, 0.04799078369140625, 0.04795084762573242, 0.048075775146484374, 0.048091136932373046, 0.04818534469604492, 0.04815871810913086, 0.048271358489990236, 0.048192512512207034, 0.048192512512207034, 0.04807987213134766, 0.04815769577026367, 0.04820787048339844, 0.048143360137939455, 0.04819353485107422, 0.04814131164550781, 0.048198654174804685, 0.048113662719726565, 0.04813312149047851, 0.04818022537231445, 0.0482242546081543, 0.048099327087402347, 0.04816793441772461, 0.04815462493896484, 0.04824063873291016, 0.04817919921875, 0.04820172882080078, 0.04818022537231445, 0.048540672302246096, 0.048331775665283204, 0.04929024124145508, 0.048215038299560545, 0.04831027221679687, 0.0492042236328125, 0.04997017669677734, 0.04822732925415039, 0.05133926391601563, 0.05036236953735351, 0.050476032257080077, 0.049631233215332034, 0.04949606323242187, 0.04960255813598633, 0.049858558654785154, 0.05063372802734375, 0.04996300888061524, 0.04963225555419922, 0.04962713623046875, 0.04796518325805664, 0.047854591369628906, 0.04802969741821289, 0.047915008544921874, 0.048037887573242184, 0.04791398239135742, 0.04796108627319336, 0.047911937713623044, 0.048010238647460936, 0.04791398239135742, 0.047968257904052736, 0.047882240295410154, 0.04801433563232422, 0.04818022537231445, 0.04801228713989258, 0.04925030517578125, 0.04967116928100586, 0.049713153839111325, 0.04801228713989258, 0.04789145660400391, 0.0480184326171875, 0.04803583908081055, 0.04811468887329102, 0.048121856689453124, 0.04818739318847656, 0.048036865234375, 0.048118785858154295, 0.04807475280761719, 0.04820787048339844, 0.048064510345458986, 0.04821811294555664, 0.04811673736572265, 0.048216064453125, 0.04980326461791992, 0.048467967987060545, 0.048217086791992186, 0.048323585510253904, 0.048089088439941405, 0.04830310440063477, 0.0481341438293457, 0.04825907135009765, 0.048100353240966794, 0.04820787048339844, 0.04814131164550781, 0.048366592407226565, 0.04819148635864258, 0.048669696807861325, 0.04891648101806641, 0.04832972717285156, 0.04812799835205078, 0.048263168334960936, 0.048159744262695314, 0.048323585510253904, 0.047887359619140625, 0.04899532699584961, 0.04784537506103516, 0.04848332977294922, 0.04792531204223633, 0.04793337631225586, 0.04790784072875977, 0.04793654251098633, 0.04792931365966797, 0.047906814575195314, 0.047923198699951174, 0.04789452743530274, 0.04794367980957031, 0.04814950561523437, 0.048075775146484374, 0.04984729766845703, 0.04965683364868164, 0.049716224670410154, 0.04797951889038086, 0.047922176361083986, 0.04796211242675781, 0.049701889038085936, 0.04988927841186523, 0.04792627334594726, 0.04807987213134766, 0.04793753433227539, 0.048110591888427735, 0.047951873779296876, 0.048048126220703126, 0.04801638412475586, 0.04802560043334961, 0.048059391021728515, 0.048054271697998044, 0.048333824157714846, 0.05015961456298828, 0.05030809783935547, 0.05049139022827148, 0.048161792755126956, 0.048178176879882816, 0.04812287902832031, 0.0481710090637207, 0.04809625625610352, 0.05014527893066406, 0.04999884796142578, 0.04924620819091797, 0.04970905685424805, 0.04822937774658203, 0.04814131164550781, 0.048156673431396485, 0.04815769577026367, 0.04822118377685547, 0.04919193649291992, 0.04965990447998047, 0.049759231567382815, 0.04968243026733398, 0.05066035079956055, 0.05067161560058594, 0.04931686401367188, 0.04974182510375977, 0.049708030700683595, 0.04958617782592773, 0.04954009628295898, 0.04826521682739258, 0.047935489654541016, 0.047754238128662106, 0.04793753433227539, 0.051138561248779295, 0.05159836959838867, 0.04965167999267578, 0.04978073501586914, 0.04974796676635742, 0.05040639877319336, 0.04983193588256836, 0.05034905624389648, 0.04979814529418945, 0.049355777740478515, 0.04978483200073242, 0.04986880111694336, 0.05041664123535156, 0.04957900619506836, 0.04955648040771484, 0.049430526733398435, 0.04963328170776367, 0.049503231048583986, 0.049519615173339845, 0.0495728645324707, 0.050223102569580076, 0.05003673553466797, 0.04797030258178711, 0.04809011077880859, 0.04793753433227539, 0.047993854522705076, 0.047898624420166014, 0.04858367919921875, 0.04802252960205078, 0.04813107299804688, 0.04804198455810547, 0.04819660949707031, 0.04805120086669922, 0.048145408630371096, 0.04805222320556641, 0.04818124771118164, 0.04807167816162109, 0.04819660949707031, 0.04810342407226562, 0.04901990509033203, 0.049675262451171875, 0.04823244857788086, 0.04843417739868164, 0.04835225677490235, 0.04966092681884766, 0.04911206436157227, 0.048126976013183595, 0.04891648101806641, 0.04975308990478516, 0.05003776168823242, 0.04987395095825195, 0.049745887756347654, 0.05020569610595703, 0.04993843078613281, 0.05179084777832031, 0.04976128005981445, 0.04983500671386719, 0.049006591796875, 0.04980838394165039]",tokens/s,20.66301932672123,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-7b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-7b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495ca-186c53a7650bea1634f7ec7c;a8a285f6-f2de-4bf5-9973-6b32c89bcd29) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for internlm/internlm-20b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/internlm/internlm-20b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for internlm/internlm2-20b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/internlm/internlm2-20b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4037, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 200, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 83, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -RuntimeError: q_weight and gptq_scales have incompatible shapes - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1074.520064,1200.095232,0.0,570.425344,536.326656,s,1,7.728947265625,7.728947265625,0.0,7.728947265625,7.728947265625,7.728947265625,7.728947265625,[7.728947265625],,kWh,1.1266836274986267e-05,6.159280645193524e-06,1.4955567520014146e-05,3.2381684440193934e-05,,MB,1546.399744,1273.495552,0.0,624.951296,594.377728,s,10,0.19308963203430177,0.019308963203430174,0.0007097530057981757,0.018976927757263184,0.020464694023132324,0.02072154664993286,0.02092702875137329,"[0.019001888275146483, 0.0209783992767334, 0.018903167724609374, 0.020407615661621095, 0.01888038444519043, 0.018819904327392577, 0.01909600067138672, 0.018951967239379884, 0.018907424926757812, 0.019142879486083984]",tokens/s,13258.09145229105,kWh,2.2556671376694137e-07,1.235772148194602e-07,5.339842058130905e-07,8.831281343994921e-07,tokens/kWh,289878659.76444566,MB,1573.187584,1290.272768,0.0,639.63136,607.71072,s,10,10.469299072265624,1.0469299072265625,0.004562610566354146,1.0468347778320313,1.0516177001953124,1.0544896362304688,1.056787185058594,"[1.0509794921875, 1.044266845703125, 1.0403541259765625, 1.04698291015625, 1.04489404296875, 1.0467540283203125, 1.04691552734375, 1.041950439453125, 1.048840087890625, 1.057361572265625]",tokens/s,60.175948327710145,kWh,1.2330444580329265e-05,6.755659308209145e-06,2.0021504460782866e-05,3.910760834932127e-05,tokens/kWh,1610939.7290998846,,s,630,10.464471012115492,0.016610271447802347,0.0002875812136867957,0.016532464027404783,0.016943820571899415,0.017189631366729734,0.01775917121887207,"[0.01643519973754883, 0.016507904052734376, 0.016471040725708007, 0.016648191452026367, 0.01652227210998535, 0.01650787162780762, 0.01662566375732422, 0.016483327865600587, 0.01658470344543457, 0.016519168853759765, 0.016534528732299804, 0.016507904052734376, 0.016487424850463867, 0.016553983688354493, 0.016471040725708007, 0.016441343307495117, 0.016471040725708007, 0.01666662406921387, 0.016498687744140626, 0.016533504486083983, 0.016500736236572267, 0.01644441604614258, 0.016541696548461913, 0.016522239685058594, 0.016497663497924805, 0.01637171173095703, 0.016357376098632814, 0.0165980167388916, 0.016744447708129884, 0.01657241630554199, 0.016570367813110352, 0.016525312423706053, 0.01658163261413574, 0.016547840118408205, 0.01661747169494629, 0.016578559875488282, 0.016498687744140626, 0.016694271087646484, 0.01697996711730957, 0.016850944519042968, 0.01680281639099121, 0.01658572769165039, 0.016518144607543944, 0.016545791625976563, 0.01722777557373047, 0.01658470344543457, 0.016442367553710938, 0.016500736236572267, 0.016655359268188476, 0.017560575485229494, 0.016753664016723634, 0.016541696548461913, 0.016533504486083983, 0.01665023994445801, 0.01653555107116699, 0.016516096115112306, 0.017330175399780275, 0.01721036720275879, 0.017748992919921876, 0.01744076728820801, 0.01701785659790039, 0.016675840377807616, 0.017763328552246094, 0.017366016387939453, 0.016793600082397463, 0.016649215698242188, 0.016487424850463867, 0.01644441604614258, 0.016630783081054687, 0.016561151504516602, 0.016579584121704103, 0.016542720794677734, 0.016480255126953124, 0.01660825538635254, 0.016903167724609376, 0.01697689628601074, 0.016849920272827147, 0.016694271087646484, 0.016664575576782227, 0.01660927963256836, 0.016508928298950197, 0.016481279373168945, 0.016564224243164064, 0.016499711990356446, 0.016540672302246092, 0.016494592666625976, 0.016474111557006836, 0.01654374313354492, 0.016489471435546875, 0.016502784729003905, 0.01638400077819824, 0.016511999130249023, 0.016540672302246092, 0.016514047622680664, 0.016472063064575194, 0.016485376358032225, 0.016545791625976563, 0.016499711990356446, 0.016473087310791015, 0.01642393684387207, 0.016449535369873047, 0.016463872909545898, 0.01686835289001465, 0.016472063064575194, 0.016526336669921874, 0.016289791107177733, 0.016498687744140626, 0.016492544174194337, 0.016570367813110352, 0.016540672302246092, 0.016491519927978517, 0.01646899223327637, 0.016467967987060548, 0.016525312423706053, 0.0164454402923584, 0.017160192489624023, 0.016665599822998048, 0.016516096115112306, 0.016552959442138672, 0.016545791625976563, 0.016468000411987305, 0.016568288803100586, 0.016447488784790038, 0.016496639251708984, 0.016528383255004882, 0.016511999130249023, 0.016327680587768553, 0.016459775924682618, 0.01660723114013672, 0.016510976791381835, 0.0165928955078125, 0.016734207153320312, 0.01681817626953125, 0.01643212890625, 0.01646899223327637, 0.01656012725830078, 0.016485376358032225, 0.016256000518798826, 0.016497663497924805, 0.016780288696289062, 0.01724825668334961, 0.016525312423706053, 0.016318464279174806, 0.01624166488647461, 0.01637785530090332, 0.01624575996398926, 0.016322559356689453, 0.016295936584472655, 0.0164136962890625, 0.01627136039733887, 0.01640959930419922, 0.016267263412475585, 0.016514047622680664, 0.016491519927978517, 0.016493568420410155, 0.016289791107177733, 0.016742399215698242, 0.016525312423706053, 0.01657651138305664, 0.016290815353393554, 0.016499711990356446, 0.01681100845336914, 0.016579584121704103, 0.016313343048095702, 0.016349184036254884, 0.016167936325073243, 0.016474111557006836, 0.01683865547180176, 0.01681817626953125, 0.016364543914794923, 0.01658470344543457, 0.016542720794677734, 0.016524288177490236, 0.016514047622680664, 0.01681817626953125, 0.016732160568237304, 0.01644339179992676, 0.016562175750732423, 0.016466943740844727, 0.016528383255004882, 0.01645260810852051, 0.016526336669921874, 0.016497663497924805, 0.016539648056030275, 0.016523263931274415, 0.01657344055175781, 0.016536575317382812, 0.016481279373168945, 0.016457727432250976, 0.01741107177734375, 0.01723084831237793, 0.017082368850708008, 0.01705369567871094, 0.016464895248413085, 0.016463872909545898, 0.01656934356689453, 0.016491519927978517, 0.01641983985900879, 0.016528383255004882, 0.01638809585571289, 0.016475135803222657, 0.016400415420532225, 0.016593887329101564, 0.016416767120361327, 0.016485376358032225, 0.01641574478149414, 0.01658060836791992, 0.016430080413818358, 0.016483327865600587, 0.016355327606201172, 0.016500736236572267, 0.01643110466003418, 0.016515071868896485, 0.01642393684387207, 0.016528383255004882, 0.016886783599853517, 0.016850944519042968, 0.0164270076751709, 0.016492544174194337, 0.016837631225585938, 0.016532480239868166, 0.01642393684387207, 0.01662156867980957, 0.016454656600952147, 0.016318464279174806, 0.016315391540527344, 0.016538623809814454, 0.016464895248413085, 0.016547872543334962, 0.016493535995483397, 0.016525312423706053, 0.016450559616088867, 0.016498687744140626, 0.01699839973449707, 0.017552383422851564, 0.018496511459350586, 0.01742233657836914, 0.016943103790283204, 0.016668672561645507, 0.01655196762084961, 0.01662870407104492, 0.0165980167388916, 0.016540672302246092, 0.016525312423706053, 0.016489471435546875, 0.01640755271911621, 0.016310272216796876, 0.016305152893066405, 0.016331775665283203, 0.016333824157714845, 0.016326656341552736, 0.016290815353393554, 0.016356351852416993, 0.016433151245117187, 0.016514047622680664, 0.016475135803222657, 0.016523263931274415, 0.016455680847167968, 0.016476160049438478, 0.016513023376464844, 0.016501760482788085, 0.016742399215698242, 0.016517120361328123, 0.01641881561279297, 0.016465919494628906, 0.016555007934570314, 0.016493568420410155, 0.016549888610839843, 0.016777215957641603, 0.01641472053527832, 0.016548896789550783, 0.01646588706970215, 0.016537599563598633, 0.016461824417114256, 0.01698508834838867, 0.017063936233520507, 0.016731136322021483, 0.01660825538635254, 0.016471040725708007, 0.016571392059326173, 0.017475584030151366, 0.016744447708129884, 0.016728063583374024, 0.016508928298950197, 0.016686080932617187, 0.01658367919921875, 0.016661504745483398, 0.016547840118408205, 0.016742399215698242, 0.016524288177490236, 0.01660518455505371, 0.016559104919433593, 0.016854015350341797, 0.01679155158996582, 0.016553983688354493, 0.016509952545166014, 0.01656524848937988, 0.016586751937866212, 0.01657753562927246, 0.016395263671875, 0.016496639251708984, 0.016514047622680664, 0.01653558349609375, 0.01668911933898926, 0.016505855560302735, 0.016502784729003905, 0.016534528732299804, 0.016471040725708007, 0.016525312423706053, 0.016482303619384766, 0.016517120361328123, 0.01643622398376465, 0.016526336669921874, 0.016464895248413085, 0.01641574478149414, 0.01635327911376953, 0.016528383255004882, 0.016457727432250976, 0.016509023666381836, 0.01645254325866699, 0.01641468811035156, 0.016472063064575194, 0.016519168853759765, 0.016558080673217773, 0.016438272476196288, 0.016488447189331054, 0.01646899223327637, 0.016513023376464844, 0.01636249542236328, 0.01645260810852051, 0.016464895248413085, 0.016564224243164064, 0.016465919494628906, 0.01665023994445801, 0.016552959442138672, 0.016513023376464844, 0.016446464538574217, 0.016522239685058594, 0.016950271606445313, 0.016502784729003905, 0.016472063064575194, 0.016530431747436524, 0.016511999130249023, 0.01640550422668457, 0.016485376358032225, 0.016780288696289062, 0.016546815872192384, 0.016474143981933594, 0.01627235221862793, 0.01653555107116699, 0.01643622398376465, 0.01655193519592285, 0.017473535537719728, 0.016840736389160157, 0.016532447814941405, 0.016505855560302735, 0.01654377555847168, 0.016550880432128906, 0.01640243148803711, 0.01656012725830078, 0.01641164779663086, 0.016526336669921874, 0.016533504486083983, 0.016540735244750977, 0.01649452781677246, 0.016450559616088867, 0.016434175491333008, 0.016558080673217773, 0.016455680847167968, 0.016574464797973632, 0.016511999130249023, 0.016531455993652345, 0.016541696548461913, 0.01796403121948242, 0.01844223976135254, 0.017501184463500977, 0.016889856338500975, 0.016898048400878905, 0.01641062355041504, 0.01657548713684082, 0.016496639251708984, 0.01656729507446289, 0.016537599563598633, 0.01655705642700195, 0.016511999130249023, 0.01660518455505371, 0.0164771842956543, 0.016380928039550782, 0.01656012725830078, 0.01658572769165039, 0.016522239685058594, 0.016532480239868166, 0.016934911727905275, 0.017153024673461914, 0.017031167984008787, 0.01696051216125488, 0.016475135803222657, 0.01686425590515137, 0.01654374313354492, 0.016511999130249023, 0.01659903907775879, 0.01663283157348633, 0.01641062355041504, 0.01656729507446289, 0.017090560913085938, 0.01664204788208008, 0.01658470344543457, 0.01658367919921875, 0.01661440086364746, 0.016365568161010743, 0.016697343826293946, 0.01675164794921875, 0.01666966438293457, 0.01661644744873047, 0.016492544174194337, 0.016913408279418944, 0.01656012725830078, 0.01640959930419922, 0.016540672302246092, 0.016536575317382812, 0.016458751678466797, 0.016515071868896485, 0.016539648056030275, 0.01655193519592285, 0.016508928298950197, 0.016518144607543944, 0.016466943740844727, 0.016328704833984374, 0.01632972717285156, 0.01636249542236328, 0.01656831932067871, 0.016548864364624022, 0.016481279373168945, 0.01643622398376465, 0.016297983169555663, 0.016277503967285157, 0.018135040283203126, 0.01723494338989258, 0.016719871520996094, 0.01655193519592285, 0.016566272735595702, 0.01638400077819824, 0.01653660774230957, 0.016480224609375, 0.016547840118408205, 0.01643519973754883, 0.016122880935668944, 0.016303104400634767, 0.016519168853759765, 0.016482303619384766, 0.016524288177490236, 0.016711679458618164, 0.017073152542114257, 0.016656383514404297, 0.0161976318359375, 0.016433151245117187, 0.01679462432861328, 0.016506879806518555, 0.0162795524597168, 0.0164270076751709, 0.016534528732299804, 0.01643212890625, 0.01643519973754883, 0.01684377670288086, 0.016667648315429686, 0.016474111557006836, 0.016536575317382812, 0.01641983985900879, 0.01660518455505371, 0.016291839599609375, 0.017520639419555666, 0.017245183944702147, 0.016684032440185546, 0.016536575317382812, 0.016671743392944336, 0.016561151504516602, 0.01626316833496094, 0.016256000518798826, 0.016313343048095702, 0.016555007934570314, 0.01656729507446289, 0.016559104919433593, 0.016530431747436524, 0.016514047622680664, 0.016296960830688476, 0.016532480239868166, 0.01680998420715332, 0.01657548713684082, 0.016465919494628906, 0.016546815872192384, 0.016530431747436524, 0.016537599563598633, 0.016340991973876954, 0.016395263671875, 0.01640451240539551, 0.01687958335876465, 0.016562175750732423, 0.016481279373168945, 0.016513023376464844, 0.01663385581970215, 0.01638400077819824, 0.016662527084350585, 0.016283647537231445, 0.01623756790161133, 0.01616896057128906, 0.016344127655029298, 0.01658464050292969, 0.016495616912841796, 0.01640447998046875, 0.016462848663330077, 0.017115135192871094, 0.017022975921630858, 0.01655193519592285, 0.016260095596313476, 0.016315391540527344, 0.016257024765014647, 0.017040384292602538, 0.016498687744140626, 0.016523263931274415, 0.016312320709228514, 0.016587776184082033, 0.016688127517700196, 0.0170199031829834, 0.016693311691284178, 0.016491455078125, 0.016498687744140626, 0.016544767379760742, 0.01652124786376953, 0.016625631332397462, 0.018134016036987305, 0.01701888084411621, 0.01686425590515137, 0.01657548713684082, 0.016555007934570314, 0.01679871940612793, 0.016681983947753908, 0.016323583602905273, 0.01641267204284668, 0.016458751678466797, 0.016496639251708984, 0.016639999389648438, 0.016324607849121094, 0.016639999389648438, 0.016695295333862305, 0.016368640899658202, 0.016308223724365235, 0.016536575317382812, 0.016539648056030275, 0.01660825538635254, 0.017763328552246094, 0.016531455993652345, 0.016478208541870116, 0.016571392059326173, 0.0166246395111084, 0.01663385581970215, 0.016491519927978517, 0.01663385581970215, 0.0166246395111084, 0.01761996841430664, 0.016785408020019533, 0.016563232421875, 0.0169072322845459, 0.016586751937866212, 0.016532480239868166, 0.016863231658935548, 0.016733184814453125, 0.016441343307495117, 0.01642291259765625, 0.016517120361328123, 0.016829439163208008, 0.01721548843383789, 0.0169881591796875, 0.016978944778442383, 0.01704550361633301, 0.017268735885620116, 0.01704960060119629, 0.01700556755065918, 0.017054719924926756, 0.01676595115661621, 0.016523263931274415, 0.016627712249755858, 0.01744076728820801, 0.01680486488342285, 0.017048576354980468, 0.016923648834228516, 0.016587776184082033, 0.016548864364624022, 0.017246208190917968, 0.017490943908691405, 0.017164287567138673, 0.016961536407470702, 0.01677107238769531, 0.016925695419311524, 0.01706188774108887, 0.016679935455322266, 0.01680998420715332, 0.01685606384277344, 0.01646899223327637, 0.016514047622680664, 0.016887807846069337, 0.016521215438842773, 0.01696460723876953, 0.01661235237121582, 0.016476160049438478, 0.01657344055175781, 0.016937984466552734, 0.016502784729003905, 0.016724992752075195, 0.016741376876831054, 0.01678643226623535, 0.016532480239868166, 0.016855039596557618, 0.016240640640258788, 0.016491519927978517, 0.016723968505859374, 0.016548864364624022, 0.016461824417114256, 0.016865280151367186, 0.016590848922729492, 0.016518144607543944, 0.016502784729003905, 0.016499711990356446, 0.016518144607543944, 0.016559104919433593, 0.016894975662231446, 0.016480255126953124, 0.016518144607543944, 0.016479232788085937, 0.016943103790283204, 0.017309696197509765]",tokens/s,60.20371209118963,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b4c-7972c43449da34fd19dbae7e;2050c297-b44e-4f80-b138-e260934b225e) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4947.038208,8236.040192,0.0,7606.370304,6988.678144,s,1,11.6076181640625,11.6076181640625,0.0,11.6076181640625,11.6076181640625,11.6076181640625,11.6076181640625,[11.6076181640625],,kWh,5.57196921972137e-05,3.0511752755013304e-05,0.00010050230262403925,0.00018673374757626626,,MB,2839.61344,8257.011712,0.0,7608.467456,6915.138048,s,10,1.128373596191406,0.11283735961914063,3.5602464445836766e-05,0.11283316802978516,0.1128817024230957,0.1129000186920166,0.11291467170715333,"[0.1127890853881836, 0.11283821105957031, 0.11284690856933594, 0.11282182312011718, 0.11280995178222657, 0.11283628845214844, 0.11283004760742188, 0.11287763214111328, 0.11280531311035157, 0.1129183349609375]",tokens/s,2268.752130181666,kWh,1.3343084340355216e-06,7.311363170328065e-07,5.9688437263806625e-06,8.03428847744899e-06,tokens/kWh,31863431.43124031,MB,2843.938816,8259.108864,0.0,7610.564608,6915.140608,s,10,17.163080444335936,1.7163080444335936,0.015459506701924707,1.7128804931640624,1.735369970703125,1.7388975708007812,1.7417196508789063,"[1.7179234619140624, 1.7309359130859374, 1.6992987060546876, 1.6943123779296876, 1.7041148681640625, 1.706790771484375, 1.7078375244140624, 1.7248555908203125, 1.7424251708984375, 1.7345860595703124]",tokens/s,36.70669738123316,kWh,2.0269057241034993e-05,1.1107783124620087e-05,5.3465592710020127e-05,8.484243307567519e-05,tokens/kWh,742552.9621929532,,s,630,17.161152503967283,0.027239924609471885,0.0005368746559596173,0.026990079879760744,0.027944037818908692,0.028110848140716554,0.028795156764984134,"[0.027256832122802735, 0.026827775955200195, 0.027030527114868166, 0.026801151275634767, 0.027599872589111327, 0.027586559295654296, 0.027898880004882814, 0.027922431945800782, 0.02795724868774414, 0.027249664306640626, 0.02792448043823242, 0.026862592697143556, 0.02693529510498047, 0.02700492858886719, 0.02731622314453125, 0.026994688034057617, 0.026926080703735353, 0.026952703475952147, 0.02698956871032715, 0.026968063354492186, 0.027017215728759765, 0.027015167236328123, 0.027420671463012695, 0.02790399932861328, 0.02672332763671875, 0.02696499252319336, 0.027793407440185547, 0.02717184066772461, 0.02700595283508301, 0.026900480270385742, 0.026594303131103517, 0.026796031951904296, 0.02691276741027832, 0.02692095947265625, 0.027082752227783204, 0.026689535140991212, 0.027181055068969725, 0.027021312713623048, 0.02691379165649414, 0.02694655990600586, 0.026826751708984374, 0.02695884895324707, 0.027015167236328123, 0.026933248519897462, 0.02688204765319824, 0.026970111846923828, 0.027098112106323242, 0.028619775772094725, 0.02792959976196289, 0.02675916862487793, 0.027844608306884764, 0.0279418888092041, 0.02791219139099121, 0.027864063262939453, 0.027006975173950197, 0.027665407180786132, 0.030448640823364258, 0.027823104858398437, 0.026969087600708007, 0.02713497543334961, 0.027461631774902344, 0.02671513557434082, 0.0269752311706543, 0.026985471725463867, 0.027049983978271484, 0.026986495971679687, 0.02697318458557129, 0.02693120002746582, 0.027502592086791993, 0.027068416595458986, 0.0269803524017334, 0.026976255416870116, 0.02690662384033203, 0.026933248519897462, 0.027278335571289062, 0.027320320129394532, 0.026685440063476562, 0.026616832733154298, 0.027784191131591796, 0.02818662452697754, 0.028161024093627928, 0.02833919906616211, 0.02697420883178711, 0.02691584014892578, 0.026909696578979493, 0.0269803524017334, 0.026925056457519532, 0.026942464828491212, 0.027025407791137695, 0.028055551528930665, 0.02714419174194336, 0.026851327896118164, 0.02698137664794922, 0.0267775993347168, 0.02690559959411621, 0.02696396827697754, 0.027205631256103514, 0.02777907180786133, 0.02729267120361328, 0.03122483253479004, 0.028035072326660155, 0.02695577621459961, 0.02694963264465332, 0.026952703475952147, 0.026977279663085937, 0.028099584579467773, 0.027022335052490236, 0.027666431427001953, 0.027865087509155274, 0.028252159118652344, 0.02816204833984375, 0.02832793617248535, 0.02794086456298828, 0.028020736694335937, 0.028078079223632812, 0.02858700752258301, 0.028453887939453124, 0.02792448043823242, 0.02711142349243164, 0.026622976303100586, 0.027854848861694335, 0.027995136260986327, 0.0279736328125, 0.02831257629394531, 0.027165695190429686, 0.02791628837585449, 0.02690662384033203, 0.02691584014892578, 0.026846208572387696, 0.026951679229736326, 0.026942464828491212, 0.02694041633605957, 0.02696703910827637, 0.026670080184936523, 0.026687488555908204, 0.02712063980102539, 0.026950656890869142, 0.026866687774658202, 0.026763263702392577, 0.02690559959411621, 0.0268984317779541, 0.026934272766113283, 0.026754047393798826, 0.02698956871032715, 0.02697216033935547, 0.026987520217895508, 0.026968063354492186, 0.026926080703735353, 0.02736844825744629, 0.027091968536376954, 0.02697932815551758, 0.026812416076660156, 0.0269486083984375, 0.027016191482543944, 0.02691481590270996, 0.026983423233032225, 0.02695680046081543, 0.02692095947265625, 0.026805248260498047, 0.02676121520996094, 0.026685440063476562, 0.027066368103027344, 0.028536832809448243, 0.027416576385498048, 0.027002880096435547, 0.027019264221191407, 0.026993663787841796, 0.026990591049194337, 0.02700595283508301, 0.027044864654541017, 0.026812416076660156, 0.02696601676940918, 0.02695475196838379, 0.026926080703735353, 0.026875904083251953, 0.026976255416870116, 0.027012096405029298, 0.02715340805053711, 0.02696294403076172, 0.026860544204711914, 0.02691276741027832, 0.026945535659790038, 0.02691379165649414, 0.02687385559082031, 0.026878976821899415, 0.02697113609313965, 0.027001855850219726, 0.027044864654541017, 0.02687385559082031, 0.02707967948913574, 0.02697420883178711, 0.026977279663085937, 0.02692095947265625, 0.02679910469055176, 0.026856447219848634, 0.02695884895324707, 0.02691584014892578, 0.02693222427368164, 0.026689535140991212, 0.02680012893676758, 0.026950656890869142, 0.02695680046081543, 0.026762239456176756, 0.02660147285461426, 0.026867712020874023, 0.026847232818603517, 0.026869760513305665, 0.027385856628417967, 0.026908672332763672, 0.027022335052490236, 0.02710937690734863, 0.02693017578125, 0.02669977569580078, 0.02679193687438965, 0.026878976821899415, 0.026864639282226564, 0.02695782470703125, 0.026822656631469727, 0.02711961555480957, 0.027062271118164064, 0.02691379165649414, 0.02682368087768555, 0.02687385559082031, 0.026894336700439454, 0.026848255157470705, 0.026830848693847657, 0.026877952575683595, 0.026835968017578125, 0.026893312454223633, 0.026885120391845704, 0.026679296493530274, 0.026416128158569335, 0.026858495712280273, 0.026874879837036132, 0.026814464569091798, 0.0269486083984375, 0.027464704513549806, 0.026861568450927735, 0.02691584014892578, 0.026925056457519532, 0.02679091262817383, 0.026848255157470705, 0.026887168884277345, 0.026848255157470705, 0.026909696578979493, 0.026916864395141602, 0.026851327896118164, 0.026917888641357423, 0.026854400634765626, 0.02673971176147461, 0.026917888641357423, 0.02688921546936035, 0.026993663787841796, 0.026887168884277345, 0.026870784759521486, 0.026657791137695314, 0.02688921546936035, 0.02694144058227539, 0.026999807357788085, 0.0269486083984375, 0.02692300796508789, 0.027002880096435547, 0.026961919784545898, 0.026933248519897462, 0.026808319091796876, 0.027073535919189453, 0.02694144058227539, 0.026987520217895508, 0.027001855850219726, 0.027040767669677734, 0.026987520217895508, 0.027044864654541017, 0.02696703910827637, 0.026887168884277345, 0.026985471725463867, 0.02698956871032715, 0.02695577621459961, 0.026835968017578125, 0.026959871292114256, 0.027041791915893554, 0.026928127288818358, 0.02754457664489746, 0.02710220718383789, 0.026917888641357423, 0.026888191223144533, 0.02698240089416504, 0.02695782470703125, 0.026976255416870116, 0.02732339286804199, 0.02793267250061035, 0.027014144897460936, 0.028271615982055662, 0.02690457534790039, 0.026952703475952147, 0.02699673652648926, 0.02695782470703125, 0.026953727722167968, 0.02688102340698242, 0.02775449562072754, 0.02779955291748047, 0.026976255416870116, 0.02695680046081543, 0.02693222427368164, 0.027258880615234377, 0.026945535659790038, 0.02710425567626953, 0.027268096923828124, 0.027280384063720704, 0.026910720825195314, 0.02691379165649414, 0.026892288208007813, 0.026864639282226564, 0.027001855850219726, 0.02679910469055176, 0.027255807876586914, 0.026879999160766603, 0.02693529510498047, 0.02715238380432129, 0.026861568450927735, 0.02652774429321289, 0.02676121520996094, 0.029030399322509767, 0.02691379165649414, 0.02692095947265625, 0.027386880874633788, 0.026855424880981447, 0.026878976821899415, 0.026886144638061524, 0.026860544204711914, 0.026847232818603517, 0.026868736267089844, 0.026817535400390623, 0.026927104949951174, 0.027280384063720704, 0.02816819190979004, 0.02730291175842285, 0.027936767578125, 0.02691584014892578, 0.026934272766113283, 0.026885120391845704, 0.026521600723266602, 0.02711756706237793, 0.026991615295410155, 0.026839040756225587, 0.026895360946655275, 0.026933248519897462, 0.026879999160766603, 0.02713907241821289, 0.026887168884277345, 0.02690764808654785, 0.0275599365234375, 0.027019264221191407, 0.026908672332763672, 0.027627519607543945, 0.02717900848388672, 0.02676838493347168, 0.026557439804077147, 0.026810367584228514, 0.027288576126098633, 0.027469823837280274, 0.027030527114868166, 0.026995712280273438, 0.026778623580932616, 0.02693222427368164, 0.027467775344848632, 0.02714419174194336, 0.027009023666381835, 0.027014144897460936, 0.027027456283569336, 0.026928127288818358, 0.02729267120361328, 0.027602943420410156, 0.026653696060180664, 0.026653696060180664, 0.0283504638671875, 0.027007999420166014, 0.02751692771911621, 0.027159551620483398, 0.02748313522338867, 0.02711347198486328, 0.027023359298706053, 0.026953727722167968, 0.026925056457519532, 0.02696601676940918, 0.02697318458557129, 0.026856447219848634, 0.027040767669677734, 0.02695884895324707, 0.02694041633605957, 0.027245567321777343, 0.028057600021362306, 0.02830745506286621, 0.027806720733642577, 0.027031551361083983, 0.026959871292114256, 0.027674623489379883, 0.027273216247558595, 0.027267072677612306, 0.02700492858886719, 0.026976255416870116, 0.02714726448059082, 0.026776575088500978, 0.026814464569091798, 0.02691276741027832, 0.0267458553314209, 0.026993663787841796, 0.02732646369934082, 0.02694041633605957, 0.02694041633605957, 0.026895360946655275, 0.02690355110168457, 0.026869760513305665, 0.027245567321777343, 0.02673766326904297, 0.02690764808654785, 0.02687385559082031, 0.02690764808654785, 0.0268984317779541, 0.027381759643554687, 0.02768998336791992, 0.027374591827392578, 0.026867712020874023, 0.02674278450012207, 0.026833919525146483, 0.027263999938964844, 0.02676121520996094, 0.026572799682617186, 0.02688102340698242, 0.028689407348632814, 0.028391424179077147, 0.02710425567626953, 0.026987520217895508, 0.026796031951904296, 0.026977279663085937, 0.02690559959411621, 0.027191295623779296, 0.026928127288818358, 0.02691276741027832, 0.026908672332763672, 0.02692198371887207, 0.026888191223144533, 0.02707865524291992, 0.02746675109863281, 0.027620351791381836, 0.030055423736572266, 0.027588607788085938, 0.026874879837036132, 0.02693529510498047, 0.02693836784362793, 0.027028480529785157, 0.02695577621459961, 0.026991615295410155, 0.026992639541625976, 0.027847679138183593, 0.027875328063964845, 0.027701248168945314, 0.027830272674560546, 0.027882495880126954, 0.027923456192016603, 0.02788761520385742, 0.02838015937805176, 0.028120063781738282, 0.028000255584716797, 0.028017663955688478, 0.027853824615478515, 0.028006399154663086, 0.027364351272583007, 0.027044864654541017, 0.02880614471435547, 0.02794803237915039, 0.027793407440185547, 0.027837440490722655, 0.027886592864990234, 0.027839487075805663, 0.026999807357788085, 0.027051008224487305, 0.026968063354492186, 0.026994688034057617, 0.027011072158813477, 0.027003904342651368, 0.027088895797729492, 0.026968063354492186, 0.026933248519897462, 0.027029504776000978, 0.027099136352539063, 0.027027456283569336, 0.026986495971679687, 0.027034624099731445, 0.027034624099731445, 0.027030527114868166, 0.027289600372314454, 0.027022335052490236, 0.026868736267089844, 0.027017215728759765, 0.026832895278930666, 0.026612735748291014, 0.027007999420166014, 0.027632640838623046, 0.027074560165405274, 0.026976255416870116, 0.02693836784362793, 0.02677555274963379, 0.02691993522644043, 0.027038719177246092, 0.027085823059082033, 0.02876825523376465, 0.029171712875366212, 0.028267520904541016, 0.028047359466552735, 0.027802623748779298, 0.02773708724975586, 0.026936319351196288, 0.02696294403076172, 0.02795929527282715, 0.02778828811645508, 0.027790336608886718, 0.027923456192016603, 0.028051456451416015, 0.02812620735168457, 0.02792857551574707, 0.02790297508239746, 0.02789990425109863, 0.027856895446777344, 0.02787942314147949, 0.027938816070556642, 0.027591680526733397, 0.02669977569580078, 0.02674995231628418, 0.027325439453125, 0.02814566421508789, 0.02793164825439453, 0.027891712188720705, 0.027876352310180662, 0.02711039924621582, 0.026580991744995116, 0.02736639976501465, 0.026862592697143556, 0.026927104949951174, 0.02693120002746582, 0.026888191223144533, 0.02695475196838379, 0.027520000457763674, 0.027862016677856444, 0.027850751876831056, 0.027769855499267578, 0.02696499252319336, 0.02695577621459961, 0.02789580726623535, 0.0277391357421875, 0.027868160247802733, 0.027841535568237305, 0.027918336868286132, 0.02793779182434082, 0.02796441650390625, 0.02794393539428711, 0.027636735916137696, 0.026613759994506835, 0.02699776077270508, 0.02760601615905762, 0.02791219139099121, 0.027836416244506838, 0.027789312362670897, 0.027820032119750978, 0.027926528930664062, 0.02797772789001465, 0.028094463348388672, 0.02832896041870117, 0.029240320205688477, 0.028717056274414062, 0.028051456451416015, 0.02794495964050293, 0.02790399932861328, 0.027979776382446288, 0.02793471908569336, 0.027805696487426756, 0.026984447479248046, 0.02696703910827637, 0.02727628707885742, 0.027987968444824218, 0.02795929527282715, 0.02789580726623535, 0.028029951095581054, 0.027997184753417968, 0.027894784927368164, 0.027782144546508788, 0.02794905662536621, 0.02791219139099121, 0.02791116714477539, 0.027813888549804686, 0.027915264129638673, 0.02795929527282715, 0.02795110321044922, 0.027936767578125, 0.027551744461059572, 0.026845184326171875, 0.02697318458557129, 0.027029504776000978, 0.027390975952148438, 0.027429887771606445, 0.027651071548461914, 0.02797056007385254, 0.026901504516601563, 0.026712064743041993, 0.026901504516601563, 0.027031551361083983, 0.02696396827697754, 0.02690559959411621, 0.02689945602416992, 0.027092992782592775, 0.02688204765319824, 0.02776371192932129, 0.02776268768310547, 0.02788761520385742, 0.027825151443481445, 0.027858943939208985, 0.027820032119750978, 0.02733670425415039, 0.026771455764770507, 0.026866687774658202, 0.026831872940063478, 0.026853376388549805, 0.026945535659790038, 0.026984447479248046, 0.02694758415222168, 0.02689740753173828, 0.026879999160766603, 0.02775142478942871, 0.028063743591308594, 0.027716608047485353, 0.027801599502563477]",tokens/s,36.71082113245935,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8237.52704,12519.473152,0.0,11882.463232,11315.947008,s,1,13.7421201171875,13.7421201171875,0.0,13.7421201171875,13.7421201171875,13.7421201171875,13.7421201171875,[13.7421201171875],,kWh,8.119423472083505e-05,4.4482210900759874e-05,0.00015841429339800983,0.00028409073901960476,,MB,4086.083584,12540.444672,0.0,11884.560384,11070.3104,s,10,2.0251124725341794,0.20251124725341793,6.321437661053479e-05,0.2024995803833008,0.20255399475097657,0.20261507873535156,0.20266394592285156,"[0.20253334045410157, 0.20248684692382812, 0.20246176147460937, 0.20267616271972655, 0.20244364929199218, 0.2025188751220703, 0.20254042053222657, 0.2024998016357422, 0.20245225524902344, 0.20249935913085937]",tokens/s,1264.1273187145378,kWh,2.395439416763818e-06,1.3124898395385556e-06,1.0535919539839956e-05,1.4243848796142329e-05,tokens/kWh,17972670.425238762,MB,4090.34752,12542.541824,0.0,11886.657536,11070.31296,s,10,21.795938476562497,2.17959384765625,0.012200670255361432,2.1802392578124996,2.1940314941406247,2.1954212646484375,2.1965330810546875,"[2.181044677734375, 2.162013427734375, 2.158500244140625, 2.179433837890625, 2.18443505859375, 2.176833984375, 2.171634765625, 2.19372265625, 2.19681103515625, 2.1915087890625]",tokens/s,28.904467714361022,kWh,2.605209428059704e-05,1.4277322915035139e-05,8.049884773236093e-05,0.00012082826492799308,tokens/kWh,521401.18073816993,,s,630,21.79385139846805,0.034593414918203204,0.0005890323533471328,0.03434598541259766,0.03546726379394532,0.035776357460021974,0.03670381679534912,"[0.034514942169189454, 0.03425177764892578, 0.034179073333740234, 0.03417702484130859, 0.03418009567260742, 0.034239486694335936, 0.03497574234008789, 0.035064830780029296, 0.0347883529663086, 0.03427635192871094, 0.034290687561035156, 0.03439718246459961, 0.034305023193359374, 0.03462963104248047, 0.03425894546508789, 0.03420569610595703, 0.03422719955444336, 0.0341401596069336, 0.03433881759643555, 0.034667518615722655, 0.034890750885009765, 0.034305023193359374, 0.03427840042114258, 0.03431219100952149, 0.03418214416503906, 0.034255870819091795, 0.03420159912109375, 0.034328575134277346, 0.034928638458251955, 0.03417190551757812, 0.034492416381835936, 0.03436544036865234, 0.03465011215209961, 0.03523993682861328, 0.034702335357666016, 0.035124225616455076, 0.03478015899658203, 0.03506892776489258, 0.03432550430297852, 0.034269184112548826, 0.034385921478271485, 0.03467366409301758, 0.034277374267578126, 0.03436032104492188, 0.034321407318115234, 0.03428147125244141, 0.03427328109741211, 0.035146751403808595, 0.034840576171875, 0.0346951675415039, 0.03576627349853516, 0.03431628799438476, 0.03425894546508789, 0.034318336486816405, 0.034694145202636716, 0.03545907211303711, 0.034909183502197266, 0.03712819290161133, 0.034923519134521484, 0.03437363052368164, 0.03426611328125, 0.03649331283569336, 0.035688449859619144, 0.03437055969238281, 0.03427532958984375, 0.03449753570556641, 0.034348033905029295, 0.034293758392333985, 0.03428966522216797, 0.03424870300292969, 0.0342476806640625, 0.03390566253662109, 0.03379814529418945, 0.033903617858886716, 0.034477054595947264, 0.03370393753051758, 0.03422003173828125, 0.03448627090454102, 0.034356224060058595, 0.034353153228759765, 0.03429683303833008, 0.03415039825439453, 0.03419340896606445, 0.03418316650390625, 0.0342108154296875, 0.03426406478881836, 0.03416166305541992, 0.034113536834716796, 0.03426713562011719, 0.03435724639892578, 0.03418624114990235, 0.034154495239257815, 0.03413708877563477, 0.03426508712768555, 0.03412684631347656, 0.03376537704467773, 0.03403673553466797, 0.034179073333740234, 0.034165760040283204, 0.03397017669677734, 0.03370598220825195, 0.03366809463500976, 0.033923072814941405, 0.034203647613525394, 0.03561881637573242, 0.035642368316650394, 0.03463065719604492, 0.03530035018920898, 0.034813953399658204, 0.03425689697265625, 0.034288639068603514, 0.034351104736328124, 0.03423539352416992, 0.03429171371459961, 0.034203647613525394, 0.034339839935302735, 0.03418214416503906, 0.03415859222412109, 0.03417292785644531, 0.03417599868774414, 0.034354209899902344, 0.03517744064331055, 0.03523481750488281, 0.03521023941040039, 0.03441356658935547, 0.03429580688476563, 0.03448320007324219, 0.03426508712768555, 0.034923519134521484, 0.03471155166625976, 0.035533824920654294, 0.03530547332763672, 0.034618366241455076, 0.03423129653930664, 0.034328575134277346, 0.0342927360534668, 0.034315265655517575, 0.034305023193359374, 0.033995777130126956, 0.03419443130493164, 0.034206718444824216, 0.0342108154296875, 0.03416166305541992, 0.03417190551757812, 0.03368960189819336, 0.03404083251953125, 0.03426713562011719, 0.03422412872314453, 0.03419443130493164, 0.034157569885253904, 0.03422412872314453, 0.03422515106201172, 0.034274303436279296, 0.03402956771850586, 0.033799198150634764, 0.034218975067138675, 0.03420876693725586, 0.03423539352416992, 0.034595840454101565, 0.034628608703613284, 0.03421184158325195, 0.03414425659179687, 0.03426816177368164, 0.03418828964233398, 0.03427532958984375, 0.034135040283203126, 0.033895423889160156, 0.034098175048828124, 0.03415552139282227, 0.034086910247802735, 0.03425075149536133, 0.03415552139282227, 0.034181121826171876, 0.03387289428710937, 0.03415859222412109, 0.03416166305541992, 0.034592769622802735, 0.03429171371459961, 0.034165760040283204, 0.03409408187866211, 0.0341104621887207, 0.03413708877563477, 0.034111488342285154, 0.034149375915527344, 0.03406438446044922, 0.03408998489379883, 0.034124801635742184, 0.034141185760498044, 0.03468492889404297, 0.03429171371459961, 0.034132991790771484, 0.03414527893066406, 0.03406028747558594, 0.03411558532714844, 0.03407462310791016, 0.034124801635742184, 0.03411455917358398, 0.034187263488769534, 0.03421491241455078, 0.034293758392333985, 0.03416985702514649, 0.03423129653930664, 0.03411558532714844, 0.03410432052612305, 0.03415039825439453, 0.03413094329833984, 0.034095104217529294, 0.03412582397460937, 0.03409408187866211, 0.03419750213623047, 0.034136062622070314, 0.03448524856567383, 0.03507814407348633, 0.03528499221801758, 0.03435520172119141, 0.03420979309082031, 0.03426201629638672, 0.034326526641845705, 0.03438694381713867, 0.03765657424926758, 0.035659774780273434, 0.03504844665527344, 0.03536383819580078, 0.03553177642822265, 0.03577958297729492, 0.03602534484863281, 0.03468902587890625, 0.034359294891357424, 0.03561574554443359, 0.034385921478271485, 0.035783679962158206, 0.03535769653320313, 0.03543142318725586, 0.03518054580688477, 0.03503104019165039, 0.034315265655517575, 0.03434905624389648, 0.03391795349121094, 0.034138111114501955, 0.03426303863525391, 0.03515596771240234, 0.03535564804077149, 0.03486003112792969, 0.03462451171875, 0.03425791931152344, 0.03401728057861328, 0.034098175048828124, 0.034277374267578126, 0.034135040283203126, 0.03436646270751953, 0.034233345031738284, 0.03427123260498047, 0.03501055908203125, 0.03430809783935547, 0.03426201629638672, 0.034219009399414066, 0.034229248046875, 0.03415039825439453, 0.03424870300292969, 0.03426816177368164, 0.034184192657470705, 0.03428966522216797, 0.034618366241455076, 0.03549593734741211, 0.03529011154174805, 0.035356670379638674, 0.03581235122680664, 0.035699710845947266, 0.03451903915405274, 0.034680831909179685, 0.034351104736328124, 0.034367488861083983, 0.03489279937744141, 0.034938880920410156, 0.03487027359008789, 0.03469619369506836, 0.03485696029663086, 0.03544985580444336, 0.034367488861083983, 0.034917377471923826, 0.03497369766235352, 0.034614273071289066, 0.03418624114990235, 0.034305023193359374, 0.03719782257080078, 0.03553996658325195, 0.03458355331420898, 0.03439820861816406, 0.03423436737060547, 0.03417190551757812, 0.03420159912109375, 0.03412377548217774, 0.03443097686767578, 0.03427840042114258, 0.03395686340332031, 0.03440127944946289, 0.03530342483520508, 0.034492416381835936, 0.034369537353515625, 0.03438284683227539, 0.03428044891357422, 0.03422822570800781, 0.03440639877319336, 0.03443199920654297, 0.0343900146484375, 0.034511871337890625, 0.03532185745239258, 0.03525939178466797, 0.03665510559082031, 0.034830337524414064, 0.0342845458984375, 0.03452620697021484, 0.03426611328125, 0.03395891189575195, 0.034385921478271485, 0.03497062301635742, 0.03438079833984375, 0.03430092620849609, 0.03546623992919922, 0.03436441421508789, 0.0365404167175293, 0.034566143035888675, 0.03435007858276367, 0.03697971343994141, 0.03580928039550781, 0.034298881530761716, 0.034184192657470705, 0.03428044891357422, 0.034290687561035156, 0.034375679016113284, 0.034253822326660154, 0.03518463897705078, 0.035813377380371096, 0.03438694381713867, 0.03471974563598633, 0.034285568237304685, 0.03426713562011719, 0.03478015899658203, 0.034683902740478514, 0.034321407318115234, 0.034800640106201174, 0.03448627090454102, 0.03422822570800781, 0.03424153518676758, 0.03423846435546875, 0.035348480224609374, 0.03518668746948242, 0.034726913452148435, 0.03426201629638672, 0.034181121826171876, 0.03506073760986328, 0.03444838333129883, 0.03415244674682617, 0.0346798095703125, 0.03539251327514648, 0.03496857452392578, 0.03414323043823242, 0.03416166305541992, 0.03417702484130859, 0.0341401596069336, 0.03452620697021484, 0.034181121826171876, 0.03419033432006836, 0.03424358367919922, 0.034148353576660156, 0.03429171371459961, 0.034305023193359374, 0.034223102569580076, 0.03424460983276367, 0.03417190551757812, 0.034116607666015625, 0.03420569610595703, 0.033982463836669925, 0.03421286392211914, 0.03420159912109375, 0.03414425659179687, 0.03411763381958008, 0.0342476806640625, 0.03471462249755859, 0.03431219100952149, 0.03424460983276367, 0.03428761672973633, 0.03418521499633789, 0.03450265502929688, 0.03548672103881836, 0.03429683303833008, 0.03425075149536133, 0.034298881530761716, 0.034305023193359374, 0.034285568237304685, 0.03428761672973633, 0.0342476806640625, 0.034269184112548826, 0.03417292785644531, 0.0342927360534668, 0.03424870300292969, 0.03455078506469727, 0.03429683303833008, 0.034310142517089845, 0.03422515106201172, 0.03426303863525391, 0.03460505676269531, 0.035369983673095705, 0.0351836166381836, 0.035105792999267575, 0.03472281646728516, 0.034285568237304685, 0.03423231887817383, 0.034274303436279296, 0.034277374267578126, 0.03434393692016602, 0.03424051284790039, 0.034359294891357424, 0.034277374267578126, 0.03425484848022461, 0.03419750213623047, 0.03424256134033203, 0.034141185760498044, 0.03423129653930664, 0.03420467376708984, 0.03422412872314453, 0.03426201629638672, 0.03437875366210937, 0.0342108154296875, 0.034310142517089845, 0.03506380844116211, 0.034511871337890625, 0.03426406478881836, 0.03435417556762695, 0.03420159912109375, 0.03420159912109375, 0.03423846435546875, 0.03425894546508789, 0.03423846435546875, 0.0361338882446289, 0.03475251388549805, 0.03549187088012695, 0.03544879913330078, 0.03439616012573242, 0.035312641143798826, 0.034282497406005856, 0.034748416900634765, 0.03522355270385742, 0.03588198471069336, 0.034754558563232424, 0.03592806243896484, 0.03488051223754883, 0.03476172637939453, 0.035716094970703126, 0.03448627090454102, 0.03435212707519531, 0.034272254943847655, 0.034331649780273435, 0.03388620758056641, 0.03425075149536133, 0.03437977600097656, 0.03421286392211914, 0.03439206314086914, 0.034977790832519534, 0.034356224060058595, 0.03422003173828125, 0.03423436737060547, 0.03427328109741211, 0.03426816177368164, 0.03443404769897461, 0.03433574295043945, 0.03424051284790039, 0.03420467376708984, 0.03421388626098633, 0.03413913726806641, 0.03409100723266602, 0.03424460983276367, 0.0352174072265625, 0.03518668746948242, 0.03508633422851563, 0.03529011154174805, 0.03511500930786133, 0.035817470550537106, 0.03427532958984375, 0.034305023193359374, 0.035138561248779294, 0.03428761672973633, 0.034206718444824216, 0.03550822448730469, 0.03680255889892578, 0.03672371292114258, 0.03569356918334961, 0.03599257659912109, 0.03500339126586914, 0.034680831909179685, 0.03446988677978516, 0.03433369445800781, 0.03436544036865234, 0.03508531188964844, 0.03584511947631836, 0.03487027359008789, 0.03436544036865234, 0.035133438110351564, 0.034367488861083983, 0.03522355270385742, 0.03577241516113281, 0.03527065658569336, 0.034310142517089845, 0.03507712173461914, 0.03474124908447265, 0.03429580688476563, 0.03529523086547852, 0.03488972854614258, 0.03461734390258789, 0.03492966461181641, 0.034315265655517575, 0.03542732620239258, 0.034764801025390625, 0.03645951843261719, 0.03597824096679687, 0.03587276840209961, 0.03544063949584961, 0.03522662353515625, 0.03510988616943359, 0.03501055908203125, 0.03522150421142578, 0.035198974609375, 0.03435417556762695, 0.035506175994873046, 0.03524607849121094, 0.03425075149536133, 0.035141632080078124, 0.03452108764648437, 0.03526860809326172, 0.03535871887207031, 0.03690188980102539, 0.03580928039550781, 0.035110912322998046, 0.034184192657470705, 0.034956287384033204, 0.0344637451171875, 0.03499520111083984, 0.035244033813476565, 0.03425996780395508, 0.0341473274230957, 0.03595673751831055, 0.03572633743286133, 0.035237918853759764, 0.034850784301757816, 0.03537715148925781, 0.03552665710449219, 0.03465523147583008, 0.034305023193359374, 0.03439206314086914, 0.03435417556762695, 0.03435007858276367, 0.03445145416259766, 0.035476478576660156, 0.03437055969238281, 0.034223102569580076, 0.03429580688476563, 0.03439308929443359, 0.03423846435546875, 0.03427942276000977, 0.034165760040283204, 0.03429683303833008, 0.03414425659179687, 0.0341473274230957, 0.03416678237915039, 0.034219009399414066, 0.034203647613525394, 0.03427840042114258, 0.035373054504394534, 0.034479103088378905, 0.03517030334472656, 0.03440332794189453, 0.03415654373168946, 0.03440127944946289, 0.03415552139282227, 0.03426508712768555, 0.03437875366210937, 0.034408447265625, 0.03429580688476563, 0.034237438201904294, 0.03438796615600586, 0.035724288940429685, 0.035745792388916016, 0.034477054595947264, 0.03443609619140625, 0.03437363052368164, 0.03444940948486328, 0.03445657730102539, 0.0345272331237793, 0.03439513778686523, 0.03524505615234375, 0.035476478576660156, 0.035811328887939455, 0.03563827133178711, 0.03566694259643555, 0.03619123077392578, 0.03617587280273438, 0.03600896072387695, 0.03464396667480469, 0.034490367889404294, 0.034374656677246096, 0.03444736099243164, 0.03435007858276367, 0.03438284683227539, 0.03440332794189453, 0.03445145416259766, 0.03443916702270508, 0.034307071685791016, 0.03437158584594727, 0.0344002571105957, 0.034307071685791016, 0.03565260696411133, 0.03577139282226562, 0.03568537521362305, 0.0355491828918457, 0.0356577262878418, 0.03589324951171875, 0.03458867263793945, 0.03437670516967774, 0.03436851119995117, 0.034342910766601564, 0.034429950714111326, 0.034181121826171876, 0.03438796615600586, 0.0343900146484375, 0.035095550537109374, 0.03464601516723633, 0.034457599639892575, 0.03445043182373047, 0.03455692672729492, 0.0345425910949707]",tokens/s,28.907235737336695,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949179-43251b7e1a565f046d6e7a76;eae8dee5-7f7f-446e-ac3a-56af95cbedd5) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c59-0a9f5388510a358f7b3585c7;588c1abf-eccc-43d0-ae22-69da7dabd488) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fd1-68537388089658724ed940ac;a1dc2e5c-a7e6-4175-a53e-84310d994b43) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949280-774e9a271f4e3f6d30d489f8;468d8aa5-9113-437d-b245-36a7c38275d8) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11060.891648,15131.475968,0.0,14501.80608,13634.065408,s,1,14.528208984375,14.528208984375,0.0,14.528208984375,14.528208984375,14.528208984375,14.528208984375,[14.528208984375],,kWh,9.105845972846966e-05,4.989215037492579e-05,0.0001959782123380216,0.00033692882244141706,,MB,2040.66816,15150.350336,0.0,14501.80608,12898.830848,s,10,3.3699922485351563,0.33699922485351563,0.00018909451286557388,0.33694303894042965,0.3372801025390625,0.3373659881591797,0.3374346966552734,"[0.3368555908203125, 0.33726101684570314, 0.33745187377929686, 0.33696197509765624, 0.3369111938476563, 0.33682406616210936, 0.3369366455078125, 0.3368711547851562, 0.33696929931640623, 0.33694943237304686]",tokens/s,759.6456642037566,kWh,3.983156069698979e-06,2.1825671890576228e-06,1.581034598160021e-05,2.1976069240356813e-05,tokens/kWh,11649035.01167907,MB,2044.993536,15152.447488,0.0,14501.80608,13243.63264,s,10,21.77368823242188,2.1773688232421877,0.011770728203939847,2.1810206298828128,2.189590576171875,2.189647705078125,2.189693408203125,"[2.174266845703125, 2.1864375, 2.175689453125, 2.18654443359375, 2.168908447265625, 2.189577880859375, 2.155778564453125, 2.189704833984375, 2.160428466796875, 2.186351806640625]",tokens/s,28.93400480778012,kWh,2.572399231599539e-05,1.4097925089849807e-05,9.419174201999817e-05,0.00013401365942584335,tokens/kWh,470101.33347534726,,s,630,21.76123902130126,0.034541649240160746,0.0007485230087456863,0.034116607666015625,0.03552174224853515,0.03586923561096191,0.03682809898376465,"[0.034151424407958986, 0.03400601577758789, 0.03457126235961914, 0.03431423950195313, 0.034103294372558594, 0.034050048828125, 0.033983486175537106, 0.03400089645385742, 0.033865726470947266, 0.033909759521484374, 0.033942527770996093, 0.033928192138671875, 0.033860607147216795, 0.036162559509277346, 0.03629363250732422, 0.035418113708496096, 0.035536895751953124, 0.03554099273681641, 0.03546623992919922, 0.036326400756835936, 0.035535873413085936, 0.03532799911499023, 0.03536793518066406, 0.03409100723266602, 0.034034687042236327, 0.03403776168823242, 0.033963008880615236, 0.033903617858886716, 0.034105342864990236, 0.03395072174072265, 0.03401523208618164, 0.03422719955444336, 0.033949695587158206, 0.03406028747558594, 0.03416883087158203, 0.034141185760498044, 0.03387289428710937, 0.034195457458496094, 0.034012161254882815, 0.03402547073364258, 0.033983486175537106, 0.03398553466796875, 0.03507814407348633, 0.03552358245849609, 0.03536383819580078, 0.035404800415039066, 0.0353546257019043, 0.033955841064453124, 0.035146751403808595, 0.03613695907592773, 0.03540787124633789, 0.03540889739990234, 0.0341104621887207, 0.03402342224121094, 0.03390873718261719, 0.03405926513671875, 0.033972225189208984, 0.0340766716003418, 0.03412070465087891, 0.03404288101196289, 0.03436441421508789, 0.03420569610595703, 0.03395276641845703, 0.03475558471679688, 0.033960960388183595, 0.034016254425048825, 0.03373158264160156, 0.034377761840820316, 0.034140129089355466, 0.03585740661621094, 0.03639091110229492, 0.03552153778076172, 0.03538227081298828, 0.03540991973876953, 0.0340398063659668, 0.034119678497314454, 0.03400089645385742, 0.03406335830688476, 0.03391795349121094, 0.034344959259033206, 0.03405619049072266, 0.03382476806640625, 0.03393740844726562, 0.03403366470336914, 0.03407462310791016, 0.03398553466796875, 0.03397119903564453, 0.03395379257202148, 0.03387289428710937, 0.03404083251953125, 0.033857536315917966, 0.03406643295288086, 0.03520409774780273, 0.03394355010986328, 0.03400396728515625, 0.03414527893066406, 0.0340582389831543, 0.03415039825439453, 0.03634483337402344, 0.03600384140014649, 0.03569868850708008, 0.03557580947875977, 0.03535257720947266, 0.03529420852661133, 0.03543961715698242, 0.03526553726196289, 0.03553792190551758, 0.03546623992919922, 0.03545600128173828, 0.0355860481262207, 0.03398553466796875, 0.03402956771850586, 0.034062335968017575, 0.03468492889404297, 0.03539865493774414, 0.03527782440185547, 0.03512934494018555, 0.03520512008666992, 0.03547545623779297, 0.03548262405395508, 0.03545087814331055, 0.03543756866455078, 0.03593318557739258, 0.03413094329833984, 0.03410943984985351, 0.03414323043823242, 0.034462718963623046, 0.034113536834716796, 0.03403059387207031, 0.033858558654785154, 0.03402137756347656, 0.033844223022460936, 0.03399679946899414, 0.034065406799316404, 0.03418624114990235, 0.03392204666137695, 0.03501465606689453, 0.03679129409790039, 0.03573350524902344, 0.03416166305541992, 0.0339865608215332, 0.03395174407958984, 0.03410636901855469, 0.03396915054321289, 0.03387187194824219, 0.03402137756347656, 0.03391385650634766, 0.033888256072998044, 0.03446886444091797, 0.034541568756103515, 0.033974273681640625, 0.03424460983276367, 0.03390771102905273, 0.033931262969970705, 0.03397836685180664, 0.03394867324829102, 0.03408588790893555, 0.03553177642822265, 0.03540377426147461, 0.0339681282043457, 0.0341473274230957, 0.035125247955322264, 0.035438591003417966, 0.03556556701660156, 0.035102718353271486, 0.034138111114501955, 0.034010112762451174, 0.035335166931152344, 0.03564646530151367, 0.03552460861206055, 0.03400806427001953, 0.0349409294128418, 0.03542015838623047, 0.03548672103881836, 0.03395276641845703, 0.03401932907104492, 0.033888256072998044, 0.03400089645385742, 0.03453235244750977, 0.03445555114746094, 0.03393024063110352, 0.03413094329833984, 0.03396710586547851, 0.035833854675292966, 0.037048320770263675, 0.03628339385986328, 0.03555737686157227, 0.03399679946899414, 0.034028545379638675, 0.03443199920654297, 0.034010112762451174, 0.03388108825683594, 0.03371724700927734, 0.0339947509765625, 0.03393228912353516, 0.03388415908813477, 0.033983486175537106, 0.03379404830932617, 0.034239486694335936, 0.034477054595947264, 0.033977344512939454, 0.033890304565429685, 0.03405619049072266, 0.03387187194824219, 0.03377151870727539, 0.03705036926269531, 0.035775489807128906, 0.03510681533813476, 0.03529318237304688, 0.035269630432128905, 0.03525734329223633, 0.03532799911499023, 0.03522150421142578, 0.03530342483520508, 0.033858558654785154, 0.03404288101196289, 0.03385343933105469, 0.03538431930541992, 0.035195903778076174, 0.03523993682861328, 0.03574169540405273, 0.035296257019042966, 0.03531468963623047, 0.036057086944580076, 0.037410816192626956, 0.03419033432006836, 0.03451801681518555, 0.03550003051757813, 0.034513919830322266, 0.03436236953735351, 0.03397017669677734, 0.03396915054321289, 0.03406950378417969, 0.03425996780395508, 0.03408486557006836, 0.034065406799316404, 0.03382476806640625, 0.03500851058959961, 0.03540377426147461, 0.03522969436645508, 0.0353177604675293, 0.03491430282592774, 0.035334144592285156, 0.03524300765991211, 0.035369983673095705, 0.03530752182006836, 0.033931262969970705, 0.034086910247802735, 0.03395379257202148, 0.03398553466796875, 0.034032638549804685, 0.03529830551147461, 0.034991104125976565, 0.03406950378417969, 0.033873920440673826, 0.033825790405273434, 0.03496550369262695, 0.03393024063110352, 0.033923072814941405, 0.033980415344238284, 0.03390566253662109, 0.033972225189208984, 0.033999870300292966, 0.03653836822509766, 0.03436032104492188, 0.03399884796142578, 0.033964031219482424, 0.03402137756347656, 0.03399679946899414, 0.033931262969970705, 0.03381248092651367, 0.03551129531860352, 0.035465217590332034, 0.03530035018920898, 0.03538227081298828, 0.03398860931396484, 0.03388927841186523, 0.033805313110351565, 0.03444940948486328, 0.03436441421508789, 0.03399270248413086, 0.034065406799316404, 0.03395276641845703, 0.034045951843261715, 0.033702911376953124, 0.03388518524169922, 0.03385548782348633, 0.033960960388183595, 0.03386880111694336, 0.03392102432250976, 0.033955841064453124, 0.03397017669677734, 0.03380428695678711, 0.03386265563964844, 0.03377766418457031, 0.0339865608215332, 0.03384832000732422, 0.033808383941650394, 0.034252799987792966, 0.03543040084838867, 0.035410945892333984, 0.03539251327514648, 0.03524915313720703, 0.035340286254882815, 0.033963008880615236, 0.0347064323425293, 0.035093505859375, 0.03416371154785156, 0.03445145416259766, 0.03423129653930664, 0.035454975128173825, 0.03542323303222656, 0.035272705078125, 0.03533107376098633, 0.03695513534545898, 0.03399679946899414, 0.0339681282043457, 0.03426816177368164, 0.035364864349365234, 0.03539148712158203, 0.03517440032958984, 0.035383296966552735, 0.03536793518066406, 0.03527884674072266, 0.03394867324829102, 0.033931262969970705, 0.035970046997070314, 0.03565465545654297, 0.03614720153808594, 0.03587891387939453, 0.0352911376953125, 0.03551334381103516, 0.03510681533813476, 0.035236862182617186, 0.03528908920288086, 0.035211265563964846, 0.035315711975097655, 0.035348480224609374, 0.03521331024169922, 0.035332096099853515, 0.03529830551147461, 0.034560001373291016, 0.03532185745239258, 0.03549491119384766, 0.035364864349365234, 0.035227649688720705, 0.03407360076904297, 0.034012161254882815, 0.03395481491088867, 0.034095104217529294, 0.033925121307373046, 0.033941505432128906, 0.034146305084228515, 0.034098175048828124, 0.033742847442626955, 0.03423231887817383, 0.03407257461547852, 0.03406028747558594, 0.035888126373291016, 0.035604480743408204, 0.03395174407958984, 0.03399884796142578, 0.03511808013916016, 0.03528192138671875, 0.03536383819580078, 0.03530137634277344, 0.035244033813476565, 0.03404288101196289, 0.03401420974731445, 0.03375820922851563, 0.03405721664428711, 0.033995777130126956, 0.033858558654785154, 0.03401113510131836, 0.033963008880615236, 0.03395379257202148, 0.03407360076904297, 0.03397017669677734, 0.03473612976074219, 0.03425996780395508, 0.03393843078613281, 0.03381452941894531, 0.0339128303527832, 0.03383500671386719, 0.03388415908813477, 0.033976318359375, 0.03430403137207031, 0.03671036911010742, 0.03575500869750976, 0.034151424407958986, 0.03419955062866211, 0.034083839416503905, 0.03401728057861328, 0.03405209732055664, 0.03429580688476563, 0.03405721664428711, 0.03384320068359375, 0.0340582389831543, 0.03392204666137695, 0.0339128303527832, 0.03402342224121094, 0.03401523208618164, 0.03401830291748047, 0.033858558654785154, 0.033974273681640625, 0.03405209732055664, 0.034307071685791016, 0.03565363311767578, 0.03450777435302734, 0.03439411163330078, 0.033926143646240234, 0.03390054321289063, 0.033993728637695314, 0.033993728637695314, 0.03394047927856445, 0.03404288101196289, 0.03393228912353516, 0.03401318359375, 0.033909759521484374, 0.03412070465087891, 0.03405619049072266, 0.03412582397460937, 0.03398963165283203, 0.03386265563964844, 0.034216960906982424, 0.0340684814453125, 0.03395174407958984, 0.033977344512939454, 0.03390156936645508, 0.03435520172119141, 0.034440193176269535, 0.034097152709960936, 0.03395891189575195, 0.03403984069824219, 0.033995742797851565, 0.03399782562255859, 0.03563520050048828, 0.034149375915527344, 0.03440947341918945, 0.034533374786376955, 0.03384320068359375, 0.03404800033569336, 0.0340582389831543, 0.03393228912353516, 0.033983486175537106, 0.033942527770996093, 0.033999870300292966, 0.03398553466796875, 0.03398553466796875, 0.033857536315917966, 0.03488256072998047, 0.03590860748291016, 0.03526758575439453, 0.03527065658569336, 0.03410739135742188, 0.03707699203491211, 0.03626496124267578, 0.03572633743286133, 0.03456921768188476, 0.034113536834716796, 0.034012161254882815, 0.033887233734130856, 0.03402035140991211, 0.03398860931396484, 0.03387289428710937, 0.03390259170532227, 0.03392409515380859, 0.03392822265625, 0.03384316635131836, 0.03397836685180664, 0.03416371154785156, 0.034034687042236327, 0.034028545379638675, 0.0339865608215332, 0.03366604614257813, 0.03474124908447265, 0.03681792068481445, 0.03578265762329102, 0.03556556701660156, 0.03565670394897461, 0.035282943725585936, 0.035181568145751956, 0.03539459228515625, 0.03522351837158203, 0.03591372680664062, 0.035315711975097655, 0.035176448822021485, 0.034616382598876956, 0.03567200088500976, 0.0348221435546875, 0.034799617767333986, 0.033993728637695314, 0.03395993423461914, 0.03488665771484375, 0.03536588668823242, 0.03395993423461914, 0.03518668746948242, 0.035590145111083986, 0.0353331184387207, 0.0350013427734375, 0.03439820861816406, 0.035460094451904296, 0.035419136047363284, 0.03513651275634765, 0.03536588668823242, 0.03410943984985351, 0.03381043243408203, 0.033926143646240234, 0.033949695587158206, 0.03395993423461914, 0.03403878402709961, 0.033957889556884766, 0.034492416381835936, 0.03717529678344727, 0.036327423095703124, 0.03506687927246094, 0.03413913726806641, 0.033995777130126956, 0.03402444839477539, 0.03397017669677734, 0.03488972854614258, 0.034408447265625, 0.0337367057800293, 0.034164737701416016, 0.03399884796142578, 0.03544268798828125, 0.03455385589599609, 0.033923072814941405, 0.03395993423461914, 0.03385343933105469, 0.033906688690185545, 0.03409100723266602, 0.034253822326660154, 0.033974273681640625, 0.03384729766845703, 0.03402342224121094, 0.03391692733764649, 0.03377151870727539, 0.03401318359375, 0.03394867324829102, 0.033960960388183595, 0.034010112762451174, 0.03398553466796875, 0.034184192657470705, 0.033865726470947266, 0.033999870300292966, 0.033857536315917966, 0.034167808532714845, 0.033939456939697264, 0.03407155227661133, 0.03404288101196289, 0.033825790405273434, 0.034305023193359374, 0.03408486557006836, 0.033833984375, 0.03387187194824219, 0.0339128303527832, 0.033909759521484374, 0.033723392486572266, 0.033737728118896484, 0.03390259170532227, 0.03425689697265625, 0.035471359252929685, 0.035369983673095705, 0.03514777755737305, 0.03523481750488281, 0.03489177703857422, 0.034533374786376955, 0.03392409515380859, 0.03423539352416992, 0.03385958480834961, 0.03434598541259765, 0.035514366149902346, 0.033999870300292966, 0.035955711364746096, 0.03527884674072266, 0.03523993682861328, 0.03528806304931641, 0.03499724960327148, 0.03522972869873047, 0.03571503829956055, 0.03593830490112305, 0.03530035018920898, 0.03532799911499023, 0.03506892776489258, 0.03483340835571289, 0.03513241577148438, 0.03508633422851563, 0.03529830551147461, 0.035138561248779294, 0.03589734268188476, 0.03683225631713867, 0.035154945373535154, 0.03524300765991211, 0.03574784088134766, 0.034051071166992186, 0.033955841064453124, 0.03413094329833984, 0.03425689697265625, 0.03409305572509766, 0.03443711853027344, 0.03405209732055664, 0.034293758392333985, 0.03402239990234375, 0.03405721664428711, 0.03455590438842773, 0.03393638229370117, 0.033949695587158206, 0.03491123199462891, 0.03545087814331055, 0.03426303863525391, 0.03408588790893555, 0.03395993423461914, 0.034070526123046875, 0.03406643295288086, 0.03390566253662109, 0.034427902221679685, 0.034841598510742186, 0.034479103088378905, 0.03448934555053711, 0.03473305511474609, 0.03410124969482422, 0.03400294494628906, 0.03437158584594727, 0.03421798324584961, 0.03408281707763672, 0.03417190551757812, 0.03423846435546875, 0.03416371154785156, 0.035454975128173825]",tokens/s,28.950557428431182,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948cfb-0e00b796566ae4f404f69367;5b15094d-af6b-4969-9708-4249eadbf561) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6677.123072,9622.257664,0.0,8992.587776,8404.320768,s,1,12.019306640625,12.019306640625,0.0,12.019306640625,12.019306640625,12.019306640625,12.019306640625,[12.019306640625],,kWh,5.93966705881927e-05,3.253476981113887e-05,0.0001136420353580192,0.00020557347575735077,,MB,1621.200896,9641.132032,0.0,8992.587776,7880.275968,s,10,1.7119584655761715,0.1711958465576172,6.473915974927822e-05,0.17117905426025393,0.17130065002441405,0.17130514068603517,0.17130873321533205,"[0.17116368103027343, 0.17129965209960937, 0.17108822631835938, 0.17120620727539063, 0.17130963134765625, 0.17122735595703126, 0.1711890869140625, 0.17116902160644532, 0.1711535339355469, 0.17115206909179687]",tokens/s,1495.363381458214,kWh,2.024154383439174e-06,1.1087761808852581e-06,7.97309489071121e-06,1.1106025455035642e-05,tokens/kWh,23050550.445472434,MB,1642.12736,9643.229184,0.0,8992.587776,8125.469184,s,10,16.463754760742187,1.6463754760742186,0.020935552247779227,1.6437764892578124,1.6711944091796875,1.6717110961914061,1.6721244458007811,"[1.672227783203125, 1.6662783203125, 1.6373736572265625, 1.6224852294921874, 1.6418719482421875, 1.6661480712890624, 1.67107958984375, 1.6322489013671875, 1.6456810302734375, 1.6083602294921875]",tokens/s,38.26587611121581,kWh,1.9101958057533472e-05,1.0468535267537632e-05,5.990895941489569e-05,8.94794527399668e-05,tokens/kWh,704072.254253523,,s,630,16.46136627006532,0.02612915280962747,0.0008348816977602347,0.026599424362182617,0.026831053352355957,0.026944256210327148,0.027554918270111084,"[0.025404415130615234, 0.02492006492614746, 0.025108480453491212, 0.02652569580078125, 0.026619903564453123, 0.026643455505371092, 0.026678272247314453, 0.02654412841796875, 0.02692095947265625, 0.026867712020874023, 0.026687488555908204, 0.0267325439453125, 0.026566656112670898, 0.02652364730834961, 0.026678272247314453, 0.026549247741699217, 0.02667622375488281, 0.026755071640014647, 0.026643455505371092, 0.026635263442993166, 0.026626047134399415, 0.027218944549560548, 0.029068288803100587, 0.027131904602050783, 0.02708684730529785, 0.026645503997802734, 0.026711040496826172, 0.026634239196777345, 0.026650623321533205, 0.02671820831298828, 0.026650623321533205, 0.02656972885131836, 0.026735616683959962, 0.026656768798828126, 0.027265024185180665, 0.026795007705688476, 0.026860544204711914, 0.02655232048034668, 0.024977407455444335, 0.02495078468322754, 0.025448448181152345, 0.02679091262817383, 0.02673766326904297, 0.026424320220947265, 0.02647756767272949, 0.026056703567504884, 0.026764287948608398, 0.02653900718688965, 0.026473472595214844, 0.0265799674987793, 0.026250240325927734, 0.0263874568939209, 0.026268672943115235, 0.026622976303100586, 0.02676940727233887, 0.026786815643310546, 0.026137599945068358, 0.02613043212890625, 0.0263505916595459, 0.026307584762573243, 0.026730495452880858, 0.026779647827148437, 0.026991615295410155, 0.024922111511230468, 0.026458112716674805, 0.026219520568847656, 0.02697420883178711, 0.026669055938720702, 0.02674995231628418, 0.025021440505981447, 0.02634547233581543, 0.02677350425720215, 0.02657689666748047, 0.02655129623413086, 0.02669260787963867, 0.02673356819152832, 0.02674278450012207, 0.025561088562011718, 0.026901504516601563, 0.026218496322631835, 0.026550271987915038, 0.026615808486938477, 0.026651647567749022, 0.02676019287109375, 0.026599424362182617, 0.026651647567749022, 0.026654720306396484, 0.026398719787597655, 0.02653593635559082, 0.026622976303100586, 0.026592256546020508, 0.026612735748291014, 0.026669055938720702, 0.02689945602416992, 0.02668339157104492, 0.026689535140991212, 0.02691379165649414, 0.026682367324829103, 0.026763263702392577, 0.026705919265747072, 0.026637311935424804, 0.026689535140991212, 0.026673152923583986, 0.026879999160766603, 0.026674175262451173, 0.026788864135742187, 0.02490675163269043, 0.024954879760742187, 0.024929279327392577, 0.02490675163269043, 0.024887296676635744, 0.02557542419433594, 0.026587135314941408, 0.026602495193481446, 0.02692095947265625, 0.026648576736450196, 0.026621952056884765, 0.02676019287109375, 0.026719232559204102, 0.02666803169250488, 0.02672127914428711, 0.026976255416870116, 0.026820608139038086, 0.026714111328125, 0.026702848434448243, 0.026714111328125, 0.024614912033081054, 0.024846336364746095, 0.025024511337280272, 0.02495078468322754, 0.024978431701660156, 0.024878080368041993, 0.02490572738647461, 0.024846336364746095, 0.02495692825317383, 0.025624576568603515, 0.02669260787963867, 0.02488934326171875, 0.024952831268310546, 0.0248668155670166, 0.024864767074584963, 0.025536512374877928, 0.026596351623535155, 0.026714111328125, 0.02689023971557617, 0.02633625602722168, 0.026266624450683593, 0.026661888122558593, 0.026689535140991212, 0.02671001625061035, 0.026686464309692383, 0.026652671813964843, 0.026714111328125, 0.026383359909057616, 0.026061824798583984, 0.026629119873046874, 0.02669260787963867, 0.02674380874633789, 0.026678272247314453, 0.026602495193481446, 0.026850303649902343, 0.026795007705688476, 0.026679296493530274, 0.026643455505371092, 0.02668441581726074, 0.026611711502075194, 0.026629119873046874, 0.026664960861206056, 0.026705919265747072, 0.026599424362182617, 0.026703872680664063, 0.026594303131103517, 0.02673971176147461, 0.026795007705688476, 0.026598400115966796, 0.026644479751586913, 0.02679091262817383, 0.026610687255859376, 0.025021440505981447, 0.025035776138305665, 0.024979455947875977, 0.024937471389770507, 0.024977407455444335, 0.025033727645874023, 0.024980480194091798, 0.025604095458984375, 0.02510438346862793, 0.024947711944580078, 0.026040319442749024, 0.025862144470214843, 0.025183231353759765, 0.025150463104248046, 0.027497472763061522, 0.02698137664794922, 0.026821632385253907, 0.02672230339050293, 0.026635263442993166, 0.026641408920288087, 0.02655436706542969, 0.026564607620239256, 0.026687488555908204, 0.026638336181640625, 0.024969215393066405, 0.02512281608581543, 0.024983552932739257, 0.025801727294921875, 0.026650623321533205, 0.025007104873657225, 0.025010175704956054, 0.02497433662414551, 0.025168895721435547, 0.026847232818603517, 0.02679091262817383, 0.026933248519897462, 0.026643455505371092, 0.026682367324829103, 0.026637311935424804, 0.027263999938964844, 0.02510438346862793, 0.025022464752197264, 0.02493235206604004, 0.024964096069335938, 0.024995840072631836, 0.02506342315673828, 0.024994815826416016, 0.02511769676208496, 0.02510950469970703, 0.025119743347167968, 0.025018367767333984, 0.025009151458740234, 0.025062400817871092, 0.025020416259765626, 0.025028608322143556, 0.02494976043701172, 0.025024511337280272, 0.02513920021057129, 0.024997888565063478, 0.025001983642578125, 0.0249036808013916, 0.02488832092285156, 0.024585216522216798, 0.024921087265014647, 0.02493440055847168, 0.025070592880249022, 0.025450496673583983, 0.02694144058227539, 0.026650623321533205, 0.02669977569580078, 0.02676019287109375, 0.02666700744628906, 0.02674278450012207, 0.02693120002746582, 0.025894912719726562, 0.026763263702392577, 0.026688512802124024, 0.026582015991210937, 0.02671308708190918, 0.0267775993347168, 0.02681548881530762, 0.026694656372070313, 0.026558464050292968, 0.026244096755981446, 0.025016319274902343, 0.025204736709594725, 0.02506342315673828, 0.024988672256469727, 0.02494054412841797, 0.025037824630737306, 0.024919040679931642, 0.024985599517822265, 0.024885248184204102, 0.025038848876953124, 0.02490880012512207, 0.024895488739013674, 0.02488115119934082, 0.025036800384521486, 0.025022464752197264, 0.024963071823120117, 0.024886272430419923, 0.024987648010253907, 0.02490572738647461, 0.024635391235351564, 0.024607744216918945, 0.026006528854370117, 0.02670899200439453, 0.02657689666748047, 0.02671001625061035, 0.026850303649902343, 0.027072511672973632, 0.027471872329711915, 0.02737766456604004, 0.026818559646606444, 0.026613759994506835, 0.024738815307617186, 0.024621055603027343, 0.025164800643920897, 0.026514432907104493, 0.026637311935424804, 0.02669875144958496, 0.026795007705688476, 0.02798591995239258, 0.026827775955200195, 0.02691481590270996, 0.026702848434448243, 0.02692915153503418, 0.02675712013244629, 0.026789888381958008, 0.026714111328125, 0.02680012893676758, 0.026656768798828126, 0.026694656372070313, 0.026681343078613282, 0.026719232559204102, 0.02667519950866699, 0.026850303649902343, 0.025057279586791992, 0.024938495635986328, 0.02494054412841797, 0.025021440505981447, 0.025016319274902343, 0.027099136352539063, 0.026814464569091798, 0.02671820831298828, 0.026754047393798826, 0.026685440063476562, 0.02691481590270996, 0.026825727462768553, 0.024936447143554686, 0.02696499252319336, 0.02674278450012207, 0.026835968017578125, 0.026820608139038086, 0.026658815383911134, 0.02676121520996094, 0.026642431259155275, 0.02674073600769043, 0.0269803524017334, 0.026842111587524413, 0.026801151275634767, 0.025166847229003905, 0.02498150444030762, 0.025060352325439454, 0.025001983642578125, 0.025043968200683595, 0.0267509765625, 0.02649395179748535, 0.02696703910827637, 0.02694041633605957, 0.02515660858154297, 0.02528563117980957, 0.026630144119262695, 0.02672640037536621, 0.0267775993347168, 0.026694656372070313, 0.026641408920288087, 0.02666700744628906, 0.026669055938720702, 0.026653696060180664, 0.027382783889770508, 0.026816511154174806, 0.026693632125854492, 0.026788864135742187, 0.026671104431152344, 0.026637311935424804, 0.026637311935424804, 0.026636287689208983, 0.026604543685913085, 0.026976255416870116, 0.026934272766113283, 0.026694656372070313, 0.026784767150878908, 0.028012544631958007, 0.02716160011291504, 0.026830848693847657, 0.026786815643310546, 0.026674175262451173, 0.026654720306396484, 0.02671001625061035, 0.026619903564453123, 0.02675712013244629, 0.026704896926879884, 0.026627071380615236, 0.026680320739746095, 0.026557439804077147, 0.0265799674987793, 0.026612735748291014, 0.02653696060180664, 0.026550271987915038, 0.02657177543640137, 0.02657587242126465, 0.024952831268310546, 0.024871936798095705, 0.026779647827148437, 0.02669158363342285, 0.026832895278930666, 0.026600448608398438, 0.02656870460510254, 0.026612735748291014, 0.02676531219482422, 0.026645503997802734, 0.026603519439697267, 0.026681343078613282, 0.026645503997802734, 0.026236928939819337, 0.02657177543640137, 0.02655232048034668, 0.026635263442993166, 0.02654719924926758, 0.026641408920288087, 0.02656972885131836, 0.026704896926879884, 0.02488115119934082, 0.024975360870361327, 0.02494259262084961, 0.024929279327392577, 0.02650726318359375, 0.026802175521850585, 0.026570751190185548, 0.026572799682617186, 0.026586111068725587, 0.026583040237426758, 0.026838016510009766, 0.02672947120666504, 0.0265482234954834, 0.026643455505371092, 0.026621952056884765, 0.026599424362182617, 0.02707865524291992, 0.02671820831298828, 0.026570751190185548, 0.026630144119262695, 0.026657791137695314, 0.026653696060180664, 0.026652671813964843, 0.026583040237426758, 0.026583040237426758, 0.02759782409667969, 0.02754764747619629, 0.027585535049438475, 0.026860544204711914, 0.026688512802124024, 0.02490880012512207, 0.02489241600036621, 0.02490982437133789, 0.024975360870361327, 0.024922111511230468, 0.02527027130126953, 0.02671001625061035, 0.0265799674987793, 0.026842111587524413, 0.026590208053588867, 0.026582015991210937, 0.026624000549316407, 0.026550271987915038, 0.026693632125854492, 0.026565631866455077, 0.0265482234954834, 0.02691584014892578, 0.026612735748291014, 0.026630144119262695, 0.026629119873046874, 0.026702848434448243, 0.026588159561157225, 0.026658815383911134, 0.02656358337402344, 0.02656358337402344, 0.02676531219482422, 0.026627071380615236, 0.026590208053588867, 0.026608640670776368, 0.026641408920288087, 0.0265533447265625, 0.026720256805419923, 0.026659839630126952, 0.024937471389770507, 0.0249169921875, 0.02494156837463379, 0.025000959396362304, 0.024885248184204102, 0.024896511077880858, 0.025037824630737306, 0.024945663452148437, 0.024964096069335938, 0.02493235206604004, 0.02494156837463379, 0.026638336181640625, 0.02675302314758301, 0.02492416000366211, 0.02511052894592285, 0.024989696502685548, 0.025006080627441408, 0.026583040237426758, 0.026611711502075194, 0.026645503997802734, 0.026649599075317384, 0.026665983200073243, 0.026846208572387696, 0.02511052894592285, 0.02509721565246582, 0.024910848617553712, 0.024988672256469727, 0.024921087265014647, 0.024995840072631836, 0.024976383209228514, 0.025001983642578125, 0.025044992446899415, 0.02498150444030762, 0.024962047576904296, 0.024964096069335938, 0.024990720748901366, 0.024929279327392577, 0.024928255081176756, 0.02489036750793457, 0.02634854316711426, 0.02670182418823242, 0.02490880012512207, 0.024868864059448242, 0.025582592010498048, 0.02676019287109375, 0.026617855072021485, 0.026558464050292968, 0.026665983200073243, 0.026648576736450196, 0.026653696060180664, 0.02670182418823242, 0.02656768035888672, 0.026694656372070313, 0.026594303131103517, 0.02654412841796875, 0.026677248001098632, 0.026672128677368165, 0.02660147285461426, 0.026629119873046874, 0.02674892807006836, 0.02651136016845703, 0.026598400115966796, 0.02674380874633789, 0.027062271118164064, 0.028107776641845703, 0.02693734359741211, 0.024972288131713868, 0.02553036880493164, 0.02755788803100586, 0.026770431518554686, 0.026638336181640625, 0.02677555274963379, 0.02656870460510254, 0.02655539131164551, 0.026599424362182617, 0.026589183807373046, 0.026513408660888672, 0.026608640670776368, 0.026583040237426758, 0.024893440246582032, 0.025012224197387696, 0.025986047744750978, 0.024995840072631836, 0.02493132781982422, 0.02495692825317383, 0.02495078468322754, 0.02490163230895996, 0.02587238311767578, 0.026628095626831053, 0.026627071380615236, 0.026658815383911134, 0.026673152923583986, 0.026693632125854492, 0.025042943954467774, 0.024951808929443358, 0.02490675163269043, 0.024968191146850584, 0.0248985595703125, 0.02494054412841797, 0.024808448791503908, 0.026268672943115235, 0.0248668155670166, 0.024946687698364257, 0.025033727645874023, 0.025007104873657225, 0.024963071823120117, 0.024927232742309572, 0.0249169921875, 0.02493951988220215, 0.024947711944580078, 0.025082880020141602, 0.024785919189453123, 0.024830976486206056, 0.025322496414184572, 0.026844160079956055, 0.026755071640014647, 0.0265850887298584, 0.026696704864501954, 0.026190847396850587, 0.026557439804077147, 0.024945663452148437, 0.025043968200683595, 0.02492313575744629, 0.024815616607666017, 0.024972288131713868, 0.025066495895385742, 0.02530816078186035, 0.024983552932739257, 0.024958976745605467, 0.026564607620239256, 0.026620927810668944, 0.02672640037536621, 0.026682367324829103, 0.026654720306396484, 0.026660863876342773, 0.026690559387207033, 0.026665983200073243, 0.026711040496826172, 0.026658815383911134, 0.02496512031555176, 0.024961023330688475, 0.02511564826965332, 0.024996864318847657, 0.025013248443603517, 0.025044992446899415, 0.025100288391113282, 0.025176063537597656, 0.025195520401000978, 0.025013248443603517, 0.025033727645874023, 0.02502348709106445, 0.024986623764038086, 0.025012224197387696, 0.02694655990600586, 0.026730495452880858, 0.027157503128051756]",tokens/s,38.27142836531397,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694938c-72b9537548fba59f1bbe3a78;43e7f0b7-8cc4-4cf5-ad69-d17d58561d43) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-40b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-40b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,918.245376,931.659776,0.0,301.989888,282.769408,s,1,7.3646982421875,7.3646982421875,0.0,7.3646982421875,7.3646982421875,7.3646982421875,7.3646982421875,[7.3646982421875],,kWh,7.047150143752232e-06,3.846593308687562e-06,9.884730130083064e-06,2.0778473582522858e-05,,MB,1504.39936,990.380032,0.0,341.835776,318.94528,s,17,0.1832941770553589,0.010782010415021113,0.00023133171940729585,0.010903615951538086,0.010979174613952637,0.010980947303771972,0.01098152042388916,"[0.01088304042816162, 0.010426303863525391, 0.01097811222076416, 0.010957152366638183, 0.010902655601501466, 0.01084553623199463, 0.010906815528869629, 0.010937824249267578, 0.010423968315124512, 0.010980768203735352, 0.01039731216430664, 0.010927616119384765, 0.010903615951538086, 0.010981663703918457, 0.010445664405822754, 0.01094972801208496, 0.010446399688720702]",tokens/s,23743.252894965666,kWh,1.255516523239244e-07,6.879574686001006e-08,2.50697744541998e-07,4.450451437259324e-07,tokens/kWh,575222544.519326,MB,1530.978304,1019.74016,0.0,369.098752,318.94784,s,17,10.064810119628905,0.592047654095818,0.00402771983942974,0.5927523803710938,0.5955052246093749,0.5965024780273438,0.5980191967773437,"[0.5920621948242187, 0.592616455078125, 0.5917846069335938, 0.593538818359375, 0.592769287109375, 0.5913892822265625, 0.5938687133789062, 0.5960285034179688, 0.5951563720703125, 0.5927523803710938, 0.591885009765625, 0.5937962036132812, 0.5983983764648437, 0.5911713256835938, 0.5812403564453125, 0.5932716674804688, 0.58308056640625]",tokens/s,106.4103532277555,kWh,6.875762107397876e-06,3.767571939282971e-06,1.0934458262399186e-05,2.1577792309080032e-05,tokens/kWh,2919668.4766258183,,s,1071,10.057448390960692,0.009390708114809238,0.0001767887031997023,0.009393216133117676,0.009469951629638672,0.00952833604812622,0.010086911869049071,"[0.009297887802124023, 0.009460736274719238, 0.009432064056396485, 0.00941055965423584, 0.009370623588562011, 0.009385984420776367, 0.009401344299316406, 0.009373696327209472, 0.009408512115478516, 0.009393216133117676, 0.009370559692382813, 0.00943513584136963, 0.00939724826812744, 0.00941055965423584, 0.009450495719909668, 0.009483263969421387, 0.009483263969421387, 0.009463808059692384, 0.00941875171661377, 0.009396224021911622, 0.00943616008758545, 0.00941158390045166, 0.009401344299316406, 0.009377792358398437, 0.009377792358398437, 0.009396224021911622, 0.009289728164672852, 0.009360383987426758, 0.009453568458557129, 0.009387007713317871, 0.009454591751098633, 0.009480192184448242, 0.009337856292724609, 0.009380864143371583, 0.00942796802520752, 0.009422911643981933, 0.009371583938598632, 0.0094269437789917, 0.009409536361694336, 0.009437184333801269, 0.009455615997314454, 0.009388031959533692, 0.009442303657531738, 0.009417728424072265, 0.00941977596282959, 0.009404416084289552, 0.00923852825164795, 0.009183232307434081, 0.009256959915161133, 0.009263104438781738, 0.009235456466674804, 0.009389056205749511, 0.009480192184448242, 0.00927948760986328, 0.009388031959533692, 0.009399295806884766, 0.009359359741210938, 0.009434111595153808, 0.009392127990722657, 0.009358336448669433, 0.009414655685424805, 0.009375743865966797, 0.009372672080993653, 0.00941875171661377, 0.0094136323928833, 0.009369600296020507, 0.009389056205749511, 0.009424896240234374, 0.009370623588562011, 0.009381888389587402, 0.009385984420776367, 0.009379839897155762, 0.009392127990722657, 0.009388031959533692, 0.009358336448669433, 0.00941260814666748, 0.009398271560668945, 0.009375743865966797, 0.009398271560668945, 0.009280511856079102, 0.009379839897155762, 0.00941055965423584, 0.00940236759185791, 0.009453568458557129, 0.009422847747802734, 0.009378815650939941, 0.009406463623046875, 0.00941260814666748, 0.009384960174560546, 0.009346048355102539, 0.00941260814666748, 0.009350144386291503, 0.00932044792175293, 0.00940339183807373, 0.009355263710021973, 0.009391103744506836, 0.009768959999084472, 0.009522175788879395, 0.009475071907043458, 0.009387007713317871, 0.009382911682128906, 0.009406463623046875, 0.009387007713317871, 0.009395199775695801, 0.009387071609497071, 0.009294783592224121, 0.009376768112182618, 0.009458687782287598, 0.009379839897155762, 0.009409536361694336, 0.009409536361694336, 0.009385984420776367, 0.009384960174560546, 0.00941158390045166, 0.009326592445373535, 0.009425919532775879, 0.009390080451965332, 0.00941260814666748, 0.009444352149963378, 0.009381888389587402, 0.009383935928344727, 0.009395199775695801, 0.009406463623046875, 0.009391103744506836, 0.009459712028503419, 0.009376768112182618, 0.009350144386291503, 0.009438207626342773, 0.009383935928344727, 0.009370623588562011, 0.009375743865966797, 0.009312288284301758, 0.009373663902282715, 0.0094136323928833, 0.009473024368286133, 0.00940544033050537, 0.0094269437789917, 0.009381888389587402, 0.009370623588562011, 0.009383935928344727, 0.009389056205749511, 0.00940339183807373, 0.0094136323928833, 0.009360383987426758, 0.009361408233642577, 0.00941055965423584, 0.009404416084289552, 0.009452544212341308, 0.009395199775695801, 0.009383935928344727, 0.009447423934936524, 0.009341952323913574, 0.009377792358398437, 0.009457663536071777, 0.009327615737915039, 0.009374719619750976, 0.00942899227142334, 0.009329664230346679, 0.009308159828186035, 0.009376768112182618, 0.009370623588562011, 0.009378815650939941, 0.009395199775695801, 0.009370623588562011, 0.009359359741210938, 0.009408512115478516, 0.009348095893859864, 0.00942080020904541, 0.00939417552947998, 0.009391103744506836, 0.009401344299316406, 0.009365504264831542, 0.009360383987426758, 0.009408512115478516, 0.009375743865966797, 0.009366527557373047, 0.00942899227142334, 0.009343999862670899, 0.009368576049804688, 0.009437184333801269, 0.009333760261535644, 0.009374719619750976, 0.009444352149963378, 0.009376768112182618, 0.009346048355102539, 0.009415679931640625, 0.009373696327209472, 0.009395199775695801, 0.009400320053100587, 0.009391167640686034, 0.009382847785949708, 0.009487360000610352, 0.009391103744506836, 0.009387007713317871, 0.00942796802520752, 0.009396224021911622, 0.009371647834777832, 0.00942899227142334, 0.009365504264831542, 0.009372672080993653, 0.009590784072875976, 0.00942182445526123, 0.009463808059692384, 0.009414655685424805, 0.009407487869262696, 0.00943513584136963, 0.00939417552947998, 0.009396224021911622, 0.00944438362121582, 0.009355232238769532, 0.00934502410888672, 0.009465855598449707, 0.009391103744506836, 0.009407487869262696, 0.00943513584136963, 0.009409536361694336, 0.009461759567260742, 0.00942080020904541, 0.009415679931640625, 0.009469951629638672, 0.009406463623046875, 0.00940339183807373, 0.00949350357055664, 0.00940339183807373, 0.00941055965423584, 0.00939417552947998, 0.009358336448669433, 0.009342975616455078, 0.009409536361694336, 0.009384960174560546, 0.009361408233642577, 0.009431039810180664, 0.009448448181152343, 0.00942080020904541, 0.00940339183807373, 0.009369600296020507, 0.009326592445373535, 0.009329664230346679, 0.009358336448669433, 0.009714688301086426, 0.009469951629638672, 0.009407487869262696, 0.00934502410888672, 0.009361408233642577, 0.009349120140075684, 0.009510911941528321, 0.009409536361694336, 0.009388031959533692, 0.009461759567260742, 0.009422911643981933, 0.009392064094543457, 0.009469951629638672, 0.00936240005493164, 0.009329664230346679, 0.009385984420776367, 0.009492480278015136, 0.009360383987426758, 0.009444352149963378, 0.00940544033050537, 0.009382911682128906, 0.009377792358398437, 0.009364480018615723, 0.00930611228942871, 0.009365504264831542, 0.009368576049804688, 0.009422847747802734, 0.009352191925048828, 0.009356287956237793, 0.00941158390045166, 0.009339903831481934, 0.009302016258239745, 0.00940339183807373, 0.009499648094177245, 0.009375743865966797, 0.009414655685424805, 0.009375743865966797, 0.009352191925048828, 0.009400320053100587, 0.00939724826812744, 0.009352191925048828, 0.009380864143371583, 0.009326592445373535, 0.009350144386291503, 0.009361408233642577, 0.00940236759185791, 0.009577471733093262, 0.009392160415649414, 0.009400287628173827, 0.009441344261169434, 0.009292736053466796, 0.009391200065612794, 0.009404319763183594, 0.00937168025970459, 0.009443296432495116, 0.009415679931640625, 0.009374719619750976, 0.009379839897155762, 0.009409536361694336, 0.009376768112182618, 0.009430015563964844, 0.009466879844665528, 0.009439231872558594, 0.009430015563964844, 0.009356287956237793, 0.00935321617126465, 0.009848832130432129, 0.009553919792175293, 0.009450495719909668, 0.009460736274719238, 0.009378815650939941, 0.009445376396179199, 0.009391103744506836, 0.009393152236938476, 0.009376768112182618, 0.009367551803588867, 0.009317376136779786, 0.009337856292724609, 0.009416704177856445, 0.009346048355102539, 0.009331711769104004, 0.009515007972717286, 0.00940544033050537, 0.009365504264831542, 0.00942182445526123, 0.009366527557373047, 0.009384960174560546, 0.0094269437789917, 0.00943513584136963, 0.009415679931640625, 0.009400320053100587, 0.009342975616455078, 0.009417728424072265, 0.00932147216796875, 0.009315327644348144, 0.009407487869262696, 0.009358336448669433, 0.009350144386291503, 0.009475071907043458, 0.009409536361694336, 0.009383935928344727, 0.00942182445526123, 0.009226240158081055, 0.009378815650939941, 0.009614336013793945, 0.009498623847961426, 0.009409536361694336, 0.009404416084289552, 0.009385984420776367, 0.009416704177856445, 0.00919961643218994, 0.00927948760986328, 0.009340928077697755, 0.00940447998046875, 0.009347007751464844, 0.009432064056396485, 0.009355263710021973, 0.009390080451965332, 0.009371647834777832, 0.009363455772399902, 0.009369600296020507, 0.009392127990722657, 0.009356287956237793, 0.009368576049804688, 0.009407487869262696, 0.009395199775695801, 0.009350144386291503, 0.009384960174560546, 0.009288703918457031, 0.009296895980834961, 0.009383935928344727, 0.009377792358398437, 0.009356287956237793, 0.00942080020904541, 0.009359359741210938, 0.009373696327209472, 0.009390080451965332, 0.0094136323928833, 0.009342975616455078, 0.009366527557373047, 0.009364480018615723, 0.009400320053100587, 0.009376768112182618, 0.009339903831481934, 0.00941260814666748, 0.009476096153259277, 0.009475071907043458, 0.009507840156555175, 0.009463808059692384, 0.009398271560668945, 0.00942899227142334, 0.009445376396179199, 0.009380864143371583, 0.009522175788879395, 0.00932249641418457, 0.009461759567260742, 0.009399295806884766, 0.009367551803588867, 0.009377792358398437, 0.009393152236938476, 0.009395199775695801, 0.009363455772399902, 0.00940236759185791, 0.009376768112182618, 0.009408512115478516, 0.00942899227142334, 0.00940339183807373, 0.009393152236938476, 0.009508864402770996, 0.009416704177856445, 0.009783295631408692, 0.009448448181152343, 0.009432095527648926, 0.00946070384979248, 0.009448448181152343, 0.009407487869262696, 0.009415679931640625, 0.009398271560668945, 0.0094269437789917, 0.009478143692016602, 0.009377920150756835, 0.009396096229553223, 0.00941260814666748, 0.00941977596282959, 0.009473024368286133, 0.00942182445526123, 0.009401344299316406, 0.009477120399475097, 0.009417728424072265, 0.009358336448669433, 0.009473024368286133, 0.009363455772399902, 0.009329664230346679, 0.00940339183807373, 0.009362431526184082, 0.009335807800292969, 0.009399295806884766, 0.009424896240234374, 0.009401344299316406, 0.00942899227142334, 0.00942899227142334, 0.009453568458557129, 0.009802751541137696, 0.009537568092346191, 0.00967574405670166, 0.009760767936706542, 0.010080256462097169, 0.009526304244995117, 0.00959280014038086, 0.009494527816772461, 0.009393152236938476, 0.009440256118774413, 0.009472000122070312, 0.009404416084289552, 0.009588735580444336, 0.009424896240234374, 0.009383935928344727, 0.009446399688720703, 0.0094136323928833, 0.009360383987426758, 0.009415679931640625, 0.00939724826812744, 0.009458720207214355, 0.00947606372833252, 0.009382911682128906, 0.009401344299316406, 0.009445376396179199, 0.009406463623046875, 0.009572352409362793, 0.00942796802520752, 0.009504768371582031, 0.009385984420776367, 0.009346176147460937, 0.009297792434692382, 0.009424896240234374, 0.009376768112182618, 0.009401344299316406, 0.00942899227142334, 0.009346048355102539, 0.009395199775695801, 0.00941875171661377, 0.009364480018615723, 0.00941055965423584, 0.009431039810180664, 0.00941260814666748, 0.009530367851257325, 0.009401344299316406, 0.00943616008758545, 0.009488384246826171, 0.00941977596282959, 0.00942899227142334, 0.009401344299316406, 0.009437184333801269, 0.009389056205749511, 0.009496576309204101, 0.009414655685424805, 0.009407487869262696, 0.009408512115478516, 0.009377792358398437, 0.009348223686218262, 0.009389951705932617, 0.009461759567260742, 0.00942899227142334, 0.009424896240234374, 0.009373696327209472, 0.008838144302368164, 0.009138175964355469, 0.009407487869262696, 0.009361408233642577, 0.00939724826812744, 0.009374719619750976, 0.009390080451965332, 0.00940339183807373, 0.009409536361694336, 0.009790464401245117, 0.00961638355255127, 0.009448448181152343, 0.009572352409362793, 0.009429023742675782, 0.009431008338928223, 0.00942182445526123, 0.00942796802520752, 0.009404416084289552, 0.00942899227142334, 0.009374719619750976, 0.009356287956237793, 0.009430015563964844, 0.009362431526184082, 0.009363455772399902, 0.009400320053100587, 0.009388031959533692, 0.009401344299316406, 0.009393152236938476, 0.009432064056396485, 0.009434111595153808, 0.009408512115478516, 0.009393152236938476, 0.009423871994018555, 0.009400320053100587, 0.009378815650939941, 0.009430015563964844, 0.009377792358398437, 0.0094136323928833, 0.009445376396179199, 0.009349120140075684, 0.009351167678833008, 0.009396224021911622, 0.00939417552947998, 0.009380864143371583, 0.009445376396179199, 0.009480192184448242, 0.00939724826812744, 0.00931942367553711, 0.009629695892333985, 0.009508864402770996, 0.009454591751098633, 0.009459744453430176, 0.009428959846496582, 0.009404416084289552, 0.010298368453979492, 0.010181632041931152, 0.00961843204498291, 0.00942182445526123, 0.009462783813476563, 0.00941875171661377, 0.009389056205749511, 0.00941875171661377, 0.00941875171661377, 0.009358336448669433, 0.009392127990722657, 0.009452544212341308, 0.009400320053100587, 0.009395199775695801, 0.009404416084289552, 0.00940236759185791, 0.009395199775695801, 0.009455615997314454, 0.009313280105590821, 0.009362431526184082, 0.009442303657531738, 0.00941055965423584, 0.009396224021911622, 0.009255935668945312, 0.00925596809387207, 0.009412575721740722, 0.00940236759185791, 0.00942905616760254, 0.009469887733459473, 0.009401344299316406, 0.009252863883972168, 0.009232383728027344, 0.009260031700134277, 0.009254912376403808, 0.009281536102294922, 0.009417728424072265, 0.009437184333801269, 0.009449472427368164, 0.009395199775695801, 0.009381888389587402, 0.009439231872558594, 0.009414655685424805, 0.009552895545959473, 0.00950169563293457, 0.009393152236938476, 0.009401344299316406, 0.009572352409362793, 0.009355263710021973, 0.009404416084289552, 0.009414655685424805, 0.00935321617126465, 0.009424896240234374, 0.009374719619750976, 0.009378815650939941, 0.009449472427368164, 0.009423871994018555, 0.009390080451965332, 0.009432064056396485, 0.009384960174560546, 0.009423871994018555, 0.009568256378173828, 0.009462783813476563, 0.009416704177856445, 0.009399295806884766, 0.009415679931640625, 0.00943513584136963, 0.009395199775695801, 0.00935321617126465, 0.009459712028503419, 0.00961638355255127, 0.00941977596282959, 0.009323519706726074, 0.008862719535827637, 0.008820735931396484, 0.008864768028259277, 0.008868864059448242, 0.008880127906799316, 0.0088340482711792, 0.0088340482711792, 0.009668607711791993, 0.010497023582458496, 0.009687040328979492, 0.009662464141845703, 0.0103372802734375, 0.009515007972717286, 0.00941055965423584, 0.009370623588562011, 0.009432064056396485, 0.00940339183807373, 0.009656352043151855, 0.009440223693847656, 0.009393152236938476, 0.009432064056396485, 0.009370623588562011, 0.009362431526184082, 0.009414655685424805, 0.009317376136779786, 0.009245696067810059, 0.009481247901916503, 0.009454560279846192, 0.009378815650939941, 0.00942182445526123, 0.009450495719909668, 0.009367551803588867, 0.009485312461853027, 0.00943513584136963, 0.009350144386291503, 0.00935321617126465, 0.009347071647644043, 0.009370623588562011, 0.009375743865966797, 0.009357312202453612, 0.009406463623046875, 0.009414655685424805, 0.009375743865966797, 0.009416704177856445, 0.009387007713317871, 0.009360383987426758, 0.009393152236938476, 0.009372672080993653, 0.0094136323928833, 0.009392127990722657, 0.009443327903747559, 0.009312255859375, 0.009379839897155762, 0.009374719619750976, 0.009380864143371583, 0.009434144020080566, 0.009466848373413085, 0.00941260814666748, 0.009484288215637206, 0.00939724826812744, 0.00942080020904541, 0.009414655685424805, 0.009393152236938476, 0.009350144386291503, 0.009358336448669433, 0.009408512115478516, 0.009385984420776367, 0.009367551803588867, 0.009404416084289552, 0.009446399688720703, 0.009379839897155762, 0.009433088302612304, 0.00928767967224121, 0.009243647575378417, 0.009195520401000976, 0.009473024368286133, 0.009443327903747559, 0.00929587173461914, 0.009425919532775879, 0.009415679931640625, 0.009374719619750976, 0.009445376396179199, 0.009406463623046875, 0.009404416084289552, 0.009446399688720703, 0.009254912376403808, 0.00921497631072998, 0.00923750400543213, 0.009194496154785157, 0.00921292781829834, 0.00942080020904541, 0.00937782382965088, 0.010189791679382325, 0.009652223587036133, 0.009543680191040039, 0.010663935661315918, 0.009745408058166503, 0.009564160346984863, 0.009455615997314454, 0.009395263671875, 0.009422783851623534, 0.009369600296020507, 0.00941977596282959, 0.009340928077697755, 0.009337856292724609, 0.009510911941528321, 0.009380864143371583, 0.009370623588562011, 0.009396224021911622, 0.009357312202453612, 0.009361408233642577, 0.009367551803588867, 0.009325568199157714, 0.009347071647644043, 0.009396224021911622, 0.009362431526184082, 0.00939417552947998, 0.009396224021911622, 0.009391103744506836, 0.009354240417480468, 0.009430015563964844, 0.009355263710021973, 0.009406463623046875, 0.009382911682128906, 0.00932147216796875, 0.00932044792175293, 0.009409536361694336, 0.009362431526184082, 0.009363455772399902, 0.009264127731323242, 0.00934502410888672, 0.009438207626342773, 0.009416704177856445, 0.009373696327209472, 0.010818559646606446, 0.009459712028503419, 0.009367551803588867, 0.00941875171661377, 0.009416704177856445, 0.009451519966125489, 0.009439231872558594, 0.009408512115478516, 0.009459712028503419, 0.009425919532775879, 0.009463808059692384, 0.009450528144836426, 0.009359328269958497, 0.009401344299316406, 0.009430015563964844, 0.009351167678833008, 0.00939417552947998, 0.009469951629638672, 0.009347071647644043, 0.009414655685424805, 0.009442303657531738, 0.009354240417480468, 0.00935321617126465, 0.009440256118774413, 0.00941977596282959, 0.00942796802520752, 0.009384960174560546, 0.00991436767578125, 0.011016192436218262, 0.010722304344177246, 0.00960307216644287, 0.009641983985900878, 0.009441280364990234, 0.009395199775695801, 0.009449472427368164, 0.00939724826812744, 0.009388031959533692, 0.009376768112182618, 0.009489439964294433, 0.009438176155090332, 0.00943616008758545, 0.009389056205749511, 0.009478143692016602, 0.009424896240234374, 0.00941260814666748, 0.009414655685424805, 0.009388031959533692, 0.009384960174560546, 0.009372672080993653, 0.009382911682128906, 0.009442303657531738, 0.009415679931640625, 0.009399295806884766, 0.009388031959533692, 0.00941875171661377, 0.009362431526184082, 0.009359359741210938, 0.00940339183807373, 0.009395199775695801, 0.009354240417480468, 0.009445376396179199, 0.00943513584136963, 0.009350144386291503, 0.009276415824890137, 0.00942899227142334, 0.009410592079162597, 0.009445343971252441, 0.009407487869262696, 0.009401344299316406, 0.009378815650939941, 0.00940544033050537, 0.009407487869262696, 0.009371647834777832, 0.009370623588562011, 0.00942182445526123, 0.009388031959533692, 0.009358336448669433, 0.009461759567260742, 0.009374719619750976, 0.00934502410888672, 0.009399295806884766, 0.009362496376037598, 0.009375679969787597, 0.009382911682128906, 0.009373696327209472, 0.009377792358398437, 0.009365504264831542, 0.009457663536071777, 0.00931942367553711, 0.009352191925048828, 0.009333760261535644, 0.009309184074401856, 0.009365504264831542, 0.009349120140075684, 0.009337856292724609, 0.009384960174560546, 0.009365535736083985, 0.009415648460388183, 0.009377792358398437, 0.00932147216796875, 0.00936963176727295, 0.00943612766265869, 0.00942182445526123, 0.009383935928344727, 0.009331711769104004, 0.009379839897155762, 0.009341952323913574, 0.009370623588562011, 0.009377792358398437, 0.00941875171661377, 0.009357312202453612, 0.009349120140075684, 0.009408512115478516, 0.009252863883972168, 0.009334783554077148, 0.009375743865966797, 0.009367551803588867, 0.009375743865966797, 0.008858624458312989, 0.008840191841125488, 0.008816639900207519, 0.008740863800048827, 0.008803327560424805, 0.008840191841125488, 0.008880127906799316, 0.008844287872314453, 0.008739839553833008, 0.008730624198913574, 0.008722432136535644, 0.008699904441833496, 0.008871935844421386, 0.008854559898376465, 0.008738783836364747, 0.008725503921508788, 0.008756223678588868, 0.00882585620880127, 0.00939417552947998, 0.009391103744506836, 0.009422847747802734, 0.009377792358398437, 0.009387007713317871, 0.009484288215637206, 0.009282560348510742, 0.009210880279541016, 0.009217023849487305, 0.00920473575592041, 0.009206784248352052, 0.009243647575378417, 0.009310208320617675, 0.009309184074401856, 0.009465855598449707, 0.009355263710021973, 0.009350144386291503, 0.00941875171661377, 0.009396224021911622, 0.009378815650939941, 0.009425919532775879, 0.009416704177856445, 0.00935321617126465, 0.009401344299316406, 0.009361408233642577, 0.009400320053100587, 0.009437184333801269, 0.009378815650939941, 0.009390080451965332, 0.009401344299316406, 0.009378815650939941, 0.00941977596282959, 0.010085375785827636, 0.009565183639526367, 0.009467904090881347, 0.009350144386291503, 0.009380864143371583, 0.00942796802520752, 0.00941158390045166, 0.00929798412322998, 0.009433024406433106, 0.009377792358398437, 0.00936143970489502, 0.009414624214172364, 0.009373696327209472, 0.009362431526184082, 0.009385984420776367, 0.009372672080993653, 0.009365504264831542, 0.00934502410888672, 0.009363455772399902, 0.00937171173095703, 0.00935929584503174, 0.009356287956237793, 0.009395199775695801, 0.009351167678833008, 0.009378815650939941, 0.009409536361694336, 0.00935321617126465, 0.009361408233642577, 0.009383935928344727, 0.009361408233642577, 0.009351167678833008, 0.009417728424072265, 0.00939724826812744, 0.009233407974243164, 0.009437184333801269, 0.009374719619750976, 0.009634816169738769, 0.009643008232116699, 0.01030246353149414, 0.010090496063232422, 0.009474111557006835, 0.009500608444213868, 0.009369600296020507, 0.00940339183807373, 0.009499711990356445, 0.00932038402557373, 0.009373696327209472, 0.009395199775695801, 0.009342975616455078, 0.009373696327209472, 0.009382911682128906, 0.009333760261535644, 0.00932863998413086, 0.009498623847961426, 0.009331711769104004, 0.009364480018615723, 0.009367551803588867, 0.009369600296020507, 0.00952012825012207, 0.009262080192565919, 0.009325568199157714, 0.009381888389587402, 0.009358336448669433, 0.009309184074401856, 0.009396224021911622, 0.009360383987426758, 0.009313280105590821, 0.009370623588562011, 0.009366592407226563, 0.009388992309570313, 0.009400320053100587, 0.00933683204650879, 0.009368576049804688, 0.009401344299316406, 0.009367551803588867, 0.009423871994018555, 0.008831999778747558, 0.008838144302368164, 0.008878080368041993, 0.008837151527404784, 0.00885142421722412, 0.008819711685180665, 0.008904704093933105, 0.008853504180908203, 0.008863743782043456, 0.00881766414642334, 0.008790016174316406, 0.00890060806274414, 0.008841216087341308, 0.008864768028259277, 0.008840191841125488, 0.009182208061218262, 0.009380864143371583, 0.009350144386291503, 0.009384991645812989, 0.009364447593688964, 0.009369600296020507, 0.00939724826812744, 0.009361408233642577, 0.00933683204650879, 0.009335807800292969, 0.009325599670410157, 0.009327584266662598, 0.009385984420776367, 0.009695232391357422, 0.009400320053100587, 0.00940236759185791, 0.009362431526184082, 0.009597951889038087, 0.009487360000610352, 0.00935321617126465, 0.009382911682128906, 0.009275391578674316, 0.009225215911865235, 0.009359359741210938, 0.009335807800292969, 0.009352191925048828, 0.009360383987426758, 0.009340928077697755, 0.009365504264831542, 0.009355263710021973, 0.009369600296020507, 0.009364480018615723, 0.009423871994018555, 0.009393152236938476, 0.00941875171661377, 0.009400320053100587, 0.009338879585266113, 0.009355263710021973, 0.009360383987426758, 0.009332736015319825, 0.00932147216796875, 0.00935321617126465, 0.00932044792175293, 0.009383968353271484, 0.009384927749633789, 0.009356287956237793, 0.009474047660827637, 0.00941260814666748]",tokens/s,106.4882421830352,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8219.881472,12512.13312,0.0,11882.463232,11315.947008,s,1,13.7016572265625,13.7016572265625,0.0,13.7016572265625,13.7016572265625,13.7016572265625,13.7016572265625,[13.7016572265625],,kWh,8.145343268193857e-05,4.462736359623538e-05,0.0001601123503121027,0.00028619314659027666,,MB,3941.15072,12533.10464,0.0,11884.560384,11070.3104,s,10,2.0259765777587893,0.20259765777587893,9.166497495298775e-05,0.2025743408203125,0.20263211669921874,0.2027450973510742,0.2028354818725586,"[0.20285807800292968, 0.20259625244140625, 0.20250956726074218, 0.20260426330566406, 0.20258163452148437, 0.2026070098876953, 0.2025617218017578, 0.20255941772460936, 0.20253158569335938, 0.20256704711914061]",tokens/s,1263.5881520565097,kWh,2.39577265005543e-06,1.3125509364551301e-06,1.0573230680797074e-05,1.4281554267307635e-05,tokens/kWh,17925219.84711551,MB,3945.414656,12535.201792,0.0,11886.657536,11070.31296,s,10,21.333620361328126,2.1333620361328127,0.004120794425977714,2.132803100585938,2.138297314453125,2.1403075927734374,2.1419158154296873,"[2.13287548828125, 2.128518310546875, 2.132730712890625, 2.13516650390625, 2.13110595703125, 2.1378505859375, 2.14231787109375, 2.13446826171875, 2.130450927734375, 2.1281357421875]",tokens/s,29.530852678995515,kWh,2.516488298508415e-05,1.379151686017829e-05,7.789781231820836e-05,0.0001168542121634708,tokens/kWh,539133.3254796793,,s,630,21.331531826019287,0.03385957432701474,0.0003996478315780102,0.03376844787597656,0.03433687133789062,0.03468595218658447,0.03538822032928467,"[0.033955841064453124, 0.033446910858154294, 0.03352166366577149, 0.033713153839111325, 0.033737728118896484, 0.03374899291992187, 0.034336769104003906, 0.03374387359619141, 0.03403366470336914, 0.03399884796142578, 0.03379814529418945, 0.0336445426940918, 0.0338155517578125, 0.033670143127441404, 0.03373056030273437, 0.0341104621887207, 0.033857536315917966, 0.03346124649047852, 0.033854465484619144, 0.03376947021484375, 0.03382374572753906, 0.033783809661865234, 0.03402547073364258, 0.033807361602783206, 0.033827838897705076, 0.03380121612548828, 0.03403571319580078, 0.03368755340576172, 0.033734657287597655, 0.0336445426940918, 0.034116607666015625, 0.03377971267700195, 0.03377356719970703, 0.03391795349121094, 0.03384627151489258, 0.033995777130126956, 0.03443302536010742, 0.03386265563964844, 0.03461939239501953, 0.03472281646728516, 0.03426508712768555, 0.03367116928100586, 0.03381862258911133, 0.03396710586547851, 0.03367628860473633, 0.03391897583007813, 0.033734657287597655, 0.03379814529418945, 0.033783809661865234, 0.033928192138671875, 0.03393535995483398, 0.03375001525878906, 0.033718273162841796, 0.03367424011230469, 0.03381657409667969, 0.03394559860229492, 0.033620990753173825, 0.033285118103027346, 0.033723392486572266, 0.034170879364013675, 0.03406643295288086, 0.03347455978393555, 0.0337367057800293, 0.03417497634887695, 0.033653759002685545, 0.03360768127441406, 0.03367424011230469, 0.033732608795166014, 0.033842174530029294, 0.03358003234863281, 0.033567745208740236, 0.033710079193115236, 0.03378790283203125, 0.03352371215820313, 0.03362201690673828, 0.03362918472290039, 0.033870849609375, 0.03332505416870117, 0.03365785598754883, 0.033708030700683594, 0.03363532638549805, 0.03363840103149414, 0.03326668930053711, 0.033716224670410154, 0.03347967910766601, 0.03366400146484375, 0.03445555114746094, 0.03511500930786133, 0.03420876693725586, 0.033888256072998044, 0.033909759521484374, 0.03433779144287109, 0.033670143127441404, 0.033898494720458985, 0.033808383941650394, 0.03382988739013672, 0.03375820922851563, 0.03372032165527344, 0.03423846435546875, 0.03361280059814453, 0.033805313110351565, 0.03377766418457031, 0.033791999816894534, 0.033584129333496096, 0.03383705520629883, 0.03380223846435547, 0.03367116928100586, 0.033797119140625, 0.034016254425048825, 0.03370086288452148, 0.03376128005981445, 0.03387596893310547, 0.033304576873779294, 0.03372236633300781, 0.03363020706176758, 0.03368243026733399, 0.03377766418457031, 0.0337448959350586, 0.0339046401977539, 0.03376639938354492, 0.03398451232910156, 0.034336769104003906, 0.033807361602783206, 0.03371212768554688, 0.03363840103149414, 0.03335270309448242, 0.033895423889160156, 0.03363532638549805, 0.03379404830932617, 0.03367628860473633, 0.03406131362915039, 0.03442483139038086, 0.03453235244750977, 0.034010112762451174, 0.03378483200073242, 0.03379097747802735, 0.034976768493652347, 0.03346944046020508, 0.033688575744628906, 0.03367731094360352, 0.03375820922851563, 0.03416064071655273, 0.033807361602783206, 0.033732608795166014, 0.034528255462646484, 0.03364044952392578, 0.0339046401977539, 0.03344384002685547, 0.033568767547607424, 0.03399884796142578, 0.03379814529418945, 0.034219009399414066, 0.033805313110351565, 0.03376435089111328, 0.03390156936645508, 0.03380633544921875, 0.03384012985229492, 0.033658878326416015, 0.033364990234375, 0.033704959869384765, 0.03356671905517578, 0.03418214416503906, 0.03374899291992187, 0.03376844787597656, 0.03410124969482422, 0.03361587142944336, 0.033772544860839845, 0.03303014373779297, 0.033721343994140625, 0.03388313674926758, 0.03335782241821289, 0.033939456939697264, 0.03474739074707031, 0.03395993423461914, 0.034184192657470705, 0.03363430404663086, 0.033860607147216795, 0.03376230239868164, 0.03377664184570312, 0.033686527252197264, 0.03407974243164062, 0.03398963165283203, 0.03360153579711914, 0.03367628860473633, 0.03376639938354492, 0.03395686340332031, 0.033740798950195314, 0.03377664184570312, 0.033808383941650394, 0.03377356719970703, 0.03365683364868164, 0.03355750274658203, 0.03364761734008789, 0.033588222503662106, 0.033926143646240234, 0.03382476806640625, 0.03379097747802735, 0.034735103607177735, 0.033508350372314456, 0.0335912971496582, 0.03382374572753906, 0.033972225189208984, 0.03364556884765625, 0.033811454772949216, 0.034830337524414064, 0.03530854415893555, 0.033939456939697264, 0.03337830352783203, 0.033751041412353515, 0.0338155517578125, 0.033704959869384765, 0.03367116928100586, 0.03376639938354492, 0.0335728645324707, 0.0339159049987793, 0.03380633544921875, 0.03367116928100586, 0.033691646575927735, 0.03382067108154297, 0.033737728118896484, 0.03369472122192383, 0.03428659057617187, 0.03402751922607422, 0.03372851181030274, 0.03463884735107422, 0.034358272552490236, 0.03374182510375977, 0.034492416381835936, 0.033770496368408204, 0.0338155517578125, 0.033888256072998044, 0.03427328109741211, 0.03403059387207031, 0.03385548782348633, 0.034146305084228515, 0.03364761734008789, 0.03377459335327149, 0.033770496368408204, 0.033834014892578125, 0.03382985687255859, 0.03376537704467773, 0.03371724700927734, 0.03381657409667969, 0.033617919921875, 0.03506175994873047, 0.03385651016235352, 0.03374899291992187, 0.03371929550170898, 0.0337151985168457, 0.033783809661865234, 0.03363840103149414, 0.03367935943603516, 0.033772544860839845, 0.03376639938354492, 0.033721343994140625, 0.033879039764404296, 0.033544193267822264, 0.03376537704467773, 0.03482419204711914, 0.03386368179321289, 0.03379507064819336, 0.03398860931396484, 0.03395993423461914, 0.033811454772949216, 0.034111488342285154, 0.033841152191162106, 0.033672191619873046, 0.033944576263427735, 0.03535871887207031, 0.034253822326660154, 0.03400294494628906, 0.03379507064819336, 0.0335912971496582, 0.033772544860839845, 0.03351039886474609, 0.03370393753051758, 0.03372748947143555, 0.03397119903564453, 0.03418521499633789, 0.03367628860473633, 0.034049022674560545, 0.03343667221069336, 0.03385139083862305, 0.03431628799438476, 0.03400806427001953, 0.03373158264160156, 0.03410432052612305, 0.03402444839477539, 0.0337151985168457, 0.033446910858154294, 0.03367833709716797, 0.03378176116943359, 0.03362611389160156, 0.03363840103149414, 0.033723392486572266, 0.033587200164794925, 0.03360768127441406, 0.03378995132446289, 0.0338524169921875, 0.03411455917358398, 0.03396710586547851, 0.033576961517333984, 0.033186817169189455, 0.03315814590454102, 0.03353702545166016, 0.03359436798095703, 0.0336629753112793, 0.034321407318115234, 0.03361075210571289, 0.03402956771850586, 0.03365683364868164, 0.03345510482788086, 0.03376537704467773, 0.03358924865722656, 0.03390156936645508, 0.03539148712158203, 0.03516723251342774, 0.03401420974731445, 0.03377971267700195, 0.03380121612548828, 0.03361075210571289, 0.03359334564208984, 0.03376844787597656, 0.03382374572753906, 0.0333568000793457, 0.03342131042480469, 0.033821697235107424, 0.03363430404663086, 0.03382070541381836, 0.03371209716796875, 0.03372236633300781, 0.033865726470947266, 0.033693695068359376, 0.03408588790893555, 0.033723392486572266, 0.03367424011230469, 0.033732608795166014, 0.0339128303527832, 0.03379404830932617, 0.03454873657226563, 0.035418113708496096, 0.034484222412109376, 0.03407974243164062, 0.0339865608215332, 0.03365785598754883, 0.03375001525878906, 0.033721343994140625, 0.03386777496337891, 0.03379097747802735, 0.03355136108398438, 0.033775615692138675, 0.03369881439208984, 0.034285568237304685, 0.03369779205322266, 0.033642494201660156, 0.033702911376953124, 0.034108417510986325, 0.03362201690673828, 0.033734657287597655, 0.03607961654663086, 0.034492416381835936, 0.03365478515625, 0.03476582336425781, 0.03378585433959961, 0.033544193267822264, 0.033576961517333984, 0.03366400146484375, 0.033775615692138675, 0.03380121612548828, 0.033637374877929685, 0.034050048828125, 0.033949695587158206, 0.03369676971435547, 0.03366809463500976, 0.03392102432250976, 0.03361177444458008, 0.033587200164794925, 0.03433267211914062, 0.03351244735717773, 0.03352371215820313, 0.034462718963623046, 0.03402239990234375, 0.033688575744628906, 0.03530035018920898, 0.034988033294677735, 0.03365683364868164, 0.03453235244750977, 0.03363430404663086, 0.033481727600097655, 0.0336629753112793, 0.033309696197509765, 0.0336814079284668, 0.03433062362670898, 0.03449856185913086, 0.03420876693725586, 0.03349094390869141, 0.03377766418457031, 0.03383295822143555, 0.033739776611328126, 0.033716224670410154, 0.03376844787597656, 0.034988033294677735, 0.034740222930908206, 0.03558911895751953, 0.034141185760498044, 0.03425177764892578, 0.03412070465087891, 0.03475558471679688, 0.03368960189819336, 0.03423231887817383, 0.03369779205322266, 0.03378278350830078, 0.033974273681640625, 0.0342999038696289, 0.03386265563964844, 0.03457843017578125, 0.03377664184570312, 0.033691646575927735, 0.03495935821533203, 0.03376844787597656, 0.03383500671386719, 0.034277374267578126, 0.034351104736328124, 0.03412684631347656, 0.03379097747802735, 0.03367116928100586, 0.03406643295288086, 0.03371417617797851, 0.03379507064819336, 0.03366604614257813, 0.03383091354370117, 0.033723392486572266, 0.033598464965820314, 0.033775615692138675, 0.03371110534667969, 0.033942527770996093, 0.033732608795166014, 0.033649665832519535, 0.03376025772094727, 0.03364352035522461, 0.033718273162841796, 0.033797119140625, 0.033740798950195314, 0.03370598220825195, 0.03464089584350586, 0.035519489288330076, 0.034495487213134765, 0.03366912078857422, 0.03460812759399414, 0.03452928161621094, 0.03380940628051758, 0.03377151870727539, 0.03365683364868164, 0.03332198333740234, 0.03314585494995117, 0.033189888000488284, 0.03362303924560547, 0.03371417617797851, 0.03363225555419922, 0.03366502380371094, 0.03357798385620117, 0.03327283096313476, 0.03317862319946289, 0.03317452621459961, 0.03371929550170898, 0.03407360076904297, 0.034920448303222655, 0.03358310317993164, 0.034097152709960936, 0.033716224670410154, 0.03328409576416016, 0.03393740844726562, 0.033726463317871096, 0.033710079193115236, 0.033732608795166014, 0.03444326400756836, 0.03557273483276367, 0.03400089645385742, 0.03355852890014648, 0.03384832000732422, 0.033805313110351565, 0.03375820922851563, 0.03364147186279297, 0.03337011337280273, 0.034108417510986325, 0.03538022232055664, 0.034945022583007815, 0.03375718307495117, 0.03365478515625, 0.03344486236572266, 0.03366604614257813, 0.03361587142944336, 0.03327897644042969, 0.03348275375366211, 0.03427942276000977, 0.033873920440673826, 0.033791999816894534, 0.03430710220336914, 0.03451798248291016, 0.03356159973144531, 0.033560577392578124, 0.03358310317993164, 0.03383705520629883, 0.03366502380371094, 0.03392102432250976, 0.0337520637512207, 0.033797119140625, 0.03376639938354492, 0.03378073501586914, 0.034148353576660156, 0.03409612655639648, 0.033742847442626955, 0.033699840545654294, 0.03381964874267578, 0.03326464080810547, 0.033189888000488284, 0.03313356781005859, 0.033205249786376956, 0.03376537704467773, 0.033770496368408204, 0.03393228912353516, 0.03439411163330078, 0.033259521484375, 0.03373875045776367, 0.03338444900512695, 0.033154048919677735, 0.033642494201660156, 0.03335273742675781, 0.03375817489624024, 0.03364147186279297, 0.03381248092651367, 0.033391616821289063, 0.033742847442626955, 0.033979393005371096, 0.033745918273925785, 0.03365990447998047, 0.0335810546875, 0.03370905685424805, 0.03347251129150391, 0.03504844665527344, 0.03500032043457031, 0.033974273681640625, 0.03354521560668945, 0.0343900146484375, 0.03376639938354492, 0.03344998550415039, 0.0332410888671875, 0.03327078247070313, 0.033739776611328126, 0.03544268798828125, 0.03528908920288086, 0.03529523086547852, 0.034367488861083983, 0.03439206314086914, 0.03405926513671875, 0.033430526733398434, 0.033587200164794925, 0.033649665832519535, 0.03381350326538086, 0.03360870361328125, 0.0339046401977539, 0.0340049934387207, 0.03389440155029297, 0.033582080841064454, 0.03342540740966797, 0.033547264099121094, 0.03332403182983398, 0.033685504913330076, 0.03362815856933594, 0.03364556884765625, 0.03379404830932617, 0.03391692733764649, 0.0335912971496582, 0.033306625366210936, 0.03359436798095703, 0.03439513778686523, 0.0336363525390625, 0.033503231048583985, 0.03343769454956055, 0.03364044952392578, 0.0337336311340332, 0.034044929504394535, 0.033827838897705076, 0.03357388687133789, 0.03386777496337891, 0.03366604614257813, 0.034460670471191404, 0.033710079193115236, 0.033734657287597655, 0.033740798950195314, 0.03369062423706055, 0.03382681655883789, 0.033767425537109375, 0.03369779205322266, 0.033653759002685545, 0.033329151153564454, 0.03390771102905273, 0.033667072296142575, 0.03364863967895508, 0.03362713623046875, 0.03368755340576172, 0.03369267272949219, 0.033759231567382815, 0.033854465484619144, 0.034427902221679685, 0.0337367057800293, 0.03378995132446289, 0.03361996841430664, 0.03381452941894531, 0.033702911376953124, 0.033675262451171875, 0.03413199996948242, 0.033982433319091794, 0.03404288101196289, 0.03375001525878906, 0.03390156936645508, 0.03401728057861328, 0.03379302215576172, 0.033655807495117186, 0.03360358428955078, 0.033633281707763675, 0.03415244674682617, 0.03374694442749023, 0.03379302215576172, 0.03382886505126953, 0.03399168014526367, 0.033911808013916016, 0.03381862258911133, 0.03383091354370117, 0.033620990753173825]",tokens/s,29.533743996366596,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948181-6f9c36c47c1df2f34dc9a2d1;3d7672f1-94f8-4971-9c08-c3dc2a9e9ce6) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1999.470592,3121.086464,0.0,2491.416576,2425.650176,s,1,9.3760556640625,9.3760556640625,0.0,9.3760556640625,9.3760556640625,9.3760556640625,9.3760556640625,[9.3760556640625],,kWh,2.963436740489619e-05,1.6225514028797122e-05,4.406975747806019e-05,8.99296389117535e-05,,MB,1809.485824,3353.870336,0.0,2705.32608,2606.129664,s,10,0.2949645137786865,0.02949645137786865,0.00025065324781213327,0.02942078399658203,0.029548410415649415,0.029894556999206543,0.030171474266052246,"[0.029334016799926758, 0.029384031295776367, 0.02947148895263672, 0.029407615661621093, 0.029398080825805663, 0.02943721580505371, 0.029449792861938478, 0.029418880462646485, 0.029422687530517577, 0.03024070358276367]",tokens/s,8679.010119572493,kWh,3.4764426631934643e-07,1.9049175751520864e-07,1.359684094282652e-06,1.8978201181172071e-06,tokens/kWh,134891604.0862571,MB,1815.003136,3353.870336,0.0,2705.32608,2606.132224,s,10,12.508057739257811,1.2508057739257814,0.008068238044635407,1.2530220336914062,1.2586537841796874,1.2613936645507813,1.2635855688476563,"[1.250889404296875, 1.2552403564453125, 1.240000244140625, 1.2374183349609376, 1.25621484375, 1.2459228515625, 1.2551546630859376, 1.258044921875, 1.24503857421875, 1.264133544921875]",tokens/s,50.36753212472635,kWh,1.4906383765832693e-05,8.168401253870574e-06,2.894747904030816e-05,5.202226406001142e-05,tokens/kWh,1211019.9572883828,,s,630,12.506252311706552,0.01985119414556594,0.0003997657155284799,0.019697664260864257,0.020418560028076172,0.020526796627044677,0.021185167064666752,"[0.019680255889892577, 0.019679231643676756, 0.019679231643676756, 0.019780607223510743, 0.020372480392456056, 0.020444160461425782, 0.020412416458129884, 0.02039193534851074, 0.02042265510559082, 0.02027212715148926, 0.019787776947021486, 0.019714048385620117, 0.019793920516967774, 0.019887104034423828, 0.01988096046447754, 0.01969152069091797, 0.01970278358459473, 0.0196628475189209, 0.019685375213623048, 0.019703807830810546, 0.019717119216918946, 0.019795967102050782, 0.019401727676391603, 0.020319232940673827, 0.01998028755187988, 0.019681280136108398, 0.019742719650268553, 0.019896320343017578, 0.019771392822265626, 0.019737600326538086, 0.019708927154541016, 0.02023526382446289, 0.02047590446472168, 0.019811328887939454, 0.020357120513916017, 0.02046976089477539, 0.02043289566040039, 0.019742719650268553, 0.019752960205078125, 0.019762176513671875, 0.01969049644470215, 0.01968435287475586, 0.019767295837402343, 0.01970278358459473, 0.019736576080322265, 0.01970790481567383, 0.019681280136108398, 0.019577856063842772, 0.01983897590637207, 0.019697664260864257, 0.019795967102050782, 0.019725311279296876, 0.019767295837402343, 0.019752960205078125, 0.019808256149291992, 0.019748863220214845, 0.019778560638427735, 0.019715072631835938, 0.019697664260864257, 0.0196495361328125, 0.019501056671142578, 0.01963212776184082, 0.01962393569946289, 0.019765247344970704, 0.019708927154541016, 0.019743743896484374, 0.0196945915222168, 0.020205568313598633, 0.019506175994873046, 0.019326976776123047, 0.020777984619140624, 0.02050764846801758, 0.020555776596069338, 0.020595712661743162, 0.02085068893432617, 0.020487167358398437, 0.020380672454833985, 0.020265983581542968, 0.0202926082611084, 0.020264959335327147, 0.02025062370300293, 0.020365312576293947, 0.02029465675354004, 0.020060159683227538, 0.019715072631835938, 0.01971609687805176, 0.019655679702758787, 0.01984409523010254, 0.019697664260864257, 0.019677183151245118, 0.01967616081237793, 0.019688447952270507, 0.019719167709350584, 0.019664896011352538, 0.019589120864868165, 0.019695615768432616, 0.01957683181762695, 0.019817472457885742, 0.019739648818969727, 0.019725311279296876, 0.01967001533508301, 0.01964851188659668, 0.019681280136108398, 0.01966592025756836, 0.01966592025756836, 0.019711999893188475, 0.01968230438232422, 0.01968435287475586, 0.019688447952270507, 0.01965772819519043, 0.019679231643676756, 0.01969152069091797, 0.01968639945983887, 0.0196945915222168, 0.019732479095458985, 0.019752960205078125, 0.02017791938781738, 0.019775487899780272, 0.01959219169616699, 0.019523584365844726, 0.01965260887145996, 0.01963929557800293, 0.01963929557800293, 0.020931583404541015, 0.021395456314086913, 0.02062745666503906, 0.019743743896484374, 0.01970790481567383, 0.019628032684326172, 0.01967820739746094, 0.019636224746704102, 0.019611648559570313, 0.019567615509033204, 0.01959219169616699, 0.019673088073730468, 0.01965363121032715, 0.01942630386352539, 0.019372032165527343, 0.019499008178710937, 0.019619840621948242, 0.01962188720703125, 0.01964646339416504, 0.019599359512329103, 0.01963007926940918, 0.01964236831665039, 0.019501056671142578, 0.01964646339416504, 0.01964646339416504, 0.01965977668762207, 0.019597312927246095, 0.01960960006713867, 0.019594240188598632, 0.01967411231994629, 0.019582975387573243, 0.019628032684326172, 0.01965772819519043, 0.01963929557800293, 0.019620864868164063, 0.01947750473022461, 0.019677183151245118, 0.01960960006713867, 0.021004287719726563, 0.021840896606445313, 0.020487167358398437, 0.019738624572753907, 0.019681280136108398, 0.01962700843811035, 0.019767295837402343, 0.01940275192260742, 0.019326976776123047, 0.019341312408447265, 0.019224576950073242, 0.019313663482666017, 0.019350528717041016, 0.01963007926940918, 0.01965260887145996, 0.019983360290527344, 0.019936256408691407, 0.01963724708557129, 0.019679231643676756, 0.019619840621948242, 0.01965875244140625, 0.019586048126220702, 0.019535871505737306, 0.01963315200805664, 0.01965977668762207, 0.019717119216918946, 0.019734527587890623, 0.01968435287475586, 0.01979084777832031, 0.019695615768432616, 0.019697664260864257, 0.01970790481567383, 0.019745792388916016, 0.019687423706054686, 0.01964339256286621, 0.019731456756591798, 0.019768320083618163, 0.01967616081237793, 0.019628032684326172, 0.0196945915222168, 0.01961881637573242, 0.01964851188659668, 0.01962495994567871, 0.019500032424926757, 0.01945702362060547, 0.019590143203735352, 0.019330047607421876, 0.019340288162231444, 0.01961676788330078, 0.01962598419189453, 0.019941375732421874, 0.01966694450378418, 0.01963417625427246, 0.01965977668762207, 0.019663871765136717, 0.020007936477661133, 0.01972428894042969, 0.019663871765136717, 0.01965875244140625, 0.01924095916748047, 0.019309568405151366, 0.019302400588989257, 0.0194334716796875, 0.01963724708557129, 0.01964134407043457, 0.019725311279296876, 0.01984614372253418, 0.02064896011352539, 0.020328447341918944, 0.019668991088867188, 0.01946009635925293, 0.01941094398498535, 0.01919385528564453, 0.019268608093261717, 0.019325952529907226, 0.01943961524963379, 0.01930342483520508, 0.01963007926940918, 0.01962495994567871, 0.019655679702758787, 0.0196177921295166, 0.019602432250976562, 0.020271104812622072, 0.01985228729248047, 0.01964236831665039, 0.019569664001464843, 0.01964851188659668, 0.019578880310058593, 0.01963724708557129, 0.01964543914794922, 0.01963520050048828, 0.019778560638427735, 0.019685375213623048, 0.01971609687805176, 0.01969254493713379, 0.019705856323242187, 0.019509248733520508, 0.01965260887145996, 0.019568639755249022, 0.02080460739135742, 0.020478975296020507, 0.020281343460083007, 0.02028339195251465, 0.020291584014892578, 0.02046156883239746, 0.02040729522705078, 0.019418111801147463, 0.019501056671142578, 0.019636224746704102, 0.019619840621948242, 0.01961676788330078, 0.01960960006713867, 0.019290111541748048, 0.019570688247680663, 0.019771392822265626, 0.02085171127319336, 0.02128179168701172, 0.020884479522705078, 0.02005504035949707, 0.019780607223510743, 0.01979903984069824, 0.019703807830810546, 0.01988198471069336, 0.020333568572998048, 0.01983180809020996, 0.019778560638427735, 0.019711999893188475, 0.019776512145996093, 0.01967513656616211, 0.01964543914794922, 0.019950592041015625, 0.019687423706054686, 0.019687423706054686, 0.019770368576049805, 0.019527679443359376, 0.0198922233581543, 0.02025267219543457, 0.020386816024780274, 0.020884479522705078, 0.020338687896728515, 0.019555328369140625, 0.01941196823120117, 0.019602432250976562, 0.01964031982421875, 0.019704832077026366, 0.019726335525512697, 0.019940351486206053, 0.020238336563110353, 0.020542463302612304, 0.020815872192382814, 0.02025369644165039, 0.01965772819519043, 0.019615743637084963, 0.01960857582092285, 0.019796991348266603, 0.01969254493713379, 0.019560447692871095, 0.019558399200439454, 0.01958502388000488, 0.019606527328491212, 0.019344383239746094, 0.01928294372558594, 0.01964543914794922, 0.019583999633789064, 0.019340288162231444, 0.01963212776184082, 0.01964236831665039, 0.019564544677734375, 0.0200898551940918, 0.019750911712646483, 0.019590143203735352, 0.01962700843811035, 0.019559423446655275, 0.01967513656616211, 0.01957683181762695, 0.020338687896728515, 0.02025984001159668, 0.01982156753540039, 0.019531776428222656, 0.019620864868164063, 0.020313087463378905, 0.020246528625488282, 0.019934207916259765, 0.019555328369140625, 0.019677183151245118, 0.020361215591430663, 0.019376127243041993, 0.019561471939086913, 0.020418560028076172, 0.020299776077270508, 0.02045030403137207, 0.01966080093383789, 0.01964134407043457, 0.01968332862854004, 0.019670047760009766, 0.019677152633666994, 0.020090879440307616, 0.021740543365478517, 0.020059135437011717, 0.019767295837402343, 0.01966592025756836, 0.019571712493896484, 0.019398656845092774, 0.019582975387573243, 0.0198287353515625, 0.019380224227905272, 0.019438592910766602, 0.019549184799194336, 0.020611072540283205, 0.020370431900024414, 0.02023628807067871, 0.019409919738769533, 0.019321855545043946, 0.019551231384277345, 0.019508224487304687, 0.019357696533203125, 0.019504127502441407, 0.020199424743652345, 0.02003046417236328, 0.01965670394897461, 0.0196997127532959, 0.019719167709350584, 0.019691551208496094, 0.01968227195739746, 0.01965260887145996, 0.019739648818969727, 0.020001792907714845, 0.019743743896484374, 0.01971609687805176, 0.01963929557800293, 0.01963724708557129, 0.019679231643676756, 0.019524608612060547, 0.019708927154541016, 0.019705856323242187, 0.019655679702758787, 0.019708927154541016, 0.019704832077026366, 0.01967820739746094, 0.019704832077026366, 0.019668991088867188, 0.01965977668762207, 0.01964031982421875, 0.019647487640380858, 0.019533824920654298, 0.019544063568115236, 0.019664896011352538, 0.02043391990661621, 0.020724735260009765, 0.020722688674926756, 0.020371456146240235, 0.0200263671875, 0.019518463134765626, 0.019793920516967774, 0.019704832077026366, 0.01964134407043457, 0.01966182327270508, 0.020486143112182616, 0.020358144760131838, 0.02025369644165039, 0.02030899238586426, 0.020372480392456056, 0.020330495834350586, 0.020264959335327147, 0.020711423873901368, 0.020351999282836913, 0.020342784881591795, 0.02026188850402832, 0.020348928451538087, 0.020570112228393556, 0.02050048065185547, 0.020501504898071288, 0.019802112579345704, 0.01967513656616211, 0.01965056037902832, 0.019619840621948242, 0.01983078384399414, 0.019537919998168944, 0.019396608352661132, 0.019668991088867188, 0.020117504119873047, 0.01964236831665039, 0.019696640014648437, 0.01970790481567383, 0.019612672805786133, 0.01967103958129883, 0.019689472198486328, 0.019791872024536132, 0.019700735092163087, 0.0196628475189209, 0.019696640014648437, 0.019610624313354492, 0.019562496185302734, 0.01927475166320801, 0.019343360900878907, 0.019512319564819337, 0.01982054328918457, 0.01968639945983887, 0.021275648117065428, 0.01963827133178711, 0.019932159423828123, 0.020065280914306642, 0.020594688415527345, 0.020396032333374024, 0.020502527236938475, 0.01985945510864258, 0.020305919647216796, 0.02025984001159668, 0.02042982482910156, 0.020428800582885744, 0.020420608520507814, 0.020460544586181642, 0.020420608520507814, 0.020358144760131838, 0.020418560028076172, 0.020395008087158203, 0.020366336822509764, 0.020340736389160157, 0.02045952033996582, 0.02082099151611328, 0.020892671585083008, 0.020486143112182616, 0.02040115165710449, 0.020343807220458983, 0.01972428894042969, 0.019563520431518554, 0.019768320083618163, 0.01978265571594238, 0.019742719650268553, 0.019574783325195313, 0.019503103256225587, 0.0198154239654541, 0.0197673282623291, 0.019778528213500977, 0.019843072891235353, 0.01977446365356445, 0.01971609687805176, 0.019722240447998047, 0.019687423706054686, 0.019737600326538086, 0.019513343811035155, 0.01945088005065918, 0.019337215423583985, 0.019811328887939454, 0.020122623443603514, 0.02046156883239746, 0.020347904205322266, 0.020386816024780274, 0.020336639404296874, 0.01965670394897461, 0.019655679702758787, 0.019786752700805665, 0.019720191955566405, 0.019167232513427734, 0.01965158462524414, 0.019730432510375977, 0.019719167709350584, 0.019727359771728514, 0.019760128021240234, 0.019719167709350584, 0.019517440795898438, 0.01940275192260742, 0.019620864868164063, 0.0214783992767334, 0.020460544586181642, 0.01969152069091797, 0.019281919479370118, 0.01947340774536133, 0.01942118453979492, 0.019681280136108398, 0.02025369644165039, 0.019779584884643556, 0.01965465545654297, 0.019685375213623048, 0.019575807571411134, 0.019523584365844726, 0.019711999893188475, 0.01961881637573242, 0.019511295318603517, 0.019532800674438477, 0.019747840881347657, 0.01969152069091797, 0.019718143463134767, 0.019755008697509766, 0.019720191955566405, 0.01968332862854004, 0.019704832077026366, 0.019499008178710937, 0.01964031982421875, 0.019739648818969727, 0.01945599937438965, 0.019606527328491212, 0.019375104904174805, 0.0194703369140625, 0.01968230438232422, 0.01965260887145996, 0.019792896270751953, 0.019747840881347657, 0.01964851188659668, 0.019729408264160156, 0.0196997127532959, 0.01967411231994629, 0.019615743637084963, 0.01949286460876465, 0.019507200241088866, 0.021124095916748048, 0.021210111618041993, 0.02036735916137695, 0.02032537651062012, 0.020323328018188477, 0.020384767532348632, 0.02043084716796875, 0.020674560546875, 0.020364288330078126, 0.020389888763427736, 0.019941375732421874, 0.020415519714355467, 0.02036425590515137, 0.020333568572998048, 0.019725311279296876, 0.019970048904418947, 0.020192256927490236, 0.020353023529052734, 0.02047283172607422, 0.020342784881591795, 0.02028441619873047, 0.020179967880249023, 0.019686431884765626, 0.020026336669921874, 0.02032537651062012, 0.02024345588684082, 0.020370431900024414, 0.02038374328613281, 0.02045644760131836, 0.019773439407348634, 0.019553279876708983, 0.019793920516967774, 0.020434944152832032, 0.02039910316467285, 0.020349952697753908, 0.020321279525756835, 0.020280319213867186, 0.01942835235595703, 0.02106777572631836, 0.02042982482910156, 0.020353023529052734, 0.020273151397705077, 0.02008576011657715, 0.02020147132873535, 0.020247552871704103, 0.019714048385620117, 0.019679231643676756, 0.0196495361328125, 0.0196628475189209, 0.01965977668762207, 0.019587072372436523, 0.01964236831665039, 0.019599359512329103, 0.01961881637573242, 0.019544063568115236, 0.019717119216918946, 0.019696640014648437, 0.019679231643676756, 0.01964031982421875, 0.019687423706054686, 0.019400703430175782, 0.019344383239746094, 0.019340288162231444, 0.019565568923950196]",tokens/s,50.374803282217904,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 126808 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949125-4b50de4f413955673a827664;93fa1441-9ae0-4a2c-8b5c-8aec9da2e84b) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694825f-0c6c6b4663ef0bde734dbdaf;189cb5f0-98fe-4484-abb0-d243990bbd10) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481e6-52fddd9b1de31b0076876073;8de1d97e-984b-47eb-9b56-8622c2efb6a7) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694949e-4f06a932749666442417e9eb;ea2fdd02-b854-44fe-87b5-e2dce99c9577) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2216.189952,3198.681088,0.0,2569.0112,2295.745536,s,1,8.5427314453125,8.5427314453125,0.0,8.5427314453125,8.5427314453125,8.5427314453125,8.5427314453125,[8.5427314453125],,kWh,2.012565567637871e-05,1.1014215793768773e-05,3.3685582504050515e-05,6.4825453974198e-05,,MB,2335.617024,3219.652608,0.0,2571.108352,2282.381824,s,10,0.4610866279602051,0.04610866279602051,0.00013542506938741722,0.04617478370666504,0.04620269927978516,0.046203461456298826,0.046204071197509765,"[0.04581622314453125, 0.045873313903808596, 0.0462042236328125, 0.046171520233154295, 0.04609916687011719, 0.0461756477355957, 0.046173919677734376, 0.046202529907226564, 0.046191841125488284, 0.04617824172973633]",tokens/s,5552.102023268707,kWh,5.421001302657202e-07,2.970450797745613e-07,2.1287580740551783e-06,2.96790328409546e-06,tokens/kWh,86256180.03520024,MB,2340.335616,3221.74976,0.0,2571.108352,2391.693824,s,10,14.022559814453125,1.4022559814453124,0.016255415085442565,1.3976600952148437,1.4295463134765625,1.4317564331054686,1.4335245288085938,"[1.386033447265625, 1.4027366943359374, 1.390958984375, 1.388246337890625, 1.397672119140625, 1.3976480712890624, 1.42905517578125, 1.3868319091796875, 1.4094105224609375, 1.433966552734375]",tokens/s,44.9276029723657,kWh,1.69422410484139e-05,9.284283726129694e-06,3.419729876454295e-05,6.042382353908655e-05,tokens/kWh,1042635.1116169766,,s,630,14.019792900085454,0.022253639523945157,0.0006030189276878926,0.022000128746032716,0.02283950119018555,0.023003815841674807,0.023900364437103274,"[0.02208358383178711, 0.02208665657043457, 0.021783552169799804, 0.021971967697143553, 0.021936128616333008, 0.021962751388549806, 0.022107135772705077, 0.02206003189086914, 0.021953535079956055, 0.022017023086547852, 0.021958656311035156, 0.022012928009033202, 0.021990400314331054, 0.021922815322875978, 0.0220948486328125, 0.021992448806762696, 0.021943296432495117, 0.021961727142333985, 0.021947391510009767, 0.021997568130493163, 0.021970943450927736, 0.021974016189575195, 0.021998592376708984, 0.021964799880981444, 0.021948415756225585, 0.0218920955657959, 0.02168524742126465, 0.022006784439086914, 0.021984256744384766, 0.022023168563842774, 0.0219289608001709, 0.021967872619628907, 0.022007808685302735, 0.022199296951293947, 0.022687744140625, 0.022254592895507814, 0.02188390350341797, 0.021979135513305666, 0.021988351821899413, 0.021957632064819335, 0.021936128616333008, 0.022000640869140626, 0.021931007385253908, 0.021958656311035156, 0.021971967697143553, 0.022245376586914063, 0.02202009582519531, 0.02185215950012207, 0.0219238395690918, 0.021963775634765623, 0.021974016189575195, 0.02205388832092285, 0.02205900764465332, 0.022033407211303712, 0.021991424560546875, 0.021988351821899413, 0.021984256744384766, 0.02196790313720703, 0.0219770565032959, 0.021894144058227538, 0.021974016189575195, 0.022004735946655272, 0.021997568130493163, 0.021943296432495117, 0.02203545570373535, 0.022586368560791017, 0.023045120239257814, 0.022833152770996092, 0.022354944229125977, 0.021944320678710938, 0.022141952514648438, 0.022790143966674805, 0.022557695388793944, 0.022032384872436524, 0.021979135513305666, 0.021922815322875978, 0.021941247940063476, 0.022016000747680665, 0.02242355155944824, 0.02246348762512207, 0.022003744125366213, 0.022984672546386718, 0.02390630340576172, 0.02289664077758789, 0.02287001609802246, 0.022759424209594727, 0.021977088928222657, 0.021957632064819335, 0.021932031631469725, 0.021979135513305666, 0.02185113525390625, 0.022801408767700194, 0.022775808334350587, 0.022129663467407225, 0.022853631973266602, 0.022798336029052735, 0.02292736053466797, 0.02291916847229004, 0.02270310401916504, 0.022775808334350587, 0.021955583572387697, 0.0218787841796875, 0.021916671752929686, 0.021948415756225585, 0.021901311874389647, 0.02190438461303711, 0.02193715286254883, 0.021957632064819335, 0.021906431198120118, 0.02187980842590332, 0.022055936813354493, 0.021976064682006836, 0.02190438461303711, 0.02186342430114746, 0.021956607818603514, 0.021934080123901366, 0.02201190376281738, 0.02192793655395508, 0.021991424560546875, 0.021934080123901366, 0.02201190376281738, 0.021772287368774415, 0.022001663208007814, 0.022009855270385743, 0.022170623779296874, 0.022755327224731444, 0.022023168563842774, 0.021976064682006836, 0.021933055877685546, 0.021960704803466798, 0.021916671752929686, 0.021950464248657226, 0.021950464248657226, 0.021968896865844727, 0.021818368911743165, 0.022006784439086914, 0.021909503936767577, 0.022039552688598633, 0.021995519638061522, 0.02207539176940918, 0.021926912307739257, 0.02193715286254883, 0.021931007385253908, 0.021953535079956055, 0.021933055877685546, 0.021833728790283204, 0.021949440002441405, 0.021987327575683592, 0.02189619255065918, 0.021938175201416017, 0.02246553611755371, 0.02427187156677246, 0.02289151954650879, 0.022536191940307617, 0.0225218563079834, 0.022039552688598633, 0.021787647247314454, 0.021984256744384766, 0.021970943450927736, 0.021992448806762696, 0.0219289608001709, 0.021964799880981444, 0.021948415756225585, 0.022022144317626953, 0.021953535079956055, 0.021939199447631837, 0.02187468719482422, 0.021988351821899413, 0.021946367263793946, 0.02245427131652832, 0.022534143447875975, 0.021997568130493163, 0.021968896865844727, 0.021998592376708984, 0.0219289608001709, 0.022146047592163084, 0.02210406494140625, 0.021935136795043945, 0.02195145606994629, 0.021824512481689453, 0.02192076873779297, 0.021933055877685546, 0.02191155242919922, 0.021991424560546875, 0.021938175201416017, 0.021931007385253908, 0.02191974449157715, 0.02266726493835449, 0.022813695907592774, 0.021936128616333008, 0.02189926338195801, 0.021974016189575195, 0.021910528182983398, 0.021921791076660157, 0.021933055877685546, 0.021909503936767577, 0.021958656311035156, 0.022115327835083007, 0.021948415756225585, 0.02191974449157715, 0.021798912048339843, 0.02200371170043945, 0.021902368545532225, 0.02271331214904785, 0.022803455352783202, 0.022821887969970703, 0.02204364776611328, 0.021943296432495117, 0.021965824127197265, 0.02194534492492676, 0.02213580894470215, 0.021938175201416017, 0.02262015914916992, 0.022984703063964843, 0.022983680725097655, 0.022354944229125977, 0.022023168563842774, 0.022002687454223634, 0.021950464248657226, 0.021965824127197265, 0.02166886329650879, 0.02193715286254883, 0.02182143974304199, 0.021959680557250977, 0.02190438461303711, 0.021932031631469725, 0.021982208251953125, 0.022001663208007814, 0.02192076873779297, 0.021942272186279296, 0.021981184005737304, 0.021976064682006836, 0.021935104370117187, 0.022012928009033202, 0.021785600662231445, 0.021986303329467775, 0.02189619255065918, 0.02188800048828125, 0.02190745544433594, 0.021956607818603514, 0.021918720245361328, 0.021953535079956055, 0.022169599533081053, 0.022013952255249023, 0.021922815322875978, 0.021753856658935547, 0.021908479690551756, 0.02192076873779297, 0.021943328857421875, 0.02193097686767578, 0.02192793655395508, 0.02191974449157715, 0.02191564750671387, 0.021965824127197265, 0.021833728790283204, 0.02187059211730957, 0.02186444854736328, 0.021998592376708984, 0.02202726364135742, 0.022743040084838868, 0.022350847244262697, 0.021884927749633788, 0.021893119812011717, 0.021865472793579102, 0.021936128616333008, 0.021908479690551756, 0.021790719985961913, 0.021986303329467775, 0.02194534492492676, 0.021955583572387697, 0.021983232498168945, 0.022013952255249023, 0.021949440002441405, 0.02204879951477051, 0.0218767032623291, 0.021914623260498048, 0.021934080123901366, 0.02225971221923828, 0.022211584091186523, 0.021941247940063476, 0.021924863815307616, 0.021922815322875978, 0.021921791076660157, 0.021970943450927736, 0.02191564750671387, 0.02200371170043945, 0.02205081558227539, 0.022046720504760742, 0.022033407211303712, 0.021771263122558594, 0.021916671752929686, 0.022017023086547852, 0.021924863815307616, 0.021950464248657226, 0.021929983139038087, 0.021981184005737304, 0.021974016189575195, 0.021943296432495117, 0.021921791076660157, 0.022353919982910156, 0.022155263900756835, 0.023859199523925782, 0.0230328311920166, 0.02299087905883789, 0.023024608612060547, 0.02304819107055664, 0.023603200912475586, 0.023133184432983397, 0.022794240951538085, 0.0228351993560791, 0.022797311782836914, 0.022724607467651366, 0.02205183982849121, 0.022041599273681642, 0.021948415756225585, 0.021933055877685546, 0.02208768081665039, 0.02189004707336426, 0.021932031631469725, 0.02188902473449707, 0.021922815322875978, 0.02186342430114746, 0.021940223693847655, 0.021950464248657226, 0.0219238395690918, 0.02188800048828125, 0.02192076873779297, 0.02186854362487793, 0.02204979133605957, 0.021953535079956055, 0.02204364776611328, 0.021884927749633788, 0.022167552947998048, 0.021959680557250977, 0.021994495391845705, 0.022862848281860353, 0.022837247848510742, 0.022718463897705078, 0.022717439651489257, 0.022780927658081054, 0.022725664138793945, 0.02275119972229004, 0.02253209686279297, 0.022614015579223632, 0.022759424209594727, 0.022760448455810548, 0.022726655960083008, 0.022914047241210937, 0.02230067253112793, 0.022222848892211915, 0.022715391159057616, 0.022321151733398437, 0.022120447158813478, 0.022426624298095704, 0.02209791946411133, 0.021976064682006836, 0.022041599273681642, 0.021959680557250977, 0.022037504196166992, 0.02207846450805664, 0.021983232498168945, 0.021983232498168945, 0.021934080123901366, 0.021946367263793946, 0.021985279083251954, 0.02190438461303711, 0.02206105613708496, 0.02221670341491699, 0.02211942481994629, 0.021947391510009767, 0.02205388832092285, 0.021963775634765623, 0.02200371170043945, 0.022235136032104492, 0.02213580894470215, 0.022037504196166992, 0.022014976501464844, 0.02183884811401367, 0.021955583572387697, 0.023399423599243165, 0.03214950561523437, 0.023635967254638672, 0.022847488403320314, 0.022750207901000977, 0.022701055526733398, 0.022232063293457033, 0.022676479339599608, 0.02269900894165039, 0.022709247589111328, 0.022687744140625, 0.02267852783203125, 0.02201907157897949, 0.021999616622924805, 0.02220134353637695, 0.02271232032775879, 0.02222591972351074, 0.021898239135742188, 0.021931007385253908, 0.02188390350341797, 0.021967872619628907, 0.02188287925720215, 0.021933055877685546, 0.0218787841796875, 0.021990400314331054, 0.022647808074951172, 0.022740991592407226, 0.02270412826538086, 0.022757375717163086, 0.022583295822143554, 0.02269388771057129, 0.022687744140625, 0.022675455093383787, 0.022754304885864256, 0.022421503067016603, 0.021908479690551756, 0.022808576583862306, 0.024190975189208985, 0.023196672439575194, 0.02285772705078125, 0.02272870445251465, 0.022768640518188478, 0.022730752944946288, 0.022350847244262697, 0.02190540885925293, 0.0219238395690918, 0.0219289608001709, 0.021996543884277343, 0.021950464248657226, 0.021983232498168945, 0.021944320678710938, 0.0223191032409668, 0.022772735595703125, 0.02274508857727051, 0.022779903411865234, 0.022806528091430665, 0.02271334457397461, 0.022759424209594727, 0.02391449546813965, 0.02310860824584961, 0.022780927658081054, 0.022631423950195313, 0.022030336380004883, 0.022175743103027345, 0.021962751388549806, 0.021942272186279296, 0.021910528182983398, 0.022411264419555665, 0.02187161636352539, 0.021934080123901366, 0.02206211280822754, 0.022175712585449217, 0.021922815322875978, 0.021977088928222657, 0.021941247940063476, 0.021970943450927736, 0.022008832931518556, 0.021985279083251954, 0.02211942481994629, 0.02221670341491699, 0.02201190376281738, 0.021991424560546875, 0.022047744750976563, 0.02186240005493164, 0.021946367263793946, 0.022223871231079103, 0.022179840087890625, 0.021996543884277343, 0.021957632064819335, 0.021979135513305666, 0.021966848373413086, 0.021992448806762696, 0.021957632064819335, 0.021916671752929686, 0.02185625648498535, 0.021929983139038087, 0.0218787841796875, 0.021942272186279296, 0.02190540885925293, 0.022016000747680665, 0.021917695999145507, 0.021921791076660157, 0.022001663208007814, 0.022082559585571288, 0.021949440002441405, 0.022381568908691408, 0.022389759063720704, 0.02206515121459961, 0.022169599533081053, 0.02202828788757324, 0.02189004707336426, 0.02190745544433594, 0.021960704803466798, 0.021968896865844727, 0.021917695999145507, 0.021954559326171876, 0.021934080123901366, 0.02185113525390625, 0.021914623260498048, 0.021966848373413086, 0.022018047332763673, 0.022040576934814454, 0.02189926338195801, 0.021952512741088868, 0.02222489547729492, 0.021972991943359374, 0.02185523223876953, 0.02190745544433594, 0.021963775634765623, 0.02188083267211914, 0.021976064682006836, 0.021962751388549806, 0.021964799880981444, 0.021974016189575195, 0.021951488494873047, 0.02189516830444336, 0.021979135513305666, 0.021948415756225585, 0.02388582420349121, 0.02428313636779785, 0.023215103149414062, 0.02291302490234375, 0.02272870445251465, 0.02207539176940918, 0.021953535079956055, 0.022397951126098634, 0.022777856826782225, 0.02186240005493164, 0.022034431457519533, 0.022002687454223634, 0.022714368820190428, 0.022697984695434572, 0.023018495559692383, 0.022804479598999023, 0.022026239395141603, 0.02203545570373535, 0.021966848373413086, 0.021914623260498048, 0.021978111267089845, 0.022334463119506837, 0.022798336029052735, 0.02272972869873047, 0.022725631713867187, 0.022797311782836914, 0.022787071228027343, 0.02274406433105469, 0.02309529685974121, 0.022922239303588866, 0.022796287536621093, 0.02229452705383301, 0.02209587287902832, 0.021831680297851562, 0.021971967697143553, 0.02244607925415039, 0.022000640869140626, 0.02187775993347168, 0.02169753646850586, 0.022017023086547852, 0.022709247589111328, 0.022841344833374022, 0.02291814422607422, 0.02224844741821289, 0.021953535079956055, 0.022033407211303712, 0.021991424560546875, 0.022026239395141603, 0.022963199615478515, 0.023017471313476562, 0.021764095306396485, 0.02206208038330078, 0.0218920955657959, 0.02186444854736328, 0.021956607818603514, 0.022114303588867186, 0.023345151901245118, 0.023163904190063478, 0.02286796760559082, 0.02281065559387207, 0.02276655960083008, 0.022762496948242186, 0.02273587226867676, 0.022018047332763673, 0.02271232032775879, 0.02308406448364258, 0.02234979248046875, 0.02190438461303711, 0.02187059211730957, 0.02244607925415039, 0.02330624008178711, 0.025416704177856447, 0.023181312561035155, 0.022831104278564454, 0.022631423950195313, 0.022846464157104493, 0.022773759841918945, 0.02289664077758789, 0.022806528091430665, 0.02286591911315918, 0.022800384521484376, 0.022724607467651366, 0.022815744400024415, 0.02275225639343262, 0.022800384521484376, 0.022845439910888672, 0.022832128524780275, 0.022872064590454103, 0.022915071487426757, 0.02284851264953613, 0.02307072067260742, 0.022795263290405272, 0.022714368820190428, 0.022841344833374022, 0.02282803153991699, 0.022819839477539062, 0.02272870445251465, 0.02288755226135254, 0.022626176834106445, 0.023014400482177736, 0.02285670471191406, 0.023385087966918947, 0.023176191329956054, 0.022770687103271483, 0.022862848281860353, 0.022684671401977538, 0.02275328063964844, 0.022821887969970703, 0.022799392700195313, 0.022655967712402345, 0.022796287536621093, 0.022839296340942384, 0.02272153663635254]",tokens/s,44.936469781672756,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3573.501952,5499.256832,0.0,4869.586944,4520.068608,s,1,10.4419091796875,10.4419091796875,0.0,10.4419091796875,10.4419091796875,10.4419091796875,10.4419091796875,[10.4419091796875],,kWh,4.2636554400694376e-05,2.335240279266151e-05,7.538867142199701e-05,0.0001413776286153529,,MB,1724.502016,5537.005568,0.0,4888.461312,4194.018304,s,10,0.9525265960693358,0.0952526596069336,8.798408593560628e-05,0.0952265625,0.09536480407714844,0.09541608276367188,0.09545710571289062,"[0.09535340881347656, 0.09526483154296875, 0.09523577880859375, 0.09517488098144532, 0.09546736145019531, 0.0951739501953125, 0.09521734619140625, 0.0952510757446289, 0.09520722961425782, 0.09518073272705078]",tokens/s,2687.5889981067294,kWh,1.1262699579100565e-06,6.171366118359252e-07,4.47430516674288e-06,6.217711736488862e-06,tokens/kWh,41172703.2145371,MB,1736.851456,5547.491328,0.0,4896.84992,4194.020864,s,10,16.2210234375,1.6221023437499997,0.01753764417475878,1.629876159667969,1.639729211425781,1.6416736267089844,1.643229158935547,"[1.6298048095703126, 1.6436180419921875, 1.6374757080078124, 1.61048046875, 1.639297119140625, 1.6057181396484375, 1.59601025390625, 1.629947509765625, 1.5949071044921874, 1.6337642822265626]",tokens/s,38.838486512728714,kWh,1.8955747955701048e-05,1.0385888048823698e-05,4.400773361885724e-05,7.3349369623382e-05,tokens/kWh,858903.0870132677,,s,630,16.219025409698485,0.02574448477729918,0.0006174557672070495,0.025893375396728514,0.026419507026672364,0.026679910659790036,0.027386654739379884,"[0.026646528244018555, 0.02610585594177246, 0.025247743606567383, 0.02509516716003418, 0.024805376052856445, 0.02551910400390625, 0.02632601547241211, 0.02627686309814453, 0.02614784049987793, 0.02612019157409668, 0.025233407974243165, 0.02529177665710449, 0.026263551712036134, 0.026475519180297852, 0.02611507225036621, 0.026255359649658205, 0.026053632736206055, 0.026068992614746093, 0.02616524887084961, 0.026373119354248048, 0.02614067268371582, 0.025247743606567383, 0.025661439895629884, 0.02550579261779785, 0.02612531280517578, 0.026460159301757814, 0.026262527465820314, 0.02615091133117676, 0.025993215560913087, 0.02614067268371582, 0.025765888214111327, 0.026223615646362306, 0.025789440155029295, 0.02486783981323242, 0.026557439804077147, 0.026372095108032227, 0.026179584503173828, 0.02653593635559082, 0.02617651176452637, 0.025193471908569336, 0.025233407974243165, 0.025127935409545898, 0.02518016052246094, 0.02589798355102539, 0.02612735939025879, 0.026281984329223632, 0.02631475257873535, 0.026239999771118162, 0.02614681625366211, 0.026246143341064454, 0.026263551712036134, 0.026242048263549804, 0.025358335494995117, 0.025282560348510744, 0.02517094421386719, 0.025839616775512695, 0.02574028778076172, 0.024862720489501954, 0.025205759048461913, 0.025186304092407227, 0.025232383728027344, 0.026200063705444337, 0.026287103652954103, 0.025776128768920898, 0.026153984069824218, 0.026186784744262694, 0.026131423950195312, 0.02511769676208496, 0.025186304092407227, 0.026064895629882814, 0.026202112197875976, 0.026224639892578124, 0.02613657569885254, 0.026241024017333983, 0.026220544815063477, 0.026315776824951172, 0.0261345272064209, 0.026096639633178712, 0.026231807708740236, 0.026210304260253905, 0.02631782341003418, 0.02632089614868164, 0.027373567581176757, 0.027488256454467775, 0.026696704864501954, 0.02632806396484375, 0.026266624450683593, 0.027212799072265623, 0.029041664123535156, 0.026631168365478516, 0.026250240325927734, 0.02613043212890625, 0.02547711944580078, 0.025939968109130858, 0.0255150089263916, 0.02553856086730957, 0.027020288467407227, 0.026234880447387695, 0.02627276802062988, 0.026588159561157225, 0.026578943252563478, 0.02612735939025879, 0.02617344093322754, 0.026638336181640625, 0.02633318328857422, 0.026058752059936522, 0.026283008575439453, 0.027242496490478517, 0.026057727813720705, 0.026694656372070313, 0.025802751541137696, 0.025204736709594725, 0.02527539253234863, 0.025133056640625, 0.02514022445678711, 0.024812543869018554, 0.025235456466674806, 0.025753599166870117, 0.025141248703002928, 0.025060352325439454, 0.024953855514526366, 0.02490572738647461, 0.024771583557128905, 0.02616831970214844, 0.026236928939819337, 0.026351615905761717, 0.025100288391113282, 0.024963071823120117, 0.026137599945068358, 0.025991167068481445, 0.02636390495300293, 0.025420799255371093, 0.025999359130859375, 0.02596659278869629, 0.025959423065185547, 0.025643007278442383, 0.026558464050292968, 0.026027008056640624, 0.0251013126373291, 0.025211904525756838, 0.02593791961669922, 0.025976831436157227, 0.02655129623413086, 0.026413055419921876, 0.026206207275390626, 0.0261079044342041, 0.02593484878540039, 0.0270162239074707, 0.026124256134033203, 0.026381311416625978, 0.028206079483032227, 0.0265482234954834, 0.026100736618041992, 0.026307584762573243, 0.02612019157409668, 0.02634137535095215, 0.02636185646057129, 0.026720256805419923, 0.02652569580078125, 0.026145824432373045, 0.02641097640991211, 0.02636595153808594, 0.026184703826904295, 0.026204160690307617, 0.02610688018798828, 0.02613248062133789, 0.02615910339355469, 0.02632294464111328, 0.025823232650756835, 0.026661888122558593, 0.026595327377319337, 0.02573209571838379, 0.025119743347167968, 0.02513408088684082, 0.02529689598083496, 0.026771455764770507, 0.02656768035888672, 0.026372095108032227, 0.026225664138793944, 0.02535219192504883, 0.026770431518554686, 0.025273344039916993, 0.025141248703002928, 0.025158655166625975, 0.02513920021057129, 0.025069568634033205, 0.025193471908569336, 0.024808448791503908, 0.024716287612915038, 0.025082880020141602, 0.024964096069335938, 0.024967168807983397, 0.02504806327819824, 0.02508697509765625, 0.02503167915344238, 0.025012224197387696, 0.025167871475219726, 0.025082880020141602, 0.025042943954467774, 0.025024511337280272, 0.025057279586791992, 0.025009151458740234, 0.024990720748901366, 0.025037824630737306, 0.025247743606567383, 0.025019392013549805, 0.025032703399658202, 0.02503987121582031, 0.025195520401000978, 0.025066495895385742, 0.025069568634033205, 0.025036800384521486, 0.02546892738342285, 0.025357311248779296, 0.025111551284790038, 0.02505523109436035, 0.02505625534057617, 0.025074687957763672, 0.02507366371154785, 0.025067520141601563, 0.025176063537597656, 0.025044992446899415, 0.026792959213256837, 0.026177536010742186, 0.026089471817016603, 0.02594099235534668, 0.02629324722290039, 0.02615500831604004, 0.026039295196533203, 0.026027008056640624, 0.02616422462463379, 0.026035200119018553, 0.026104831695556642, 0.026404863357543946, 0.0261212158203125, 0.026065919876098635, 0.025986047744750978, 0.026025983810424806, 0.026049535751342775, 0.025996288299560546, 0.02594611167907715, 0.02570444869995117, 0.026070016860961914, 0.02510950469970703, 0.026294271469116212, 0.02616422462463379, 0.026129407882690428, 0.026147872924804687, 0.02599830436706543, 0.026253311157226563, 0.026061824798583984, 0.02613248062133789, 0.025640960693359374, 0.025944063186645508, 0.026113023757934572, 0.025968639373779297, 0.026076160430908202, 0.026212352752685547, 0.02613862419128418, 0.025176063537597656, 0.025225215911865235, 0.02592665672302246, 0.026191871643066408, 0.026471424102783202, 0.026846208572387696, 0.026364927291870118, 0.026192895889282225, 0.02612735939025879, 0.026183679580688478, 0.026171392440795898, 0.026199039459228517, 0.026059776306152343, 0.02614886474609375, 0.026078208923339844, 0.025851903915405275, 0.026004480361938476, 0.026201087951660155, 0.02611712074279785, 0.02719436836242676, 0.02627174377441406, 0.02613657569885254, 0.02608639907836914, 0.026011648178100585, 0.026071039199829102, 0.026187776565551758, 0.026184703826904295, 0.026208255767822267, 0.026231807708740236, 0.02652569580078125, 0.025828351974487306, 0.02612224006652832, 0.026294271469116212, 0.026223615646362306, 0.026051584243774413, 0.026255359649658205, 0.026073087692260744, 0.026265600204467773, 0.026038272857666016, 0.02609766387939453, 0.02595430374145508, 0.026204160690307617, 0.026040319442749024, 0.02510643196105957, 0.025059328079223633, 0.02588876724243164, 0.026015743255615235, 0.026300416946411134, 0.026080255508422853, 0.02612224006652832, 0.025649152755737304, 0.026096639633178712, 0.025193471908569336, 0.025150463104248046, 0.0251146240234375, 0.02512998390197754, 0.02531532859802246, 0.025116672515869142, 0.025070592880249022, 0.025057279586791992, 0.02509721565246582, 0.025103359222412108, 0.025116672515869142, 0.025013248443603517, 0.0251463680267334, 0.02528767967224121, 0.026806272506713868, 0.026065919876098635, 0.026246143341064454, 0.02613350486755371, 0.025985023498535157, 0.026036224365234374, 0.02594099235534668, 0.02553548812866211, 0.025001983642578125, 0.024805376052856445, 0.02475212860107422, 0.02488422393798828, 0.024979455947875977, 0.02514022445678711, 0.025078784942626952, 0.025288703918457032, 0.026023935317993165, 0.02510745620727539, 0.02551603126525879, 0.02533171272277832, 0.02513817596435547, 0.025225215911865235, 0.025215999603271484, 0.025174016952514647, 0.025224191665649414, 0.02533580780029297, 0.025189376831054686, 0.025565183639526368, 0.025267232894897462, 0.02520675277709961, 0.025204736709594725, 0.025149440765380858, 0.025175039291381835, 0.024839168548583986, 0.024980480194091798, 0.025175039291381835, 0.02514739227294922, 0.025194496154785157, 0.02509823989868164, 0.02531123161315918, 0.027235328674316408, 0.026475519180297852, 0.025219072341918947, 0.025627647399902344, 0.02569932746887207, 0.026063871383666993, 0.026178560256958007, 0.0269434871673584, 0.025854976654052734, 0.025939968109130858, 0.026070016860961914, 0.026046464920043946, 0.02636595153808594, 0.025202688217163087, 0.02512588882446289, 0.025201663970947266, 0.025150463104248046, 0.025009151458740234, 0.02508185577392578, 0.025016319274902343, 0.024980480194091798, 0.025037824630737306, 0.024986623764038086, 0.026067968368530273, 0.02595123291015625, 0.02569830322265625, 0.02511564826965332, 0.025093120574951173, 0.02506547164916992, 0.025067520141601563, 0.0251146240234375, 0.025091072082519532, 0.025169919967651368, 0.025058303833007813, 0.02510745620727539, 0.02507263946533203, 0.0251463680267334, 0.025464832305908205, 0.02531635284423828, 0.025126911163330077, 0.02508902359008789, 0.02502348709106445, 0.0250644474029541, 0.025054208755493163, 0.02506547164916992, 0.025103359222412108, 0.02508697509765625, 0.025181184768676756, 0.025208831787109375, 0.0247193603515625, 0.024800256729125978, 0.02508902359008789, 0.02511257553100586, 0.02532147216796875, 0.025047040939331053, 0.026054655075073242, 0.025673728942871094, 0.025701375961303712, 0.02594304084777832, 0.025991167068481445, 0.026034175872802736, 0.025297920227050782, 0.025590784072875978, 0.026218496322631835, 0.026073087692260744, 0.026046464920043946, 0.026035200119018553, 0.025800703048706054, 0.025999359130859375, 0.02591436767578125, 0.02514227294921875, 0.025091072082519532, 0.025157632827758788, 0.02511052894592285, 0.025203712463378908, 0.025247743606567383, 0.02596249580383301, 0.025145343780517578, 0.02507980728149414, 0.02514022445678711, 0.025167871475219726, 0.02615910339355469, 0.025952255249023438, 0.02608742332458496, 0.026622976303100586, 0.02739200019836426, 0.026505216598510743, 0.026137599945068358, 0.026062847137451172, 0.026167295455932618, 0.026390527725219725, 0.026884096145629883, 0.02655948829650879, 0.026167295455932618, 0.026059776306152343, 0.026634239196777345, 0.026415103912353514, 0.027535360336303712, 0.026459135055541993, 0.02631884765625, 0.02629734420776367, 0.02691379165649414, 0.025387008666992186, 0.025284608840942382, 0.025025535583496093, 0.02503987121582031, 0.025260032653808592, 0.025227264404296876, 0.025548799514770508, 0.025465856552124022, 0.025487360000610353, 0.026002431869506838, 0.02529280090332031, 0.025638912200927736, 0.026482688903808595, 0.025422847747802735, 0.02513715171813965, 0.02680012893676758, 0.026186752319335937, 0.02612735939025879, 0.026081279754638673, 0.025965568542480468, 0.0261079044342041, 0.026065919876098635, 0.025082880020141602, 0.02513715171813965, 0.02510540771484375, 0.02533478355407715, 0.025066495895385742, 0.02628096008300781, 0.025203712463378908, 0.025483264923095703, 0.026023935317993165, 0.025133056640625, 0.02594508743286133, 0.026236928939819337, 0.026233856201171874, 0.025127935409545898, 0.02509414482116699, 0.02609049606323242, 0.025145343780517578, 0.025116672515869142, 0.024991743087768553, 0.025103359222412108, 0.02592767906188965, 0.025197599411010744, 0.025047008514404296, 0.02512384033203125, 0.025007104873657225, 0.025145343780517578, 0.025012224197387696, 0.026225664138793944, 0.026332160949707032, 0.0261079044342041, 0.026051584243774413, 0.02591948890686035, 0.025145343780517578, 0.02513715171813965, 0.02514841651916504, 0.025078784942626952, 0.025050111770629883, 0.02508697509765625, 0.0251146240234375, 0.02591436767578125, 0.02615603256225586, 0.025606143951416017, 0.025189376831054686, 0.025082880020141602, 0.025116672515869142, 0.025014272689819338, 0.02505625534057617, 0.025200639724731445, 0.02566655921936035, 0.025255935668945313, 0.025340927124023437, 0.025049087524414062, 0.025187328338623048, 0.025135103225708007, 0.025127935409545898, 0.025259008407592775, 0.02514739227294922, 0.02512588882446289, 0.025209856033325196, 0.025387008666992186, 0.025351167678833008, 0.02517094421386719, 0.025175039291381835, 0.025169919967651368, 0.025174016952514647, 0.025225215911865235, 0.025257984161376954, 0.02547302436828613, 0.025597951889038087, 0.02518016052246094, 0.02513203239440918, 0.02514841651916504, 0.025184255599975586, 0.025213951110839843, 0.025224191665649414, 0.025169919967651368, 0.025181184768676756, 0.025143295288085937, 0.027080703735351562, 0.026030080795288086, 0.02647248077392578, 0.02720252799987793, 0.026977279663085937, 0.026933248519897462, 0.027645952224731447, 0.02693120002746582, 0.027039743423461913, 0.027678720474243163, 0.02656768035888672, 0.026047487258911133, 0.026061824798583984, 0.0259368953704834, 0.02611097526550293, 0.026053632736206055, 0.026044416427612304, 0.02627276802062988, 0.026113023757934572, 0.026030080795288086, 0.02609152030944824, 0.02614067268371582, 0.026196992874145508, 0.025668607711791993, 0.025403392791748046, 0.026461183547973634, 0.02648575973510742, 0.026065919876098635, 0.026084352493286132, 0.026166271209716797, 0.025211904525756838, 0.025373695373535156, 0.02513715171813965, 0.02526310348510742, 0.02527539253234863, 0.02532863998413086, 0.02531839942932129, 0.025152511596679687, 0.025189376831054686, 0.02513920021057129, 0.025158655166625975, 0.02508902359008789, 0.025136127471923828, 0.02512588882446289, 0.025935871124267578, 0.02614886474609375, 0.02612428855895996, 0.02614374351501465, 0.026063871383666993, 0.025802751541137696, 0.02517094421386719, 0.025126911163330077, 0.025118719100952147, 0.02515660858154297, 0.025151487350463866, 0.025191423416137695, 0.025161727905273438, 0.02508799934387207, 0.026556415557861326, 0.02655948829650879, 0.026031103134155274, 0.02595840072631836, 0.02617241668701172]",tokens/s,38.843271040396736,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494f9-3448c0fa69d6c72831ab4d47;7a87d3d3-884b-45de-b104-74a2f51fe37a) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5176.6272,6590.824448,0.0,5953.814528,5766.738432,s,1,12.227966796875,12.227966796875,0.0,12.227966796875,12.227966796875,12.227966796875,12.227966796875,[12.227966796875],,kWh,6.327412010972467e-05,3.462126717411182e-05,0.00011923759539000933,0.0002171329826738458,,MB,1801.220096,6651.641856,0.0,5995.757568,5281.196032,s,10,1.431302993774414,0.1431302993774414,4.752378308414272e-05,0.1431241912841797,0.14314753417968748,0.14320506134033203,0.14325108306884765,"[0.14326258850097656, 0.1431117706298828, 0.14313475036621093, 0.1431239013671875, 0.1431309814453125, 0.1431125183105469, 0.14312797546386719, 0.14312448120117188, 0.14310400390625, 0.1430700225830078]",tokens/s,1788.5800638543753,kWh,1.6936438396428479e-06,9.275899663597485e-07,6.411183700371399e-06,9.032417506373995e-06,tokens/kWh,28342356.829646762,MB,1801.490432,6672.613376,0.0,6014.631936,5281.198592,s,10,23.515557861328126,2.3515557861328125,0.008188626942172098,2.349184326171875,2.3613659667968747,2.3622174072265625,2.3628985595703123,"[2.3611767578125, 2.36306884765625, 2.3428154296875, 2.3449462890625, 2.357818359375, 2.3482841796875, 2.348498046875, 2.34987060546875, 2.338245849609375, 2.36083349609375]",tokens/s,26.79077416385896,kWh,2.839989029035738e-05,1.5563858379789716e-05,6.601820757642731e-05,0.0001099819562465744,tokens/kWh,572821.2349556407,,s,630,23.513039886474626,0.03732228553408668,0.0006209124345479809,0.037045759201049806,0.03824138221740723,0.03854832630157471,0.03929548702239991,"[0.03719680023193359, 0.036857856750488284, 0.03687936019897461, 0.03681894302368164, 0.036587520599365236, 0.03671039962768555, 0.03674521636962891, 0.03699609756469727, 0.03692748641967773, 0.03691110229492187, 0.03787980651855469, 0.03832012939453125, 0.03783475112915039, 0.036999168395996096, 0.03676364898681641, 0.03686195373535156, 0.038042625427246096, 0.03707392120361328, 0.03682918548583984, 0.036853759765625, 0.037407745361328126, 0.0380313606262207, 0.03715071868896484, 0.037634048461914066, 0.038056961059570314, 0.03799244689941406, 0.03715584182739258, 0.0374917106628418, 0.03821363067626953, 0.03827097702026367, 0.03894988632202148, 0.03856281661987305, 0.038324222564697266, 0.038242305755615234, 0.03844095993041992, 0.03811532974243164, 0.038329345703125, 0.038580223083496096, 0.0380313606262207, 0.0367534065246582, 0.036653057098388675, 0.037146625518798826, 0.0368455696105957, 0.03696025466918945, 0.036951038360595705, 0.03688243103027344, 0.037381118774414065, 0.037032958984375, 0.03685068893432617, 0.03685171127319336, 0.037353473663330077, 0.03918643188476562, 0.03839078521728516, 0.03768115234375, 0.038265857696533206, 0.038114303588867186, 0.03680767822265625, 0.03745792007446289, 0.037884929656982425, 0.036918270111083985, 0.036853759765625, 0.03711795043945312, 0.03753779220581055, 0.03754086303710937, 0.03744768142700195, 0.0368455696105957, 0.037338111877441404, 0.03677596664428711, 0.03675235366821289, 0.03751628875732422, 0.037610496520996094, 0.03701760101318359, 0.036988929748535154, 0.036961280822753906, 0.037449726104736326, 0.039449600219726565, 0.03828531265258789, 0.03791974258422852, 0.03854438400268555, 0.038042625427246096, 0.03675545501708984, 0.03738828659057617, 0.03724390411376953, 0.0374917106628418, 0.038084606170654296, 0.0384420166015625, 0.03688547134399414, 0.03785830307006836, 0.03715686416625977, 0.03679948806762695, 0.036772865295410156, 0.03667763137817383, 0.036819969177246094, 0.036926464080810545, 0.036980735778808595, 0.03684454345703125, 0.037978111267089845, 0.038414337158203124, 0.03721932983398438, 0.03844607925415039, 0.03845529556274414, 0.03806515121459961, 0.03784601593017578, 0.03767295837402344, 0.038242305755615234, 0.03807436752319336, 0.03803033447265625, 0.03706675338745117, 0.036934654235839845, 0.0369172477722168, 0.036915199279785156, 0.036531200408935545, 0.037308414459228514, 0.03818188858032227, 0.03737497711181641, 0.036928512573242187, 0.036890625, 0.03686195373535156, 0.036951038360595705, 0.03678822326660156, 0.03676364898681641, 0.03911065673828125, 0.03934003067016602, 0.03890073776245117, 0.03790335845947266, 0.03710464096069336, 0.038029312133789066, 0.03709030532836914, 0.03686297607421875, 0.03715686416625977, 0.03684659194946289, 0.03686297607421875, 0.03684864044189453, 0.036760574340820314, 0.03767295837402344, 0.03718041610717773, 0.03783168029785156, 0.03749478530883789, 0.03688345718383789, 0.036980735778808595, 0.036983806610107424, 0.03685068893432617, 0.036803585052490234, 0.03676364898681641, 0.037389312744140625, 0.03764940643310547, 0.03739340972900391, 0.036956161499023435, 0.036803585052490234, 0.03699097442626953, 0.03742822265625, 0.03711695861816406, 0.036979679107666016, 0.03812351989746094, 0.038117408752441406, 0.03780707168579102, 0.037410816192626956, 0.03747430419921875, 0.036647937774658204, 0.03647180938720703, 0.03682611083984375, 0.03759308624267578, 0.037921791076660154, 0.036945919036865234, 0.0365588493347168, 0.037324798583984374, 0.03760639953613281, 0.03755724716186523, 0.036790271759033204, 0.03704012680053711, 0.03745075225830078, 0.03676671981811523, 0.03679641723632812, 0.036972545623779295, 0.03679334259033203, 0.037477375030517575, 0.03796377563476563, 0.037855232238769534, 0.03710566329956055, 0.03702272033691406, 0.03727360153198242, 0.037501953125, 0.0374917106628418, 0.03687833786010742, 0.036928512573242187, 0.03688345718383789, 0.03687936019897461, 0.03694899368286133, 0.03679334259033203, 0.03692748641967773, 0.036944896697998046, 0.03686195373535156, 0.03690086364746094, 0.03700940704345703, 0.03662847900390625, 0.03688755035400391, 0.036939777374267575, 0.03666022491455078, 0.03709542465209961, 0.0373196792602539, 0.037141502380371096, 0.037335041046142575, 0.036994049072265625, 0.03695513534545898, 0.0369879035949707, 0.03747430419921875, 0.03751833724975586, 0.036939777374267575, 0.036896766662597655, 0.03686707305908203, 0.036863998413085936, 0.0369879035949707, 0.03707596969604492, 0.03788185501098633, 0.037163009643554686, 0.03704422378540039, 0.03691212844848633, 0.03683737564086914, 0.03712819290161133, 0.038757377624511716, 0.0391649284362793, 0.03750502395629883, 0.03726847839355469, 0.037381118774414065, 0.038225921630859375, 0.0380497932434082, 0.03776204681396484, 0.03836108779907227, 0.03770163345336914, 0.03709952163696289, 0.036913150787353514, 0.03688243103027344, 0.03741491317749023, 0.03797196960449219, 0.037368862152099606, 0.03669398498535156, 0.036370433807373044, 0.03690086364746094, 0.03694182586669922, 0.037084159851074217, 0.03755110549926758, 0.03722444915771484, 0.03700838470458984, 0.03712819290161133, 0.03688345718383789, 0.03693670272827149, 0.037814273834228515, 0.036819969177246094, 0.036931583404541016, 0.037594112396240234, 0.036724735260009765, 0.03703910446166992, 0.03747635269165039, 0.03705344009399414, 0.036915199279785156, 0.03689164733886719, 0.03693260955810547, 0.037700607299804685, 0.03728384017944336, 0.03700735855102539, 0.03687014389038086, 0.0369356803894043, 0.0369090576171875, 0.03681689453125, 0.03772825622558594, 0.03734220886230469, 0.03762995147705078, 0.03707904052734375, 0.036759552001953126, 0.03671756744384766, 0.03672678375244141, 0.036828193664550785, 0.03693360137939453, 0.03687321472167969, 0.03696640014648438, 0.037804031372070314, 0.03831808090209961, 0.037792766571044925, 0.03801804733276367, 0.03809075164794922, 0.03788390350341797, 0.037634048461914066, 0.03676774215698242, 0.03949977493286133, 0.039008289337158206, 0.037742561340332034, 0.03782144165039063, 0.037736446380615234, 0.03679129409790039, 0.038204414367675785, 0.037408767700195314, 0.03730124664306641, 0.03809280014038086, 0.03811328125, 0.03806617736816406, 0.03698995208740234, 0.03649331283569336, 0.03676364898681641, 0.0366929931640625, 0.03722956848144531, 0.036432895660400394, 0.03682204818725586, 0.036906974792480465, 0.03687936019897461, 0.038836223602294925, 0.03723468780517578, 0.03689471817016601, 0.036857856750488284, 0.03757363128662109, 0.03859763336181641, 0.038145057678222655, 0.03826889419555664, 0.039093246459960936, 0.03746303939819336, 0.0369356803894043, 0.03712614440917969, 0.038566913604736325, 0.03803443145751953, 0.038434814453125, 0.03826892852783203, 0.03720499038696289, 0.03757670211791992, 0.037070846557617186, 0.039569408416748046, 0.0374917106628418, 0.037152767181396484, 0.0383631362915039, 0.03813478469848633, 0.03726540756225586, 0.03674521636962891, 0.03691929626464844, 0.036994049072265625, 0.03703091049194336, 0.03702579116821289, 0.036907009124755856, 0.037154815673828126, 0.0377262077331543, 0.0370964469909668, 0.0369244155883789, 0.03705548858642578, 0.03693260955810547, 0.037029888153076174, 0.036923393249511716, 0.03685273742675781, 0.03687833786010742, 0.03690393447875977, 0.03694899368286133, 0.03684966278076172, 0.036874240875244144, 0.03696537780761719, 0.03686604690551758, 0.0372305908203125, 0.03675545501708984, 0.0368721923828125, 0.037765121459960936, 0.036795391082763675, 0.03684454345703125, 0.03682099151611328, 0.03683430480957031, 0.03686912155151367, 0.03685273742675781, 0.036893695831298826, 0.037395454406738284, 0.037800960540771485, 0.038935550689697264, 0.03798732757568359, 0.03753574371337891, 0.0366929931640625, 0.036773887634277344, 0.03689267349243164, 0.03678822326660156, 0.03676364898681641, 0.0367718391418457, 0.036738048553466796, 0.03659161758422851, 0.038351871490478515, 0.03852288055419922, 0.037136383056640625, 0.03761663818359375, 0.03703910446166992, 0.03680767822265625, 0.03681587219238281, 0.03747635269165039, 0.03695513534545898, 0.036929534912109374, 0.036847614288330076, 0.037700607299804685, 0.037631999969482424, 0.03892019271850586, 0.03838566589355469, 0.038117374420166016, 0.036959232330322264, 0.036999168395996096, 0.03706265640258789, 0.037498878479003905, 0.037819393157958986, 0.03657625579833984, 0.03677695846557617, 0.03748863983154297, 0.036792320251464845, 0.03672063827514648, 0.03679436874389649, 0.03710464096069336, 0.03779481506347656, 0.03793817520141601, 0.03689471817016601, 0.036874240875244144, 0.03735551834106445, 0.036732929229736325, 0.03679948806762695, 0.03667967987060547, 0.03796480178833008, 0.036947967529296875, 0.03684864044189453, 0.03758899307250976, 0.0386693115234375, 0.03800883102416992, 0.03702783966064453, 0.036961280822753906, 0.03781119918823242, 0.037904384613037106, 0.03700940704345703, 0.03697971343994141, 0.03750809478759766, 0.03701964950561523, 0.03697356796264648, 0.03697868728637695, 0.037087230682373046, 0.036959232330322264, 0.03696025466918945, 0.037628929138183595, 0.03858432006835937, 0.03721318435668945, 0.03693056106567383, 0.03690393447875977, 0.03717836761474609, 0.03764633560180664, 0.036907009124755856, 0.036956161499023435, 0.03684864044189453, 0.03736678314208984, 0.03792281723022461, 0.03704729461669922, 0.03700838470458984, 0.037275646209716795, 0.036898815155029296, 0.036827136993408206, 0.036772865295410156, 0.03706982421875, 0.036822017669677735, 0.03688447952270508, 0.03685273742675781, 0.03711078262329102, 0.03855155181884766, 0.037422080993652344, 0.03686195373535156, 0.037100543975830076, 0.03805593490600586, 0.037116928100585936, 0.03690496063232422, 0.03681689453125, 0.03802521514892578, 0.036706302642822264, 0.03680051040649414, 0.03753472137451172, 0.03830988693237305, 0.037005313873291014, 0.03860684967041016, 0.038152191162109376, 0.03686502456665039, 0.03676774215698242, 0.03736678314208984, 0.03969740676879883, 0.03959807968139648, 0.03822079849243164, 0.03708620834350586, 0.0374917106628418, 0.037103614807128905, 0.036860927581787106, 0.03681484985351562, 0.03794124984741211, 0.03780710220336914, 0.03704934310913086, 0.03727360153198242, 0.036874240875244144, 0.036853759765625, 0.03686195373535156, 0.036838401794433595, 0.036790271759033204, 0.03683430480957031, 0.03676979064941406, 0.03679743957519531, 0.03676774215698242, 0.03697971343994141, 0.037005313873291014, 0.03781836700439453, 0.038555648803710936, 0.03724800109863281, 0.03678515243530273, 0.037154815673828126, 0.03788390350341797, 0.03683327865600586, 0.03681792068481445, 0.03676671981811523, 0.03752345657348633, 0.03691929626464844, 0.03681382369995117, 0.03761356735229492, 0.037154815673828126, 0.03682099151611328, 0.03683635330200195, 0.03680460739135742, 0.036853759765625, 0.03688652801513672, 0.03690086364746094, 0.037029888153076174, 0.036877311706542966, 0.03727052688598633, 0.03718348693847656, 0.03786649703979492, 0.03709132766723633, 0.03684966278076172, 0.03697663879394531, 0.036972545623779295, 0.03688959884643555, 0.036787200927734375, 0.036790271759033204, 0.03697356796264648, 0.03772927856445313, 0.03685580825805664, 0.03779174423217774, 0.03861401748657227, 0.03728076934814453, 0.0369356803894043, 0.03809791946411133, 0.038365184783935545, 0.03756032180786133, 0.03691929626464844, 0.036853759765625, 0.036942848205566405, 0.036890625, 0.036828159332275394, 0.03759308624267578, 0.03700428771972656, 0.036896766662597655, 0.036953086853027346, 0.037272575378417966, 0.03751321411132812, 0.036969470977783206, 0.036462593078613284, 0.03693670272827149, 0.03705036926269531, 0.0369541130065918, 0.03688345718383789, 0.0369244155883789, 0.03683225631713867, 0.03657318496704102, 0.03861913681030273, 0.03724595260620117, 0.03678310394287109, 0.03688755035400391, 0.03694387054443359, 0.03690086364746094, 0.036939777374267575, 0.03685990524291992, 0.03681075286865235, 0.03687526321411133, 0.03827814483642578, 0.03668172836303711, 0.03648614501953125, 0.03743027114868164, 0.03761356735229492, 0.03688755035400391, 0.037730304718017575, 0.03708927917480469, 0.03679743957519531, 0.03824127960205078, 0.03770163345336914, 0.037422080993652344, 0.03699814224243164, 0.038825984954833984, 0.038351871490478515, 0.037850112915039064, 0.03817062377929688, 0.038269950866699216, 0.03760332870483398, 0.03688857650756836, 0.03675545501708984, 0.03729715347290039, 0.03729100799560547, 0.037370880126953124, 0.03809791946411133, 0.038010879516601564, 0.038250495910644534, 0.038024192810058595, 0.03704422378540039, 0.03693260955810547, 0.03782963180541992, 0.03916185760498047, 0.0382740478515625, 0.036893695831298826, 0.036923393249511716, 0.036910079956054685, 0.03682406234741211, 0.03680665588378906, 0.036860927581787106, 0.03683327865600586, 0.0378787841796875, 0.036964351654052735, 0.03678412628173828, 0.0381952018737793, 0.038785022735595705, 0.03702169418334961, 0.03697663879394531, 0.03694079971313476, 0.03688345718383789, 0.03684454345703125, 0.037269504547119144, 0.03702067184448242, 0.037512191772460936, 0.03689267349243164, 0.03723571014404297, 0.037048320770263675, 0.03711897659301758, 0.036853759765625, 0.03729817581176758, 0.037795841217041014, 0.03722444915771484, 0.037645313262939455, 0.04045926284790039]",tokens/s,26.79364314617586,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 89110 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493e8-51bfdab147ee9dbc7c3e7f0b;cf706216-fc71-4510-b640-badeeeb2c377) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490d4-561b28403b9cb6f97c1f4a99;6be8f682-7bbe-4217-ab1f-18d9dcb77923) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491a9-55c60cf979dcb5c619ce3f4d;4df3d572-31f2-4b1f-9406-8d3163b1d5a4) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17836.412928,24050.663424,0.0,23420.993536,21732.465152,s,1,17.0617578125,17.0617578125,0.0,17.0617578125,17.0617578125,17.0617578125,17.0617578125,[17.0617578125],,kWh,0.00012062266173473089,6.609497063666135e-05,0.0002665399354540021,0.0004532575678253944,,MB,4444.143616,24138.743808,0.0,23492.296704,20639.95904,s,10,56.68390039062499,5.6683900390624995,0.000140469070451222,5.6683974609375,5.66857421875,5.6685764160156245,5.6685781738281245,"[5.66857373046875, 5.6682353515625, 5.66817236328125, 5.6685283203125, 5.66825146484375, 5.66829443359375, 5.66843310546875, 5.66847119140625, 5.66836181640625, 5.66857861328125]",tokens/s,45.16273549205871,kWh,6.69296238645845e-05,3.668171793180542e-05,0.00041170427380758933,0.0005153156156039793,tokens/kWh,496782.9272938943,MB,4448.407552,24145.035264,0.0,23498.58816,20639.9616,s,10,39.42234497070312,3.942234497070312,0.03513883521435536,3.94442138671875,3.9964390869140627,4.002376647949219,4.007126696777344,"[3.901472412109375, 3.899844970703125, 3.9459912109375, 3.9428515625, 3.95123974609375, 3.99511962890625, 3.90573876953125, 3.925458740234375, 4.008314208984375, 3.946313720703125]",tokens/s,15.980784513660645,kWh,4.662566085249967e-05,2.5554963770934485e-05,0.00020103352193781007,0.0002732141465612442,tokens/kWh,230588.35273699052,,s,630,39.41977597427371,0.0625710729750376,0.001368491897895592,0.06251673698425293,0.06424883422851563,0.06454210433959962,0.06547970291137695,"[0.0627619857788086, 0.06296678543090821, 0.061557792663574216, 0.06168675231933594, 0.06161100769042969, 0.06109593582153321, 0.06202675247192383, 0.06112255859375, 0.06098636627197265, 0.06076416015625, 0.060865535736083984, 0.06273126220703125, 0.06324428939819336, 0.06163558578491211, 0.061797374725341796, 0.06134374237060547, 0.06173388671875, 0.061178878784179686, 0.06116659164428711, 0.06143078231811523, 0.061077503204345705, 0.06164275360107422, 0.0634337272644043, 0.06199398422241211, 0.06284492874145507, 0.06238310241699219, 0.061879295349121094, 0.06201446533203125, 0.06346547317504883, 0.0640030746459961, 0.061489151000976565, 0.06198169708251953, 0.06256435012817382, 0.061948928833007816, 0.06211686325073242, 0.06133964920043945, 0.06098944091796875, 0.06108262252807617, 0.06116556930541992, 0.06268108749389649, 0.061794303894042966, 0.06152908706665039, 0.06238105773925781, 0.061243392944335937, 0.06237388610839844, 0.06301593780517578, 0.06185881423950195, 0.0619233283996582, 0.061143039703369144, 0.06224895858764649, 0.061072383880615234, 0.06147174453735352, 0.061437950134277344, 0.06143078231811523, 0.0625080337524414, 0.06328729629516601, 0.06122086334228516, 0.06250291061401367, 0.06117171096801758, 0.06281932830810547, 0.06333337783813477, 0.062105598449707033, 0.06155059051513672, 0.06260428619384766, 0.06104576110839844, 0.06411571502685547, 0.06438912200927735, 0.06130278396606445, 0.06112972640991211, 0.06127718353271484, 0.062134273529052736, 0.06338969421386718, 0.06423859405517578, 0.06343270492553711, 0.061006847381591796, 0.06091263961791992, 0.06352588653564453, 0.06398566436767578, 0.06389862442016601, 0.06424781036376953, 0.061265918731689455, 0.060862464904785155, 0.06223052978515625, 0.06378291320800782, 0.060440574645996094, 0.06085836791992188, 0.06369894409179687, 0.0637122573852539, 0.06120550537109375, 0.061058048248291016, 0.061034496307373044, 0.06088499069213867, 0.06112255859375, 0.06099148941040039, 0.06095974349975586, 0.060832767486572265, 0.06081126403808594, 0.06363647842407226, 0.06137343978881836, 0.06071807861328125, 0.06086348724365234, 0.061183998107910156, 0.06096486282348633, 0.06254694366455078, 0.06142566299438477, 0.06114508819580078, 0.06112460708618164, 0.06128844833374023, 0.06227251052856445, 0.06129459381103516, 0.06092902374267578, 0.06352384185791016, 0.06107852935791016, 0.06399795150756836, 0.06316236877441406, 0.06320947265625, 0.06031564712524414, 0.060523521423339846, 0.06093721771240234, 0.061846527099609375, 0.06257561492919922, 0.061192192077636716, 0.061061119079589846, 0.060922878265380856, 0.06105497741699219, 0.0610334701538086, 0.06261043167114258, 0.061682689666748045, 0.0617891845703125, 0.0627619857788086, 0.06148198318481445, 0.061088768005371094, 0.061026302337646485, 0.06364569473266601, 0.06263296127319336, 0.06278041458129882, 0.0634972152709961, 0.0637470703125, 0.06051839828491211, 0.06080409622192383, 0.06140108871459961, 0.06121779251098633, 0.06291968154907226, 0.06350336074829102, 0.0636231689453125, 0.06191308975219727, 0.06334873580932618, 0.062053375244140625, 0.06409932708740235, 0.06560665893554687, 0.06367641448974609, 0.06394367980957032, 0.06407373046875, 0.06300979232788086, 0.061158401489257816, 0.06108467102050781, 0.06106623840332031, 0.0612853775024414, 0.061158401489257816, 0.06072217559814453, 0.06076927947998047, 0.06150656127929687, 0.06519705963134766, 0.06182092666625977, 0.06379315185546874, 0.06387814331054688, 0.06385254287719727, 0.06390169525146484, 0.06461746978759765, 0.06444953918457032, 0.06393241500854492, 0.06483353424072266, 0.06111539077758789, 0.06329651260375976, 0.06453759765625, 0.06372249603271485, 0.06405017852783203, 0.06454579162597657, 0.062091262817382815, 0.061123584747314455, 0.06106009674072266, 0.06113894271850586, 0.06123110580444336, 0.061061119079589846, 0.06346239852905274, 0.06378188705444336, 0.061489151000976565, 0.06357708740234375, 0.06097510528564453, 0.06509056091308593, 0.06166016006469727, 0.06097305679321289, 0.0627066879272461, 0.0637655029296875, 0.06360166549682617, 0.06398361587524413, 0.06416690826416016, 0.0617625617980957, 0.06217830276489258, 0.062153728485107425, 0.06353305435180664, 0.06312243270874024, 0.06422118377685547, 0.06424473571777344, 0.06409523010253906, 0.06431027221679687, 0.06427852630615234, 0.06114406585693359, 0.06127004623413086, 0.06141129684448242, 0.06423654174804687, 0.06375731277465821, 0.061328384399414064, 0.06193971252441406, 0.061656063079833984, 0.06388121414184571, 0.06399283218383789, 0.06102937698364258, 0.061416446685791014, 0.06451097869873047, 0.06500556945800781, 0.06132940673828125, 0.06415974426269531, 0.06125568008422851, 0.062091262817382815, 0.06261964797973633, 0.06340403366088868, 0.06336102294921875, 0.06304051208496093, 0.06101196670532227, 0.061295616149902345, 0.06403379058837891, 0.061178878784179686, 0.061118465423583984, 0.06109695816040039, 0.06207692718505859, 0.06469324493408203, 0.0642713623046875, 0.0634511375427246, 0.060951553344726565, 0.06022348785400391, 0.06081024169921875, 0.06068841552734375, 0.060846046447753904, 0.06098636627197265, 0.060723201751708984, 0.06113382339477539, 0.06413005065917969, 0.06425804901123047, 0.06389247894287109, 0.06092390441894531, 0.0611143684387207, 0.06542848205566407, 0.06420787048339843, 0.06437171173095703, 0.061695999145507815, 0.06116864013671875, 0.06129459381103516, 0.06595993804931641, 0.06400511932373047, 0.06439218902587891, 0.06444441223144531, 0.061379585266113285, 0.06131916809082031, 0.06117990493774414, 0.06125158309936524, 0.06124236679077148, 0.060980224609375, 0.06098944091796875, 0.06086656188964844, 0.06244454574584961, 0.06379622268676757, 0.06371839904785156, 0.06368255996704102, 0.06396416091918945, 0.06306406402587891, 0.061001728057861325, 0.06350643157958985, 0.06415974426269531, 0.06338662338256836, 0.061178878784179686, 0.06125363159179688, 0.060818431854248046, 0.06077951812744141, 0.06312345504760743, 0.061224960327148435, 0.06298214340209961, 0.06354534530639648, 0.06355456161499023, 0.06115430450439453, 0.06306816101074218, 0.06139289474487305, 0.06004121780395508, 0.06208204650878906, 0.06453759765625, 0.06350131225585938, 0.06355763244628906, 0.06390886306762696, 0.06361600112915039, 0.06378291320800782, 0.06359347152709961, 0.06389657592773437, 0.06375116729736328, 0.0639662094116211, 0.06392115020751953, 0.06139801788330078, 0.061195262908935545, 0.06132326507568359, 0.06145024108886719, 0.061036544799804686, 0.06371942520141602, 0.06404402923583985, 0.06433792114257812, 0.06419660949707032, 0.061156352996826174, 0.06365491104125977, 0.06406861114501954, 0.06433177947998046, 0.06459494018554687, 0.06467993927001953, 0.061879295349121094, 0.06288383865356445, 0.06209535980224609, 0.06348185729980468, 0.06417305755615234, 0.06362623977661133, 0.06416998291015626, 0.06573875427246094, 0.06311526489257813, 0.06379417419433593, 0.06463590240478516, 0.0639283218383789, 0.06347878265380859, 0.06376857757568359, 0.06350643157958985, 0.06300569534301757, 0.06234624099731445, 0.0632360954284668, 0.0636948471069336, 0.06282547378540039, 0.06456729888916016, 0.06401945495605468, 0.06207692718505859, 0.061502464294433595, 0.0626165771484375, 0.06399692916870117, 0.06404096221923829, 0.06379216003417969, 0.0642119369506836, 0.06151270294189453, 0.06226124954223633, 0.062483455657958986, 0.06255513763427735, 0.06416588592529297, 0.061483009338378906, 0.06470655822753907, 0.06616575622558593, 0.06340915298461915, 0.061290496826171874, 0.06154342269897461, 0.0643276824951172, 0.06315929412841798, 0.06442598724365234, 0.06413721466064454, 0.0644485092163086, 0.064289794921875, 0.0638023681640625, 0.06326784133911133, 0.06324326324462891, 0.06344089508056641, 0.0637132797241211, 0.06319206237792968, 0.061213695526123046, 0.06189056015014648, 0.062036991119384766, 0.06390067291259766, 0.06380339050292969, 0.06344192123413087, 0.06219161605834961, 0.061052928924560546, 0.06095667266845703, 0.061338623046875, 0.06153113555908203, 0.06097612762451172, 0.06071603012084961, 0.061241344451904295, 0.06112051010131836, 0.06156083297729492, 0.06078566360473633, 0.06077542495727539, 0.06253772735595703, 0.060933120727539064, 0.06145024108886719, 0.06150860977172851, 0.06099456024169922, 0.0607723503112793, 0.0639283218383789, 0.06174310302734375, 0.061774848937988285, 0.06144204711914063, 0.06369075012207032, 0.061175807952880856, 0.06178815841674805, 0.06138060760498047, 0.06074982452392578, 0.060388351440429686, 0.06236774444580078, 0.06131097412109375, 0.06252544021606446, 0.06226534271240235, 0.06182809448242187, 0.06129151916503906, 0.062080001831054686, 0.06074982452392578, 0.06398976135253906, 0.0639743995666504, 0.06372659301757813, 0.06253772735595703, 0.0636948471069336, 0.06400614166259766, 0.06197452926635742, 0.06109183883666992, 0.06313062286376953, 0.06117478561401367, 0.06100377655029297, 0.06148198318481445, 0.06164787292480469, 0.06123724746704102, 0.061257728576660155, 0.06131097412109375, 0.06356787109375, 0.06437171173095703, 0.06166835021972656, 0.061075454711914064, 0.06157926559448242, 0.06345830535888672, 0.06543462371826173, 0.06397030258178711, 0.06256230545043945, 0.061192192077636716, 0.06443724822998047, 0.06240256118774414, 0.06137651062011719, 0.061093887329101565, 0.06071603012084961, 0.06109183883666992, 0.06100070571899414, 0.060777473449707034, 0.060278785705566405, 0.061346817016601565, 0.06162227249145508, 0.06204723358154297, 0.061385726928710936, 0.06372147369384766, 0.06372249603271485, 0.0637583351135254, 0.06332928085327148, 0.06094745635986328, 0.060761089324951174, 0.060510208129882816, 0.06366924667358398, 0.061123584747314455, 0.06101913452148437, 0.06092697525024414, 0.061020160675048826, 0.06096281433105469, 0.06501888275146485, 0.06469120025634766, 0.06394982528686523, 0.06342451095581055, 0.06374604797363281, 0.06340198516845703, 0.06363955307006836, 0.06363750457763671, 0.06359552001953125, 0.06140723037719727, 0.0609617919921875, 0.06081740951538086, 0.060851200103759766, 0.060902400970458986, 0.06100582504272461, 0.06099353790283203, 0.060972030639648435, 0.06102732849121094, 0.06520320129394531, 0.06201139068603516, 0.06152396774291992, 0.06154342269897461, 0.06316748809814453, 0.06467481231689454, 0.06475878143310547, 0.06390374374389649, 0.06356275177001953, 0.06359449768066407, 0.0637132797241211, 0.06295347213745117, 0.06068326568603516, 0.06075392150878906, 0.06325964736938476, 0.06069247817993164, 0.06272819137573242, 0.0639365119934082, 0.06362931060791016, 0.06423859405517578, 0.06474240112304687, 0.06431846618652344, 0.06424371337890625, 0.06394982528686523, 0.06382284927368163, 0.06476595306396485, 0.06432665252685547, 0.06402047729492187, 0.06394572830200196, 0.06240563201904297, 0.06305279922485352, 0.06388940811157226, 0.06372351837158204, 0.06382899093627929, 0.0639477767944336, 0.06400511932373047, 0.06581145477294922, 0.06401433563232421, 0.061192192077636716, 0.062361598968505856, 0.0637757453918457, 0.06472089385986328, 0.06436249542236328, 0.06373068618774413, 0.06395289611816406, 0.06441165161132813, 0.06431948852539063, 0.06288691329956055, 0.0639283218383789, 0.06317977523803711, 0.06686617279052734, 0.06364364624023437, 0.062404609680175784, 0.0641075210571289, 0.06297395324707031, 0.06276710510253906, 0.06480178833007813, 0.06477823638916015, 0.06369894409179687, 0.06377267074584961, 0.06492671966552735, 0.06417100524902344, 0.06339788818359375, 0.06396006393432617, 0.06380748748779297, 0.06392217636108398, 0.06287667083740234, 0.06382284927368163, 0.06316339111328124, 0.06360166549682617, 0.0610252799987793, 0.0638218231201172, 0.06221619033813477, 0.06301696014404297, 0.06415360260009766, 0.06123622512817383, 0.06395904159545898, 0.06116659164428711, 0.061072383880615234, 0.0639109115600586, 0.06441471862792969, 0.06085017776489258, 0.06409523010253906, 0.062225406646728515, 0.06155059051513672, 0.061026302337646485, 0.06137855911254883, 0.06156595230102539, 0.060851200103759766, 0.06086348724365234, 0.06116454315185547, 0.06087577438354492, 0.0612044792175293, 0.06278860855102539, 0.0638914566040039, 0.06121574401855469, 0.06110105514526367, 0.06110310363769531, 0.061862911224365234, 0.06147481536865235, 0.06354329681396484, 0.06311833572387696, 0.06344192123413087, 0.06358118438720703, 0.06368563079833985, 0.06549811553955077, 0.06325657653808593, 0.06379110336303712, 0.06374911880493164, 0.06104985427856445, 0.061265918731689455, 0.06076620864868164, 0.06391705703735352, 0.06343884658813477, 0.06391705703735352, 0.061521919250488284, 0.06116966247558594, 0.061028350830078126, 0.06096588897705078, 0.06430105590820312, 0.06427545928955078, 0.06398054504394532, 0.06344089508056641, 0.06359654235839844, 0.06368972778320313, 0.06390784072875977, 0.0641976318359375, 0.06362828826904297, 0.06219161605834961, 0.0633620491027832, 0.06427852630615234, 0.06178815841674805, 0.06166527938842774, 0.06126387023925781, 0.061810688018798826, 0.06093619155883789, 0.061142017364501956, 0.06290636825561523, 0.06365593719482422, 0.06242611312866211, 0.06436659240722656, 0.06405324554443359, 0.06450176239013672, 0.0638279685974121, 0.06393241500854492, 0.06407577514648438]",tokens/s,15.981825985291074,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1231.843328,1005.060096,0.0,358.612992,318.913024,s,21,0.18355030345916748,0.008740490640912738,0.0003350787955471485,0.00862604808807373,0.008949695587158203,0.00923475170135498,0.009869042778015138,"[0.010027615547180176, 0.008949695587158203, 0.008625503540039062, 0.00923475170135498, 0.008610015869140624, 0.00866915225982666, 0.00862604808807373, 0.008871392250061035, 0.008561504364013673, 0.008556192398071288, 0.008633919715881348, 0.00886348819732666, 0.008537247657775878, 0.008714079856872559, 0.008701151847839355, 0.008674336433410644, 0.008554752349853516, 0.008511584281921386, 0.00850607967376709, 0.008580191612243653, 0.008541600227355957]",tokens/s,29288.973642018205,kWh,1.0310658924635563e-07,5.648848538782815e-08,2.2294035675046953e-07,3.825354313846533e-07,tokens/kWh,669219055.2738177,MB,1232.13824,1005.060096,0.0,358.612992,328.809472,s,21,10.26999664306641,0.48904745919363846,0.00403887718511448,0.487286376953125,0.4949382019042969,0.4976250915527344,0.4998394714355469,"[0.50039306640625, 0.4949382019042969, 0.491740478515625, 0.4892876586914062, 0.4909471740722656, 0.4976250915527344, 0.487286376953125, 0.48865457153320313, 0.48538818359375, 0.48481610107421874, 0.48979806518554686, 0.48635006713867185, 0.48722393798828123, 0.485962646484375, 0.4894192504882813, 0.48873077392578124, 0.4857748413085938, 0.4860442810058594, 0.48649954223632813, 0.4870877685546875, 0.486028564453125]",tokens/s,128.821853208024,kWh,5.764060866563101e-06,3.1584372687529265e-06,9.864422210484918e-06,1.878692034580095e-05,tokens/kWh,3353396.8761453275,,s,1322,10.43460300683975,0.007893043121663959,0.0011110576671242386,0.007707647800445557,0.007921459436416627,0.008081356382369995,0.016560291538238524,"[0.009092096328735352, 0.009058367729187012, 0.008918975830078125, 0.008963071823120117, 0.008934399604797364, 0.008929280281066895, 0.00934502410888672, 0.008104960441589355, 0.008223744392395019, 0.007952415943145752, 0.007911392211914062, 0.007724063873291016, 0.007740384101867676, 0.007718912124633789, 0.007732223987579345, 0.007771135807037354, 0.007724031925201416, 0.007733248233795166, 0.007701568126678467, 0.00769324779510498, 0.007707647800445557, 0.007727104187011719, 0.007730175971984863, 0.007672832012176513, 0.007747583866119385, 0.007762944221496582, 0.007890944004058837, 0.007702527999877929, 0.007723008155822754, 0.007799808025360107, 0.007707647800445557, 0.007696383953094482, 0.007733248233795166, 0.0076902399063110355, 0.007792640209197998, 0.007727104187011719, 0.007721983909606934, 0.007738368034362793, 0.007664639949798584, 0.0076902399063110355, 0.007693312168121338, 0.0076902399063110355, 0.0076912641525268555, 0.007764992237091065, 0.007812096118927002, 0.0076984319686889645, 0.007737343788146973, 0.007910399913787843, 0.007769087791442871, 0.007794688224792481, 0.007959551811218261, 0.007833600044250488, 0.007940095901489258, 0.007715839862823487, 0.007907328128814697, 0.007768064022064209, 0.0076984319686889645, 0.007676928043365478, 0.007727104187011719, 0.007711743831634522, 0.00790937614440918, 0.007811071872711181, 0.01726470375061035, 0.007868351936340333, 0.007919616222381591, 0.008102911949157715, 0.008324095726013184, 0.008201215744018555, 0.008301568031311036, 0.008308735847473145, 0.008286208152770995, 0.00830463981628418, 0.008102911949157715, 0.00808140754699707, 0.007987199783325195, 0.007973919868469239, 0.007829472064971924, 0.00774348783493042, 0.007905280113220215, 0.007950335979461669, 0.008045568466186523, 0.007724031925201416, 0.00809779167175293, 0.007895040035247802, 0.007924736022949219, 0.007701504230499268, 0.007714816093444824, 0.007802879810333252, 0.007689216136932373, 0.007731200218200684, 0.0076902399063110355, 0.007704576015472412, 0.0077209601402282715, 0.0077036161422729494, 0.00770963191986084, 0.00785100793838501, 0.007706624031066894, 0.00785920000076294, 0.007693312168121338, 0.00774348783493042, 0.007714816093444824, 0.007696383953094482, 0.007706624031066894, 0.007671807765960693, 0.008090656280517578, 0.00802505588531494, 0.007776256084442139, 0.007709695816040039, 0.007713791847229004, 0.007703551769256592, 0.00783462381362915, 0.007703551769256592, 0.0076984319686889645, 0.007723008155822754, 0.007885824203491211, 0.007710720062255859, 0.007711743831634522, 0.007705599784851074, 0.007715839862823487, 0.007755775928497314, 0.007683072090148926, 0.007688191890716553, 0.007709695816040039, 0.007748608112335205, 0.007685120105743408, 0.01663488006591797, 0.007726079940795898, 0.007717887878417969, 0.0077281279563903805, 0.007688191890716553, 0.007729152202606201, 0.0076943359375, 0.007702527999877929, 0.007713888168334961, 0.007737247943878174, 0.0077209601402282715, 0.007712768077850342, 0.0077608962059021, 0.007717887878417969, 0.007710720062255859, 0.008239104270935058, 0.008116224288940429, 0.008475647926330567, 0.008458239555358887, 0.008246272087097169, 0.007942143917083741, 0.00795961618423462, 0.007945151805877685, 0.007906303882598878, 0.007910399913787843, 0.007938047885894776, 0.007723008155822754, 0.007715839862823487, 0.0076902399063110355, 0.0076902399063110355, 0.0076902399063110355, 0.007681024074554443, 0.007697408199310303, 0.007683104038238526, 0.0076943359375, 0.0077506241798400875, 0.00775270414352417, 0.007787519931793213, 0.007673855781555176, 0.007689216136932373, 0.007712768077850342, 0.007634943962097168, 0.007689216136932373, 0.0076902399063110355, 0.0076912641525268555, 0.007678976058959961, 0.0076902399063110355, 0.007709695816040039, 0.007890944004058837, 0.007795711994171142, 0.007898111820220948, 0.007674880027770996, 0.007759871959686279, 0.007707647800445557, 0.007746560096740723, 0.007889920234680176, 0.007853055953979492, 0.007882751941680909, 0.007867392063140868, 0.0077547521591186525, 0.007782400131225586, 0.007676928043365478, 0.007889920234680176, 0.01763020706176758, 0.008062975883483887, 0.007933951854705811, 0.00788479995727539, 0.007878655910491944, 0.007974912166595459, 0.007945216178894043, 0.007906303882598878, 0.007907328128814697, 0.007918591976165772, 0.007847936153411865, 0.007732223987579345, 0.00775270414352417, 0.007715839862823487, 0.007693312168121338, 0.007790592193603516, 0.007701504230499268, 0.00767084789276123, 0.007698368072509765, 0.007670783996582031, 0.007679999828338623, 0.007670783996582031, 0.007672832012176513, 0.0076871681213378906, 0.007737343788146973, 0.007924736022949219, 0.007703551769256592, 0.007711743831634522, 0.007705599784851074, 0.007706624031066894, 0.007683072090148926, 0.007726079940795898, 0.007684095859527588, 0.007717887878417969, 0.007829504013061523, 0.007885824203491211, 0.007727104187011719, 0.007808000087738037, 0.007792640209197998, 0.007704576015472412, 0.007664639949798584, 0.007731200218200684, 0.007749631881713868, 0.007715839862823487, 0.007766016006469726, 0.007790592193603516, 0.007709695816040039, 0.007795711994171142, 0.007740416049957275, 0.007689216136932373, 0.007677951812744141, 0.007688191890716553, 0.007706624031066894, 0.007693312168121338, 0.007697440147399902, 0.007667679786682129, 0.0076912641525268555, 0.007759871959686279, 0.007836671829223632, 0.007707647800445557, 0.007697408199310303, 0.007692287921905518, 0.007705599784851074, 0.01658367919921875, 0.007725056171417236, 0.007701568126678467, 0.007698368072509765, 0.007697408199310303, 0.007677984237670898, 0.007667744159698486, 0.00769324779510498, 0.007798816204071045, 0.007762944221496582, 0.00780079984664917, 0.008656895637512207, 0.00830361557006836, 0.008055839538574219, 0.008196063995361328, 0.007998464107513427, 0.00795136022567749, 0.008061951637268066, 0.007879712104797364, 0.00796668815612793, 0.007711808204650879, 0.0077086081504821775, 0.007677951812744141, 0.007714816093444824, 0.007709695816040039, 0.007789567947387695, 0.007707647800445557, 0.0076912641525268555, 0.007716864109039307, 0.007702527999877929, 0.007709695816040039, 0.007679999828338623, 0.007709695816040039, 0.007703551769256592, 0.007715839862823487, 0.007775231838226319, 0.007758848190307617, 0.007897088050842285, 0.007768064022064209, 0.007769087791442871, 0.007915520191192627, 0.007915520191192627, 0.007895040035247802, 0.0076943359375, 0.007679999828338623, 0.007676928043365478, 0.00769536018371582, 0.007711743831634522, 0.007781375885009765, 0.007708735942840576, 0.00771065616607666, 0.0076984319686889645, 0.007763967990875244, 0.007647232055664062, 0.007775231838226319, 0.007683072090148926, 0.007717887878417969, 0.007763967990875244, 0.0076943359375, 0.007715839862823487, 0.007766016006469726, 0.007777279853820801, 0.007678976058959961, 0.016675840377807616, 0.007716864109039307, 0.0076984319686889645, 0.007701504230499268, 0.00780185604095459, 0.007713791847229004, 0.007725056171417236, 0.007714816093444824, 0.007688191890716553, 0.008960000038146973, 0.008425472259521484, 0.00793497610092163, 0.007820288181304931, 0.007699456214904785, 0.007699456214904785, 0.007682047843933106, 0.007803904056549072, 0.007661568164825439, 0.007709695816040039, 0.007979008197784423, 0.007948287963867188, 0.008050687789916992, 0.007895040035247802, 0.007890944004058837, 0.007904255867004394, 0.00800051212310791, 0.007781375885009765, 0.007700479984283447, 0.0076902399063110355, 0.0076902399063110355, 0.007723008155822754, 0.007730175971984863, 0.007696383953094482, 0.007681024074554443, 0.008360960006713868, 0.009117695808410644, 0.009527296066284179, 0.00808243179321289, 0.00799955177307129, 0.00785094404220581, 0.007670783996582031, 0.007714816093444824, 0.007681024074554443, 0.007689216136932373, 0.007984127998352051, 0.007931903839111328, 0.007922688007354736, 0.00800870418548584, 0.007964672088623047, 0.008010751724243164, 0.007980031967163086, 0.00773529577255249, 0.007676928043365478, 0.0076871681213378906, 0.00769536018371582, 0.007947264194488525, 0.008145919799804687, 0.007927807807922363, 0.007897088050842285, 0.007776256084442139, 0.007714816093444824, 0.007714816093444824, 0.007712768077850342, 0.016688127517700196, 0.007721983909606934, 0.007673855781555176, 0.00773529577255249, 0.008170495986938477, 0.008371199607849121, 0.0077209601402282715, 0.0077281599044799806, 0.007708640098571778, 0.007672832012176513, 0.0076943359375, 0.0076943359375, 0.007738368034362793, 0.00773529577255249, 0.007715839862823487, 0.007730175971984863, 0.0076871681213378906, 0.007707647800445557, 0.007707647800445557, 0.007686143875122071, 0.007689216136932373, 0.0076984319686889645, 0.007703551769256592, 0.007768064022064209, 0.00774348783493042, 0.007772160053253174, 0.0076871681213378906, 0.007663616180419922, 0.007666687965393066, 0.007672832012176513, 0.0076656961441040035, 0.007675871849060058, 0.0076871681213378906, 0.00764518404006958, 0.007671807765960693, 0.007745535850524903, 0.007677951812744141, 0.0076902399063110355, 0.007734272003173828, 0.007685120105743408, 0.0076943359375, 0.007675903797149658, 0.007677951812744141, 0.007675903797149658, 0.007730175971984863, 0.007740416049957275, 0.007658495903015137, 0.007704576015472412, 0.007727104187011719, 0.007727104187011719, 0.007701504230499268, 0.007684095859527588, 0.0077578239440917966, 0.007705599784851074, 0.007681024074554443, 0.007710720062255859, 0.007685184001922607, 0.007728064060211181, 0.007676959991455078, 0.007675871849060058, 0.007856128215789794, 0.00790118408203125, 0.0079267840385437, 0.017124351501464845, 0.007890944004058837, 0.007896063804626464, 0.007844863891601562, 0.007715839862823487, 0.007679999828338623, 0.007675903797149658, 0.0076912641525268555, 0.007697408199310303, 0.007703551769256592, 0.007701504230499268, 0.007693312168121338, 0.00774348783493042, 0.0076984319686889645, 0.007705599784851074, 0.007809023857116699, 0.007742464065551758, 0.007864319801330566, 0.007906303882598878, 0.007886847972869874, 0.007703551769256592, 0.007671807765960693, 0.007692287921905518, 0.007684095859527588, 0.007683072090148926, 0.007726079940795898, 0.007716864109039307, 0.007705599784851074, 0.007701504230499268, 0.007738368034362793, 0.007684095859527588, 0.007758848190307617, 0.007689216136932373, 0.007742464065551758, 0.007646207809448242, 0.007659520149230957, 0.007811071872711181, 0.007968768119812012, 0.007933951854705811, 0.007763967990875244, 0.007693312168121338, 0.0076943359375, 0.007675903797149658, 0.007677951812744141, 0.007677951812744141, 0.007675936222076416, 0.007691232204437256, 0.0076687359809875484, 0.007788544178009033, 0.007912447929382324, 0.0077209601402282715, 0.007674880027770996, 0.007666687965393066, 0.007675903797149658, 0.007684095859527588, 0.007665664196014404, 0.007678976058959961, 0.008128512382507324, 0.008069120407104492, 0.007811071872711181, 0.007938047885894776, 0.007679999828338623, 0.007688223838806152, 0.016586719512939455, 0.007676928043365478, 0.007712768077850342, 0.007682047843933106, 0.007674880027770996, 0.007696383953094482, 0.007659520149230957, 0.007680031776428222, 0.007677919864654541, 0.0076912641525268555, 0.0076912641525268555, 0.007704576015472412, 0.007708672046661377, 0.007681024074554443, 0.007847936153411865, 0.007811071872711181, 0.00773632001876831, 0.007725056171417236, 0.0076574721336364745, 0.0076769919395446775, 0.007656383991241455, 0.007673855781555176, 0.007708672046661377, 0.00769536018371582, 0.007670783996582031, 0.007692287921905518, 0.007771135807037354, 0.007670783996582031, 0.007665664196014404, 0.007670783996582031, 0.007733248233795166, 0.007681024074554443, 0.007705599784851074, 0.007664639949798584, 0.0076605439186096195, 0.007678976058959961, 0.007718912124633789, 0.007679999828338623, 0.007672832012176513, 0.007723008155822754, 0.007666687965393066, 0.007671807765960693, 0.007719999790191651, 0.007715775966644287, 0.007756832122802734, 0.007769055843353271, 0.007670783996582031, 0.007681024074554443, 0.0076871681213378906, 0.007742464065551758, 0.007697408199310303, 0.007704576015472412, 0.007715839862823487, 0.0077199358940124516, 0.007715839862823487, 0.007699456214904785, 0.007782400131225586, 0.007672832012176513, 0.007705599784851074, 0.007671807765960693, 0.007681024074554443, 0.007676928043365478, 0.007688191890716553, 0.016537599563598633, 0.007702527999877929, 0.0076912641525268555, 0.00780185604095459, 0.007649280071258545, 0.0077199358940124516, 0.0076605439186096195, 0.007711743831634522, 0.0076360001564025875, 0.0076789441108703616, 0.007679999828338623, 0.007708672046661377, 0.007693376064300537, 0.0076594557762145994, 0.007683072090148926, 0.007707647800445557, 0.007715839862823487, 0.007693312168121338, 0.007656447887420655, 0.007662591934204102, 0.007688191890716553, 0.007667712211608887, 0.007652383804321289, 0.007774176120758057, 0.007679999828338623, 0.0076277761459350585, 0.0077199358940124516, 0.007705599784851074, 0.007661568164825439, 0.007655424118041992, 0.00765337610244751, 0.007646207809448242, 0.007651328086853027, 0.00765337610244751, 0.007684095859527588, 0.007672832012176513, 0.00765235185623169, 0.007797760009765625, 0.0076574721336364745, 0.007676928043365478, 0.007676928043365478, 0.0076605439186096195, 0.007675936222076416, 0.007666656017303467, 0.007671807765960693, 0.007673855781555176, 0.007711743831634522, 0.007681024074554443, 0.0076605439186096195, 0.007897088050842285, 0.007715839862823487, 0.007639039993286132, 0.007663616180419922, 0.007663616180419922, 0.007703551769256592, 0.007701504230499268, 0.007685120105743408, 0.007809023857116699, 0.007738368034362793, 0.007857151985168457, 0.007700479984283447, 0.007667744159698486, 0.007658463954925537, 0.01657142448425293, 0.007717855930328369, 0.007683072090148926, 0.007674880027770996, 0.007670783996582031, 0.007671807765960693, 0.0076984319686889645, 0.007650303840637207, 0.007666687965393066, 0.007692287921905518, 0.007678976058959961, 0.00769536018371582, 0.007678976058959961, 0.007858176231384278, 0.0076912641525268555, 0.007671807765960693, 0.007683072090148926, 0.007661568164825439, 0.007699456214904785, 0.00769536018371582, 0.007699456214904785, 0.007732223987579345, 0.0076902399063110355, 0.007854080200195313, 0.007869440078735352, 0.007822336196899414, 0.007726079940795898, 0.008191007614135742, 0.008648672103881837, 0.008141823768615723, 0.007966720104217529, 0.007977983951568603, 0.008117247581481933, 0.007708672046661377, 0.007697408199310303, 0.007654399871826172, 0.007683072090148926, 0.007730175971984863, 0.007701504230499268, 0.008463359832763672, 0.008068096160888672, 0.007916543960571289, 0.007705599784851074, 0.007667712211608887, 0.007699456214904785, 0.007643136024475097, 0.007656447887420655, 0.007656447887420655, 0.007661568164825439, 0.0076605439186096195, 0.007755775928497314, 0.007671807765960693, 0.0076871681213378906, 0.007669760227203369, 0.0077619199752807615, 0.007662591934204102, 0.0076605439186096195, 0.007664639949798584, 0.007772160053253174, 0.007708672046661377, 0.007676928043365478, 0.007662591934204102, 0.00795750379562378, 0.017183744430541992, 0.007907328128814697, 0.007918591976165772, 0.00783462381362915, 0.007717887878417969, 0.0076943359375, 0.007677951812744141, 0.007763967990875244, 0.007702527999877929, 0.007685120105743408, 0.007701504230499268, 0.007684095859527588, 0.007709695816040039, 0.007688191890716553, 0.007730175971984863, 0.007692287921905518, 0.007733248233795166, 0.007673855781555176, 0.007678976058959961, 0.007662591934204102, 0.007718912124633789, 0.007674880027770996, 0.007672832012176513, 0.007670783996582031, 0.007658495903015137, 0.007688191890716553, 0.007734272003173828, 0.007763967990875244, 0.007769087791442871, 0.007686143875122071, 0.007650303840637207, 0.007665664196014404, 0.007681024074554443, 0.007721983909606934, 0.0077281279563903805, 0.007683072090148926, 0.00769536018371582, 0.0077916159629821775, 0.007670783996582031, 0.007696383953094482, 0.0076605439186096195, 0.007673920154571533, 0.0076819839477539065, 0.007651328086853027, 0.0076943359375, 0.007655424118041992, 0.0076574721336364745, 0.007684095859527588, 0.007669760227203369, 0.007726079940795898, 0.007689216136932373, 0.007771135807037354, 0.0076912641525268555, 0.007823359966278077, 0.007716864109039307, 0.007703551769256592, 0.007678976058959961, 0.007688191890716553, 0.0077844481468200685, 0.007733248233795166, 0.007706624031066894, 0.0077199358940124516, 0.007748608112335205, 0.016525312423706053, 0.007715839862823487, 0.0077209601402282715, 0.007697408199310303, 0.007702527999877929, 0.007712800025939942, 0.007707615852355957, 0.007683072090148926, 0.007696383953094482, 0.007737343788146973, 0.0076687359809875484, 0.007705599784851074, 0.007711743831634522, 0.007686143875122071, 0.007732223987579345, 0.00773632001876831, 0.00787660789489746, 0.00830361557006836, 0.007911424160003662, 0.007717887878417969, 0.007702527999877929, 0.007723008155822754, 0.007732223987579345, 0.007715839862823487, 0.007697408199310303, 0.007795711994171142, 0.007724031925201416, 0.007716864109039307, 0.007809023857116699, 0.007868415832519531, 0.0077608962059021, 0.00773632001876831, 0.007714816093444824, 0.007705599784851074, 0.007703551769256592, 0.007706624031066894, 0.007705599784851074, 0.007702527999877929, 0.007710720062255859, 0.007686143875122071, 0.007699456214904785, 0.007853055953979492, 0.007704576015472412, 0.007655424118041992, 0.007672832012176513, 0.007702527999877929, 0.007703551769256592, 0.007702527999877929, 0.007742464065551758, 0.0076871681213378906, 0.007737343788146973, 0.007697408199310303, 0.007700479984283447, 0.007683072090148926, 0.007702527999877929, 0.007733248233795166, 0.007663616180419922, 0.007678976058959961, 0.007686143875122071, 0.00769536018371582, 0.0077209601402282715, 0.007686143875122071, 0.007703551769256592, 0.01681612777709961, 0.007713791847229004, 0.007679999828338623, 0.007707712173461914, 0.007690176010131836, 0.007659520149230957, 0.007675903797149658, 0.007814144134521485, 0.007681024074554443, 0.007721983909606934, 0.007671807765960693, 0.007718912124633789, 0.007726079940795898, 0.007714816093444824, 0.007703551769256592, 0.007715839862823487, 0.0076943359375, 0.00769536018371582, 0.007705599784851074, 0.007677951812744141, 0.007672832012176513, 0.007671807765960693, 0.007730175971984863, 0.007686143875122071, 0.007659520149230957, 0.0076912641525268555, 0.0076984319686889645, 0.007662591934204102, 0.007699456214904785, 0.0077578239440917966, 0.0076943359375, 0.0077404799461364745, 0.007823296070098877, 0.007710720062255859, 0.0076943359375, 0.007682047843933106, 0.00769536018371582, 0.007639039993286132, 0.007710720062255859, 0.007692287921905518, 0.007687200069427491, 0.007911392211914062, 0.00794316816329956, 0.007654399871826172, 0.0076943359375, 0.007661568164825439, 0.0076902399063110355, 0.007673855781555176, 0.007707647800445557, 0.007701504230499268, 0.007683072090148926, 0.007661568164825439, 0.00773632001876831, 0.007669760227203369, 0.007759871959686279, 0.007682047843933106, 0.007688191890716553, 0.007670783996582031, 0.00773638391494751, 0.007666624069213867, 0.007685120105743408, 0.007774208068847656, 0.007774208068847656, 0.016714752197265623, 0.007661568164825439, 0.007718912124633789, 0.007678976058959961, 0.0076902399063110355, 0.0076687359809875484, 0.007655424118041992, 0.007704576015472412, 0.007724031925201416, 0.007693312168121338, 0.007682047843933106, 0.007713856220245362, 0.007695295810699463, 0.007727104187011719, 0.007708672046661377, 0.0076912641525268555, 0.0076943359375, 0.007725056171417236, 0.007659520149230957, 0.008349696159362792, 0.008087552070617676, 0.00800153636932373, 0.007982079982757568, 0.007956480026245117, 0.007929855823516846, 0.008227840423583984, 0.008046591758728027, 0.008014847755432129, 0.007885824203491211, 0.007773183822631836, 0.0076871681213378906, 0.007777279853820801, 0.007727104187011719, 0.007721983909606934, 0.007692287921905518, 0.007705599784851074, 0.007746560096740723, 0.007696383953094482, 0.007721983909606934, 0.007792640209197998, 0.007708672046661377, 0.007921664237976075, 0.007707647800445557, 0.007706624031066894, 0.007712768077850342, 0.007642111778259277, 0.007730175971984863, 0.007689216136932373, 0.007717887878417969, 0.007708672046661377, 0.007692287921905518, 0.007714816093444824, 0.007763967990875244, 0.007715839862823487, 0.007689216136932373, 0.007714816093444824, 0.007768064022064209, 0.0076912641525268555, 0.007679999828338623, 0.007677951812744141, 0.007693312168121338, 0.007681024074554443, 0.007692287921905518, 0.01683967971801758, 0.007956480026245117, 0.00795136022567749, 0.007931968212127686, 0.007883711814880371, 0.0077209601402282715, 0.007726111888885498, 0.007844831943511963, 0.007725056171417236, 0.007685120105743408, 0.007966720104217529, 0.007915520191192627, 0.007905280113220215, 0.007930880069732665, 0.0077209601402282715, 0.0077199358940124516, 0.007727104187011719, 0.007699456214904785, 0.0076912641525268555, 0.0076912641525268555, 0.007654399871826172, 0.007792640209197998, 0.0076871681213378906, 0.007699456214904785, 0.0076902399063110355, 0.007671807765960693, 0.0077209601402282715, 0.0076984319686889645, 0.007840767860412597, 0.007961599826812745, 0.007816192150115966, 0.007692287921905518, 0.007686143875122071, 0.007718912124633789, 0.007701504230499268, 0.007663616180419922, 0.007713791847229004, 0.007682047843933106, 0.007669760227203369, 0.007689216136932373, 0.007676928043365478, 0.00800153636932373, 0.007803904056549072, 0.007771135807037354, 0.0077322559356689455, 0.007685152053833008, 0.007731135845184326, 0.007712768077850342, 0.007674880027770996, 0.007709695816040039, 0.007684095859527588, 0.007681024074554443, 0.00773632001876831, 0.007786496162414551, 0.007685120105743408, 0.007683072090148926, 0.007667712211608887, 0.007677951812744141, 0.007724031925201416, 0.007700479984283447, 0.007700479984283447, 0.007817215919494629, 0.0077209601402282715, 0.01655705642700195, 0.007729152202606201, 0.007725056171417236, 0.007679999828338623, 0.00773529577255249, 0.007704576015472412, 0.007726079940795898, 0.007715839862823487, 0.007713791847229004, 0.00774454402923584, 0.007709663867950439, 0.007705599784851074, 0.007731200218200684, 0.007697408199310303, 0.007729152202606201, 0.007713791847229004, 0.00773632001876831, 0.007699456214904785, 0.007676928043365478, 0.007709695816040039, 0.007763967990875244, 0.007693312168121338, 0.007684095859527588, 0.007672832012176513, 0.007727104187011719, 0.007699456214904785, 0.007692287921905518, 0.007696383953094482, 0.007666687965393066, 0.007671807765960693, 0.007734272003173828, 0.007797760009765625, 0.007671807765960693, 0.007671807765960693, 0.007674880027770996, 0.007686143875122071, 0.007685120105743408, 0.007680031776428222, 0.007708640098571778, 0.007688191890716553, 0.007741439819335938, 0.007726079940795898, 0.007679999828338623, 0.007747583866119385, 0.007707647800445557, 0.007661568164825439, 0.007709695816040039, 0.007714816093444824, 0.007689216136932373, 0.007710720062255859, 0.007702527999877929, 0.00769536018371582, 0.007712768077850342, 0.007729152202606201, 0.007714816093444824, 0.0076984319686889645, 0.00769536018371582, 0.007725056171417236, 0.007663616180419922, 0.007682112216949463, 0.007765952110290527, 0.007704576015472412, 0.007675903797149658, 0.016482303619384766, 0.0077209601402282715, 0.007693312168121338, 0.007663616180419922, 0.007686143875122071, 0.007688191890716553, 0.007673855781555176, 0.007783423900604248, 0.007667712211608887, 0.007689216136932373, 0.007803904056549072, 0.00769536018371582, 0.007676928043365478, 0.007678976058959961, 0.007724031925201416, 0.007696383953094482, 0.007805952072143555, 0.007718912124633789, 0.007708672046661377, 0.0076912641525268555, 0.007655424118041992, 0.0076902399063110355, 0.007697408199310303, 0.007712768077850342, 0.007770112037658691, 0.007656447887420655, 0.00785920000076294, 0.007704576015472412, 0.007676928043365478, 0.007677951812744141, 0.007712768077850342, 0.0076943359375, 0.007666687965393066, 0.007725056171417236, 0.007830527782440186, 0.007674880027770996, 0.00769536018371582, 0.007692287921905518, 0.007716864109039307, 0.0076687359809875484, 0.007670783996582031, 0.007796735763549805, 0.007696383953094482, 0.0076943359375, 0.007697408199310303, 0.007675903797149658, 0.007701504230499268, 0.007655424118041992, 0.007669760227203369, 0.007663616180419922, 0.007686143875122071, 0.007696383953094482, 0.00764518404006958, 0.00769536018371582, 0.007786496162414551, 0.007718912124633789, 0.007711743831634522, 0.0077209601402282715, 0.007987199783325195, 0.0076871681213378906, 0.007702527999877929, 0.007797760009765625, 0.007693312168121338, 0.016484352111816408, 0.007674880027770996, 0.007697408199310303, 0.007717887878417969, 0.007707647800445557, 0.007704576015472412, 0.007643136024475097, 0.007710720062255859, 0.0077199358940124516, 0.0077199358940124516, 0.007701504230499268, 0.007710720062255859, 0.0076984319686889645, 0.0077619199752807615, 0.007701504230499268, 0.007733248233795166, 0.007715871810913086, 0.007692255973815918, 0.007709695816040039, 0.0076943359375, 0.007684095859527588, 0.0076912641525268555, 0.007692287921905518, 0.007710720062255859, 0.007682047843933106, 0.007670783996582031, 0.007689216136932373, 0.007673855781555176, 0.007685120105743408, 0.007702527999877929, 0.007711743831634522, 0.007717887878417969, 0.007881728172302246, 0.007721983909606934, 0.007707647800445557, 0.007847936153411865, 0.007799808025360107, 0.0077281279563903805, 0.007715839862823487, 0.0076902399063110355, 0.007700479984283447, 0.007724031925201416, 0.0076943359375, 0.007865344047546387, 0.0076871681213378906, 0.007723008155822754, 0.007727104187011719, 0.0077199358940124516, 0.007688191890716553, 0.007844863891601562, 0.007663616180419922, 0.007730175971984863, 0.007700479984283447, 0.007699456214904785, 0.00773529577255249, 0.007684095859527588, 0.007678976058959961, 0.007696383953094482, 0.007714816093444824, 0.00782643222808838, 0.007778304100036621, 0.007774208068847656, 0.007678976058959961, 0.016561151504516602, 0.007704576015472412, 0.007812096118927002, 0.007910399913787843, 0.007922688007354736, 0.008080384254455567, 0.007913472175598145, 0.007903232097625732, 0.007676928043365478, 0.007712831974029541, 0.007683008193969726, 0.007683072090148926, 0.007721983909606934, 0.007693312168121338, 0.007700479984283447, 0.0077199358940124516, 0.007685120105743408, 0.007682047843933106, 0.007686143875122071, 0.007696383953094482, 0.007689216136932373, 0.007641088008880615, 0.007703551769256592, 0.0076912641525268555, 0.007705599784851074, 0.0077281599044799806, 0.007726047992706299, 0.007732223987579345, 0.007673855781555176, 0.0076687359809875484, 0.00778547191619873, 0.007723008155822754, 0.007731200218200684, 0.007683072090148926, 0.007699456214904785, 0.007643136024475097, 0.007693312168121338, 0.007725088119506836, 0.007687136173248291, 0.007711743831634522, 0.007689216136932373, 0.007809023857116699, 0.007697408199310303, 0.007674911975860596, 0.0076861119270324706, 0.007665664196014404, 0.007709695816040039, 0.007683072090148926, 0.007771135807037354, 0.007688191890716553, 0.007716864109039307, 0.007693312168121338, 0.007705599784851074, 0.007773183822631836, 0.007767039775848389, 0.0077578239440917966, 0.0077209601402282715, 0.007709695816040039, 0.007776256084442139, 0.007709695816040039, 0.007684095859527588, 0.007709695816040039, 0.00769536018371582, 0.016553983688354493, 0.007738368034362793, 0.007707647800445557, 0.0077199358940124516, 0.007712768077850342, 0.007697408199310303, 0.007688191890716553, 0.007713791847229004, 0.007730175971984863, 0.007711743831634522, 0.007738368034362793, 0.007681024074554443, 0.007712768077850342, 0.007685120105743408, 0.007709695816040039, 0.007734272003173828, 0.007716864109039307, 0.007678976058959961, 0.0076912641525268555, 0.007699456214904785, 0.0077209601402282715, 0.007701504230499268, 0.007718912124633789, 0.007665664196014404, 0.0076912641525268555, 0.007706624031066894, 0.007685120105743408, 0.007721983909606934, 0.007716864109039307, 0.007688191890716553, 0.007689216136932373, 0.007676928043365478, 0.007684095859527588, 0.007715839862823487, 0.007701504230499268, 0.007725056171417236, 0.0076912641525268555, 0.007726079940795898, 0.007692287921905518, 0.007766016006469726, 0.00800051212310791, 0.00773529577255249, 0.007711743831634522, 0.007738368034362793, 0.0076912641525268555, 0.0076574721336364745, 0.0077066879272460935, 0.007749567985534668, 0.007740416049957275, 0.007701504230499268, 0.007696383953094482, 0.007714816093444824, 0.0076871681213378906, 0.007677951812744141, 0.0076902399063110355, 0.007684095859527588, 0.007696415901184082, 0.007714784145355225, 0.007678976058959961, 0.007717919826507568, 0.0077270717620849605, 0.007686143875122071, 0.007689216136932373]",tokens/s,126.69384730146851,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,3151.736832,4836.5568,0.0,4206.886912,4087.771648,s,1,9.90873828125,9.90873828125,0.0,9.90873828125,9.90873828125,9.90873828125,9.90873828125,[9.90873828125],,kWh,3.6708339818089075e-05,2.010315288425745e-05,5.7333101421974675e-05,0.0001141445941243212,,MB,3100.844032,5012.717568,0.0,4366.270464,4273.050624,s,10,6.172314208984375,0.6172314208984375,0.0001806009815508707,0.6172477722167968,0.6173733032226562,0.6174991516113281,0.6175998303222656,"[0.6169696044921875, 0.6170508422851563, 0.6170869140625, 0.617206298828125, 0.6172940063476563, 0.6172892456054687, 0.61734228515625, 0.617625, 0.6173453369140625, 0.6171046752929688]",tokens/s,414.755294906031,kWh,7.299943601388906e-06,3.99834134799857e-06,4.406379342094274e-05,5.536207837033022e-05,tokens/kWh,4624103.854764169,MB,3105.169408,5014.81472,0.0,4368.367616,4273.053184,s,10,22.972166259765622,2.2972166259765627,0.03775634338344153,2.2894224853515626,2.3485938232421875,2.351175939941406,2.3532416333007813,"[2.24863671875, 2.261334716796875, 2.265004638671875, 2.26551953125, 2.297212890625, 2.34802001953125, 2.353758056640625, 2.347228515625, 2.303819091796875, 2.281632080078125]",tokens/s,27.424492443423034,kWh,2.697130160451429e-05,1.4782696079417337e-05,5.968237290925588e-05,0.0001014363705931875,tokens/kWh,621079.003828545,,s,630,22.970030014038112,0.03646036510164776,0.0008980116401025015,0.03644262504577637,0.0375123966217041,0.03770567626953125,0.03823319107055664,"[0.03626393508911133, 0.035389438629150394, 0.035386367797851564, 0.03528192138671875, 0.03521331024169922, 0.035160064697265625, 0.035492862701416016, 0.035675136566162106, 0.03521535873413086, 0.035023872375488284, 0.03652608108520508, 0.03719782257080078, 0.0355860481262207, 0.03523993682861328, 0.03540582275390625, 0.03554304122924805, 0.035622913360595705, 0.03525734329223633, 0.03552153778076172, 0.03576934432983398, 0.03557273483276367, 0.035558399200439454, 0.03605913543701172, 0.03515289688110351, 0.035691520690917966, 0.035369983673095705, 0.03527884674072266, 0.0353177604675293, 0.03565875244140625, 0.03572326278686523, 0.03552358245849609, 0.03580313491821289, 0.035631103515625, 0.03564851379394531, 0.03577036666870117, 0.03551641464233399, 0.035501056671142575, 0.03547955322265625, 0.03574476623535156, 0.03768524932861328, 0.037580799102783204, 0.0354150390625, 0.03522457504272461, 0.03544780731201172, 0.03537919998168945, 0.035490814208984374, 0.03547545623779297, 0.03578060913085938, 0.03575193786621094, 0.03640217590332031, 0.0373196792602539, 0.0365926399230957, 0.03517337417602539, 0.03540787124633789, 0.03528908920288086, 0.03574476623535156, 0.03547955322265625, 0.035757057189941405, 0.0355676155090332, 0.035643390655517575, 0.03570483016967774, 0.03566694259643555, 0.03567718505859375, 0.03620761489868164, 0.03558707046508789, 0.03508633422851563, 0.03514777755737305, 0.035639297485351565, 0.03566694259643555, 0.03521945571899414, 0.03559423828125, 0.0354068489074707, 0.0383744010925293, 0.03780710220336914, 0.03743436813354492, 0.0367646713256836, 0.03707187271118164, 0.037182464599609374, 0.037212158203125, 0.035509246826171875, 0.03563622283935547, 0.03580928039550781, 0.035506175994873046, 0.035566593170166014, 0.035661823272705076, 0.035699710845947266, 0.035432449340820314, 0.035724288940429685, 0.03665203094482422, 0.03561779022216797, 0.035639297485351565, 0.037154815673828126, 0.03788083267211914, 0.03579391860961914, 0.03590655899047852, 0.03565670394897461, 0.03531161499023437, 0.03555942535400391, 0.03545087814331055, 0.03562700653076172, 0.0357212142944336, 0.03553177642822265, 0.03553484725952148, 0.03545395278930664, 0.03576627349853516, 0.03559423828125, 0.03525734329223633, 0.035659774780273434, 0.03568537521362305, 0.03602227020263672, 0.03527475357055664, 0.03508838272094727, 0.03548262405395508, 0.03566592025756836, 0.035931137084960936, 0.03584819030761719, 0.036122623443603515, 0.0357283821105957, 0.03589120101928711, 0.03565465545654297, 0.0357314567565918, 0.035768318176269534, 0.03560345458984375, 0.035760128021240234, 0.035568641662597655, 0.03558399963378906, 0.036040702819824216, 0.03596083068847656, 0.03609395217895508, 0.035558399200439454, 0.035937278747558594, 0.03590963363647461, 0.03561062240600586, 0.03632230377197266, 0.036310016632080076, 0.03550822448730469, 0.03622809600830078, 0.035214336395263675, 0.035625984191894534, 0.035558399200439454, 0.03554611206054688, 0.03577753448486328, 0.03584819030761719, 0.036675582885742186, 0.03590348815917969, 0.03575193786621094, 0.035664894104003905, 0.035639297485351565, 0.035642368316650394, 0.035775489807128906, 0.03575603103637695, 0.03560243225097656, 0.03567411041259766, 0.0356577262878418, 0.03564748764038086, 0.03682918548583984, 0.0358021125793457, 0.035746814727783204, 0.036089855194091795, 0.036431873321533206, 0.03549798583984375, 0.03627008056640625, 0.03744255828857422, 0.037084159851074217, 0.03712409591674805, 0.03729305648803711, 0.03712614440917969, 0.0370964469909668, 0.035716094970703126, 0.03701760101318359, 0.0355860481262207, 0.0356864013671875, 0.035506175994873046, 0.036241409301757815, 0.03578777694702148, 0.03570380783081055, 0.035631103515625, 0.03542937469482422, 0.03549593734741211, 0.03560755157470703, 0.03566284942626953, 0.035655681610107424, 0.035419136047363284, 0.036101119995117184, 0.035527679443359376, 0.035576831817626955, 0.0357283821105957, 0.03563827133178711, 0.03580723190307617, 0.03560550308227539, 0.03558399963378906, 0.03538739013671875, 0.0357283821105957, 0.036127742767333985, 0.03566080093383789, 0.035620864868164064, 0.03587481689453125, 0.03584511947631836, 0.036367359161376955, 0.03749990463256836, 0.03835289764404297, 0.03747840118408203, 0.03720601654052735, 0.03535871887207031, 0.03594137573242188, 0.03576934432983398, 0.035745792388916016, 0.03583078384399414, 0.035310592651367184, 0.035896320343017575, 0.035833854675292966, 0.0357386245727539, 0.036275199890136715, 0.03523379135131836, 0.0354150390625, 0.03568947219848633, 0.03553279876708984, 0.03571712112426758, 0.03573452758789063, 0.03567923355102539, 0.035544063568115236, 0.03569356918334961, 0.036141056060791016, 0.03575296020507813, 0.0358809585571289, 0.0363694076538086, 0.037440513610839846, 0.035784702301025394, 0.03603558349609375, 0.035765247344970705, 0.03600281524658203, 0.03584000015258789, 0.03580108642578125, 0.035884033203125, 0.03569356918334961, 0.03566080093383789, 0.035740673065185545, 0.03546112060546875, 0.03536383819580078, 0.035130367279052735, 0.035211265563964846, 0.03544063949584961, 0.03679436874389649, 0.03719782257080078, 0.037294078826904296, 0.03720294570922852, 0.03727052688598633, 0.03550822448730469, 0.035535873413085936, 0.03535564804077149, 0.035332096099853515, 0.035138561248779294, 0.03659161758422851, 0.035639297485351565, 0.035168254852294925, 0.03541708755493164, 0.03566592025756836, 0.03568025588989258, 0.03586150360107422, 0.035740673065185545, 0.035732479095458985, 0.03566796875, 0.03570892715454101, 0.03576627349853516, 0.035707904815673826, 0.03715891265869141, 0.036762622833251955, 0.03719680023193359, 0.03756851196289063, 0.03765862274169922, 0.03760025787353516, 0.03749785614013672, 0.0356126708984375, 0.03553996658325195, 0.03563724899291992, 0.03554099273681641, 0.035714046478271484, 0.035664894104003905, 0.03570278549194336, 0.03551641464233399, 0.03532799911499023, 0.03527679824829102, 0.035416065216064455, 0.035264511108398434, 0.03513241577148438, 0.035683326721191407, 0.037152767181396484, 0.03704729461669922, 0.0357386245727539, 0.03551232147216797, 0.035334144592285156, 0.03647488021850586, 0.036708351135253905, 0.036994049072265625, 0.03778252792358398, 0.03778252792358398, 0.03793612670898437, 0.037700607299804685, 0.03746406555175781, 0.03774259185791016, 0.03757056045532227, 0.03667763137817383, 0.037501953125, 0.03743436813354492, 0.03699302291870117, 0.036828159332275394, 0.03692031860351563, 0.037392383575439454, 0.037454849243164064, 0.037103614807128905, 0.03671449661254883, 0.036708351135253905, 0.03686809539794922, 0.03712409591674805, 0.03752447891235351, 0.03671449661254883, 0.03675545501708984, 0.037667839050292966, 0.03751424026489258, 0.03753881454467774, 0.03721625518798828, 0.03700735855102539, 0.03744768142700195, 0.037043201446533204, 0.037013504028320314, 0.03726131057739258, 0.037397502899169925, 0.03712307357788086, 0.03734425735473633, 0.03731558227539063, 0.037473281860351565, 0.037528575897216795, 0.037618686676025394, 0.036860927581787106, 0.03733196640014649, 0.03716198348999023, 0.03671756744384766, 0.03713228988647461, 0.03698175811767578, 0.03702272033691406, 0.03718656158447266, 0.03730022430419922, 0.0372305908203125, 0.03681075286865235, 0.037160961151123044, 0.03719475173950195, 0.03742617416381836, 0.03681587219238281, 0.03708006286621094, 0.0373831672668457, 0.0373043212890625, 0.03684864044189453, 0.037323776245117186, 0.03726348876953125, 0.03787251281738281, 0.03749990463256836, 0.03746201705932617, 0.03749273681640625, 0.03765350341796875, 0.037116928100585936, 0.037269504547119144, 0.037405696868896485, 0.037563392639160156, 0.03788390350341797, 0.03749990463256836, 0.0374128646850586, 0.037923839569091795, 0.03783270263671875, 0.03772415924072266, 0.03757670211791992, 0.03731353759765625, 0.03681894302368164, 0.03681689453125, 0.037275646209716795, 0.037222400665283206, 0.03685887908935547, 0.036511745452880856, 0.03727872085571289, 0.03785625457763672, 0.03758489608764649, 0.03755417633056641, 0.037285888671875, 0.03738726425170898, 0.03717324829101563, 0.0371671028137207, 0.037512191772460936, 0.03746815872192383, 0.03688550567626953, 0.03719168090820312, 0.03781631851196289, 0.037425151824951174, 0.03731660842895508, 0.03736576080322265, 0.03757056045532227, 0.03723263931274414, 0.037803009033203126, 0.0359024658203125, 0.036898815155029296, 0.03708927917480469, 0.03736371231079102, 0.0370247688293457, 0.040174591064453126, 0.03823616027832031, 0.03750707244873047, 0.037085182189941404, 0.03752447891235351, 0.03746201705932617, 0.03744460678100586, 0.03735859298706055, 0.037493759155273435, 0.037066783905029294, 0.03728278350830078, 0.03687936019897461, 0.03761459350585938, 0.03732992172241211, 0.037292030334472655, 0.0375470085144043, 0.03720601654052735, 0.03729612731933594, 0.03714355087280274, 0.03673702239990234, 0.03784806442260742, 0.03895296096801758, 0.03775795364379883, 0.037190654754638675, 0.037601280212402347, 0.03721932983398438, 0.03668787384033203, 0.03729305648803711, 0.03704422378540039, 0.03747840118408203, 0.03731763076782227, 0.03665510559082031, 0.037138431549072266, 0.03654553604125976, 0.03700121688842774, 0.0371701774597168, 0.03732275390625, 0.03724800109863281, 0.03727667236328125, 0.03717529678344727, 0.03666329574584961, 0.037321727752685545, 0.03709542465209961, 0.037108734130859376, 0.037397502899169925, 0.03739340972900391, 0.037305343627929685, 0.03726233673095703, 0.03738726425170898, 0.03749478530883789, 0.037222400665283206, 0.03723571014404297, 0.036992000579833983, 0.03715071868896484, 0.03751731109619141, 0.037580799102783204, 0.037222400665283206, 0.03668172836303711, 0.03770982360839844, 0.037223423004150394, 0.03739033508300781, 0.037288959503173826, 0.037303295135498044, 0.037348350524902346, 0.03726131057739258, 0.037190654754638675, 0.03717836761474609, 0.03881062316894531, 0.03806412887573242, 0.03744870376586914, 0.03707596969604492, 0.0373196792602539, 0.03723263931274414, 0.03737702560424805, 0.03738726425170898, 0.03728793716430664, 0.03772108840942383, 0.037367809295654295, 0.03732787322998047, 0.03783065414428711, 0.03702067184448242, 0.037032958984375, 0.036772865295410156, 0.03709439849853516, 0.03745587158203125, 0.03744768142700195, 0.036517887115478515, 0.03730739212036133, 0.03750707244873047, 0.037285888671875, 0.03671039962768555, 0.037381118774414065, 0.03746713638305664, 0.037149696350097655, 0.037106689453125, 0.036803585052490234, 0.03679334259033203, 0.03683942413330078, 0.03739955139160156, 0.037179393768310545, 0.03659775924682617, 0.03664179229736328, 0.03732070541381836, 0.03619123077392578, 0.03602431869506836, 0.03694387054443359, 0.037174270629882815, 0.0371486701965332, 0.037526527404785154, 0.03584921646118164, 0.03738623809814453, 0.037220352172851565, 0.03729919815063477, 0.03724185562133789, 0.03731660842895508, 0.037233665466308595, 0.03732992172241211, 0.03676876831054687, 0.03754393768310547, 0.0369356803894043, 0.038225921630859375, 0.03814297485351562, 0.03766067123413086, 0.03722956848144531, 0.037308414459228514, 0.037059585571289064, 0.037116928100585936, 0.03554304122924805, 0.03536896133422852, 0.03538227081298828, 0.03512115097045899, 0.03535257720947266, 0.03498905563354492, 0.03545292663574219, 0.03514470291137695, 0.03616460800170898, 0.0374835205078125, 0.03738828659057617, 0.03687526321411133, 0.03723775863647461, 0.03735756683349609, 0.03765964889526367, 0.037482494354248046, 0.03725107192993164, 0.03736064147949219, 0.037348350524902346, 0.03732275390625, 0.03738009643554688, 0.03679334259033203, 0.03688447952270508, 0.03707494354248047, 0.03530035018920898, 0.03511296081542969, 0.03545395278930664, 0.0355860481262207, 0.0357212142944336, 0.03570483016967774, 0.03565465545654297, 0.03577753448486328, 0.03563008117675781, 0.03566080093383789, 0.035678207397460936, 0.03557068634033203, 0.03589836883544922, 0.035806209564208984, 0.03575807952880859, 0.036523006439208985, 0.03715379333496094, 0.03737190246582031, 0.038005760192871094, 0.037498878479003905, 0.037784576416015625, 0.03745587158203125, 0.0368455696105957, 0.037700607299804685, 0.03734630584716797, 0.037379070281982424, 0.03705241775512695, 0.03755212783813477, 0.03745382308959961, 0.03736166381835938, 0.03794636917114258, 0.03741491317749023, 0.036918270111083985, 0.035568641662597655, 0.035574783325195314, 0.035404800415039066, 0.035730430603027344, 0.035659774780273434, 0.0355491828918457, 0.03584204864501953, 0.03577958297729492, 0.03566592025756836, 0.03521843338012695, 0.03532799911499023, 0.036203521728515625, 0.03526758575439453, 0.03544678497314453, 0.03512831878662109, 0.03654348754882813, 0.036779006958007815, 0.03872051239013672, 0.03619123077392578, 0.03544166564941406, 0.036162559509277346, 0.03632537460327148, 0.035699710845947266, 0.035340286254882815, 0.03625062561035156, 0.03645337677001953, 0.03616460800170898, 0.035119102478027346, 0.035683326721191407, 0.03553996658325195, 0.03567001724243164, 0.03559936141967773, 0.03575091171264649, 0.03560550308227539, 0.035659774780273434, 0.03563827133178711, 0.03527884674072266, 0.03540991973876953, 0.0353361930847168, 0.035568641662597655, 0.03566796875, 0.03529216003417969, 0.03544985580444336, 0.03530854415893555, 0.03565055847167969]",tokens/s,27.427042960543663,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1329.414144,1361.575936,0.0,731.906048,703.86944,s,1,7.7310810546875,7.7310810546875,0.0,7.7310810546875,7.7310810546875,7.7310810546875,7.7310810546875,[7.7310810546875],,kWh,1.0835721882643965e-05,5.922728511964853e-06,1.5119734318003175e-05,3.1878184712611995e-05,,MB,1607.868416,1644.691456,0.0,998.244352,942.610432,s,10,0.675199592590332,0.0675199592590332,3.712158824847865e-05,0.06753414535522462,0.0675495979309082,0.06756240043640137,0.0675726424407959,"[0.06743389129638672, 0.06748992156982422, 0.06749842834472657, 0.0675335693359375, 0.06750943756103515, 0.06754112243652344, 0.0675467529296875, 0.06753654479980468, 0.06753472137451172, 0.06757520294189454]",tokens/s,3791.471482052929,kWh,7.981257335210751e-07,4.372718562627411e-07,4.6974756048244955e-06,5.932873194608312e-06,tokens/kWh,43149413.71621564,MB,1647.583232,1655.177216,0.0,1006.63296,942.612992,s,10,13.369005981445312,1.3369005981445312,0.002816137412146056,1.3369354248046874,1.3404452514648437,1.3408586120605468,1.3411893005371094,"[1.3366226806640624, 1.3396343994140625, 1.3338436279296875, 1.3322059326171876, 1.33493310546875, 1.34127197265625, 1.33487548828125, 1.3372481689453124, 1.3403533935546874, 1.3380172119140625]",tokens/s,47.12392236747965,kWh,1.5741121580645414e-05,8.62604894860127e-06,2.8032939468376576e-05,5.2400109997623247e-05,tokens/kWh,1202287.5525043276,,s,630,13.367188484191885,0.021217759498717292,0.0003208631640703057,0.021187071800231935,0.02149488582611084,0.021744793796539305,0.02257415187835694,"[0.021319679260253906, 0.021154815673828126, 0.021282848358154298, 0.021033952713012696, 0.021008384704589843, 0.02125209617614746, 0.02124799919128418, 0.02149478340148926, 0.020998144149780275, 0.02162380790710449, 0.02186342430114746, 0.02148761558532715, 0.02122444725036621, 0.02105446434020996, 0.021181440353393553, 0.02143846321105957, 0.021433343887329103, 0.021218303680419923, 0.021171199798583985, 0.02148761558532715, 0.021363712310791014, 0.02142617607116699, 0.021200895309448242, 0.021164031982421876, 0.02129408073425293, 0.020970495223999023, 0.021016576766967773, 0.021186559677124024, 0.02127872085571289, 0.020945920944213867, 0.020952064514160155, 0.02088960075378418, 0.021201919555664063, 0.021139455795288087, 0.02128895950317383, 0.02109644889831543, 0.020959232330322267, 0.020997119903564454, 0.02126028823852539, 0.02125823974609375, 0.021185535430908203, 0.021251071929931642, 0.02124083137512207, 0.02128281593322754, 0.02128998374938965, 0.02119987106323242, 0.02125823974609375, 0.02120806312561035, 0.021187583923339845, 0.021194751739501954, 0.021147647857666017, 0.020971519470214844, 0.021308416366577147, 0.021163007736206055, 0.021105663299560547, 0.021251071929931642, 0.02168115234375, 0.02122956848144531, 0.021246976852416992, 0.020986879348754883, 0.020768768310546876, 0.021135360717773437, 0.021275648117065428, 0.02104934310913086, 0.020975616455078124, 0.02089369583129883, 0.021046272277832033, 0.021141504287719725, 0.021186559677124024, 0.021024768829345702, 0.021175296783447265, 0.02126540756225586, 0.02128895950317383, 0.021243904113769533, 0.021188608169555666, 0.020937728881835937, 0.021608448028564452, 0.02128691291809082, 0.021190656661987304, 0.021597183227539063, 0.021737472534179687, 0.022674432754516603, 0.021538816452026367, 0.021215232849121093, 0.021772287368774415, 0.02168832015991211, 0.02166681671142578, 0.02123366355895996, 0.02130534362792969, 0.021230592727661132, 0.021413888931274414, 0.020989952087402345, 0.021000192642211913, 0.021321727752685548, 0.02107494354248047, 0.02109644889831543, 0.021994495391845705, 0.02123161506652832, 0.02122137641906738, 0.02165452766418457, 0.021170175552368165, 0.021189632415771483, 0.021174303054809572, 0.021038047790527342, 0.020717567443847656, 0.02087424087524414, 0.02104012870788574, 0.021193727493286133, 0.02126540756225586, 0.021712896347045898, 0.021529600143432616, 0.02129305648803711, 0.021235712051391603, 0.021156864166259767, 0.021102592468261717, 0.020921344757080077, 0.021279743194580078, 0.020967424392700194, 0.020880384445190428, 0.020909055709838868, 0.021177343368530274, 0.021195775985717775, 0.021195775985717775, 0.021411840438842773, 0.02124799919128418, 0.021390335083007812, 0.02123980712890625, 0.021130239486694336, 0.020831232070922853, 0.02082918357849121, 0.021073919296264648, 0.021175296783447265, 0.021151744842529296, 0.021170175552368165, 0.020891647338867187, 0.0210831356048584, 0.021150720596313476, 0.020977664947509765, 0.020954111099243163, 0.021217279434204102, 0.021195775985717775, 0.021186559677124024, 0.020977664947509765, 0.021561344146728514, 0.021104639053344726, 0.021156864166259767, 0.021004287719726563, 0.021195775985717775, 0.021104639053344726, 0.021126144409179686, 0.021111808776855468, 0.021196800231933592, 0.021184511184692383, 0.021163007736206055, 0.02122854423522949, 0.021251071929931642, 0.02124185562133789, 0.020979711532592774, 0.02110873603820801, 0.021154815673828126, 0.021204992294311522, 0.02124185562133789, 0.02130534362792969, 0.021153791427612305, 0.02091929626464844, 0.021191680908203125, 0.02091929626464844, 0.021146623611450196, 0.021251071929931642, 0.021200895309448242, 0.021147647857666017, 0.021046272277832033, 0.021003263473510742, 0.02092748832702637, 0.021021696090698243, 0.0212490234375, 0.020935680389404295, 0.02126233673095703, 0.021542911529541017, 0.020935680389404295, 0.020951040267944337, 0.021271551132202148, 0.02123263931274414, 0.0212490234375, 0.021005311965942384, 0.021203968048095705, 0.022487039566040038, 0.022136831283569337, 0.021411840438842773, 0.02123263931274414, 0.021178367614746094, 0.021098495483398438, 0.021161983489990235, 0.0210513916015625, 0.021174272537231444, 0.021155839920043946, 0.02100223922729492, 0.020937728881835937, 0.02127462387084961, 0.021189632415771483, 0.021149696350097655, 0.021128192901611328, 0.021028863906860353, 0.02084454345703125, 0.02083737564086914, 0.021121023178100586, 0.021081087112426757, 0.021380096435546874, 0.021163007736206055, 0.021158912658691405, 0.02109951972961426, 0.02123673629760742, 0.020924415588378906, 0.020772863388061523, 0.020925439834594727, 0.021145599365234375, 0.021094400405883788, 0.0210513916015625, 0.021180416107177736, 0.02121625518798828, 0.021105663299560547, 0.020918272018432618, 0.020961280822753905, 0.02120806312561035, 0.021203968048095705, 0.02088652801513672, 0.020955135345458984, 0.021178367614746094, 0.020995071411132812, 0.021137407302856445, 0.021337087631225587, 0.021501951217651367, 0.021206016540527343, 0.021211135864257814, 0.021016576766967773, 0.021529600143432616, 0.02145894432067871, 0.021234687805175782, 0.020951040267944337, 0.0210565128326416, 0.022158336639404298, 0.021215232849121093, 0.02091007995605469, 0.021167104721069335, 0.021160959243774414, 0.02125823974609375, 0.02123263931274414, 0.021212160110473634, 0.021214208602905273, 0.02125209617614746, 0.02122137641906738, 0.020967424392700194, 0.021169151306152344, 0.02091110420227051, 0.020914176940917968, 0.021154815673828126, 0.020989952087402345, 0.020932607650756836, 0.021098495483398438, 0.02123776054382324, 0.020970495223999023, 0.02106879997253418, 0.02106675148010254, 0.020940799713134766, 0.02107494354248047, 0.02102272033691406, 0.020959232330322267, 0.021186559677124024, 0.021203968048095705, 0.021204992294311522, 0.021210111618041993, 0.021159936904907226, 0.021009408950805664, 0.020776960372924806, 0.020880384445190428, 0.02106470489501953, 0.021327871322631836, 0.021363712310791014, 0.021364736557006835, 0.021553152084350585, 0.021126144409179686, 0.021172224044799806, 0.021143552780151367, 0.021177343368530274, 0.020933631896972657, 0.021004287719726563, 0.021178367614746094, 0.021140480041503908, 0.02085478401184082, 0.0212674560546875, 0.02129817581176758, 0.021147647857666017, 0.021176319122314453, 0.0210831356048584, 0.021008384704589843, 0.02086092758178711, 0.02104319953918457, 0.021172224044799806, 0.021186559677124024, 0.021183488845825195, 0.021175296783447265, 0.021165056228637694, 0.021210111618041993, 0.021242879867553712, 0.021587968826293946, 0.02168524742126465, 0.022401023864746093, 0.022064128875732423, 0.021411840438842773, 0.02187571144104004, 0.021201919555664063, 0.02122444725036621, 0.02125209617614746, 0.021351423263549805, 0.020933631896972657, 0.020981760025024415, 0.020898815155029296, 0.021081087112426757, 0.021218303680419923, 0.021575679779052736, 0.0212807674407959, 0.021153791427612305, 0.021750783920288085, 0.021316608428955077, 0.02127462387084961, 0.02126028823852539, 0.02124083137512207, 0.021153791427612305, 0.021081087112426757, 0.02089369583129883, 0.02120806312561035, 0.021230592727661132, 0.021238784790039062, 0.021142528533935546, 0.020986879348754883, 0.02109644889831543, 0.021179391860961915, 0.021090303421020508, 0.021194751739501954, 0.021198848724365234, 0.02107596778869629, 0.021151744842529296, 0.02127257537841797, 0.021192703247070312, 0.021238784790039062, 0.021194751739501954, 0.021210111618041993, 0.02147737693786621, 0.021357568740844726, 0.021185535430908203, 0.021255168914794922, 0.021202943801879884, 0.021158912658691405, 0.02127667236328125, 0.021517311096191406, 0.021287935256958008, 0.021223424911499023, 0.02126131248474121, 0.024225791931152343, 0.022608896255493165, 0.021910528182983398, 0.021352447509765626, 0.02122956848144531, 0.020965375900268556, 0.021139455795288087, 0.021391359329223633, 0.021839872360229492, 0.021366783142089844, 0.02122854423522949, 0.02102374458312988, 0.02142310333251953, 0.020982784271240236, 0.020946943283081054, 0.020969472885131835, 0.02099404716491699, 0.020947967529296875, 0.020967424392700194, 0.021313535690307618, 0.021715967178344727, 0.02129408073425293, 0.021151744842529296, 0.021191680908203125, 0.020915199279785156, 0.021203968048095705, 0.021003263473510742, 0.021193727493286133, 0.021271551132202148, 0.021140480041503908, 0.020951040267944337, 0.02092748832702637, 0.021158912658691405, 0.021072895050048827, 0.021243904113769533, 0.020930559158325195, 0.021173248291015623, 0.021204992294311522, 0.020723712921142577, 0.02086911964416504, 0.020951072692871095, 0.021042144775390625, 0.021148672103881837, 0.021130239486694336, 0.02080460739135742, 0.02085068893432617, 0.02131046485900879, 0.021301248550415038, 0.021197824478149413, 0.021104639053344726, 0.020790271759033203, 0.02130534362792969, 0.021133312225341795, 0.021193727493286133, 0.021235712051391603, 0.021206016540527343, 0.021522432327270507, 0.021186559677124024, 0.021167104721069335, 0.02086092758178711, 0.02088243293762207, 0.021174272537231444, 0.020970495223999023, 0.020892671585083008, 0.02091110420227051, 0.020917247772216797, 0.021038080215454103, 0.022345727920532226, 0.022751232147216797, 0.021974016189575195, 0.021783552169799804, 0.021489664077758788, 0.021255168914794922, 0.02122547149658203, 0.021110784530639647, 0.021283840179443358, 0.02126438331604004, 0.0211507511138916, 0.021092319488525392, 0.0208855037689209, 0.02128691291809082, 0.021238784790039062, 0.020984832763671874, 0.020970495223999023, 0.021235712051391603, 0.021213184356689452, 0.02124185562133789, 0.021323776245117186, 0.021203968048095705, 0.021206016540527343, 0.020964351654052735, 0.021211135864257814, 0.021165056228637694, 0.021238784790039062, 0.020963327407836914, 0.020940799713134766, 0.021300224304199217, 0.021149696350097655, 0.020928512573242186, 0.02165657615661621, 0.02127769660949707, 0.02127257537841797, 0.021796863555908205, 0.021540864944458008, 0.021555200576782226, 0.02104832077026367, 0.023111679077148437, 0.022122495651245116, 0.021201919555664063, 0.021234687805175782, 0.021001216888427734, 0.022189056396484375, 0.02149580764770508, 0.021279743194580078, 0.021201919555664063, 0.02125823974609375, 0.02128998374938965, 0.02126131248474121, 0.021144575119018554, 0.02123366355895996, 0.02123776054382324, 0.020921344757080077, 0.02106777572631836, 0.02125721549987793, 0.021187583923339845, 0.020981760025024415, 0.02091007995605469, 0.020918272018432618, 0.020964351654052735, 0.020920320510864256, 0.020921344757080077, 0.020937728881835937, 0.020929536819458007, 0.020938751220703124, 0.02122035217285156, 0.021320703506469727, 0.021218303680419923, 0.020930559158325195, 0.020974592208862306, 0.02091007995605469, 0.020966400146484376, 0.021212160110473634, 0.02119987106323242, 0.02166476821899414, 0.020953088760375976, 0.020974592208862306, 0.02126233673095703, 0.021175296783447265, 0.02124185562133789, 0.021223424911499023, 0.021122047424316406, 0.021238784790039062, 0.021206016540527343, 0.02125823974609375, 0.021234687805175782, 0.021235712051391603, 0.021235712051391603, 0.021279743194580078, 0.021215232849121093, 0.021215232849121093, 0.021203968048095705, 0.02122137641906738, 0.021127168655395507, 0.021355520248413085, 0.021157888412475585, 0.021194751739501954, 0.020918272018432618, 0.021172224044799806, 0.021171199798583985, 0.02123980712890625, 0.02127257537841797, 0.021569536209106444, 0.022821887969970703, 0.021818368911743165, 0.021243904113769533, 0.02124595260620117, 0.02122137641906738, 0.021143552780151367, 0.021223424911499023, 0.02124083137512207, 0.02124595260620117, 0.02124083137512207, 0.02124083137512207, 0.02127667236328125, 0.021246976852416992, 0.02091929626464844, 0.020953088760375976, 0.02208051109313965, 0.0214835205078125, 0.021251071929931642, 0.021344255447387696, 0.02167296028137207, 0.02126233673095703, 0.02125004768371582, 0.02128691291809082, 0.020992000579833983, 0.02106879997253418, 0.021127168655395507, 0.02128691291809082, 0.02126233673095703, 0.02127257537841797, 0.021360639572143555, 0.021356544494628905, 0.02123980712890625, 0.02127462387084961, 0.020936704635620116, 0.020999168395996092, 0.02109132766723633, 0.02144256019592285, 0.020989952087402345, 0.020912128448486327, 0.02103193664550781, 0.02126438331604004, 0.021117952346801756, 0.02122547149658203, 0.02227302360534668, 0.02248908805847168, 0.02186751937866211, 0.021292032241821288, 0.02123366355895996, 0.02122854423522949, 0.021155839920043946, 0.021155839920043946, 0.021111808776855468, 0.02123673629760742, 0.021211135864257814, 0.021143552780151367, 0.02108415985107422, 0.02090188789367676, 0.02091007995605469, 0.021127168655395507, 0.021770240783691407, 0.021218303680419923, 0.021112831115722656, 0.020922367095947265, 0.021537792205810546, 0.02123263931274414, 0.02105449676513672, 0.02096329689025879, 0.020898815155029296, 0.021005311965942384, 0.021009408950805664, 0.021185535430908203, 0.0209039363861084, 0.02088652801513672, 0.02084966468811035, 0.02086195182800293, 0.021154815673828126, 0.021132287979125978, 0.021011455535888672, 0.020992000579833983, 0.020891647338867187, 0.02106982421875, 0.021089279174804687, 0.021219327926635743, 0.023222272872924804, 0.022409215927124023, 0.02144767951965332, 0.021497856140136717, 0.02143436813354492, 0.021355520248413085, 0.021362688064575194, 0.020940799713134766, 0.021032960891723632, 0.020890623092651366, 0.02107187271118164, 0.021163007736206055, 0.021174272537231444, 0.02144972801208496, 0.021139455795288087, 0.021168127059936523, 0.021140480041503908]",tokens/s,47.13032966842962,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1595.445248,2057.8304,0.0,1428.160512,1322.516992,s,1,8.29711328125,8.29711328125,0.0,8.29711328125,8.29711328125,8.29711328125,8.29711328125,[8.29711328125],,kWh,1.5529674745825006e-05,8.495243807793052e-06,2.407085259004127e-05,4.8095771143659324e-05,,MB,1726.246912,2082.996224,0.0,1434.451968,1320.892416,s,10,2.4204870147705075,0.24204870147705076,0.0001286863365834227,0.2420351791381836,0.24218169860839844,0.24221629180908202,0.24224396636962892,"[0.24217401123046875, 0.24201657104492189, 0.24194975280761719, 0.24176278686523436, 0.24209309387207031, 0.2420130615234375, 0.2421583709716797, 0.24201469421386718, 0.24225088500976563, 0.2420537872314453]",tokens/s,1057.63839441325,kWh,2.8588931703533943e-06,1.5665358921256763e-06,1.6068465235712425e-05,2.0493894298191492e-05,tokens/kWh,12491525.342872048,MB,1737.453568,2082.996224,0.0,1434.451968,1373.031936,s,10,13.200702758789063,1.3200702758789062,0.01533914127262112,1.3195706176757813,1.3382166137695313,1.338532745361328,1.3387856506347657,"[1.292245849609375, 1.2970362548828125, 1.3177557373046875, 1.321385498046875, 1.3310888671875, 1.3153233642578126, 1.314959716796875, 1.3339122314453125, 1.338848876953125, 1.3381463623046874]",tokens/s,47.72473189584883,kWh,1.5197362225550854e-05,8.328203153395741e-06,3.000509940088819e-05,5.3530664779834795e-05,tokens/kWh,1176895.5281820514,,s,630,13.196368886947646,0.020946617280869256,0.0005020260688619478,0.021003263473510742,0.021376101875305177,0.021665382385253902,0.02256992218017578,"[0.02026905632019043, 0.020338687896728515, 0.02027622413635254, 0.020361215591430663, 0.020271104812622072, 0.02026393508911133, 0.021012479782104493, 0.021295103073120117, 0.020426752090454102, 0.02035916709899902, 0.02035916709899902, 0.021141504287719725, 0.020435968399047853, 0.020775936126708985, 0.0204400634765625, 0.020385791778564453, 0.020388864517211915, 0.020331520080566406, 0.020358144760131838, 0.02027827262878418, 0.02036735916137695, 0.020947967529296875, 0.020959232330322267, 0.021090303421020508, 0.020633600234985353, 0.020299776077270508, 0.020338687896728515, 0.020346879959106445, 0.020427776336669923, 0.020306943893432617, 0.02031001663208008, 0.020414464950561522, 0.020418560028076172, 0.020559871673583984, 0.020397056579589845, 0.02043084716796875, 0.020384767532348632, 0.02044313621520996, 0.020452352523803712, 0.021106687545776368, 0.02082918357849121, 0.020369407653808593, 0.020354047775268554, 0.02088140869140625, 0.021351423263549805, 0.020915199279785156, 0.02097670364379883, 0.02050758361816406, 0.020358144760131838, 0.020376575469970702, 0.020404224395751954, 0.02044927978515625, 0.020403200149536133, 0.020303871154785155, 0.020287488937377928, 0.02049228858947754, 0.02025062370300293, 0.02004377555847168, 0.02046976089477539, 0.020393983840942383, 0.020385791778564453, 0.020358144760131838, 0.02031001663208008, 0.02044927978515625, 0.020374528884887694, 0.020288511276245116, 0.020298751831054687, 0.020262912750244142, 0.020343807220458983, 0.020388864517211915, 0.020445184707641603, 0.02038374328613281, 0.020319232940673827, 0.02040012741088867, 0.020329471588134765, 0.020397056579589845, 0.020516864776611327, 0.02050662422180176, 0.020470783233642577, 0.020347904205322266, 0.020358144760131838, 0.020739072799682616, 0.020358144760131838, 0.020347904205322266, 0.02042470359802246, 0.020569087982177735, 0.02042982482910156, 0.020381696701049806, 0.020303871154785155, 0.020345855712890625, 0.020338687896728515, 0.020303871154785155, 0.020574207305908202, 0.020754432678222655, 0.02047590446472168, 0.020330495834350586, 0.020397056579589845, 0.020378623962402344, 0.02033459281921387, 0.020344831466674804, 0.020330495834350586, 0.020304895401000975, 0.02041651153564453, 0.02045747184753418, 0.020262912750244142, 0.020338687896728515, 0.020229120254516602, 0.020617216110229493, 0.020684799194335936, 0.022198272705078126, 0.022162431716918944, 0.021311487197875977, 0.02108518409729004, 0.020762624740600585, 0.020336639404296874, 0.020505599975585938, 0.021138431549072266, 0.020966400146484376, 0.02101759910583496, 0.020351999282836913, 0.020596736907958983, 0.020395008087158203, 0.020971519470214844, 0.020840448379516603, 0.02168832015991211, 0.021621759414672852, 0.020141056060791016, 0.020366336822509764, 0.021024768829345702, 0.021013504028320314, 0.020633600234985353, 0.02043903923034668, 0.021148672103881837, 0.02103603172302246, 0.020960256576538085, 0.020975616455078124, 0.020611072540283205, 0.020377599716186523, 0.020633600234985353, 0.02063155174255371, 0.02085478401184082, 0.020121599197387697, 0.020487167358398437, 0.020974592208862306, 0.02087731170654297, 0.020945920944213867, 0.020358144760131838, 0.02110361671447754, 0.020928512573242186, 0.02110054397583008, 0.020899839401245117, 0.020976640701293944, 0.020979711532592774, 0.020932607650756836, 0.02104832077026367, 0.022054912567138672, 0.023983104705810547, 0.02437222480773926, 0.021202943801879884, 0.021164031982421876, 0.021279743194580078, 0.02149068832397461, 0.021243904113769533, 0.020990976333618162, 0.020462591171264647, 0.02046976089477539, 0.020788223266601562, 0.02107699203491211, 0.02106675148010254, 0.02084556770324707, 0.020360191345214843, 0.02040115165710449, 0.020535295486450195, 0.02064896011352539, 0.02046771240234375, 0.020549631118774413, 0.021189632415771483, 0.021411840438842773, 0.020658176422119142, 0.02085990333557129, 0.021173248291015623, 0.020414464950561522, 0.020602880477905275, 0.020370431900024414, 0.020370431900024414, 0.020384767532348632, 0.020405248641967775, 0.02084659194946289, 0.02063871955871582, 0.020135936737060548, 0.020344831466674804, 0.020304895401000975, 0.020280319213867186, 0.020264959335327147, 0.020323328018188477, 0.020264959335327147, 0.02038374328613281, 0.020979711532592774, 0.021823488235473632, 0.02105548858642578, 0.020938751220703124, 0.021005311965942384, 0.020964351654052735, 0.021327871322631836, 0.020373504638671876, 0.020290559768676757, 0.020316160202026368, 0.021607423782348634, 0.02040115165710449, 0.020556800842285155, 0.02041651153564453, 0.02030182456970215, 0.020379648208618165, 0.021234687805175782, 0.021579776763916016, 0.021825536727905274, 0.02231500816345215, 0.021372928619384765, 0.020969472885131835, 0.021045248031616212, 0.021012479782104493, 0.021012479782104493, 0.021007360458374022, 0.021130239486694336, 0.021222400665283202, 0.02106060791015625, 0.02104934310913086, 0.021145599365234375, 0.021140480041503908, 0.02104832077026367, 0.021129215240478515, 0.021164031982421876, 0.021013504028320314, 0.02188287925720215, 0.021159936904907226, 0.020950016021728517, 0.02105753517150879, 0.020984832763671874, 0.021189632415771483, 0.020947967529296875, 0.020948991775512696, 0.021195775985717775, 0.021015552520751952, 0.02104729652404785, 0.021164031982421876, 0.02129817581176758, 0.021131263732910157, 0.021003263473510742, 0.020975616455078124, 0.02104832077026367, 0.021375999450683594, 0.02104422378540039, 0.02087936019897461, 0.02106265640258789, 0.02108518409729004, 0.020986879348754883, 0.0210831356048584, 0.02091929626464844, 0.020981760025024415, 0.02106777572631836, 0.02103603172302246, 0.021082111358642578, 0.020990976333618162, 0.020969472885131835, 0.021102592468261717, 0.02107084846496582, 0.02102272033691406, 0.021215232849121093, 0.02107904052734375, 0.021167104721069335, 0.021124095916748048, 0.02109132766723633, 0.021142528533935546, 0.02103708839416504, 0.021072864532470703, 0.02090188789367676, 0.02110873603820801, 0.02102579116821289, 0.021105663299560547, 0.02109337615966797, 0.021013504028320314, 0.021021696090698243, 0.021195775985717775, 0.021134336471557616, 0.020791296005249024, 0.021517311096191406, 0.021202943801879884, 0.021131263732910157, 0.020955135345458984, 0.021073919296264648, 0.021122047424316406, 0.021149696350097655, 0.020997119903564454, 0.021089279174804687, 0.021121023178100586, 0.02106879997253418, 0.021111808776855468, 0.02125619125366211, 0.021198848724365234, 0.021136383056640624, 0.021102592468261717, 0.021131263732910157, 0.021163007736206055, 0.021114879608154297, 0.02120195198059082, 0.02113942337036133, 0.0211015682220459, 0.021198848724365234, 0.021155839920043946, 0.021198848724365234, 0.021173248291015623, 0.02103091239929199, 0.021121023178100586, 0.02150912094116211, 0.022401023864746093, 0.020595712661743162, 0.02079641532897949, 0.020758527755737305, 0.021168127059936523, 0.020701183319091796, 0.021130239486694336, 0.020709375381469726, 0.02087321662902832, 0.020864000320434572, 0.02083839988708496, 0.021102592468261717, 0.020979711532592774, 0.020926464080810548, 0.021356544494628905, 0.02123161506652832, 0.02087116813659668, 0.020918272018432618, 0.021805055618286134, 0.021151744842529296, 0.021149696350097655, 0.021000192642211913, 0.02105446434020996, 0.02104115104675293, 0.020974592208862306, 0.021005311965942384, 0.02023526382446289, 0.020282367706298828, 0.020682752609252928, 0.020907007217407226, 0.02106572723388672, 0.021421056747436523, 0.020750335693359375, 0.02085478401184082, 0.020880384445190428, 0.02109337615966797, 0.020684799194335936, 0.020342784881591795, 0.02109235191345215, 0.020740095138549804, 0.021223424911499023, 0.020355072021484375, 0.020511743545532226, 0.020963327407836914, 0.02106777572631836, 0.021061632156372072, 0.02031820869445801, 0.02046976089477539, 0.020992000579833983, 0.020945920944213867, 0.020966400146484376, 0.02091110420227051, 0.0212490234375, 0.020546560287475587, 0.020610048294067384, 0.020398080825805662, 0.02087116813659668, 0.020864000320434572, 0.020345855712890625, 0.021003263473510742, 0.02109542465209961, 0.02048204803466797, 0.02061311912536621, 0.02103603172302246, 0.020314111709594726, 0.02040934371948242, 0.02084351921081543, 0.021122047424316406, 0.0212674560546875, 0.020570112228393556, 0.021812223434448243, 0.02345881652832031, 0.022337535858154296, 0.0212807674407959, 0.020626432418823244, 0.02104934310913086, 0.020985855102539062, 0.020299776077270508, 0.02025574493408203, 0.020734975814819336, 0.02085273551940918, 0.020939775466918945, 0.02059878349304199, 0.02024550437927246, 0.020783103942871094, 0.020953088760375976, 0.020899839401245117, 0.020985855102539062, 0.02108415985107422, 0.020611072540283205, 0.02060697555541992, 0.02091110420227051, 0.020797439575195312, 0.02026188850402832, 0.020915199279785156, 0.020564992904663085, 0.020913152694702147, 0.020641792297363282, 0.020350976943969725, 0.020470783233642577, 0.020479999542236327, 0.020411392211914063, 0.020525056838989256, 0.020557823181152343, 0.020404224395751954, 0.020494335174560546, 0.021732351303100587, 0.020702207565307617, 0.020900863647460938, 0.0206561279296875, 0.02105446434020996, 0.02042163276672363, 0.020356096267700196, 0.020832256317138673, 0.020985855102539062, 0.021102592468261717, 0.021013504028320314, 0.020941823959350587, 0.021147647857666017, 0.02210918426513672, 0.021094400405883788, 0.02063974380493164, 0.02045132827758789, 0.021094400405883788, 0.020642816543579103, 0.020970495223999023, 0.02102579116821289, 0.020314111709594726, 0.020727807998657227, 0.020428800582885744, 0.020855808258056642, 0.02046976089477539, 0.020692991256713866, 0.02051584053039551, 0.020395008087158203, 0.020666368484497072, 0.02087424087524414, 0.02104422378540039, 0.020968448638916014, 0.02120806312561035, 0.022574079513549804, 0.021742591857910155, 0.021371904373168944, 0.02106265640258789, 0.02119987106323242, 0.02126028823852539, 0.02124492835998535, 0.020946943283081054, 0.020960256576538085, 0.020941823959350587, 0.020997119903564454, 0.02088243293762207, 0.020928512573242186, 0.020853759765625, 0.020980735778808594, 0.020963327407836914, 0.021206016540527343, 0.02147532844543457, 0.021292032241821288, 0.021102592468261717, 0.02106982421875, 0.020922367095947265, 0.021021696090698243, 0.021218303680419923, 0.02123980712890625, 0.02127462387084961, 0.02128691291809082, 0.021428224563598632, 0.02131046485900879, 0.021242879867553712, 0.02131455993652344, 0.02128179168701172, 0.02151628875732422, 0.021425151824951173, 0.02132275199890137, 0.021424127578735352, 0.021372928619384765, 0.02126438331604004, 0.021283840179443358, 0.021377023696899415, 0.021140480041503908, 0.021344255447387696, 0.022034431457519533, 0.021749759674072267, 0.021336063385009766, 0.021372928619384765, 0.021584896087646483, 0.021534719467163087, 0.021331968307495116, 0.02125004768371582, 0.021599264144897462, 0.022180831909179688, 0.021378047943115236, 0.020434944152832032, 0.020314111709594726, 0.02024038314819336, 0.02043187141418457, 0.021315584182739256, 0.02127769660949707, 0.021141504287719725, 0.02122854423522949, 0.021160959243774414, 0.021182464599609374, 0.02120806312561035, 0.021275648117065428, 0.02132275199890137, 0.021287935256958008, 0.021526527404785157, 0.021378047943115236, 0.021181440353393553, 0.021104639053344726, 0.021212160110473634, 0.021183488845825195, 0.02168012809753418, 0.021565439224243164, 0.021189632415771483, 0.021178367614746094, 0.021164031982421876, 0.021275648117065428, 0.021605375289916993, 0.022120447158813478, 0.021396480560302734, 0.021485567092895508, 0.02130636787414551, 0.021155839920043946, 0.021163007736206055, 0.021213184356689452, 0.02128179168701172, 0.0212807674407959, 0.02130534362792969, 0.02119987106323242, 0.021209087371826172, 0.0210565128326416, 0.021115903854370118, 0.02109337615966797, 0.02128281593322754, 0.021165056228637694, 0.02124185562133789, 0.021106687545776368, 0.02105855941772461, 0.020892671585083008, 0.021102592468261717, 0.02253004837036133, 0.02153267288208008, 0.02125619125366211, 0.02123673629760742, 0.021194751739501954, 0.02032537651062012, 0.020405248641967775, 0.02040934371948242, 0.022760448455810548, 0.021812223434448243, 0.02150809669494629, 0.02023321533203125, 0.020588544845581053, 0.021351423263549805, 0.021279743194580078, 0.022559743881225586, 0.02164735984802246, 0.02125823974609375, 0.021577728271484374, 0.021345279693603517, 0.021301248550415038, 0.020668415069580077, 0.02038374328613281, 0.020447231292724608, 0.02035916709899902, 0.020108287811279296, 0.02109644889831543, 0.021202943801879884, 0.02129100799560547, 0.02122444725036621, 0.021061632156372072, 0.0212674560546875, 0.0212674560546875, 0.021287935256958008, 0.02128281593322754, 0.021203968048095705, 0.021212160110473634, 0.02126233673095703, 0.021343231201171875, 0.021178367614746094, 0.021146623611450196, 0.02108006477355957, 0.021186559677124024, 0.021161983489990235, 0.020920320510864256, 0.02101862335205078, 0.021110784530639647, 0.021183488845825195, 0.021394432067871092, 0.021181440353393553, 0.021179391860961915, 0.021090303421020508, 0.021045248031616212, 0.02107904052734375, 0.021129215240478515, 0.021191680908203125, 0.021117952346801756, 0.021127168655395507, 0.021735424041748046, 0.021259263992309572, 0.021309440612792968, 0.021387264251708983, 0.02105753517150879, 0.02327756881713867, 0.022579200744628908, 0.02229452705383301, 0.021608448028564452, 0.021327871322631836, 0.021173248291015623, 0.02122137641906738, 0.02120806312561035, 0.021214208602905273, 0.021212160110473634, 0.02124083137512207]",tokens/s,47.7404053643215,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 66444 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1576.763392,2057.8304,0.0,1428.160512,1322.516992,s,1,8.14499658203125,8.14499658203125,0.0,8.14499658203125,8.14499658203125,8.14499658203125,8.14499658203125,[8.14499658203125],,kWh,1.6085757661125577e-05,8.800198210647255e-06,2.2276406710042274e-05,4.7162362581815104e-05,,MB,1641.070592,2082.996224,0.0,1434.451968,1320.892416,s,10,2.4189410400390625,0.24189410400390626,0.00015714637372977485,0.24188616943359376,0.24203426513671875,0.24213716583251954,0.24221948638916016,"[0.2422400665283203, 0.2417813720703125, 0.2420113983154297, 0.24171615600585938, 0.24179487609863282, 0.24197080993652345, 0.2419731903076172, 0.24191384887695314, 0.2416808319091797, 0.24185848999023438]",tokens/s,1058.3143440150404,kWh,2.8588274791828824e-06,1.5665003589898838e-06,1.6189788084093803e-05,2.061511592226657e-05,tokens/kWh,12418072.300214045,MB,1674.56768,2082.996224,0.0,1434.451968,1373.031936,s,10,12.66516650390625,1.2665166503906249,0.008444081786023462,1.2642487182617188,1.272671252441406,1.2809900451660157,1.287645079345703,"[1.289308837890625, 1.26822412109375, 1.265739990234375, 1.2570731201171874, 1.2631708984375, 1.2631226806640625, 1.2708226318359375, 1.26035205078125, 1.262025634765625, 1.2653265380859375]",tokens/s,49.7427333312746,kWh,1.5148766041232329e-05,8.301605091066452e-06,2.988122099491349e-05,5.333159212721225e-05,tokens/kWh,1181288.5662540437,,s,630,12.66145176124573,0.02009754247816782,0.0004380035972135553,0.019953664779663087,0.020706509208679197,0.020915916061401366,0.021604567546844484,"[0.02044108772277832, 0.020700159072875975, 0.02107699203491211, 0.02089779281616211, 0.0208240966796875, 0.020793312072753905, 0.02083839988708496, 0.02104934310913086, 0.021567487716674806, 0.020765695571899414, 0.02069811248779297, 0.020750335693359375, 0.020741119384765624, 0.020742143630981445, 0.02068992042541504, 0.020800512313842775, 0.020159488677978517, 0.020121599197387697, 0.020140031814575195, 0.02082611274719238, 0.02083430480957031, 0.020773887634277344, 0.02086502456665039, 0.020952064514160155, 0.021124095916748048, 0.020948991775512696, 0.02105753517150879, 0.020771839141845702, 0.02047488021850586, 0.019903488159179687, 0.019938304901123048, 0.022009855270385743, 0.021908479690551756, 0.021857280731201172, 0.020967424392700194, 0.020389888763427736, 0.019677183151245118, 0.01962598419189453, 0.021130239486694336, 0.02131865692138672, 0.020154367446899413, 0.01995676803588867, 0.01972425651550293, 0.019679231643676756, 0.01966694450378418, 0.01965772819519043, 0.01968230438232422, 0.01977446365356445, 0.020769792556762694, 0.02060492706298828, 0.0196628475189209, 0.01997209548950195, 0.019991552352905274, 0.019734527587890623, 0.01970278358459473, 0.019812351226806642, 0.019942399978637695, 0.019926015853881835, 0.019986431121826173, 0.01982156753540039, 0.019706880569458008, 0.019989503860473632, 0.01988812828063965, 0.019876863479614256, 0.019920896530151368, 0.019899391174316407, 0.01988096046447754, 0.019997695922851562, 0.020153343200683595, 0.02007142448425293, 0.02008678436279297, 0.019998720169067383, 0.0198922233581543, 0.019928064346313477, 0.019923967361450197, 0.01993427276611328, 0.01998841667175293, 0.019936256408691407, 0.020183040618896485, 0.019948543548583983, 0.019960832595825196, 0.019965951919555663, 0.019958784103393554, 0.020231168746948244, 0.02082508850097656, 0.020190208435058594, 0.019981311798095702, 0.019927040100097656, 0.019777536392211914, 0.01965875244140625, 0.019895296096801757, 0.019947519302368166, 0.020008960723876954, 0.02004582405090332, 0.02007756805419922, 0.01982361602783203, 0.019689472198486328, 0.0196945915222168, 0.019706880569458008, 0.01984716796875, 0.019924991607666014, 0.019887104034423828, 0.02146201515197754, 0.021929983139038087, 0.02168320083618164, 0.02101043128967285, 0.02067353630065918, 0.020541439056396483, 0.019953664779663087, 0.019964927673339843, 0.019955711364746095, 0.019903488159179687, 0.019945472717285157, 0.019971071243286134, 0.019963903427124022, 0.019962879180908204, 0.0198922233581543, 0.019949567794799804, 0.02003660774230957, 0.020818944931030273, 0.020708351135253905, 0.020725759506225586, 0.020723712921142577, 0.019994623184204103, 0.019738624572753907, 0.019685375213623048, 0.02104115104675293, 0.02086809539794922, 0.020807680130004884, 0.02065407943725586, 0.020478975296020507, 0.019950592041015625, 0.019958784103393554, 0.020015104293823242, 0.019939327239990236, 0.019895296096801757, 0.019963903427124022, 0.019904512405395508, 0.019983360290527344, 0.019893247604370116, 0.019986431121826173, 0.02083635139465332, 0.021192703247070312, 0.021175296783447265, 0.0208353271484375, 0.019999744415283204, 0.020011007308959963, 0.02000383949279785, 0.019955711364746095, 0.020002815246582033, 0.019983360290527344, 0.01988403129577637, 0.019929088592529298, 0.019937280654907227, 0.02000588798522949, 0.019935232162475586, 0.019950592041015625, 0.02003660774230957, 0.01982054328918457, 0.019935232162475586, 0.019914751052856446, 0.01991372871398926, 0.02004377555847168, 0.019938304901123048, 0.019999744415283204, 0.020182016372680665, 0.020007936477661133, 0.019903488159179687, 0.019952640533447266, 0.01985228729248047, 0.019951616287231445, 0.01987379264831543, 0.019911680221557617, 0.020153343200683595, 0.019965951919555663, 0.019788799285888673, 0.019894271850585937, 0.019907583236694337, 0.019959808349609375, 0.01988198471069336, 0.020001792907714845, 0.019970048904418947, 0.019975168228149414, 0.019899391174316407, 0.019993600845336915, 0.019924991607666014, 0.019968000411987305, 0.019926015853881835, 0.019926015853881835, 0.01987583923339844, 0.019951616287231445, 0.019901439666748046, 0.019956735610961913, 0.01999667167663574, 0.01997209548950195, 0.019957759857177734, 0.019966976165771484, 0.02000588798522949, 0.02003455924987793, 0.019975168228149414, 0.019965951919555663, 0.019961856842041017, 0.019919872283935547, 0.019983360290527344, 0.02007859230041504, 0.019899391174316407, 0.019929088592529298, 0.019952640533447266, 0.019950592041015625, 0.019994623184204103, 0.02000588798522949, 0.019899391174316407, 0.020033536911010744, 0.02004377555847168, 0.01987583923339844, 0.019877887725830077, 0.01988812828063965, 0.01985228729248047, 0.019895296096801757, 0.01985536003112793, 0.019937280654907227, 0.0198922233581543, 0.019952640533447266, 0.020135936737060548, 0.019954687118530275, 0.019939327239990236, 0.019945472717285157, 0.019914751052856446, 0.01997315216064453, 0.019963872909545897, 0.019944448471069336, 0.01983692741394043, 0.01991372871398926, 0.019891199111938478, 0.019908607482910155, 0.019948543548583983, 0.019970048904418947, 0.019923967361450197, 0.020016128540039063, 0.02001203155517578, 0.019927040100097656, 0.019979263305664064, 0.019923967361450197, 0.019900415420532228, 0.0200447998046875, 0.019998720169067383, 0.019915775299072267, 0.019929088592529298, 0.019933183670043944, 0.019960895538330078, 0.019883968353271483, 0.019891199111938478, 0.01979903984069824, 0.019893247604370116, 0.01990656089782715, 0.019887104034423828, 0.019987455368041994, 0.01986764717102051, 0.01983692741394043, 0.019957759857177734, 0.019901439666748046, 0.01986355209350586, 0.01986764717102051, 0.019934207916259765, 0.019961856842041017, 0.019918848037719726, 0.019886079788208007, 0.02002739143371582, 0.019990528106689453, 0.019912704467773438, 0.019928064346313477, 0.019918848037719726, 0.019939327239990236, 0.019979263305664064, 0.019982336044311523, 0.020015104293823242, 0.020934656143188478, 0.020185087203979494, 0.020015104293823242, 0.019938304901123048, 0.020220928192138672, 0.019957759857177734, 0.020100095748901366, 0.02007347106933594, 0.021024768829345702, 0.02072985649108887, 0.020617216110229493, 0.020489215850830078, 0.0204400634765625, 0.019978239059448243, 0.019730432510375977, 0.02007142448425293, 0.019907583236694337, 0.01985843276977539, 0.01994758415222168, 0.019929023742675783, 0.020139007568359374, 0.019946495056152345, 0.019895296096801757, 0.019948543548583983, 0.02026188850402832, 0.020358144760131838, 0.019990528106689453, 0.02002124786376953, 0.019984384536743165, 0.01991372871398926, 0.020015104293823242, 0.02008166313171387, 0.01989836883544922, 0.019941375732421874, 0.019968000411987305, 0.020016128540039063, 0.02025984001159668, 0.019922943115234376, 0.01988915252685547, 0.019974143981933593, 0.019953664779663087, 0.01987276840209961, 0.019842048645019532, 0.02028646469116211, 0.02042470359802246, 0.020168703079223634, 0.019997695922851562, 0.019929088592529298, 0.019927040100097656, 0.019985408782958985, 0.019887104034423828, 0.019952640533447266, 0.019939327239990236, 0.019992576599121094, 0.02002841567993164, 0.01993011283874512, 0.019911680221557617, 0.01992192077636719, 0.019914751052856446, 0.019943424224853516, 0.02017692756652832, 0.020495328903198242, 0.020262912750244142, 0.019920896530151368, 0.019961856842041017, 0.019984384536743165, 0.019985408782958985, 0.019976192474365235, 0.019911680221557617, 0.019999744415283204, 0.019948543548583983, 0.01985843276977539, 0.020351999282836913, 0.01984000015258789, 0.019907583236694337, 0.019918848037719726, 0.019920896530151368, 0.020315135955810547, 0.0200581111907959, 0.020015104293823242, 0.02007347106933594, 0.019954687118530275, 0.020447231292724608, 0.019938304901123048, 0.019965951919555663, 0.020017152786254884, 0.02008166313171387, 0.02046668815612793, 0.02007756805419922, 0.019938304901123048, 0.019969024658203126, 0.019923967361450197, 0.019907583236694337, 0.020015104293823242, 0.020978687286376953, 0.020406272888183592, 0.01988812828063965, 0.020173824310302735, 0.019939327239990236, 0.019986431121826173, 0.019939327239990236, 0.020075519561767577, 0.019968000411987305, 0.020189184188842774, 0.02005504035949707, 0.020125696182250977, 0.019891199111938478, 0.019973119735717772, 0.019919872283935547, 0.019971071243286134, 0.020291584014892578, 0.019952640533447266, 0.019936256408691407, 0.019901439666748046, 0.019932159423828123, 0.020176895141601564, 0.019940351486206053, 0.020131839752197265, 0.020033536911010744, 0.020008960723876954, 0.020134912490844727, 0.019914751052856446, 0.019928064346313477, 0.020158464431762696, 0.02024448013305664, 0.020564992904663085, 0.02004172706604004, 0.01986662483215332, 0.0198922233581543, 0.01979084777832031, 0.01984515190124512, 0.020540384292602538, 0.020922367095947265, 0.020155391693115234, 0.019954687118530275, 0.01984000015258789, 0.019911680221557617, 0.01985843276977539, 0.019923967361450197, 0.01993011283874512, 0.019893247604370116, 0.020024320602416993, 0.019926015853881835, 0.02082713508605957, 0.026071039199829102, 0.02148454475402832, 0.020747264862060546, 0.02108518409729004, 0.02068889617919922, 0.02000486373901367, 0.019926015853881835, 0.019977216720581056, 0.019938304901123048, 0.019861503601074217, 0.019891199111938478, 0.01988198471069336, 0.019945472717285157, 0.01983180809020996, 0.01966694450378418, 0.019602432250976562, 0.019875871658325196, 0.019831775665283203, 0.019907583236694337, 0.019861536026000978, 0.01989731216430664, 0.01985740852355957, 0.019861503601074217, 0.0198604793548584, 0.019927040100097656, 0.02072985649108887, 0.020706304550170897, 0.02104729652404785, 0.02068377685546875, 0.02063052749633789, 0.019909631729125975, 0.019885055541992186, 0.019958784103393554, 0.019933183670043944, 0.019975168228149414, 0.019910655975341796, 0.02003455924987793, 0.019968000411987305, 0.019970048904418947, 0.019957759857177734, 0.019953664779663087, 0.019961856842041017, 0.019953664779663087, 0.019943424224853516, 0.020000768661499024, 0.019958784103393554, 0.019927040100097656, 0.01998028755187988, 0.019876863479614256, 0.01983897590637207, 0.019833856582641602, 0.019903488159179687, 0.019922943115234376, 0.019877887725830077, 0.02006425666809082, 0.019952640533447266, 0.01988403129577637, 0.019910655975341796, 0.020118528366088868, 0.019997695922851562, 0.02002841567993164, 0.019963903427124022, 0.01985740852355957, 0.019956735610961913, 0.02000588798522949, 0.019946495056152345, 0.019964927673339843, 0.019938304901123048, 0.019922943115234376, 0.019912704467773438, 0.019904512405395508, 0.01985740852355957, 0.019940351486206053, 0.019912704467773438, 0.01987993621826172, 0.01990553665161133, 0.02002841567993164, 0.019918848037719726, 0.019919872283935547, 0.01993011283874512, 0.019812351226806642, 0.019936256408691407, 0.019942399978637695, 0.019907583236694337, 0.019811328887939454, 0.01983078384399414, 0.01983283233642578, 0.019912704467773438, 0.01987379264831543, 0.019900415420532228, 0.019946495056152345, 0.019974143981933593, 0.0198973445892334, 0.01984000015258789, 0.01987379264831543, 0.021005311965942384, 0.02070425605773926, 0.02046463966369629, 0.020257791519165038, 0.02107904052734375, 0.020247615814208985, 0.019980224609375, 0.01998028755187988, 0.019912704467773438, 0.019902463912963866, 0.019876863479614256, 0.01990553665161133, 0.020158464431762696, 0.019903488159179687, 0.01987174415588379, 0.01987993621826172, 0.019907583236694337, 0.0198973445892334, 0.02002022361755371, 0.020131839752197265, 0.02002841567993164, 0.01988915252685547, 0.019971071243286134, 0.01984716796875, 0.0198922233581543, 0.019949567794799804, 0.01989836883544922, 0.01989836883544922, 0.019964927673339843, 0.0198604793548584, 0.019718143463134767, 0.019869695663452147, 0.01983180809020996, 0.019891199111938478, 0.019949567794799804, 0.02004172706604004, 0.020908031463623047, 0.021619712829589844, 0.02006220817565918, 0.019963903427124022, 0.019941375732421874, 0.019861503601074217, 0.019893247604370116, 0.019903488159179687, 0.019916799545288084, 0.019928064346313477, 0.019803136825561524, 0.019942399978637695, 0.01988198471069336, 0.019954687118530275, 0.019919872283935547, 0.01985945510864258, 0.01983180809020996, 0.019936256408691407, 0.019870719909667968, 0.02025164794921875, 0.02004582405090332, 0.019903488159179687, 0.019907583236694337, 0.020023296356201172, 0.019978239059448243, 0.019983360290527344, 0.019950592041015625, 0.019957759857177734, 0.019962879180908204, 0.019987455368041994, 0.019974143981933593, 0.020110336303710938, 0.019971071243286134, 0.019939327239990236, 0.019937280654907227, 0.01990656089782715, 0.019927040100097656, 0.019947519302368166, 0.019933183670043944, 0.02001203155517578, 0.019969024658203126, 0.019896320343017578, 0.01997209548950195, 0.0198799991607666, 0.019945407867431642, 0.019901439666748046, 0.01985740852355957, 0.019935232162475586, 0.019945472717285157, 0.01987993621826172, 0.019904512405395508, 0.019909631729125975, 0.01989740753173828, 0.01996793556213379, 0.019957759857177734, 0.02007961654663086, 0.019904512405395508, 0.019909631729125975, 0.0198604793548584, 0.019908607482910155, 0.019899391174316407, 0.019906591415405274, 0.01993827247619629, 0.019963903427124022, 0.020907007217407226, 0.02030080032348633, 0.02048204803466797, 0.020543487548828124, 0.020537343978881836, 0.021028863906860353, 0.02029363250732422, 0.020521984100341797, 0.02089369583129883, 0.020324352264404297, 0.020295679092407228, 0.019951616287231445, 0.02068172836303711, 0.02052403259277344, 0.02025267219543457]",tokens/s,49.75732734916773,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1254.875136,2645.03296,0.0,1998.585856,1692.285952,s,10,0.24200246238708498,0.0242002462387085,0.000612125689531125,0.024116847991943358,0.02477244110107422,0.025168700790405275,0.025485708541870117,"[0.025564960479736328, 0.023542015075683594, 0.024684383392333985, 0.023764448165893556, 0.024505792617797853, 0.02447939109802246, 0.023851680755615234, 0.024382015228271485, 0.02353171157836914, 0.023696063995361328]",tokens/s,10578.40475980471,kWh,2.807038850191559e-07,1.537483499955637e-07,8.364053933254355e-07,1.270857628340155e-06,tokens/kWh,201438771.9687823,MB,1255.440384,2645.03296,0.0,1998.585856,1740.091904,s,10,13.98928271484375,1.398928271484375,0.009250422421391818,1.3987031860351564,1.4114088623046874,1.4117007690429688,1.4119342944335938,"[1.4011539306640626, 1.41199267578125, 1.4039649658203126, 1.3913382568359376, 1.411343994140625, 1.4078441162109374, 1.3920347900390626, 1.39625244140625, 1.3845965576171875, 1.388760986328125]",tokens/s,45.034474807741184,kWh,1.6551164256870412e-05,9.07002460188642e-06,3.349914985287281e-05,5.912033871162965e-05,tokens/kWh,1065623.1234955217,,s,629,14.182389726638794,0.022547519438217475,0.0029122915922720257,0.022378463745117188,0.022657024002075195,0.02286632957458496,0.04563361907958984,"[0.02266111946105957, 0.022467584609985353, 0.022542335510253905, 0.02246451187133789, 0.022589439392089843, 0.022599679946899414, 0.022360063552856444, 0.02248806381225586, 0.02263039970397949, 0.022633472442626954, 0.02250547218322754, 0.02248294448852539, 0.022863872528076173, 0.023409664154052736, 0.023657472610473632, 0.023362560272216795, 0.023547903060913086, 0.023163904190063478, 0.02222591972351074, 0.02307379150390625, 0.02348646354675293, 0.022845439910888672, 0.02269491195678711, 0.022561792373657227, 0.02246451187133789, 0.022376447677612304, 0.022429695129394533, 0.02270207977294922, 0.022359039306640623, 0.02206208038330078, 0.021753856658935547, 0.021375999450683594, 0.02146406364440918, 0.021550079345703126, 0.0216944637298584, 0.021514240264892577, 0.021566463470458985, 0.02152448081970215, 0.021552127838134767, 0.021534719467163087, 0.021593088150024413, 0.02186649513244629, 0.022435840606689454, 0.02308095932006836, 0.02249113655090332, 0.021839872360229492, 0.021519359588623048, 0.021755903244018555, 0.02161667251586914, 0.021600223541259764, 0.02187264060974121, 0.022023168563842774, 0.021592063903808592, 0.021515264511108398, 0.021596160888671875, 0.021569536209106444, 0.021582847595214845, 0.021592063903808592, 0.02168422317504883, 0.022158336639404298, 0.02163609504699707, 0.02167193603515625, 0.04547686386108398, 0.021605375289916993, 0.021596160888671875, 0.021605375289916993, 0.021635072708129883, 0.022408191680908202, 0.022987775802612305, 0.022617088317871094, 0.022552576065063477, 0.02248089599609375, 0.022358015060424806, 0.0216627197265625, 0.02272768020629883, 0.022161407470703123, 0.021736448287963867, 0.021808128356933593, 0.021787647247314454, 0.021617664337158202, 0.022231039047241212, 0.02268876838684082, 0.022399999618530272, 0.022468608856201173, 0.02253004837036133, 0.02234982490539551, 0.022375423431396483, 0.022451200485229493, 0.02247270393371582, 0.022508544921875, 0.025584640502929686, 0.023030784606933592, 0.023574527740478517, 0.022610944747924806, 0.02266726493835449, 0.02264473533630371, 0.022552576065063477, 0.022425600051879883, 0.022433792114257813, 0.022609920501708985, 0.022460447311401368, 0.022352863311767578, 0.02246963119506836, 0.022428672790527345, 0.022574079513549804, 0.022376447677612304, 0.02248908805847168, 0.0224849910736084, 0.02279734420776367, 0.02256585693359375, 0.022495231628417968, 0.02246246337890625, 0.02231091117858887, 0.021749759674072267, 0.02169036865234375, 0.021618688583374023, 0.0221214714050293, 0.022444032669067384, 0.022716415405273437, 0.022600704193115235, 0.02253824043273926, 0.022766592025756836, 0.022545408248901368, 0.022372352600097657, 0.02264678382873535, 0.04748287963867188, 0.02241535949707031, 0.02246553611755371, 0.022574079513549804, 0.022425600051879883, 0.02245529556274414, 0.02246963119506836, 0.022468608856201173, 0.022520832061767578, 0.0224901123046875, 0.022559743881225586, 0.022330368041992187, 0.022426624298095704, 0.0227194881439209, 0.022578176498413087, 0.02250649642944336, 0.02247372817993164, 0.022339584350585938, 0.022377504348754882, 0.022376415252685546, 0.022382591247558595, 0.022501375198364256, 0.02234060859680176, 0.022625280380249024, 0.022508544921875, 0.022467584609985353, 0.022540288925170897, 0.022409215927124023, 0.022548479080200197, 0.022412288665771486, 0.022406143188476564, 0.02249830436706543, 0.022558719635009765, 0.02249318313598633, 0.022424575805664062, 0.022597631454467772, 0.022608896255493165, 0.022436864852905275, 0.02167091178894043, 0.02172211265563965, 0.02166374397277832, 0.021729280471801758, 0.02165247917175293, 0.021629951477050782, 0.02152448081970215, 0.021518335342407227, 0.021576704025268553, 0.022010879516601564, 0.02165657615661621, 0.02165247917175293, 0.021729280471801758, 0.021611520767211914, 0.02169139289855957, 0.02255564880371094, 0.02243891143798828, 0.022483968734741212, 0.0224849910736084, 0.022394880294799805, 0.02249932861328125, 0.022280191421508787, 0.022421503067016603, 0.022451200485229493, 0.02246143913269043, 0.045770751953125, 0.021746688842773438, 0.021728256225585937, 0.021703680038452147, 0.021639167785644533, 0.021617664337158202, 0.02170163154602051, 0.02166067123413086, 0.021626880645751953, 0.02168422317504883, 0.02163711929321289, 0.021738496780395508, 0.02183475112915039, 0.021724159240722657, 0.021720064163208007, 0.021741567611694337, 0.02190540885925293, 0.022610944747924806, 0.02248089599609375, 0.021700607299804688, 0.02162073516845703, 0.02164735984802246, 0.02163609504699707, 0.02162483215332031, 0.021567487716674806, 0.02170675277709961, 0.021619712829589844, 0.021646335601806642, 0.021635072708129883, 0.02166988754272461, 0.021695487976074217, 0.021622783660888673, 0.02164121627807617, 0.02167398452758789, 0.02165043258666992, 0.02211123275756836, 0.023779327392578126, 0.022837247848510742, 0.022750207901000977, 0.022556671142578123, 0.022378496170043945, 0.022426624298095704, 0.022381568908691408, 0.02231500816345215, 0.02289254379272461, 0.022403072357177735, 0.022542335510253905, 0.022353919982910156, 0.022362112045288086, 0.022404096603393556, 0.022500352859497072, 0.02253107261657715, 0.02250444793701172, 0.022428672790527345, 0.022418432235717774, 0.02233241653442383, 0.022389759063720704, 0.022434816360473633, 0.022420480728149415, 0.022503423690795898, 0.02242355155944824, 0.022587392807006838, 0.02253004837036133, 0.04750950241088867, 0.02255462455749512, 0.02245529556274414, 0.02269388771057129, 0.022742015838623047, 0.02252390480041504, 0.022503423690795898, 0.02253107261657715, 0.022559743881225586, 0.022559743881225586, 0.02245734405517578, 0.02246963119506836, 0.022397951126098634, 0.02254643249511719, 0.022470687866210936, 0.022647775650024415, 0.02243891143798828, 0.02228326416015625, 0.022187007904052734, 0.02170982360839844, 0.02269388771057129, 0.022635520935058592, 0.022399007797241213, 0.022482912063598634, 0.022408191680908202, 0.022483968734741212, 0.0225167350769043, 0.02253004837036133, 0.02247372817993164, 0.0224849910736084, 0.02231808090209961, 0.0224532470703125, 0.02246246337890625, 0.02240716743469238, 0.022417407989501953, 0.022451200485229493, 0.02234163284301758, 0.022402048110961914, 0.022230016708374024, 0.022042623519897463, 0.0220579833984375, 0.022452255249023438, 0.022406112670898436, 0.022346752166748047, 0.022376447677612304, 0.02267852783203125, 0.02265088081359863, 0.0224768009185791, 0.022940671920776368, 0.022603776931762694, 0.0219238395690918, 0.021757951736450197, 0.0216944637298584, 0.02168217658996582, 0.021613567352294923, 0.022289407730102538, 0.02246963119506836, 0.02243174362182617, 0.022377471923828125, 0.0224399356842041, 0.022323200225830078, 0.02244915199279785, 0.022559743881225586, 0.047440895080566405, 0.022466560363769532, 0.022579200744628908, 0.022544384002685547, 0.022822912216186524, 0.023026687622070312, 0.022591487884521484, 0.02247372817993164, 0.022510623931884764, 0.022584287643432618, 0.022615039825439453, 0.02259660720825195, 0.022487039566040038, 0.022495231628417968, 0.022602752685546876, 0.0225218563079834, 0.02249728012084961, 0.022600704193115235, 0.02282700729370117, 0.022567935943603516, 0.02230271911621094, 0.02247270393371582, 0.022608896255493165, 0.022347776412963868, 0.022796287536621093, 0.02242767906188965, 0.02233852767944336, 0.022391807556152343, 0.022312959671020507, 0.02224844741821289, 0.02244710350036621, 0.022391807556152343, 0.02162892723083496, 0.022434816360473633, 0.022419456481933595, 0.023061504364013673, 0.023794687271118165, 0.02291814422607422, 0.02275328063964844, 0.0224737606048584, 0.022846431732177735, 0.022739967346191405, 0.0224399356842041, 0.022252544403076172, 0.02245529556274414, 0.022370304107666016, 0.022350847244262697, 0.022475776672363282, 0.021917695999145507, 0.021703680038452147, 0.0216494083404541, 0.021736448287963867, 0.021687295913696288, 0.02165452766418457, 0.02166374397277832, 0.02164735984802246, 0.021622783660888673, 0.021659648895263672, 0.02165350341796875, 0.021634048461914062, 0.021605375289916993, 0.021601280212402343, 0.021582847595214845, 0.04577996826171875, 0.021742591857910155, 0.02168012809753418, 0.022021120071411132, 0.02241535949707031, 0.022448160171508788, 0.022464479446411133, 0.022429695129394533, 0.02190540885925293, 0.02169139289855957, 0.02166169548034668, 0.021618688583374023, 0.02166988754272461, 0.021708799362182618, 0.02167705535888672, 0.021703680038452147, 0.02170675277709961, 0.021603328704833984, 0.022540288925170897, 0.022359039306640623, 0.022375455856323244, 0.022378463745117188, 0.022406143188476564, 0.02274508857727051, 0.022146047592163084, 0.021757951736450197, 0.022359039306640623, 0.022399999618530272, 0.022748159408569335, 0.022377471923828125, 0.022382591247558595, 0.022356000900268555, 0.022352863311767578, 0.022393856048583984, 0.021725183486938478, 0.021651456832885742, 0.021613567352294923, 0.021739519119262696, 0.021606399536132814, 0.021602304458618164, 0.021587968826293946, 0.021557247161865235, 0.021554176330566405, 0.021601280212402343, 0.021581823348999024, 0.021748735427856446, 0.02166476821899414, 0.021607423782348634, 0.021644287109375, 0.022733823776245117, 0.02239897537231445, 0.022427648544311524, 0.02243174362182617, 0.022390783309936522, 0.02250444793701172, 0.02248908805847168, 0.022656000137329102, 0.02255462455749512, 0.02251571273803711, 0.02247372817993164, 0.02249932861328125, 0.022417407989501953, 0.02246143913269043, 0.04679884719848633, 0.021774335861206053, 0.0216944637298584, 0.022240255355834963, 0.022406143188476564, 0.02237238311767578, 0.022376415252685546, 0.022520832061767578, 0.022566911697387695, 0.022358015060424806, 0.02244915199279785, 0.02204876708984375, 0.02246348762512207, 0.02243071937561035, 0.022529024124145508, 0.02246553611755371, 0.022758399963378906, 0.022568960189819336, 0.02242355155944824, 0.02242252731323242, 0.022395904541015626, 0.022386688232421875, 0.022372352600097657, 0.022389759063720704, 0.02205695915222168, 0.02208563232421875, 0.02240716743469238, 0.022379520416259766, 0.02263039970397949, 0.02286796760559082, 0.022436864852905275, 0.022495231628417968, 0.02264473533630371, 0.0224768009185791, 0.02244915199279785, 0.022459392547607423, 0.02164735984802246, 0.021584896087646483, 0.02171494483947754, 0.02163199996948242, 0.021613567352294923, 0.02165452766418457, 0.021562368392944335, 0.02166374397277832, 0.021953535079956055, 0.022040576934814454, 0.021609472274780273, 0.021794815063476563, 0.021734432220458986, 0.022769632339477538, 0.02245631980895996, 0.021619712829589844, 0.022328319549560546, 0.02252390480041504, 0.02247987174987793, 0.02167296028137207, 0.02163711929321289, 0.021740543365478517, 0.021622783660888673, 0.022252544403076172, 0.021740543365478517, 0.021704704284667968, 0.021771263122558594, 0.04554956817626953, 0.021817344665527344, 0.021734399795532225, 0.021558271408081055, 0.021544960021972655, 0.021627904891967774, 0.021627904891967774, 0.02165555191040039, 0.021766143798828123, 0.021760000228881835, 0.021741567611694337, 0.02191974449157715, 0.02168627166748047, 0.021700607299804688, 0.021734399795532225, 0.021758975982666014, 0.021625856399536132, 0.02186444854736328, 0.021780479431152345, 0.02165247917175293, 0.02168217658996582, 0.021432319641113282, 0.02170675277709961, 0.021590015411376954, 0.021515264511108398, 0.021569536209106444, 0.021545984268188476, 0.021519359588623048, 0.021651456832885742, 0.021586944580078125, 0.022597631454467772, 0.023916543960571288, 0.022725664138793945, 0.022460384368896483, 0.022580223083496095, 0.022452224731445314, 0.02228121566772461, 0.02166886329650879, 0.021551103591918946, 0.021612543106079102, 0.02163711929321289, 0.021565439224243164, 0.021635072708129883, 0.021566463470458985, 0.022394912719726563, 0.022438880920410156, 0.022375423431396483, 0.022433792114257813, 0.022394880294799805, 0.022401023864746093, 0.02226380729675293, 0.02230886459350586, 0.02188697624206543, 0.021750783920288085, 0.02182963180541992, 0.023613439559936524, 0.02305843162536621, 0.022502399444580077, 0.022609920501708985, 0.0224768009185791, 0.02185932731628418, 0.02165452766418457, 0.021748735427856446, 0.045666305541992185, 0.0216494083404541, 0.0216944637298584, 0.02168217658996582, 0.02166886329650879, 0.021585920333862304, 0.02225971221923828, 0.022445056915283205, 0.022372352600097657, 0.02244812774658203, 0.02245631980895996, 0.022335487365722655, 0.022558719635009765, 0.022304767608642577, 0.0224532470703125, 0.0224849910736084, 0.02243071937561035, 0.021595136642456055, 0.021551103591918946, 0.0217262077331543, 0.021611520767211914, 0.021566463470458985, 0.021792768478393554, 0.021699583053588867, 0.021734399795532225, 0.02164838409423828, 0.02206515121459961, 0.02191564750671387, 0.021738496780395508, 0.02163609504699707, 0.02168524742126465, 0.021646335601806642, 0.021615615844726564, 0.02164019203186035, 0.02167705535888672, 0.021550079345703126, 0.02182963180541992, 0.02245529556274414, 0.02269388771057129, 0.0224901123046875, 0.022406143188476564, 0.022425600051879883, 0.02234880065917969, 0.022487039566040038, 0.022495231628417968, 0.022410240173339844, 0.022487039566040038, 0.022525951385498046, 0.02243174362182617, 0.022336511611938475, 0.02234060859680176, 0.022337535858154296, 0.02232524871826172, 0.022363136291503907, 0.021784576416015625, 0.021576704025268553, 0.0216760311126709, 0.02170675277709961, 0.021560319900512694, 0.0216627197265625, 0.02201907157897949, 0.02225868797302246, 0.022460416793823244]",tokens/s,44.35077671138517,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949914-19bd13bd77fc0356180fdfa7;cbcd402d-5497-4d3b-95ac-74a99c35ada3) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,deci,MB,4384.84992,5589.434368,0.0,4959.76448,4769.731072,s,1,10.6732900390625,10.6732900390625,0.0,10.6732900390625,10.6732900390625,10.6732900390625,10.6732900390625,[10.6732900390625],,kWh,4.359199812847376e-05,2.3860498165852596e-05,8.132978728600526e-05,0.0001487822835803316,,MB,1626.681344,5612.50304,0.0,4966.055936,4251.027456,s,10,12.88134387207031,1.2881343872070312,0.00011800751775398599,1.2880999755859375,1.28822685546875,1.2883319946289062,1.2884161059570314,"[1.2884371337890625, 1.28810107421875, 1.2880789794921874, 1.288098876953125, 1.288014892578125, 1.2880887451171874, 1.2882034912109375, 1.2881893310546875, 1.288000244140625, 1.288131103515625]",tokens/s,198.73702817224398,kWh,1.5222095251041532e-05,8.341441687673412e-06,8.634101351719936e-05,0.0001099045504559143,tokens/kWh,2329293.9094700045,MB,1638.887424,5627.183104,0.0,4980.736,4251.030016,s,10,16.68578271484375,1.668578271484375,0.010873003862068732,1.665667236328125,1.67797109375,1.6874754760742188,1.6950789819335936,"[1.6713468017578126, 1.6689512939453126, 1.6671597900390625, 1.6630877685546874, 1.66185693359375, 1.6758590087890626, 1.65916357421875, 1.6572030029296876, 1.6641746826171875, 1.6969798583984375]",tokens/s,37.7566944725673,kWh,2.0056157090972862e-05,1.0992546964624318e-05,5.795418525220142e-05,8.90028893077986e-05,tokens/kWh,707842.1890566627,,s,630,16.6831913013458,0.026481256033882256,0.0004960790615836686,0.026242048263549804,0.027233485031127932,0.027346892547607422,0.027762769680023194,"[0.026998783111572267, 0.026380287170410157, 0.026335231781005858, 0.026166271209716797, 0.02735103988647461, 0.02721075248718262, 0.02670796775817871, 0.026104831695556642, 0.026077184677124023, 0.02613145637512207, 0.026211328506469726, 0.026070016860961914, 0.02610585594177246, 0.0261345272064209, 0.026229759216308594, 0.026252288818359375, 0.02613862419128418, 0.026275840759277344, 0.026229759216308594, 0.026053632736206055, 0.026044416427612304, 0.02613145637512207, 0.026171392440795898, 0.026220544815063477, 0.026239999771118162, 0.02613555145263672, 0.026247167587280275, 0.026170368194580077, 0.02596249580383301, 0.02614886474609375, 0.026239999771118162, 0.026113023757934572, 0.026468351364135743, 0.030619647979736327, 0.027810815811157227, 0.027246591567993163, 0.027261951446533202, 0.027413503646850586, 0.02726092720031738, 0.027183103561401366, 0.02707865524291992, 0.027088895797729492, 0.02673459243774414, 0.027320320129394532, 0.027075584411621095, 0.027527168273925783, 0.027219968795776366, 0.026605567932128905, 0.02613145637512207, 0.026179584503173828, 0.02610585594177246, 0.026187776565551758, 0.02608332824707031, 0.02613350486755371, 0.02615193557739258, 0.026186752319335937, 0.02617344093322754, 0.026080255508422853, 0.0261529598236084, 0.02609459114074707, 0.026223615646362306, 0.02615500831604004, 0.026167295455932618, 0.026427391052246094, 0.026456064224243164, 0.027006975173950197, 0.027165695190429686, 0.0269434871673584, 0.027148288726806642, 0.0271779842376709, 0.026695680618286134, 0.02612838363647461, 0.027527168273925783, 0.027114496231079102, 0.027016191482543944, 0.027175935745239257, 0.027091968536376954, 0.02660966491699219, 0.026183679580688478, 0.026149887084960938, 0.02612428855895996, 0.026231807708740236, 0.02627993583679199, 0.026193920135498046, 0.026241024017333983, 0.026220544815063477, 0.026145792007446288, 0.026238975524902345, 0.026238975524902345, 0.026203136444091796, 0.026302463531494142, 0.026238975524902345, 0.026200063705444337, 0.026225664138793944, 0.026145792007446288, 0.026065919876098635, 0.02611814308166504, 0.026242048263549804, 0.026226688385009765, 0.026178560256958007, 0.026192895889282225, 0.02631679916381836, 0.026435583114624024, 0.02634649658203125, 0.026224639892578124, 0.026053632736206055, 0.026186752319335937, 0.026484735488891603, 0.02634854316711426, 0.026163200378417968, 0.02615193557739258, 0.026181631088256836, 0.026184703826904295, 0.02617344093322754, 0.026171392440795898, 0.026185728073120116, 0.026218496322631835, 0.026170368194580077, 0.026101760864257813, 0.026432512283325195, 0.027777023315429687, 0.027604991912841798, 0.027233280181884766, 0.027167743682861328, 0.027033599853515625, 0.02707967948913574, 0.026615808486938477, 0.026434560775756837, 0.02634035110473633, 0.02615910339355469, 0.026093568801879883, 0.02608639907836914, 0.026192895889282225, 0.025996288299560546, 0.028043264389038085, 0.026628095626831053, 0.02631884765625, 0.026104831695556642, 0.026216447830200194, 0.02614067268371582, 0.02612326431274414, 0.026177536010742186, 0.02610688018798828, 0.026198015213012696, 0.0261529598236084, 0.026209280014038085, 0.026262527465820314, 0.026237951278686524, 0.026292224884033204, 0.02631167984008789, 0.026252288818359375, 0.026234880447387695, 0.026209280014038085, 0.026215423583984376, 0.026193920135498046, 0.02614784049987793, 0.026187776565551758, 0.026237951278686524, 0.026081279754638673, 0.026274816513061523, 0.026194944381713867, 0.026273792266845702, 0.026088447570800782, 0.026230783462524415, 0.02612531280517578, 0.026202112197875976, 0.026185728073120116, 0.026302463531494142, 0.026181631088256836, 0.026161151885986327, 0.02627174377441406, 0.02616831970214844, 0.02628915214538574, 0.02772787284851074, 0.027586559295654296, 0.027248640060424805, 0.026686464309692383, 0.026831872940063478, 0.027143167495727538, 0.027089920043945313, 0.027045888900756834, 0.027039743423461913, 0.02715648078918457, 0.02721075248718262, 0.027167743682861328, 0.027085823059082033, 0.026208255767822267, 0.02614784049987793, 0.02731622314453125, 0.026442752838134766, 0.026242048263549804, 0.026183679580688478, 0.026042367935180662, 0.026174463272094727, 0.027241472244262696, 0.026727424621582032, 0.026037248611450195, 0.026209280014038085, 0.026039295196533203, 0.026242048263549804, 0.026377216339111328, 0.026104831695556642, 0.02608639907836914, 0.02614169692993164, 0.026221567153930665, 0.026175487518310548, 0.026219520568847656, 0.026255359649658205, 0.026006528854370117, 0.026230783462524415, 0.026169343948364256, 0.026193920135498046, 0.027270143508911132, 0.028770303726196288, 0.02769715118408203, 0.027281408309936524, 0.02716876792907715, 0.027108352661132814, 0.02709401512145996, 0.02635366439819336, 0.02616524887084961, 0.02609152030944824, 0.026277887344360353, 0.026228736877441407, 0.026218496322631835, 0.026231807708740236, 0.02611712074279785, 0.026182655334472657, 0.026286079406738282, 0.026186752319335937, 0.026248191833496092, 0.026458112716674805, 0.026443775177001954, 0.026247167587280275, 0.02633318328857422, 0.026222591400146485, 0.02652569580078125, 0.02634752082824707, 0.026231807708740236, 0.026427391052246094, 0.026818559646606444, 0.02616831970214844, 0.026247167587280275, 0.02609868812561035, 0.026255359649658205, 0.02617651176452637, 0.026080255508422853, 0.02615091133117676, 0.026238975524902345, 0.02614886474609375, 0.026202112197875976, 0.02627993583679199, 0.026895360946655275, 0.026270719528198243, 0.0261345272064209, 0.026002431869506838, 0.02610380744934082, 0.026014720916748047, 0.026230783462524415, 0.02618880081176758, 0.02609152030944824, 0.02615500831604004, 0.026203136444091796, 0.026068992614746093, 0.026054655075073242, 0.02611609649658203, 0.02615193557739258, 0.02607411193847656, 0.026193920135498046, 0.026720256805419923, 0.026574848175048828, 0.026233856201171874, 0.02614169692993164, 0.026241024017333983, 0.025995264053344725, 0.026209280014038085, 0.02676633644104004, 0.02649395179748535, 0.026242048263549804, 0.026219520568847656, 0.026184703826904295, 0.02613350486755371, 0.026222591400146485, 0.025788415908813478, 0.025644031524658203, 0.026269695281982423, 0.026101760864257813, 0.026201087951660155, 0.02613657569885254, 0.026238975524902345, 0.02613248062133789, 0.02631270408630371, 0.026167295455932618, 0.02607513618469238, 0.026637311935424804, 0.027183103561401366, 0.0271779842376709, 0.027143167495727538, 0.027106304168701172, 0.027064319610595702, 0.02709708786010742, 0.027447296142578126, 0.026649599075317384, 0.026203136444091796, 0.02691481590270996, 0.027200511932373047, 0.026274816513061523, 0.02631372833251953, 0.026863616943359377, 0.02673459243774414, 0.026214399337768556, 0.0263372802734375, 0.026241024017333983, 0.026255359649658205, 0.02616422462463379, 0.026900480270385742, 0.026398719787597655, 0.02611814308166504, 0.026038272857666016, 0.02614169692993164, 0.025981952667236328, 0.02638643264770508, 0.027198463439941405, 0.027158527374267577, 0.02715443229675293, 0.027209728240966798, 0.026622976303100586, 0.026184703826904295, 0.027100160598754884, 0.026529792785644532, 0.02616831970214844, 0.02613862419128418, 0.026149887084960938, 0.026239999771118162, 0.026235904693603516, 0.02616524887084961, 0.026213375091552735, 0.026621952056884765, 0.026746879577636717, 0.026163200378417968, 0.026237951278686524, 0.026196992874145508, 0.026195968627929687, 0.026242048263549804, 0.026248191833496092, 0.02631475257873535, 0.026161151885986327, 0.02607411193847656, 0.026291200637817383, 0.026048511505126954, 0.02716364860534668, 0.027208703994750977, 0.027484159469604492, 0.02740121650695801, 0.02729471969604492, 0.027444223403930663, 0.027192319869995117, 0.027235328674316408, 0.027142143249511717, 0.02712985610961914, 0.027259904861450194, 0.027197439193725585, 0.027321344375610353, 0.026918912887573244, 0.02632499122619629, 0.026027008056640624, 0.027249664306640626, 0.026266624450683593, 0.026266624450683593, 0.02610688018798828, 0.026217472076416014, 0.0261345272064209, 0.026204160690307617, 0.026214399337768556, 0.026256383895874022, 0.026193920135498046, 0.027197439193725585, 0.027503616333007814, 0.026860544204711914, 0.026257408142089843, 0.02611916732788086, 0.02606185531616211, 0.026020832061767578, 0.026072063446044923, 0.025983999252319336, 0.02608332824707031, 0.027151359558105468, 0.027291648864746092, 0.02712985610961914, 0.027140096664428712, 0.02715443229675293, 0.027100160598754884, 0.027025407791137695, 0.026294271469116212, 0.027011072158813477, 0.027077632904052733, 0.026261503219604493, 0.026183679580688478, 0.02634649658203125, 0.026459135055541993, 0.026181631088256836, 0.026195968627929687, 0.026229759216308594, 0.02611609649658203, 0.026267648696899414, 0.026702848434448243, 0.026648576736450196, 0.026161151885986327, 0.026171392440795898, 0.026109952926635743, 0.026077184677124023, 0.026184703826904295, 0.02615193557739258, 0.026047487258911133, 0.026287103652954103, 0.026113023757934572, 0.026216447830200194, 0.026242048263549804, 0.02615910339355469, 0.026163200378417968, 0.0261396484375, 0.026226688385009765, 0.02612224006652832, 0.026137599945068358, 0.02629631996154785, 0.026145792007446288, 0.026167295455932618, 0.02590617561340332, 0.02576896095275879, 0.026158079147338868, 0.026277887344360353, 0.026656768798828126, 0.02632908821105957, 0.026190847396850587, 0.026212352752685547, 0.026208255767822267, 0.026283008575439453, 0.025825279235839844, 0.02610380744934082, 0.02611507225036621, 0.026145792007446288, 0.0270960636138916, 0.026037248611450195, 0.02592153549194336, 0.026066944122314452, 0.02612633514404297, 0.026096639633178712, 0.026198015213012696, 0.026216447830200194, 0.02613043212890625, 0.026178560256958007, 0.027386880874633788, 0.02674073600769043, 0.026263551712036134, 0.026200063705444337, 0.02618880081176758, 0.026306560516357422, 0.026223615646362306, 0.027378688812255858, 0.02713804817199707, 0.02615193557739258, 0.025987071990966795, 0.025980928421020507, 0.026275840759277344, 0.02611814308166504, 0.026256383895874022, 0.02612838363647461, 0.026181631088256836, 0.026639360427856446, 0.026577951431274414, 0.026248159408569335, 0.026020864486694335, 0.026175487518310548, 0.026038272857666016, 0.02674995231628418, 0.026680320739746095, 0.026212352752685547, 0.026215423583984376, 0.026068992614746093, 0.02631475257873535, 0.026166271209716797, 0.02614784049987793, 0.026078208923339844, 0.026204160690307617, 0.026776575088500978, 0.026264575958251952, 0.02631884765625, 0.026287103652954103, 0.026228736877441407, 0.026190847396850587, 0.02616012763977051, 0.026210304260253905, 0.026270719528198243, 0.026200063705444337, 0.026270719528198243, 0.026208255767822267, 0.02633420753479004, 0.026202112197875976, 0.026224639892578124, 0.026166271209716797, 0.026450944900512696, 0.026246143341064454, 0.026277887344360353, 0.02614067268371582, 0.027059200286865235, 0.02631987190246582, 0.0261396484375, 0.026080255508422853, 0.026053632736206055, 0.02609868812561035, 0.026199039459228517, 0.026092544555664062, 0.02611609649658203, 0.026703872680664063, 0.027459583282470702, 0.0271779842376709, 0.027296768188476563, 0.026664960861206056, 0.026449920654296875, 0.026213375091552735, 0.026275840759277344, 0.026244096755981446, 0.02631782341003418, 0.026286079406738282, 0.02628096008300781, 0.026283008575439453, 0.026239999771118162, 0.026195968627929687, 0.026274816513061523, 0.026229759216308594, 0.026265600204467773, 0.026249216079711913, 0.026259456634521484, 0.026185728073120116, 0.026249216079711913, 0.026504192352294922, 0.02792755126953125, 0.027394048690795897, 0.02715238380432129, 0.026433536529541016, 0.02618880081176758, 0.026661888122558593, 0.027518976211547853, 0.02733158493041992, 0.026401792526245117, 0.026244096755981446, 0.026044416427612304, 0.026275840759277344, 0.026196992874145508, 0.026259456634521484, 0.026246143341064454, 0.026207231521606447, 0.026200063705444337, 0.026153984069824218, 0.026200063705444337, 0.026129535675048828, 0.026068864822387697, 0.026260480880737305, 0.02614476776123047, 0.02618880081176758, 0.026200063705444337, 0.026468351364135743, 0.02614886474609375, 0.02629631996154785, 0.026207231521606447, 0.0261079044342041, 0.026183679580688478, 0.026833919525146483, 0.02632089614868164, 0.02616012763977051, 0.026642431259155275, 0.027222015380859374, 0.027635711669921875, 0.027609088897705077, 0.02734182357788086, 0.02715545654296875, 0.027140096664428712, 0.027287551879882813, 0.02714931106567383, 0.027072511672973632, 0.026936319351196288, 0.02666803169250488, 0.027282432556152345, 0.027089920043945313, 0.027273216247558595, 0.026674175262451173, 0.026225664138793944, 0.026789888381958008, 0.027173887252807616, 0.026687488555908204, 0.026299392700195313, 0.02633830451965332, 0.02609561538696289, 0.026798080444335938, 0.026248191833496092, 0.026248191833496092, 0.02627174377441406, 0.02636595153808594, 0.026398719787597655, 0.027283456802368163, 0.02730188751220703, 0.02715648078918457, 0.027244543075561522, 0.027286527633666992, 0.027213823318481444, 0.02734182357788086, 0.027257856369018556, 0.02713497543334961, 0.026262527465820314, 0.026332160949707032, 0.02679193687438965, 0.027862016677856444, 0.027643903732299805, 0.0272988166809082, 0.027205631256103514, 0.027239423751831054, 0.027261951446533202, 0.027357183456420898, 0.027390975952148438, 0.02717695999145508, 0.0271329288482666, 0.02739200019836426, 0.02727731132507324, 0.02713702392578125, 0.027470848083496095, 0.02677350425720215, 0.026360832214355468, 0.02630143928527832, 0.026283008575439453, 0.02612838363647461]",tokens/s,37.7625592502304,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,882.241536,793.247744,0.0,163.577856,152.009216,s,1,7.2200234375,7.2200234375,0.0,7.2200234375,7.2200234375,7.2200234375,7.2200234375,[7.2200234375],,kWh,4.802818856262548e-06,2.615667419403761e-06,6.508060762022705e-06,1.3926547037689014e-05,,MB,1479.159808,847.773696,0.0,201.326592,184.525824,s,27,0.21294092750549315,0.00788670101872197,0.0001532123029251601,0.007824063777923584,0.008046630096435548,0.008237395000457764,0.008361637783050536,"[0.008380000114440917, 0.008031423568725587, 0.007868800163269043, 0.007796768188476562, 0.007759168148040772, 0.007929920196533203, 0.0078076162338256835, 0.007786943912506104, 0.007829792022705078, 0.008069439888000489, 0.008309375762939454, 0.007952703952789306, 0.007893472194671632, 0.0078115520477294925, 0.007718944072723389, 0.00782153606414795, 0.007946623802185058, 0.007836544036865234, 0.007947231769561768, 0.007824063777923584, 0.0077749757766723635, 0.007793056011199951, 0.007808159828186035, 0.00782313585281372, 0.007860320091247559, 0.007791520118713379, 0.007767839908599854]",tokens/s,32459.706459302866,kWh,9.18103815322004e-08,5.030740518100638e-08,3.6558412221749884e-07,5.077019089307057e-07,tokens/kWh,504232888.4269381,MB,1524.748288,849.870848,0.0,201.326592,184.528384,s,27,9.931189697265623,0.3678218406394675,0.00503236682494468,0.36698208618164063,0.37491141357421875,0.3770038269042969,0.3775230407714844,"[0.3760689697265625, 0.3774044799804688, 0.36698208618164063, 0.3620142517089844, 0.3693927917480469, 0.37413970947265623, 0.370105712890625, 0.3700155029296875, 0.3665720825195313, 0.3678841857910156, 0.3667320861816406, 0.36874908447265625, 0.36658755493164064, 0.36078482055664063, 0.36280450439453127, 0.37342999267578125, 0.3714714050292969, 0.3702291564941406, 0.372423095703125, 0.3623826599121094, 0.3630744323730469, 0.36201910400390624, 0.36080987548828125, 0.3661871643066406, 0.36288211059570313, 0.3624781799316406, 0.377564697265625]",tokens/s,171.27857304632298,kWh,4.312218921245273e-06,2.362457652634456e-06,6.950121318820927e-06,1.3624797892700656e-05,tokens/kWh,4623921.800245683,,s,1701,9.920265247344974,0.005832019545764239,0.00015545601890409457,0.0057784318923950195,0.006023168087005615,0.006055935859680176,0.0064102401733398436,"[0.00657203197479248, 0.006536191940307618, 0.006631423950195312, 0.006409215927124024, 0.006052864074707031, 0.0060405759811401364, 0.006025216102600098, 0.006021120071411133, 0.006014976024627685, 0.006240255832672119, 0.006037504196166992, 0.00603545618057251, 0.005984255790710449, 0.006047743797302246, 0.005992447853088379, 0.005873663902282715, 0.005971968173980713, 0.006075424194335937, 0.005959648132324219, 0.0060067839622497555, 0.005988351821899414, 0.006011903762817383, 0.006103040218353272, 0.00602726411819458, 0.005989376068115234, 0.006015999794006348, 0.006014976024627685, 0.0059361281394958495, 0.0059361281394958495, 0.0060067839622497555, 0.005993472099304199, 0.005790719985961914, 0.005687295913696289, 0.005728256225585937, 0.005790719985961914, 0.00577023983001709, 0.00572211217880249, 0.005698560237884521, 0.005710847854614258, 0.005689343929290771, 0.005743616104125977, 0.005772287845611572, 0.00577023983001709, 0.005799935817718506, 0.005740543842315674, 0.005720064163208007, 0.005691391944885254, 0.005718016147613526, 0.005784575939178467, 0.005766143798828125, 0.005852159976959229, 0.005780479907989502, 0.0057825279235839844, 0.006038527965545654, 0.006033408164978027, 0.006024191856384278, 0.0060282878875732426, 0.005931007862091065, 0.005991487979888916, 0.006345664024353027, 0.006089759826660156, 0.006064095973968506, 0.006055935859680176, 0.005932032108306885, 0.005988351821899414, 0.006089727878570556, 0.0060067839622497555, 0.005983232021331787, 0.005837823867797852, 0.005807104110717773, 0.006065152168273926, 0.006025216102600098, 0.006055935859680176, 0.006003712177276611, 0.006025216102600098, 0.005980160236358643, 0.006069248199462891, 0.005938176155090332, 0.005958655834197998, 0.005876736164093017, 0.005979135990142822, 0.005863423824310303, 0.006005760192871094, 0.005858304023742676, 0.006023168087005615, 0.005881855964660645, 0.005982207775115967, 0.005876736164093017, 0.0059688959121704105, 0.005830656051635743, 0.005952511787414551, 0.005825535774230957, 0.005970943927764893, 0.005893119812011719, 0.005984255790710449, 0.005917695999145508, 0.0059658241271972655, 0.0058716158866882326, 0.005970943927764893, 0.0058726401329040525, 0.005942272186279297, 0.005864448070526123, 0.005914624214172363, 0.005897215843200684, 0.005924863815307617, 0.005991424083709717, 0.005899263858795166, 0.005958655834197998, 0.006013951778411865, 0.006082592010498047, 0.006118400096893311, 0.006281184196472168, 0.006436863899230957, 0.006173696041107178, 0.006037504196166992, 0.006153247833251953, 0.006576096057891846, 0.006251520156860352, 0.006043647766113281, 0.006000639915466309, 0.006038527965545654, 0.005979135990142822, 0.005952511787414551, 0.005783552169799804, 0.0058091521263122555, 0.005747744083404541, 0.005876736164093017, 0.006120448112487793, 0.005987328052520752, 0.005951488018035888, 0.005986303806304932, 0.005945343971252442, 0.005975039958953857, 0.005950463771820068, 0.005990399837493897, 0.005893119812011719, 0.0059023361206054685, 0.0058787841796875, 0.00591871976852417, 0.006024191856384278, 0.005760000228881836, 0.005694464206695556, 0.005709824085235596, 0.005697535991668701, 0.005710847854614258, 0.005732351779937744, 0.005769216060638428, 0.005801983833312988, 0.0057784318923950195, 0.005781504154205322, 0.005760000228881836, 0.005743616104125977, 0.005723135948181152, 0.005756927967071533, 0.005788671970367432, 0.005768191814422607, 0.005863423824310303, 0.005727231979370117, 0.005780479907989502, 0.005798912048339844, 0.005755904197692871, 0.005716991901397705, 0.005673024177551269, 0.00572819185256958, 0.0056780800819396975, 0.005710847854614258, 0.005691391944885254, 0.005697535991668701, 0.005725183963775635, 0.005692416191101074, 0.005725247859954834, 0.0056728959083557125, 0.0057118721008300784, 0.005703680038452149, 0.005682176113128662, 0.006050816059112549, 0.0059996161460876465, 0.0060590081214904785, 0.006007808208465576, 0.005966847896575928, 0.005876736164093017, 0.005914624214172363, 0.005863423824310303, 0.005921792030334472, 0.005889023780822754, 0.005764095783233642, 0.005675007820129394, 0.00570470380783081, 0.005786623954772949, 0.005566463947296142, 0.005751808166503906, 0.005744639873504639, 0.005754879951477051, 0.005731328010559082, 0.005695487976074219, 0.0057415680885314945, 0.00576204776763916, 0.005763072013854981, 0.005744703769683838, 0.005785535812377929, 0.005746687889099121, 0.005791744232177734, 0.00572108793258667, 0.005727231979370117, 0.005682176113128662, 0.005666816234588623, 0.005696512222290039, 0.005668863773345947, 0.005766143798828125, 0.0057415680885314945, 0.0057825279235839844, 0.005698560237884521, 0.005666816234588623, 0.0057077760696411135, 0.005681151866912842, 0.005703680038452149, 0.00568012809753418, 0.005750783920288086, 0.005755904197692871, 0.005742623805999756, 0.005794784069061279, 0.005731328010559082, 0.005788703918457031, 0.005747680187225342, 0.005724160194396972, 0.005672959804534912, 0.005690368175506591, 0.0058009600639343266, 0.0057487359046936035, 0.005790719985961914, 0.005760000228881836, 0.0057784318923950195, 0.005746687889099121, 0.005767168045043946, 0.0057487359046936035, 0.005742591857910156, 0.005700607776641845, 0.005606400012969971, 0.005703680038452149, 0.005932032108306885, 0.005786623954772949, 0.0057497601509094234, 0.005761023998260498, 0.00576204776763916, 0.0057487359046936035, 0.005787648200988769, 0.005754879951477051, 0.005768191814422607, 0.0057497601509094234, 0.005805056095123291, 0.005757952213287353, 0.005751808166503906, 0.005617663860321045, 0.005758975982666016, 0.00572211217880249, 0.0056852478981018065, 0.005706751823425293, 0.00567193603515625, 0.005702655792236328, 0.005693439960479736, 0.00568832015991211, 0.005703680038452149, 0.005696512222290039, 0.00570470380783081, 0.005683199882507324, 0.00568012809753418, 0.005728256225585937, 0.005693439960479736, 0.006508543968200684, 0.005951488018035888, 0.0058419198989868165, 0.005793791770935058, 0.005827583789825439, 0.005755904197692871, 0.0059699201583862304, 0.005865471839904785, 0.005984255790710449, 0.005989376068115234, 0.006055935859680176, 0.005858304023742676, 0.005816319942474365, 0.005787680149078369, 0.005824480056762695, 0.005776383876800537, 0.005789696216583252, 0.006069248199462891, 0.00620851182937622, 0.0059955201148986816, 0.005799935817718506, 0.005833727836608887, 0.005829631805419922, 0.005836800098419189, 0.0057415680885314945, 0.005794816017150879, 0.005754879951477051, 0.00581324815750122, 0.005765120029449463, 0.005790719985961914, 0.005850111961364746, 0.005830656051635743, 0.005839871883392334, 0.0058757119178771975, 0.005917695999145508, 0.005944320201873779, 0.005937151908874512, 0.0059351038932800295, 0.006071296215057373, 0.006007808208465576, 0.006004735946655273, 0.005996543884277344, 0.006002719879150391, 0.0059944639205932616, 0.006004735946655273, 0.005982207775115967, 0.006022143840789795, 0.005849088191986084, 0.006020095825195313, 0.005924863815307617, 0.006464511871337891, 0.00606822395324707, 0.005980160236358643, 0.0059985918998718265, 0.006080512046813965, 0.006007808208465576, 0.006024191856384278, 0.005962751865386963, 0.0060293121337890625, 0.0060364799499511715, 0.0060364799499511715, 0.0059688959121704105, 0.0060293121337890625, 0.006076416015625, 0.006011903762817383, 0.005868544101715088, 0.0059269118309021, 0.00588595199584961, 0.005934144020080566, 0.00592787218093872, 0.006053887844085694, 0.005945343971252442, 0.005903359889984131, 0.005744639873504639, 0.00601907205581665, 0.005811200141906738, 0.0059351038932800295, 0.005979135990142822, 0.006039552211761475, 0.005964799880981446, 0.006096896171569824, 0.005911551952362061, 0.0059351038932800295, 0.005874688148498535, 0.005806111812591553, 0.005848031997680664, 0.005789696216583252, 0.005899263858795166, 0.006409279823303223, 0.005912511825561523, 0.005944320201873779, 0.005923840045928955, 0.005976064205169678, 0.005985280036926269, 0.005923840045928955, 0.005945343971252442, 0.005974016189575195, 0.005944320201873779, 0.005709824085235596, 0.005748799800872803, 0.005739456176757813, 0.005823488235473633, 0.005738495826721191, 0.005839871883392334, 0.005745664119720459, 0.005743616104125977, 0.005709856033325195, 0.005721055984497071, 0.0058419198989868165, 0.005777408123016357, 0.00562278413772583, 0.005754879951477051, 0.0060293121337890625, 0.006072319984436035, 0.00603545618057251, 0.005953536033630371, 0.006047743797302246, 0.005966847896575928, 0.006001664161682129, 0.005888000011444092, 0.005907455921173096, 0.005852159976959229, 0.0059351038932800295, 0.006829055786132812, 0.0065669121742248536, 0.006115359783172607, 0.00604259204864502, 0.005941247940063477, 0.005901311874389649, 0.005803008079528809, 0.005868544101715088, 0.00586240005493164, 0.005947391986846923, 0.005856256008148194, 0.005986303806304932, 0.005907455921173096, 0.006087679862976075, 0.005938208103179932, 0.006011871814727783, 0.005962751865386963, 0.006052864074707031, 0.0059688959121704105, 0.005979135990142822, 0.005772287845611572, 0.0057292799949646, 0.005703680038452149, 0.005692416191101074, 0.005769216060638428, 0.005769216060638428, 0.005783616065979004, 0.005755839824676513, 0.005833727836608887, 0.005752831935882568, 0.005811200141906738, 0.005745664119720459, 0.005736447811126709, 0.005698560237884521, 0.005700607776641845, 0.005725183963775635, 0.005679103851318359, 0.005699584007263184, 0.0056514558792114256, 0.005705728054046631, 0.005696512222290039, 0.005739552021026611, 0.00570576000213623, 0.005769184112548828, 0.005772255897521973, 0.00568832015991211, 0.005670911788940429, 0.005753856182098388, 0.005703680038452149, 0.005803008079528809, 0.00562278413772583, 0.0057712640762329105, 0.005896192073822021, 0.005986303806304932, 0.005952672004699707, 0.005898079872131348, 0.00586956787109375, 0.005876736164093017, 0.0058419198989868165, 0.005689343929290771, 0.005752831935882568, 0.0057712640762329105, 0.005759007930755615, 0.00568828821182251, 0.005727231979370117, 0.0060282878875732426, 0.006135807991027832, 0.005970943927764893, 0.005830656051635743, 0.005817344188690185, 0.0058132801055908205, 0.005802976131439209, 0.005769248008728027, 0.0058418879508972165, 0.00576204776763916, 0.0058419198989868165, 0.005730303764343261, 0.005739520072937012, 0.0056852478981018065, 0.0058419198989868165, 0.005745664119720459, 0.005775360107421875, 0.005767168045043946, 0.005766143798828125, 0.005794816017150879, 0.005787648200988769, 0.005797920227050782, 0.005753824234008789, 0.005816319942474365, 0.0057784318923950195, 0.0057825279235839844, 0.005803008079528809, 0.005790719985961914, 0.005958655834197998, 0.0060375680923461916, 0.006038464069366455, 0.00602726411819458, 0.005942336082458496, 0.0059862399101257326, 0.0060282878875732426, 0.0060026879310607914, 0.0060282878875732426, 0.005985280036926269, 0.00601907205581665, 0.006005760192871094, 0.006017024040222168, 0.006022143840789795, 0.006030335903167725, 0.005986303806304932, 0.005996543884277344, 0.0060067839622497555, 0.005826560020446778, 0.005801983833312988, 0.005636096000671387, 0.005753856182098388, 0.005807104110717773, 0.005745664119720459, 0.005994495868682862, 0.005903359889984131, 0.0059351038932800295, 0.005865471839904785, 0.005900288105010986, 0.0058787841796875, 0.005865471839904785, 0.005886975765228271, 0.0058716158866882326, 0.005731328010559082, 0.005689343929290771, 0.00572108793258667, 0.005694464206695556, 0.005687295913696289, 0.005720064163208007, 0.0057118721008300784, 0.0057610878944396975, 0.005952447891235352, 0.006031360149383545, 0.005986303806304932, 0.005991424083709717, 0.005949600219726563, 0.005947264194488525, 0.005972959995269775, 0.005972032070159912, 0.00592787218093872, 0.00591974401473999, 0.005880832195281982, 0.005784639835357666, 0.005714879989624023, 0.005677055835723877, 0.0057794561386108395, 0.005756927967071533, 0.005805056095123291, 0.00571289587020874, 0.005744639873504639, 0.00568012809753418, 0.00568832015991211, 0.005723135948181152, 0.00568832015991211, 0.005733376026153565, 0.005684288024902344, 0.00574560022354126, 0.005681151866912842, 0.0057118721008300784, 0.0057077760696411135, 0.0057077760696411135, 0.005715968132019043, 0.00573747205734253, 0.005731328010559082, 0.00572211217880249, 0.005684224128723145, 0.005739520072937012, 0.005697535991668701, 0.005811200141906738, 0.005790719985961914, 0.005856256008148194, 0.006179840087890625, 0.0064174079895019534, 0.005864448070526123, 0.00607539176940918, 0.006004735946655273, 0.00603545618057251, 0.005964799880981446, 0.006146048069000244, 0.006024191856384278, 0.006008831977844238, 0.006037504196166992, 0.005983232021331787, 0.005996543884277344, 0.005691423892974854, 0.005699552059173584, 0.005725183963775635, 0.005689343929290771, 0.005696512222290039, 0.00571289587020874, 0.005801983833312988, 0.00575494384765625, 0.005801919937133789, 0.005803008079528809, 0.005772287845611572, 0.005803008079528809, 0.0059023361206054685, 0.006024191856384278, 0.00586956787109375, 0.005745664119720459, 0.005725183963775635, 0.005691391944885254, 0.005726208209991455, 0.005668863773345947, 0.005726208209991455, 0.005675007820129394, 0.005728256225585937, 0.005777408123016357, 0.005784575939178467, 0.0057784318923950195, 0.00577023983001709, 0.005803008079528809, 0.005744639873504639, 0.005806079864501953, 0.005734399795532226, 0.005817344188690185, 0.005745664119720459, 0.0057415680885314945, 0.005683199882507324, 0.005735424041748047, 0.005642240047454834, 0.005643263816833496, 0.005734399795532226, 0.005724160194396972, 0.0061562881469726565, 0.006022143840789795, 0.00597811222076416, 0.0060026879310607914, 0.0059433279037475585, 0.005857247829437256, 0.005882880210876465, 0.005900288105010986, 0.005945343971252442, 0.005950463771820068, 0.005865471839904785, 0.005745664119720459, 0.006520832061767578, 0.00601907205581665, 0.006053887844085694, 0.005857279777526855, 0.005948448181152343, 0.005868512153625488, 0.005886975765228271, 0.005829631805419922, 0.005873663902282715, 0.005830656051635743, 0.006449151992797852, 0.006073344230651856, 0.005787648200988769, 0.005738495826721191, 0.005682176113128662, 0.005715968132019043, 0.005661695957183838, 0.00570470380783081, 0.005677055835723877, 0.005699584007263184, 0.005696512222290039, 0.00572211217880249, 0.005720064163208007, 0.00567193603515625, 0.0057825279235839844, 0.005764095783233642, 0.00567193603515625, 0.0057415680885314945, 0.005734399795532226, 0.005792768001556397, 0.005752831935882568, 0.005725183963775635, 0.0056780800819396975, 0.005698560237884521, 0.005700607776641845, 0.005681151866912842, 0.00570470380783081, 0.005675007820129394, 0.00571289587020874, 0.005653503894805908, 0.00567193603515625, 0.005668863773345947, 0.00568012809753418, 0.0056975679397583, 0.005666783809661865, 0.0057118721008300784, 0.005668863773345947, 0.005803008079528809, 0.00581324815750122, 0.005703680038452149, 0.005727231979370117, 0.005668863773345947, 0.005744639873504639, 0.00568012809753418, 0.005662720203399658, 0.0062156801223754886, 0.006710271835327148, 0.0060067839622497555, 0.006045695781707764, 0.00591871976852417, 0.005898240089416504, 0.005891071796417236, 0.005911551952362061, 0.005824512004852295, 0.0059361281394958495, 0.0059658241271972655, 0.005990431785583496, 0.005990367889404297, 0.0059269118309021, 0.005947391986846923, 0.005992447853088379, 0.005959680080413818, 0.005979135990142822, 0.005956607818603516, 0.005985280036926269, 0.0058388481140136715, 0.005866496086120605, 0.005858304023742676, 0.005854207992553711, 0.00588595199584961, 0.005754879951477051, 0.005703680038452149, 0.005745728015899659, 0.005769152164459229, 0.00572108793258667, 0.005761023998260498, 0.005755904197692871, 0.00566476821899414, 0.005691391944885254, 0.00566374397277832, 0.005698560237884521, 0.005679103851318359, 0.00572111988067627, 0.005751776218414307, 0.005760000228881836, 0.005773312091827393, 0.005743616104125977, 0.006003712177276611, 0.005790719985961914, 0.005789696216583252, 0.005743616104125977, 0.005769216060638428, 0.005700607776641845, 0.005667840003967285, 0.00572211217880249, 0.0056780800819396975, 0.005695487976074219, 0.005720064163208007, 0.005754879951477051, 0.005746687889099121, 0.005744639873504639, 0.006156320095062256, 0.005938144207000733, 0.0059996161460876465, 0.005959680080413818, 0.006014976024627685, 0.005952511787414551, 0.005922815799713135, 0.0058716158866882326, 0.0059351038932800295, 0.006015999794006348, 0.0059985918998718265, 0.005957695960998535, 0.006006720066070556, 0.005954559803009033, 0.006005760192871094, 0.005731328010559082, 0.005901311874389649, 0.005832704067230224, 0.0059054079055786135, 0.00586240005493164, 0.005745664119720459, 0.005666816234588623, 0.005665791988372803, 0.005709824085235596, 0.005667840003967285, 0.005718016147613526, 0.005658624172210694, 0.005805056095123291, 0.005726208209991455, 0.005735424041748047, 0.005752831935882568, 0.005746687889099121, 0.005794816017150879, 0.0057487359046936035, 0.005992447853088379, 0.005967872142791748, 0.006024191856384278, 0.005967872142791748, 0.006004735946655273, 0.005961728096008301, 0.006005760192871094, 0.005913599967956543, 0.005940224170684814, 0.0059699201583862304, 0.006005792140960693, 0.0059576001167297365, 0.005835775852203369, 0.005945343971252442, 0.005904384136199951, 0.005844992160797119, 0.00586137580871582, 0.005788671970367432, 0.005666816234588623, 0.005684224128723145, 0.005638144016265869, 0.005681183815002442, 0.005653471946716309, 0.0057077760696411135, 0.00603545618057251, 0.005971968173980713, 0.005985280036926269, 0.0057794561386108395, 0.006020095825195313, 0.006061056137084961, 0.005987328052520752, 0.005738495826721191, 0.005784575939178467, 0.005701632022857666, 0.00572211217880249, 0.005662720203399658, 0.005678143978118897, 0.005683135986328125, 0.00566374397277832, 0.005690368175506591, 0.0056483840942382815, 0.005698560237884521, 0.00570470380783081, 0.005653503894805908, 0.005518335819244385, 0.005647424221038818, 0.005769152164459229, 0.005723135948181152, 0.005797887802124023, 0.00568832015991211, 0.005702655792236328, 0.005783552169799804, 0.005742591857910156, 0.005822463989257813, 0.0057497601509094234, 0.005792768001556397, 0.005764095783233642, 0.005752863883972168, 0.005768159866333008, 0.005758975982666016, 0.005777408123016357, 0.005758975982666016, 0.0057784318923950195, 0.005694496154785156, 0.005703648090362549, 0.005684224128723145, 0.005687359809875488, 0.005707712173461914, 0.005692416191101074, 0.005723135948181152, 0.005951519966125488, 0.005822432041168213, 0.005755904197692871, 0.005790719985961914, 0.00573747205734253, 0.005752831935882568, 0.005760000228881836, 0.005730303764343261, 0.0057784318923950195, 0.005768191814422607, 0.005857279777526855, 0.0057149438858032225, 0.005704768180847168, 0.005682112216949463, 0.00566476821899414, 0.005720064163208007, 0.005668896198272705, 0.005702655792236328, 0.005663712024688721, 0.0056780800819396975, 0.005695487976074219, 0.005658624172210694, 0.0056986560821533205, 0.005567391872406006, 0.00566476821899414, 0.005666816234588623, 0.005662720203399658, 0.005695487976074219, 0.005668896198272705, 0.005713888168334961, 0.005673984050750733, 0.005670911788940429, 0.005697535991668701, 0.005683199882507324, 0.0057077760696411135, 0.005675007820129394, 0.005687295913696289, 0.0055797758102417, 0.005752831935882568, 0.005766143798828125, 0.005731328010559082, 0.005788671970367432, 0.005752831935882568, 0.005701632022857666, 0.0056555519104003905, 0.005653503894805908, 0.005697535991668701, 0.005673984050750733, 0.005733376026153565, 0.005808127880096436, 0.00577132797241211, 0.005722047805786133, 0.006073344230651856, 0.005781504154205322, 0.005751808166503906, 0.005788671970367432, 0.0057825279235839844, 0.005751808166503906, 0.005684224128723145, 0.00572108793258667, 0.005666816234588623, 0.005686272144317627, 0.005679103851318359, 0.005659647941589355, 0.005694464206695556, 0.005769216060638428, 0.005747712135314941, 0.005755904197692871, 0.005873663902282715, 0.005703680038452149, 0.005659647941589355, 0.005726208209991455, 0.00567193603515625, 0.005692416191101074, 0.005659647941589355, 0.005724160194396972, 0.005698560237884521, 0.005667840003967285, 0.005692416191101074, 0.005665791988372803, 0.005703680038452149, 0.005734399795532226, 0.005915647983551026, 0.005879807949066162, 0.005890048027038574, 0.005921792030334472, 0.005944320201873779, 0.005884928226470947, 0.0058419198989868165, 0.005892096042633056, 0.005857279777526855, 0.00587059211730957, 0.005772287845611572, 0.005732351779937744, 0.005775360107421875, 0.005753920078277588, 0.005755839824676513, 0.005689343929290771, 0.0057118721008300784, 0.00566476821899414, 0.005575679779052735, 0.005711904048919678, 0.00568726396560669, 0.005672959804534912, 0.005656576156616211, 0.0057190399169921875, 0.005653503894805908, 0.005661856174468994, 0.005560160160064697, 0.0056483840942382815, 0.005687295913696289, 0.005895167827606201, 0.005848063945770264, 0.005799935817718506, 0.006073344230651856, 0.006303743839263916, 0.006230016231536865, 0.006028319835662842, 0.006081503868103027, 0.0059996161460876465, 0.006039552211761475, 0.005957632064819336, 0.00602623987197876, 0.005967872142791748, 0.006021120071411133, 0.005975039958953857, 0.006000639915466309, 0.005801983833312988, 0.005729311943054199, 0.005742559909820557, 0.005831679821014404, 0.0057487359046936035, 0.005767199993133545, 0.005752799987792968, 0.006120448112487793, 0.006359039783477783, 0.006173696041107178, 0.0059955201148986816, 0.005998623847961426, 0.0059985599517822265, 0.005983232021331787, 0.005988351821899414, 0.006009856224060059, 0.005986303806304932, 0.005996543884277344, 0.005989376068115234, 0.006012928009033203, 0.005976064205169678, 0.006005760192871094, 0.005946368217468262, 0.005907455921173096, 0.006000639915466309, 0.006012928009033203, 0.005970943927764893, 0.005987328052520752, 0.005979135990142822, 0.005988351821899414, 0.005992447853088379, 0.005987328052520752, 0.006004735946655273, 0.005990399837493897, 0.005923840045928955, 0.005866496086120605, 0.006487040042877197, 0.006783999919891357, 0.0063498239517211915, 0.006260735988616943, 0.0064245758056640625, 0.0060631041526794435, 0.006076416015625, 0.00601804780960083, 0.006045695781707764, 0.006005760192871094, 0.00601907205581665, 0.0059985918998718265, 0.006013951778411865, 0.005882880210876465, 0.005899263858795166, 0.005931007862091065, 0.005880832195281982, 0.005992447853088379, 0.005983232021331787, 0.00602623987197876, 0.006001664161682129, 0.006001664161682129, 0.005970943927764893, 0.005957632064819336, 0.005760000228881836, 0.005705728054046631, 0.005667840003967285, 0.005643263816833496, 0.0056269440650939945, 0.005862336158752441, 0.005718016147613526, 0.005746687889099121, 0.005683199882507324, 0.005750783920288086, 0.00582041597366333, 0.005760064125061035, 0.0058111357688903804, 0.005751840114593506, 0.005757919788360596, 0.005679103851318359, 0.00572211217880249, 0.005684224128723145, 0.005761023998260498, 0.00577023983001709, 0.005789696216583252, 0.00577023983001709, 0.005815296173095703, 0.005718016147613526, 0.00571289587020874, 0.005670911788940429, 0.005731328010559082, 0.005656576156616211, 0.0057487359046936035, 0.005710847854614258, 0.005698560237884521, 0.005913599967956543, 0.006072319984436035, 0.00591871976852417, 0.005904384136199951, 0.005874688148498535, 0.005840896129608154, 0.0059054079055786135, 0.0058388481140136715, 0.005574656009674072, 0.005761023998260498, 0.005776383876800537, 0.0057487359046936035, 0.00576204776763916, 0.005724192142486572, 0.005719007968902588, 0.005751808166503906, 0.005757952213287353, 0.005815296173095703, 0.005746687889099121, 0.005815296173095703, 0.005744639873504639, 0.005947391986846923, 0.0059770879745483394, 0.006030335903167725, 0.0058787841796875, 0.005948416233062744, 0.005880832195281982, 0.00602623987197876, 0.005982207775115967, 0.0060067839622497555, 0.005963776111602783, 0.006005760192871094, 0.005982207775115967, 0.006046720027923584, 0.005993472099304199, 0.006023168087005615, 0.0059688959121704105, 0.006030335903167725, 0.005982207775115967, 0.006082560062408447, 0.006008831977844238, 0.006078464031219482, 0.005997568130493164, 0.0060364799499511715, 0.006014976024627685, 0.006014976024627685, 0.0059064321517944334, 0.005798912048339844, 0.005691391944885254, 0.005674079895019531, 0.005718944072723389, 0.005666816234588623, 0.005742591857910156, 0.005656576156616211, 0.00572108793258667, 0.005679103851318359, 0.005775519847869873, 0.005739359855651855, 0.005776383876800537, 0.0057784318923950195, 0.0057784318923950195, 0.005777440071105957, 0.005990367889404297, 0.006031360149383545, 0.005943295955657959, 0.005888000011444092, 0.00586137580871582, 0.005892096042633056, 0.005815296173095703, 0.005889023780822754, 0.006025216102600098, 0.005763072013854981, 0.005974016189575195, 0.005985280036926269, 0.005976064205169678, 0.005974016189575195, 0.005980160236358643, 0.005975039958953857, 0.005981184005737304, 0.005962751865386963, 0.005975039958953857, 0.005990431785583496, 0.006032351970672607, 0.0059955201148986816, 0.006049791812896729, 0.005942272186279297, 0.006031360149383545, 0.006280191898345947, 0.00637440013885498, 0.006182911872863769, 0.0059391999244689945, 0.005975039958953857, 0.00601804780960083, 0.006034431934356689, 0.005990399837493897, 0.006012928009033203, 0.006033408164978027, 0.005994495868682862, 0.0059351038932800295, 0.006004735946655273, 0.006012928009033203, 0.006015999794006348, 0.005990399837493897, 0.00591871976852417, 0.0058787841796875, 0.005910528182983398, 0.006045695781707764, 0.006001664161682129, 0.005876736164093017, 0.005876736164093017, 0.005819392204284668, 0.005718016147613526, 0.005807104110717773, 0.005763072013854981, 0.005798912048339844, 0.005740543842315674, 0.00587059211730957, 0.005830656051635743, 0.005733376026153565, 0.005691455841064453, 0.0056954240798950196, 0.005706783771514893, 0.005700575828552246, 0.005753856182098388, 0.005689343929290771, 0.005804031848907471, 0.00576204776763916, 0.0057784318923950195, 0.0058009600639343266, 0.005798912048339844, 0.005728256225585937, 0.005708799839019775, 0.005751808166503906, 0.005691391944885254, 0.005610496044158936, 0.005773312091827393, 0.0057784318923950195, 0.005769216060638428, 0.005750912189483643, 0.005679999828338623, 0.005743616104125977, 0.005673984050750733, 0.0057077760696411135, 0.005672959804534912, 0.005735424041748047, 0.00566476821899414, 0.005713920116424561, 0.005753856182098388, 0.005764095783233642, 0.005776383876800537, 0.005725183963775635, 0.005746687889099121, 0.005673984050750733, 0.005755904197692871, 0.005716991901397705, 0.005788671970367432, 0.005731328010559082, 0.005772287845611572, 0.005785600185394287, 0.005766143798828125, 0.005698560237884521, 0.005703743934631348, 0.0056943998336792, 0.005693439960479736, 0.005650432109832764, 0.005703680038452149, 0.005694464206695556, 0.0057118721008300784, 0.006056960105895996, 0.0058429441452026365, 0.005773312091827393, 0.0058122239112854005, 0.005790719985961914, 0.005784607887268066, 0.005783520221710205, 0.005739520072937012, 0.005784575939178467, 0.005754879951477051, 0.005808127880096436, 0.005755904197692871, 0.005814271926879883, 0.005743616104125977, 0.005985407829284668, 0.005675903797149658, 0.0057497601509094234, 0.005822527885437012, 0.005755839824676513, 0.005795839786529541, 0.005751808166503906, 0.005730303764343261, 0.005684224128723145, 0.005700607776641845, 0.005696512222290039, 0.005706751823425293, 0.00568832015991211, 0.005700607776641845, 0.005695487976074219, 0.005588992118835449, 0.005788671970367432, 0.005751840114593506, 0.005758944034576416, 0.005751808166503906, 0.005738495826721191, 0.005669888019561768, 0.00568012809753418, 0.005672959804534912, 0.005706751823425293, 0.005670911788940429, 0.005807104110717773, 0.005764128208160401, 0.0057671360969543456, 0.005773312091827393, 0.005768191814422607, 0.0057712640762329105, 0.005742591857910156, 0.005785600185394287, 0.005683199882507324, 0.005738495826721191, 0.00572108793258667, 0.005727231979370117, 0.005683199882507324, 0.005764095783233642, 0.005772287845611572, 0.005764095783233642, 0.005781504154205322, 0.005652480125427246, 0.005732351779937744, 0.005733376026153565, 0.005797887802124023, 0.005750783920288086, 0.005708799839019775, 0.005703680038452149, 0.005705728054046631, 0.005721151828765869, 0.00580294418334961, 0.005806079864501953, 0.005800992012023926, 0.005705696105957031, 0.005915647983551026, 0.005959680080413818, 0.006071296215057373, 0.00587775993347168, 0.005889023780822754, 0.00566476821899414, 0.005736447811126709, 0.005744639873504639, 0.005769216060638428, 0.0057487359046936035, 0.005766208171844482, 0.00577836799621582, 0.005753856182098388, 0.0057784318923950195, 0.005731359958648681, 0.005759967803955078, 0.005733376026153565, 0.005756927967071533, 0.005732351779937744, 0.005740543842315674, 0.0057794561386108395, 0.005780479907989502, 0.005554175853729248, 0.005757952213287353, 0.0062863359451293946, 0.006039552211761475, 0.006025216102600098, 0.005778495788574219, 0.005877696037292481, 0.005691391944885254, 0.005825535774230957, 0.00573747205734253, 0.0057118721008300784, 0.005715968132019043, 0.005718016147613526, 0.005710847854614258, 0.005680160045623779, 0.005748703956604004, 0.0057118721008300784, 0.005695487976074219, 0.005703680038452149, 0.005724160194396972, 0.005707808017730713, 0.005755936145782471, 0.005767104148864746, 0.005695487976074219, 0.005677120208740234, 0.0057363839149475095, 0.005848063945770264, 0.005826560020446778, 0.005765120029449463, 0.005796864032745362, 0.005683199882507324, 0.005793791770935058, 0.005690368175506591, 0.005669888019561768, 0.005753856182098388, 0.005694464206695556, 0.005745664119720459, 0.005677055835723877, 0.0057497920989990235, 0.005697504043579101, 0.005731328010559082, 0.005695487976074219, 0.005822463989257813, 0.005718016147613526, 0.005695487976074219, 0.00572108793258667, 0.005689343929290771, 0.005659679889678955, 0.005713888168334961, 0.00567193603515625, 0.005700607776641845, 0.005576704025268555, 0.00567193603515625, 0.005720064163208007, 0.0057292799949646, 0.005754879951477051, 0.005676032066345215, 0.0057292799949646, 0.005692416191101074, 0.00572108793258667, 0.005694464206695556, 0.005708799839019775, 0.005705728054046631, 0.00553984022140503, 0.0058091521263122555, 0.005742591857910156, 0.0057794561386108395, 0.005747712135314941, 0.005756927967071533, 0.005768191814422607, 0.005736447811126709, 0.005690368175506591, 0.005666816234588623, 0.005706751823425293, 0.005645311832427978, 0.005687295913696289, 0.005864448070526123, 0.005786623954772949, 0.005772287845611572, 0.005765120029449463, 0.005796864032745362, 0.005718016147613526, 0.0057118721008300784, 0.005693439960479736, 0.00567193603515625, 0.005715968132019043, 0.005668863773345947, 0.005715968132019043, 0.005666816234588623, 0.005760000228881836, 0.005772287845611572, 0.005682176113128662, 0.005849088191986084, 0.005603328227996827, 0.005666816234588623, 0.005668863773345947, 0.005681151866912842, 0.005702655792236328, 0.005696512222290039, 0.005673984050750733, 0.0056852478981018065, 0.005692416191101074, 0.005695487976074219, 0.005736447811126709, 0.005804031848907471, 0.005742591857910156, 0.005777408123016357, 0.005799935817718506, 0.005698560237884521, 0.005694464206695556, 0.005691391944885254, 0.005756927967071533, 0.005675007820129394, 0.005734399795532226, 0.0056555519104003905, 0.005730303764343261, 0.00568012809753418, 0.005696544170379639, 0.00568828821182251, 0.005689343929290771, 0.005756927967071533, 0.00572211217880249, 0.005775360107421875, 0.005736447811126709, 0.005788671970367432, 0.005735424041748047, 0.005615615844726562, 0.0057825279235839844, 0.005766143798828125, 0.005803008079528809, 0.005744639873504639, 0.005829631805419922, 0.005757952213287353, 0.005728256225585937, 0.00568832015991211, 0.005693439960479736, 0.005617663860321045, 0.005747712135314941, 0.005711904048919678, 0.005793759822845459, 0.005882880210876465, 0.00592793607711792, 0.00597811222076416, 0.006007808208465576, 0.005980160236358643, 0.006004735946655273, 0.005984255790710449, 0.0059996161460876465, 0.005812255859375, 0.005811168193817139, 0.005795839786529541, 0.005792768001556397, 0.0058122239112854005, 0.005796864032745362, 0.005776383876800537, 0.005938176155090332, 0.006038527965545654, 0.005893119812011719, 0.00591871976852417, 0.0058757119178771975, 0.0058429441452026365, 0.005698560237884521, 0.00576204776763916, 0.005676032066345215, 0.005755904197692871, 0.005686272144317627, 0.005746687889099121, 0.005703680038452149, 0.005705728054046631, 0.0057190399169921875, 0.005726208209991455, 0.005723135948181152, 0.005665791988372803, 0.0057487359046936035, 0.005687295913696289, 0.005852159976959229, 0.005740608215332031, 0.005741504192352295, 0.006015999794006348, 0.006020095825195313, 0.006045695781707764, 0.005823488235473633, 0.0058419198989868165, 0.005744639873504639, 0.005745664119720459, 0.005745664119720459, 0.00576204776763916, 0.005819392204284668, 0.005703680038452149, 0.005620736122131348, 0.005826560020446778, 0.005739520072937012, 0.0057118721008300784, 0.005710847854614258, 0.005697535991668701, 0.006000639915466309, 0.005996543884277344, 0.006025216102600098, 0.005985280036926269, 0.005962751865386963, 0.0058757119178771975, 0.005896192073822021, 0.005950496196746826, 0.005920735836029053, 0.005868544101715088, 0.00567193603515625, 0.005709824085235596, 0.005702655792236328, 0.005710847854614258, 0.005709824085235596, 0.005773312091827393, 0.005825535774230957, 0.005732351779937744, 0.005773312091827393, 0.00568012809753418, 0.005746687889099121, 0.005697535991668701, 0.005803008079528809, 0.005752831935882568, 0.005786623954772949, 0.005708799839019775, 0.00561356782913208, 0.005676032066345215, 0.00568832015991211, 0.00576204776763916, 0.005683199882507324, 0.005702655792236328, 0.0057292799949646, 0.005690368175506591, 0.005700607776641845, 0.005691391944885254, 0.005700607776641845, 0.005698560237884521, 0.005662720203399658, 0.0057292799949646, 0.005662720203399658, 0.005730303764343261, 0.005700607776641845, 0.005707808017730713, 0.005668831825256348, 0.005662720203399658, 0.005727231979370117, 0.005660672187805176, 0.0057118721008300784, 0.005697535991668701, 0.005724160194396972, 0.005669888019561768, 0.005687295913696289, 0.006012928009033203, 0.005766143798828125, 0.0057190399169921875, 0.005683199882507324, 0.005604351997375488, 0.005736447811126709, 0.005794816017150879, 0.005742591857910156, 0.005760000228881836, 0.005765120029449463, 0.005761023998260498, 0.005743680000305176, 0.0056954240798950196, 0.005863423824310303, 0.00576204776763916, 0.005694464206695556, 0.005708799839019775, 0.00573747205734253, 0.005716991901397705, 0.00568832015991211, 0.005683199882507324, 0.0056852478981018065, 0.005656576156616211, 0.005734399795532226, 0.00568832015991211, 0.005731391906738281, 0.005655488014221191, 0.0057077760696411135, 0.00566476821899414, 0.0057118721008300784, 0.005705728054046631, 0.005665791988372803, 0.00572108793258667, 0.005677055835723877, 0.0057784318923950195, 0.005761023998260498, 0.005723135948181152, 0.00573747205734253, 0.005720064163208007, 0.005697535991668701, 0.005698560237884521, 0.005715968132019043, 0.005709824085235596, 0.005731328010559082, 0.005792768001556397, 0.005731328010559082, 0.005796864032745362, 0.00568012809753418, 0.005687327861785889, 0.0057077441215515135, 0.005669888019561768, 0.005807104110717773, 0.005757952213287353, 0.005990399837493897, 0.005949440002441406, 0.006000639915466309, 0.005938176155090332, 0.006045695781707764, 0.005895167827606201, 0.005750783920288086, 0.005708896160125733, 0.005798816204071045, 0.0057487678527832035, 0.0057702078819274905, 0.005699584007263184, 0.005716991901397705, 0.005715968132019043, 0.005692416191101074, 0.006049791812896729, 0.005963776111602783, 0.006047743797302246, 0.005945343971252442, 0.006048768043518066, 0.005945343971252442, 0.006048768043518066, 0.005920767784118652, 0.006030335903167725, 0.005860352039337159, 0.005929984092712402, 0.0059361281394958495, 0.00607539176940918, 0.005983232021331787, 0.006048768043518066, 0.005953536033630371, 0.006062079906463623, 0.005948416233062744, 0.006137856006622314, 0.00601087999343872, 0.006015999794006348, 0.005856256008148194, 0.005917695999145508, 0.005888000011444092, 0.005923840045928955, 0.005909503936767578, 0.006004735946655273, 0.005981184005737304, 0.006013951778411865, 0.005924863815307617, 0.005895167827606201, 0.00591974401473999, 0.005827583789825439, 0.0060282878875732426, 0.005949440002441406, 0.006102015972137451, 0.0064102401733398436, 0.006411263942718506, 0.006145023822784424, 0.006050816059112549, 0.006007808208465576, 0.006053887844085694, 0.006017024040222168, 0.006043647766113281, 0.006015999794006348, 0.006000639915466309, 0.005990399837493897, 0.006023168087005615, 0.006194176197052002, 0.006048768043518066, 0.005940224170684814, 0.00586137580871582, 0.0058419198989868165, 0.005883903980255127, 0.005865471839904785, 0.005924863815307617, 0.005910592079162598, 0.00593503999710083, 0.005900288105010986, 0.005925888061523437, 0.005916672229766846, 0.00603545618057251]",tokens/s,171.46718939347417,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492b1-3921fa19586c45085fc01876;2d3b220d-7f3c-4e7f-9c37-ec7b22bfa61a) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,892.997632,851.968,0.0,222.298112,199.93344,s,1,7.457958984375,7.457958984375,0.0,7.457958984375,7.457958984375,7.457958984375,7.457958984375,[7.457958984375],,kWh,6.575278239574801e-06,3.5877671099892042e-06,9.905285701994337e-06,2.0068331051558344e-05,,MB,1484.57472,908.591104,0.0,262.144,220.883456,s,18,0.39692291069030766,0.0220512728161282,4.760562240820497e-05,0.022047327995300292,0.02211508102416992,0.022132221984863282,0.022134681091308596,"[0.02201759910583496, 0.022135295867919923, 0.022007455825805666, 0.02209868812561035, 0.022082015991210936, 0.022007295608520508, 0.021976480484008788, 0.022035680770874023, 0.022017248153686525, 0.022080192565917967, 0.022107967376708983, 0.022055776596069335, 0.022081279754638673, 0.021973983764648437, 0.022039775848388673, 0.022054880142211915, 0.02213167953491211, 0.022019615173339845]",tokens/s,11609.307187599747,kWh,2.596236279304051e-07,1.4226063089938357e-07,1.3015193562329394e-06,1.7034036150627278e-06,tokens/kWh,150287341.02491194,MB,1529.856,925.36832,0.0,276.824064,220.886016,s,18,10.064231689453123,0.5591239827473957,0.007700323302095533,0.5602023620605469,0.5692203430175781,0.5702013641357422,0.5702080462646485,"[0.562080322265625, 0.5643646850585937, 0.5678374633789063, 0.5488248901367188, 0.5486480102539063, 0.54913427734375, 0.5701998901367188, 0.5474310302734375, 0.55794287109375, 0.5623279418945313, 0.570209716796875, 0.5482300415039062, 0.5583244018554687, 0.562546630859375, 0.5567232055664062, 0.568800537109375, 0.5627931518554687, 0.5578126220703125]",tokens/s,112.67626133730434,kWh,6.556157990857928e-06,3.5924805854408392e-06,1.1293182077546977e-05,2.144182065384574e-05,tokens/kWh,2938183.3295345893,,s,1134,10.056579080581665,0.00886823552079512,0.00024967952660023916,0.008747008323669434,0.009103360176086426,0.009140224456787109,0.009635041131973273,"[0.00831488037109375, 0.008679424285888672, 0.008689663887023925, 0.008660991668701172, 0.008690719604492187, 0.00866812801361084, 0.008696831703186036, 0.00867743968963623, 0.008681407928466797, 0.008687616348266602, 0.008744959831237792, 0.008764415740966798, 0.008706048011779785, 0.008744031906127929, 0.008696736335754395, 0.00871833610534668, 0.008778752326965332, 0.008695839881896972, 0.008696800231933593, 0.008699904441833496, 0.008682496070861816, 0.00871014404296875, 0.008731648445129395, 0.008714240074157715, 0.008705023765563966, 0.00869478416442871, 0.00870195198059082, 0.009589759826660157, 0.00912179183959961, 0.009072640419006347, 0.00910643196105957, 0.009038847923278808, 0.008996864318847657, 0.0090316801071167, 0.009099264144897461, 0.009074687957763672, 0.009029631614685058, 0.008992768287658692, 0.009067520141601563, 0.009042943954467773, 0.009035840034484863, 0.009080767631530762, 0.009142271995544434, 0.009092096328735352, 0.009051136016845703, 0.009058303833007812, 0.009137151718139648, 0.009003007888793945, 0.009053183555603026, 0.00903270435333252, 0.009120767593383788, 0.00909823989868164, 0.009209856033325196, 0.009116671562194823, 0.009068575859069825, 0.009074655532836914, 0.009104448318481445, 0.009071552276611328, 0.009055232048034668, 0.008969216346740723, 0.009064448356628419, 0.009052160263061524, 0.009057279586791991, 0.008836095809936523, 0.009034751892089844, 0.009143296241760255, 0.00908902359008789, 0.009047039985656738, 0.008999936103820801, 0.00902143955230713, 0.009152511596679687, 0.009057279586791991, 0.009033727645874023, 0.009085951805114746, 0.009072640419006347, 0.009043968200683594, 0.009069567680358886, 0.009064448356628419, 0.009094143867492676, 0.009081855773925781, 0.00909004783630371, 0.00981503963470459, 0.009449472427368164, 0.00909004783630371, 0.008996864318847657, 0.008975359916687011, 0.009074687957763672, 0.00912281608581543, 0.009101311683654785, 0.00906550407409668, 0.00905519962310791, 0.009192447662353515, 0.0095283203125, 0.009182208061218262, 0.009074687957763672, 0.009068544387817384, 0.009050111770629882, 0.009078783988952637, 0.009065471649169921, 0.009107456207275391, 0.009043968200683594, 0.008922112464904786, 0.00872652816772461, 0.008742912292480469, 0.00872652816772461, 0.008698944091796876, 0.00871110439300537, 0.008697855949401855, 0.008668160438537598, 0.008671232223510742, 0.00870809555053711, 0.008689663887023925, 0.008655872344970703, 0.008631296157836914, 0.008689663887023925, 0.008672256469726563, 0.008699904441833496, 0.008729599952697753, 0.008788991928100585, 0.008655872344970703, 0.008737792015075683, 0.008675328254699707, 0.00871014404296875, 0.008680447578430176, 0.008759296417236329, 0.008742912292480469, 0.008436736106872558, 0.00871833610534668, 0.008725503921508788, 0.008732704162597657, 0.008703968048095704, 0.008722432136535644, 0.008725503921508788, 0.009178175926208496, 0.009421759605407715, 0.009398271560668945, 0.009327615737915039, 0.009682944297790527, 0.008848383903503418, 0.009086976051330567, 0.00911359977722168, 0.009160703659057617, 0.009164799690246582, 0.009045023918151856, 0.00909001636505127, 0.009192447662353515, 0.009092096328735352, 0.009005056381225587, 0.00902451229095459, 0.009059328079223633, 0.00901734447479248, 0.009042943954467773, 0.009033727645874023, 0.00910848045349121, 0.00902348804473877, 0.009058303833007812, 0.009118720054626465, 0.009042943954467773, 0.00903270435333252, 0.00903987216949463, 0.008901632308959961, 0.008785920143127441, 0.008705023765563966, 0.008688639640808106, 0.008657919883728027, 0.008689663887023925, 0.008712191581726075, 0.008665120124816895, 0.00878281593322754, 0.008728575706481934, 0.0087326717376709, 0.009809920310974121, 0.009198592185974122, 0.009140224456787109, 0.00912384033203125, 0.009063424110412598, 0.009040896415710448, 0.009034751892089844, 0.00912179183959961, 0.00901632022857666, 0.009051136016845703, 0.00901632022857666, 0.009054207801818847, 0.009037823677062988, 0.009250816345214843, 0.00909823989868164, 0.009048064231872559, 0.00902451229095459, 0.009052160263061524, 0.008452095985412598, 0.008764415740966798, 0.00870911979675293, 0.008657983779907227, 0.008680383682250976, 0.008730624198913574, 0.008675328254699707, 0.008675328254699707, 0.008688639640808106, 0.008753151893615722, 0.008677375793457032, 0.008688672065734863, 0.008775648117065429, 0.00871014404296875, 0.008743935585021973, 0.008740863800048827, 0.008739839553833008, 0.008744959831237792, 0.008691712379455567, 0.008730624198913574, 0.008728575706481934, 0.008714240074157715, 0.008686592102050781, 0.00871014404296875, 0.008697855949401855, 0.008717311859130859, 0.008672256469726563, 0.008668160438537598, 0.00871628761291504, 0.008713215827941894, 0.008687616348266602, 0.008757247924804687, 0.008582143783569337, 0.008705023765563966, 0.008728575706481934, 0.008733695983886718, 0.008692768096923828, 0.008696800231933593, 0.008697855949401855, 0.008729599952697753, 0.008777728080749512, 0.008707072257995606, 0.008729599952697753, 0.008712191581726075, 0.008734720230102539, 0.008734720230102539, 0.00872447967529297, 0.008612863540649414, 0.008608768463134766, 0.008581119537353516, 0.008631296157836914, 0.00859340763092041, 0.008680447578430176, 0.00871014404296875, 0.009095168113708496, 0.008797183990478515, 0.008687616348266602, 0.008753151893615722, 0.00870297622680664, 0.008697855949401855, 0.00878816032409668, 0.008599360466003418, 0.008676351547241211, 0.00851251220703125, 0.008715264320373535, 0.008672256469726563, 0.008683520317077637, 0.008734720230102539, 0.008699904441833496, 0.008692735671997071, 0.008691712379455567, 0.008745984077453613, 0.008737792015075683, 0.008766464233398438, 0.008697855949401855, 0.008674304008483886, 0.008688639640808106, 0.00882380771636963, 0.00872447967529297, 0.008794112205505371, 0.008723456382751465, 0.008713215827941894, 0.008688639640808106, 0.008662015914916991, 0.008720383644104004, 0.00876035213470459, 0.008712160110473632, 0.00861184024810791, 0.008869888305664063, 0.008686592102050781, 0.008696831703186036, 0.008683520317077637, 0.008700927734375, 0.008700927734375, 0.008674304008483886, 0.008728575706481934, 0.008692735671997071, 0.008686592102050781, 0.008562687873840333, 0.008604672431945801, 0.008557567596435547, 0.008565759658813477, 0.008627200126647949, 0.00861695957183838, 0.008695808410644532, 0.008696831703186036, 0.008769536018371582, 0.008715264320373535, 0.008742912292480469, 0.008783871650695801, 0.008812543869018554, 0.008712191581726075, 0.008747008323669434, 0.008748031616210938, 0.008705023765563966, 0.008741888046264648, 0.008553471565246582, 0.008670207977294921, 0.008700927734375, 0.008748064041137696, 0.008689632415771484, 0.008771583557128907, 0.00872652816772461, 0.00872755241394043, 0.008715264320373535, 0.008757247924804687, 0.008470527648925781, 0.008764415740966798, 0.008704000473022461, 0.008736767768859864, 0.008803327560424805, 0.0087193603515625, 0.008768511772155761, 0.0087193603515625, 0.00872652816772461, 0.008755200386047364, 0.008741888046264648, 0.008739839553833008, 0.008741888046264648, 0.008751104354858399, 0.008748031616210938, 0.008747008323669434, 0.008766464233398438, 0.008704095840454102, 0.008713120460510254, 0.00871116828918457, 0.008737792015075683, 0.008704000473022461, 0.008744959831237792, 0.00860262393951416, 0.008576000213623047, 0.00863644790649414, 0.00868348789215088, 0.008687616348266602, 0.008749055862426757, 0.008692735671997071, 0.0087193603515625, 0.008650752067565918, 0.008744959831237792, 0.00870297622680664, 0.008753151893615722, 0.008738816261291504, 0.00870297622680664, 0.008691712379455567, 0.008956928253173829, 0.00879308795928955, 0.008762368202209473, 0.008760319709777833, 0.00869375991821289, 0.00868556785583496, 0.008676351547241211, 0.00862003231048584, 0.008714240074157715, 0.008738816261291504, 0.008695808410644532, 0.008684543609619141, 0.008675328254699707, 0.008731648445129395, 0.008722432136535644, 0.008639488220214844, 0.00868556785583496, 0.008683520317077637, 0.008684543609619141, 0.008649727821350098, 0.008644607543945313, 0.008664064407348633, 0.00861184024810791, 0.008749055862426757, 0.008731648445129395, 0.008451071739196778, 0.008675328254699707, 0.00873574447631836, 0.008730624198913574, 0.008697855949401855, 0.008674304008483886, 0.008671232223510742, 0.008652799606323243, 0.008679424285888672, 0.008728575706481934, 0.008752127647399903, 0.008700927734375, 0.008721407890319824, 0.008967167854309082, 0.008904704093933105, 0.009187328338623046, 0.009356287956237793, 0.009154560089111329, 0.009060352325439454, 0.009110527992248535, 0.009104448318481445, 0.009124799728393555, 0.009020416259765626, 0.009063455581665039, 0.009119711875915527, 0.00910643196105957, 0.009095168113708496, 0.009752575874328612, 0.010116095542907716, 0.009757696151733398, 0.009220095634460449, 0.009152511596679687, 0.00910540771484375, 0.00921292781829834, 0.009092096328735352, 0.009043968200683594, 0.00898252773284912, 0.009052224159240722, 0.009059264183044433, 0.009069567680358886, 0.009124863624572753, 0.009051136016845703, 0.009075712203979493, 0.009022496223449706, 0.009028575897216796, 0.009063424110412598, 0.009033727645874023, 0.009047039985656738, 0.009112575531005859, 0.00903987216949463, 0.009029631614685058, 0.009076736450195312, 0.009099264144897461, 0.009076736450195312, 0.0090316801071167, 0.009142271995544434, 0.009140224456787109, 0.00910540771484375, 0.009194496154785157, 0.009116671562194823, 0.009119744300842286, 0.009070591926574707, 0.009101311683654785, 0.008458239555358887, 0.008742912292480469, 0.00873574447631836, 0.00870297622680664, 0.008706048011779785, 0.00870195198059082, 0.00871628761291504, 0.008767487525939942, 0.008640512466430664, 0.00871833610534668, 0.008704000473022461, 0.008692735671997071, 0.008690688133239746, 0.008725503921508788, 0.008740863800048827, 0.008684543609619141, 0.008707072257995606, 0.008670207977294921, 0.008683520317077637, 0.008901632308959961, 0.008670207977294921, 0.008728575706481934, 0.008669183731079102, 0.008674304008483886, 0.00870297622680664, 0.008653823852539062, 0.008682496070861816, 0.00859545612335205, 0.008677375793457032, 0.008566783905029298, 0.008557567596435547, 0.008552448272705078, 0.008583168029785156, 0.008679424285888672, 0.008662015914916991, 0.008647680282592773, 0.008643584251403809, 0.008676351547241211, 0.008696831703186036, 0.00869375991821289, 0.008640576362609863, 0.008707008361816405, 0.008674304008483886, 0.008714240074157715, 0.008673279762268067, 0.008739839553833008, 0.00868556785583496, 0.008674304008483886, 0.008711199760437011, 0.00866198444366455, 0.00870195198059082, 0.008676351547241211, 0.008654848098754882, 0.008655872344970703, 0.008692735671997071, 0.008678400039672851, 0.008597503662109375, 0.008872960090637207, 0.008683520317077637, 0.008632320404052735, 0.00879923152923584, 0.008664064407348633, 0.008720383644104004, 0.008444928169250488, 0.008706048011779785, 0.008686592102050781, 0.008744959831237792, 0.008670207977294921, 0.00871833610534668, 0.008675328254699707, 0.008665151596069336, 0.00870905590057373, 0.00870195198059082, 0.008690688133239746, 0.00871014404296875, 0.00869375991821289, 0.0087326717376709, 0.008706048011779785, 0.008706080436706542, 0.00866915225982666, 0.008747008323669434, 0.008660991668701172, 0.008657919883728027, 0.008669183731079102, 0.008691712379455567, 0.008689663887023925, 0.008713215827941894, 0.008670207977294921, 0.008713215827941894, 0.00869478416442871, 0.008675328254699707, 0.008682496070861816, 0.008700927734375, 0.008678400039672851, 0.008676351547241211, 0.00870297622680664, 0.00870809555053711, 0.00894156837463379, 0.00901529598236084, 0.009048064231872559, 0.009044992446899413, 0.009082880020141602, 0.009059328079223633, 0.009059328079223633, 0.009120767593383788, 0.009120767593383788, 0.009037823677062988, 0.00910540771484375, 0.00903270435333252, 0.008992768287658692, 0.009029631614685058, 0.008879103660583497, 0.009020416259765626, 0.009102335929870605, 0.009011199951171875, 0.009020416259765626, 0.009109503746032714, 0.009035776138305664, 0.009005056381225587, 0.009055232048034668, 0.009052160263061524, 0.00908083152770996, 0.009034751892089844, 0.008999936103820801, 0.009051136016845703, 0.00900710391998291, 0.008450048446655273, 0.008757247924804687, 0.009083904266357423, 0.00892518424987793, 0.009030719757080079, 0.009002943992614747, 0.009040896415710448, 0.009033727645874023, 0.009046015739440917, 0.009020416259765626, 0.008903679847717285, 0.008980480194091797, 0.009046015739440917, 0.009050111770629882, 0.009048064231872559, 0.009053183555603026, 0.009075712203979493, 0.009079808235168458, 0.009029696464538575, 0.008751071929931641, 0.008756192207336425, 0.008670207977294921, 0.008681471824645997, 0.008666111946105956, 0.008656895637512207, 0.0087193603515625, 0.008691712379455567, 0.00870911979675293, 0.008705023765563966, 0.008689663887023925, 0.008689727783203125, 0.008686528205871583, 0.008699904441833496, 0.008671232223510742, 0.008663040161132812, 0.008648703575134278, 0.00868556785583496, 0.008672256469726563, 0.008696831703186036, 0.00913100814819336, 0.009030655860900879, 0.009001983642578124, 0.009076736450195312, 0.009036800384521485, 0.009087039947509765, 0.009054143905639649, 0.009086976051330567, 0.009086976051330567, 0.00910848045349121, 0.009054207801818847, 0.009040896415710448, 0.009134143829345702, 0.008992704391479492, 0.009067520141601563, 0.009042943954467773, 0.009048064231872559, 0.009047039985656738, 0.009077759742736816, 0.009030655860900879, 0.008999936103820801, 0.009085951805114746, 0.009034751892089844, 0.009071616172790528, 0.008766464233398438, 0.0088985595703125, 0.009059328079223633, 0.009082880020141602, 0.009005056381225587, 0.009039936065673828, 0.00910431957244873, 0.00909823989868164, 0.009066495895385742, 0.009051136016845703, 0.009062399864196777, 0.009053183555603026, 0.009051136016845703, 0.009088000297546387, 0.00912179183959961, 0.009077759742736816, 0.009057279586791991, 0.008896512031555176, 0.008928256034851074, 0.008919039726257324, 0.008863743782043456, 0.009010175704956054, 0.009156607627868652, 0.009091072082519532, 0.009059328079223633, 0.009088000297546387, 0.009117695808410644, 0.009035776138305664, 0.009075712203979493, 0.008973312377929688, 0.00905833625793457, 0.009016287803649903, 0.009077759742736816, 0.009022463798522949, 0.009109503746032714, 0.009056256294250489, 0.009022463798522949, 0.00909721565246582, 0.009030655860900879, 0.009053183555603026, 0.009040896415710448, 0.009111552238464356, 0.009033791542053223, 0.009095104217529296, 0.009010175704956054, 0.009036800384521485, 0.00901734447479248, 0.009234432220458985, 0.009037823677062988, 0.009076736450195312, 0.009051136016845703, 0.0090316801071167, 0.009103360176086426, 0.009085951805114746, 0.009082880020141602, 0.009064448356628419, 0.00909004783630371, 0.008896512031555176, 0.009088000297546387, 0.00903270435333252, 0.009076736450195312, 0.009026559829711914, 0.009020416259765626, 0.008457216262817382, 0.00872652816772461, 0.008725503921508788, 0.0087193603515625, 0.008713215827941894, 0.008668160438537598, 0.00870195198059082, 0.00871014404296875, 0.008673279762268067, 0.00871014404296875, 0.008766464233398438, 0.008682496070861816, 0.008692735671997071, 0.00871116828918457, 0.008691712379455567, 0.0087326717376709, 0.008686592102050781, 0.008681471824645997, 0.008767487525939942, 0.00889958381652832, 0.008607744216918945, 0.008697855949401855, 0.008699904441833496, 0.008668160438537598, 0.008656895637512207, 0.008676351547241211, 0.008678400039672851, 0.00870911979675293, 0.008691712379455567, 0.008642560005187988, 0.00868556785583496, 0.008656895637512207, 0.008671232223510742, 0.008770560264587402, 0.008646656036376953, 0.008653823852539062, 0.008736767768859864, 0.008653823852539062, 0.008695808410644532, 0.00870297622680664, 0.008721407890319824, 0.008667136192321777, 0.008684543609619141, 0.008665087699890137, 0.008714240074157715, 0.008699904441833496, 0.008666111946105956, 0.008651776313781738, 0.008697855949401855, 0.008557567596435547, 0.00869375991821289, 0.00868556785583496, 0.008689663887023925, 0.008754176139831543, 0.008681535720825195, 0.008690624237060547, 0.008743935585021973, 0.0087326717376709, 0.0087193603515625, 0.008744959831237792, 0.008725503921508788, 0.008758272171020508, 0.008734720230102539, 0.008459263801574708, 0.008714240074157715, 0.008695808410644532, 0.008698880195617676, 0.008692735671997071, 0.00868556785583496, 0.00869478416442871, 0.008680447578430176, 0.0087193603515625, 0.010169343948364258, 0.009315327644348144, 0.009075712203979493, 0.009033727645874023, 0.0090316801071167, 0.00899891185760498, 0.009247743606567382, 0.009046015739440917, 0.009041919708251953, 0.009022463798522949, 0.009041919708251953, 0.009048128128051757, 0.009078720092773437, 0.00903987216949463, 0.009034751892089844, 0.008996864318847657, 0.009026559829711914, 0.00903987216949463, 0.009013248443603515, 0.009034815788269044, 0.009152447700500489, 0.009244671821594238, 0.009112575531005859, 0.009076736450195312, 0.008734720230102539, 0.00870911979675293, 0.008707072257995606, 0.008679424285888672, 0.008714271545410155, 0.008778719902038575, 0.00870195198059082, 0.008566783905029298, 0.008696831703186036, 0.008695808410644532, 0.008747008323669434, 0.008690688133239746, 0.008652799606323243, 0.008682527542114259, 0.008744928359985351, 0.008672256469726563, 0.008737792015075683, 0.008665087699890137, 0.008674304008483886, 0.008669183731079102, 0.00870297622680664, 0.008672256469726563, 0.008707072257995606, 0.008725503921508788, 0.008683520317077637, 0.008729599952697753, 0.008679424285888672, 0.008692735671997071, 0.008721407890319824, 0.00871116828918457, 0.008481792449951172, 0.00869375991821289, 0.009118720054626465, 0.009124992370605468, 0.0090632963180542, 0.00902348804473877, 0.009076736450195312, 0.008961024284362793, 0.00871014404296875, 0.008700927734375, 0.00870809555053711, 0.00872755241394043, 0.008677375793457032, 0.008697855949401855, 0.0087326717376709, 0.008709152221679688, 0.008684512138366698, 0.008671232223510742, 0.008692735671997071, 0.008673279762268067, 0.008756223678588868, 0.008625151634216309, 0.00871833610534668, 0.008652799606323243, 0.008691712379455567, 0.008673279762268067, 0.0087326717376709, 0.008671232223510742, 0.009066495895385742, 0.009018367767333984, 0.009075712203979493, 0.00900607967376709, 0.008979455947875976, 0.009042943954467773, 0.009048064231872559, 0.009013248443603515, 0.009003007888793945, 0.008991744041442871, 0.009060352325439454, 0.00903270435333252, 0.00902451229095459, 0.00903270435333252, 0.009070624351501464, 0.009051103591918946, 0.00913920021057129, 0.009116671562194823, 0.00903987216949463, 0.009013248443603515, 0.00901529598236084, 0.009052224159240722, 0.009065407752990723, 0.009082880020141602, 0.009047039985656738, 0.009052160263061524, 0.009048064231872559, 0.009025535583496093, 0.009077759742736816, 0.009058303833007812, 0.009058303833007812, 0.00910540771484375, 0.009003007888793945, 0.009093119621276855, 0.009056256294250489, 0.008438783645629883, 0.008774656295776367, 0.00868556785583496, 0.008698880195617676, 0.008695808410644532, 0.008721407890319824, 0.008673279762268067, 0.008672256469726563, 0.008706048011779785, 0.00870809555053711, 0.008688703536987304, 0.008743871688842773, 0.008756223678588868, 0.008720383644104004, 0.00870195198059082, 0.008700927734375, 0.00870911979675293, 0.008680447578430176, 0.008671232223510742, 0.008787967681884766, 0.008860671997070312, 0.008713215827941894, 0.00871628761291504, 0.008613887786865235, 0.008634367942810058, 0.00875216007232666, 0.008681440353393554, 0.008713215827941894, 0.008698880195617676, 0.00871116828918457, 0.008690688133239746, 0.008687616348266602, 0.008691712379455567, 0.00873574447631836, 0.008690688133239746, 0.008695808410644532, 0.008659968376159668, 0.008782848358154297, 0.008700960159301758, 0.008711135864257813, 0.008681471824645997, 0.00872652816772461, 0.008697855949401855, 0.008695808410644532, 0.008686623573303222, 0.008766495704650878, 0.008672191619873046, 0.008737792015075683, 0.009657343864440919, 0.009433088302612304, 0.009406463623046875, 0.00971571159362793, 0.00935321617126465, 0.009082880020141602, 0.009233407974243164, 0.009093119621276855, 0.009069567680358886, 0.009046015739440917, 0.009191424369812011, 0.009084927558898925, 0.009047039985656738, 0.009053183555603026, 0.009084927558898925, 0.008468480110168456, 0.00870911979675293, 0.008672287940979004, 0.008721376419067383, 0.009027584075927735, 0.009041919708251953, 0.009116671562194823, 0.009056256294250489, 0.009025535583496093, 0.009062399864196777, 0.009061375617980956, 0.008962047576904298, 0.008995840072631836, 0.009062399864196777, 0.00889241600036621, 0.009183232307434081, 0.009027584075927735, 0.0090316801071167, 0.009000960350036622, 0.009018367767333984, 0.009028608322143555, 0.009188384056091308, 0.009044960021972656, 0.009056256294250489, 0.008995840072631836, 0.009082880020141602, 0.009074687957763672, 0.00899071979522705, 0.008992768287658692, 0.009109503746032714, 0.008869888305664063, 0.008795136451721192, 0.00910848045349121, 0.009033727645874023, 0.009012224197387696, 0.009026559829711914, 0.009034751892089844, 0.009072640419006347, 0.00899788761138916, 0.009033727645874023, 0.009009152412414552, 0.009050111770629882, 0.008886272430419923, 0.009093119621276855, 0.009137151718139648, 0.009048064231872559, 0.008976384162902832, 0.00903987216949463, 0.009084927558898925, 0.009029631614685058, 0.009088000297546387, 0.009009152412414552, 0.009079808235168458, 0.009018367767333984, 0.00912281608581543, 0.00901734447479248, 0.009082880020141602, 0.009116671562194823, 0.009240639686584473, 0.009273280143737793, 0.00910547161102295, 0.009064384460449219, 0.009088000297546387, 0.008429568290710449, 0.008659968376159668, 0.008689663887023925, 0.008749055862426757, 0.008857600212097168, 0.00869478416442871, 0.008683520317077637, 0.008664064407348633, 0.008721407890319824, 0.00869478416442871, 0.008757247924804687, 0.008766464233398438, 0.008645631790161134, 0.008698880195617676, 0.008720383644104004, 0.008676351547241211, 0.008706080436706542, 0.008646623611450195, 0.008683520317077637, 0.008692735671997071, 0.00870809555053711, 0.00871628761291504, 0.008717311859130859, 0.008695808410644532, 0.008681504249572754, 0.008701919555664062, 0.008677375793457032, 0.008730624198913574, 0.008668160438537598, 0.008731648445129395, 0.008680447578430176, 0.009151488304138184, 0.009117695808410644, 0.008975359916687011, 0.009019392013549805, 0.009095168113708496, 0.009095168113708496, 0.009070591926574707, 0.009083904266357423, 0.009044992446899413, 0.009034751892089844, 0.010358783721923828, 0.012613632202148438, 0.009104384422302245, 0.00908902359008789, 0.009127936363220214, 0.009083904266357423, 0.00910848045349121, 0.009103360176086426, 0.009037823677062988, 0.009019392013549805, 0.009059328079223633, 0.009046015739440917, 0.009088000297546387, 0.0090316801071167, 0.009010175704956054, 0.009141247749328613, 0.008698975563049317, 0.008760224342346192, 0.008687616348266602, 0.008715264320373535, 0.008578047752380372, 0.008681471824645997, 0.008460288047790527, 0.00871833610534668, 0.00871628761291504, 0.00870809555053711, 0.008674304008483886, 0.008707072257995606, 0.00872447967529297, 0.008744959831237792, 0.00870195198059082, 0.008686592102050781, 0.00871833610534668, 0.008683520317077637, 0.008761343955993652, 0.008688639640808106, 0.008785920143127441, 0.008777759552001952, 0.008703968048095704, 0.008678400039672851, 0.00871833610534668, 0.008762368202209473, 0.008683520317077637, 0.008648703575134278, 0.00869379234313965, 0.008707039833068848, 0.008624128341674805, 0.008653823852539062, 0.008740863800048827, 0.008674304008483886, 0.008684543609619141, 0.008683520317077637, 0.008723456382751465, 0.008657919883728027, 0.008704000473022461, 0.008673279762268067, 0.008673279762268067, 0.008674304008483886, 0.008679424285888672, 0.008674304008483886, 0.008739839553833008, 0.008854528427124024, 0.01030246353149414, 0.009282624244689942, 0.009136063575744629, 0.009104384422302245, 0.009034751892089844, 0.009049087524414063, 0.009029631614685058, 0.009026559829711914, 0.0090316801071167, 0.009004032135009766, 0.009020416259765626, 0.009040896415710448, 0.009020416259765626, 0.008948736190795899, 0.009035776138305664, 0.008996864318847657, 0.009088000297546387, 0.00899891185760498, 0.009044992446899413, 0.00900710391998291, 0.00902560043334961, 0.009087936401367187, 0.009095168113708496]",tokens/s,112.76200295482688,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gptj,MB,3839.770624,5463.605248,0.0,4833.93536,4546.659328,s,1,9.553228515625,9.553228515625,0.0,9.553228515625,9.553228515625,9.553228515625,9.553228515625,[9.553228515625],,kWh,3.2837779534719406e-05,1.7981711500260706e-05,6.295699481001504e-05,0.00011377648584499515,,MB,2117.71392,5499.256832,0.0,4852.809728,4095.21408,s,10,10.624811401367186,1.0624811401367187,0.0002482317813519991,1.062448974609375,1.0627955200195311,1.062807000732422,1.0628161853027345,"[1.0624339599609376, 1.0619945068359375, 1.0624639892578125, 1.06242822265625, 1.06229052734375, 1.0622933349609376, 1.06278173828125, 1.06279296875, 1.062513671875, 1.0628184814453125]",tokens/s,240.94545336311407,kWh,1.2552405042777777e-05,6.878212019287394e-06,7.141052935060177e-05,9.084114641266694e-05,tokens/kWh,2818106.222890019,MB,2130.259968,5501.353984,0.0,4852.809728,4197.764096,s,10,19.669857177734375,1.9669857177734371,0.009482922557337323,1.9642777709960937,1.9774749999999999,1.9822195556640625,1.9860152001953124,"[1.968271728515625, 1.9626697998046876, 1.964132080078125, 1.963879150390625, 1.9531826171875, 1.9557996826171875, 1.9644234619140626, 1.9741138916015626, 1.986964111328125, 1.976420654296875]",tokens/s,32.02870230868474,kWh,2.346265063881902e-05,1.2859384875086486e-05,6.301977263799552e-05,9.934180815190103e-05,tokens/kWh,634174.0821112125,,s,630,19.66747543144228,0.031218214970543275,0.00041704740300292463,0.03105894374847412,0.0317863935470581,0.03198735332489014,0.0327709991836548,"[0.03149004745483398, 0.031031328201293944, 0.031108064651489256, 0.031100927352905275, 0.031098880767822266, 0.031221824645996092, 0.03109779167175293, 0.03128422355651855, 0.031105024337768555, 0.031121408462524414, 0.031063039779663085, 0.0310118408203125, 0.03101081657409668, 0.03131596755981445, 0.03112550354003906, 0.03143987274169922, 0.03103539276123047, 0.03102720069885254, 0.031128576278686523, 0.03103027153015137, 0.031086591720581053, 0.031076351165771485, 0.03178291130065918, 0.032884735107421875, 0.03219968032836914, 0.03191398429870605, 0.031093759536743162, 0.031080448150634765, 0.031064064025878906, 0.03102617645263672, 0.031067136764526368, 0.03204198455810547, 0.03121766471862793, 0.03101081657409668, 0.03120742416381836, 0.031065088272094726, 0.03105075263977051, 0.031074304580688477, 0.031076351165771485, 0.03138047981262207, 0.031046655654907225, 0.031068159103393556, 0.031079423904418944, 0.031047679901123046, 0.031055871963500976, 0.03180748748779297, 0.031109119415283205, 0.031611904144287106, 0.031156223297119142, 0.03187711906433106, 0.03177471923828125, 0.031112192153930664, 0.031093759536743162, 0.0310118408203125, 0.03117158317565918, 0.03172863960266113, 0.030939136505126953, 0.03100160026550293, 0.030991359710693358, 0.03097702407836914, 0.031038463592529295, 0.031055871963500976, 0.03099443244934082, 0.031927295684814457, 0.032481281280517575, 0.03144601631164551, 0.031239168167114258, 0.03095961570739746, 0.030867456436157226, 0.030842880249023437, 0.030991359710693358, 0.031545343399047854, 0.03100876808166504, 0.030913536071777343, 0.030940160751342774, 0.031699968338012696, 0.030852127075195312, 0.030911455154418944, 0.031297536849975584, 0.030827520370483398, 0.030891008377075195, 0.030954496383666992, 0.030891008377075195, 0.030897151947021483, 0.030904319763183592, 0.030883840560913086, 0.030858240127563476, 0.03098419189453125, 0.03099852752685547, 0.030926847457885744, 0.030911487579345705, 0.03095347213745117, 0.030917631149291993, 0.030955520629882813, 0.03093708801269531, 0.030901248931884766, 0.031871999740600586, 0.031920127868652344, 0.03091967964172363, 0.030860288619995117, 0.030872575759887694, 0.030924800872802735, 0.03093008041381836, 0.031104864120483397, 0.031082496643066407, 0.03092889595031738, 0.03094118309020996, 0.03163955116271973, 0.031185920715332032, 0.030962688446044922, 0.031441919326782225, 0.030689279556274415, 0.030685216903686523, 0.030939104080200196, 0.03094118309020996, 0.031075328826904298, 0.03062272071838379, 0.03206246566772461, 0.031321088790893556, 0.030934015274047853, 0.030900224685668946, 0.03054489517211914, 0.030872575759887694, 0.03212595367431641, 0.03363430404663086, 0.031955968856811526, 0.031222784042358398, 0.030862335205078126, 0.03135078430175781, 0.03202969741821289, 0.030988288879394532, 0.030923776626586914, 0.03099443244934082, 0.031063039779663085, 0.03159552001953125, 0.031285247802734374, 0.030929920196533203, 0.031270912170410156, 0.031039487838745116, 0.031406080245971676, 0.031091712951660157, 0.031075328826904298, 0.03103436851501465, 0.03098624038696289, 0.03122790336608887, 0.031102975845336913, 0.03101900863647461, 0.0315043830871582, 0.031091712951660157, 0.03096883201599121, 0.03095347213745117, 0.030912511825561522, 0.0309749755859375, 0.031066112518310547, 0.03095142364501953, 0.030867456436157226, 0.031147008895874025, 0.03098419189453125, 0.03096985626220703, 0.03098214340209961, 0.03102003288269043, 0.03170816040039062, 0.03168767929077149, 0.031044607162475587, 0.031083520889282228, 0.030921728134155273, 0.03139993667602539, 0.03115110397338867, 0.031072256088256835, 0.03127910423278808, 0.031308799743652346, 0.03103436851501465, 0.0313702392578125, 0.03103539276123047, 0.030917631149291993, 0.03101900863647461, 0.030929920196533203, 0.030924800872802735, 0.030910463333129884, 0.03096063995361328, 0.031021055221557618, 0.030905344009399413, 0.03096985626220703, 0.030926847457885744, 0.03231129455566406, 0.03323392105102539, 0.03124019241333008, 0.030891008377075195, 0.0317255687713623, 0.03121971130371094, 0.030887935638427736, 0.030884864807128907, 0.030913536071777343, 0.03096985626220703, 0.03169177627563476, 0.031388671875, 0.031177728652954102, 0.031036415100097657, 0.031526912689208986, 0.03201638412475586, 0.031425535202026365, 0.031529983520507815, 0.0309749755859375, 0.030946304321289062, 0.030962688446044922, 0.030916608810424805, 0.030883840560913086, 0.03159654426574707, 0.031006719589233397, 0.03149004745483398, 0.030962688446044922, 0.03097804832458496, 0.03093708801269531, 0.03095347213745117, 0.030946304321289062, 0.031101951599121092, 0.030980096817016602, 0.030905344009399413, 0.03097599983215332, 0.03099443244934082, 0.030885887145996094, 0.031078399658203124, 0.03094425582885742, 0.030911487579345705, 0.030972959518432618, 0.03107529640197754, 0.030920703887939452, 0.03182387161254883, 0.031441919326782225, 0.030948352813720704, 0.03141119956970215, 0.03143680000305176, 0.031160320281982422, 0.03179929542541504, 0.031081472396850586, 0.03091967964172363, 0.03096883201599121, 0.03099545669555664, 0.03094425582885742, 0.030916608810424805, 0.03100979232788086, 0.03187302398681641, 0.03233587265014649, 0.03198873519897461, 0.031888383865356446, 0.031006719589233397, 0.030988288879394532, 0.030913536071777343, 0.030905344009399413, 0.03099750328063965, 0.03099238395690918, 0.030920703887939452, 0.031246335983276367, 0.03082137680053711, 0.030910463333129884, 0.030903295516967775, 0.03100569534301758, 0.030929920196533203, 0.031115264892578126, 0.03160166358947754, 0.03138764762878418, 0.031014911651611327, 0.030954496383666992, 0.031258655548095704, 0.031523807525634766, 0.03075379180908203, 0.03082342338562012, 0.030877695083618165, 0.03097292709350586, 0.030906368255615234, 0.031079423904418944, 0.030899200439453125, 0.03101286315917969, 0.031034463882446288, 0.030914464950561524, 0.030907392501831055, 0.030669824600219726, 0.030527488708496094, 0.03060736083984375, 0.030678016662597656, 0.030929920196533203, 0.030555135726928712, 0.030923776626586914, 0.03241984176635742, 0.03153919982910156, 0.03115519905090332, 0.03094118309020996, 0.030857215881347655, 0.03057254409790039, 0.030686208724975586, 0.03094528007507324, 0.03100467109680176, 0.031456256866455076, 0.031531007766723636, 0.03101286315917969, 0.030938112258911132, 0.03179007911682129, 0.030864383697509764, 0.03074662399291992, 0.03058176040649414, 0.03064729690551758, 0.03081113624572754, 0.030939136505126953, 0.030993408203125, 0.030999551773071288, 0.030913536071777343, 0.031052799224853517, 0.031079423904418944, 0.03101900863647461, 0.031112192153930664, 0.03099545669555664, 0.030867456436157226, 0.030856191635131838, 0.030909439086914063, 0.03097395133972168, 0.031237119674682616, 0.030902271270751954, 0.03135385513305664, 0.03097599983215332, 0.030931968688964844, 0.030884864807128907, 0.03096780776977539, 0.031056896209716797, 0.03098419189453125, 0.03161702346801758, 0.03172249603271484, 0.03156172752380371, 0.031455232620239255, 0.030847999572753908, 0.030940160751342774, 0.030843904495239258, 0.031071231842041015, 0.03081625556945801, 0.031235071182250978, 0.030938112258911132, 0.030956544876098634, 0.031037439346313478, 0.031107072830200196, 0.030845951080322266, 0.030943231582641603, 0.03098111915588379, 0.030851072311401367, 0.031032320022583007, 0.03096575927734375, 0.030932992935180665, 0.031080448150634765, 0.03094425582885742, 0.030866432189941406, 0.03098521614074707, 0.030962688446044922, 0.030897151947021483, 0.030950399398803712, 0.030958591461181642, 0.030916608810424805, 0.030948352813720704, 0.030925823211669923, 0.031498239517211916, 0.0310118408203125, 0.03160166358947754, 0.031542272567749025, 0.030955520629882813, 0.031044607162475587, 0.031024127960205077, 0.030849023818969725, 0.03094528007507324, 0.030947328567504883, 0.031060991287231447, 0.030887935638427736, 0.03100569534301758, 0.030993408203125, 0.031007743835449218, 0.03095756721496582, 0.031084543228149415, 0.03097804832458496, 0.030882816314697265, 0.03096575927734375, 0.0310118408203125, 0.030888959884643553, 0.03130876731872559, 0.030876672744750977, 0.030849023818969725, 0.03077631950378418, 0.030908416748046875, 0.030894079208374024, 0.03102617645263672, 0.030918655395507814, 0.031014911651611327, 0.030912511825561522, 0.03143270492553711, 0.031036415100097657, 0.03175014305114746, 0.03152383995056152, 0.03120639991760254, 0.03098521614074707, 0.030910463333129884, 0.030874624252319335, 0.03101286315917969, 0.03096780776977539, 0.031069183349609376, 0.031923200607299806, 0.0315228157043457, 0.03135385513305664, 0.031086591720581053, 0.031355903625488284, 0.031038463592529295, 0.030988288879394532, 0.030891008377075195, 0.030810111999511718, 0.03098419189453125, 0.03095244789123535, 0.031528959274291994, 0.031033344268798828, 0.03098624038696289, 0.031119359970092773, 0.03083776092529297, 0.030863359451293947, 0.031088640213012695, 0.03109174346923828, 0.0311234245300293, 0.0314019832611084, 0.03129343986511231, 0.03134982490539551, 0.0311582088470459, 0.031139839172363282, 0.03189555168151856, 0.0314769287109375, 0.031021888732910157, 0.03100876808166504, 0.03165798377990723, 0.031226879119873048, 0.031229951858520507, 0.0311592960357666, 0.031048704147338867, 0.031062015533447264, 0.032322559356689456, 0.032215038299560544, 0.03136614418029785, 0.031129600524902344, 0.031080448150634765, 0.0310435848236084, 0.031074304580688477, 0.03138047981262207, 0.03100057601928711, 0.031054847717285155, 0.030938112258911132, 0.03099443244934082, 0.03114291191101074, 0.031308799743652346, 0.03099443244934082, 0.03120128059387207, 0.030955520629882813, 0.031460351943969726, 0.03129043197631836, 0.031227840423583984, 0.03192940711975098, 0.03194771194458008, 0.032247806549072264, 0.031579135894775394, 0.03239324951171875, 0.03145110321044922, 0.031056896209716797, 0.03198259162902832, 0.03130060768127441, 0.031512575149536134, 0.030958591461181642, 0.03158527946472168, 0.031355903625488284, 0.03137945556640625, 0.03096575927734375, 0.031046655654907225, 0.03137843132019043, 0.03125760078430176, 0.031287296295166016, 0.031748096466064454, 0.031069183349609376, 0.031039487838745116, 0.031290367126464845, 0.030915584564208985, 0.03094425582885742, 0.0314521598815918, 0.031047679901123046, 0.031128576278686523, 0.031470592498779294, 0.031079423904418944, 0.030909439086914063, 0.03120128059387207, 0.031604736328125, 0.031080448150634765, 0.031115264892578126, 0.03219660949707031, 0.03153510475158691, 0.031104000091552734, 0.031111167907714843, 0.03160678482055664, 0.031222816467285155, 0.03125859260559082, 0.030920703887939452, 0.03163852882385254, 0.031056896209716797, 0.030991359710693358, 0.031006719589233397, 0.032043006896972655, 0.03218227386474609, 0.031337472915649416, 0.03143270492553711, 0.031265792846679685, 0.03116748809814453, 0.03120128059387207, 0.031213567733764647, 0.03139379119873047, 0.03180441665649414, 0.03172352027893066, 0.031526912689208986, 0.03154841613769531, 0.031749120712280275, 0.031715328216552735, 0.031529983520507815, 0.03160678482055664, 0.031308799743652346, 0.031848447799682614, 0.031940607070922854, 0.0321003532409668, 0.03227545547485351, 0.03163955116271973, 0.03158220863342285, 0.031648767471313476, 0.031714303970336914, 0.031865856170654294, 0.031749120712280275, 0.03177881622314453, 0.03292671966552734, 0.03186380767822266, 0.0316753921508789, 0.03169177627563476, 0.03158527946472168, 0.031748096466064454, 0.03129855918884277, 0.03128934478759766, 0.03189043235778809, 0.03152076721191406, 0.03134976005554199, 0.03203481674194336, 0.03175628852844238, 0.03155558395385742, 0.03189452743530274, 0.031900672912597655, 0.03158118438720703, 0.03127603149414063, 0.031307775497436525, 0.031068159103393556, 0.03140812873840332, 0.03194572830200195, 0.031101951599121092, 0.031470592498779294, 0.03234611129760742, 0.03099033546447754, 0.030857215881347655, 0.030958591461181642, 0.030886911392211915, 0.03139686393737793, 0.03101900863647461, 0.03099033546447754, 0.030925823211669923, 0.03169484710693359, 0.030962688446044922, 0.03141119956970215, 0.03082956886291504, 0.03178598403930664, 0.03102822494506836, 0.030918655395507814, 0.030862335205078126, 0.031661056518554685, 0.03196723175048828, 0.031699968338012696, 0.03198566436767578, 0.031768575668334964, 0.03141836738586426, 0.031821823120117186, 0.03096063995361328, 0.031014911651611327, 0.030950399398803712, 0.031681535720825195, 0.031547391891479495, 0.030834688186645507, 0.03099238395690918, 0.031663103103637694, 0.031473663330078124, 0.03141734313964844, 0.030923776626586914, 0.033152000427246094, 0.03314688110351562, 0.03222016143798828, 0.03325439834594727, 0.03160883140563965, 0.03186073684692383, 0.03159552001953125, 0.03095142364501953, 0.031153152465820313, 0.03118796730041504, 0.031139839172363282, 0.031122432708740235, 0.031038463592529295, 0.030938112258911132, 0.031252479553222655, 0.031263744354248044, 0.030938112258911132, 0.0319109115600586, 0.030958591461181642, 0.031122432708740235, 0.03119206428527832, 0.03113369560241699, 0.032492542266845705, 0.032010238647460935, 0.031509504318237305, 0.031079423904418944, 0.030996480941772462, 0.03095961570739746, 0.03102617645263672, 0.031029247283935548, 0.030900224685668946, 0.03094528007507324, 0.031081472396850586, 0.031065088272094726, 0.03098111915588379, 0.030676992416381835, 0.03102822494506836, 0.031015935897827147, 0.031076351165771485, 0.030879776000976564, 0.030922719955444336]",tokens/s,32.03258100899026,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669480ff-6c65b85b439f9c476f1d69e8;83ed4b0a-083f-438d-916d-9a7838613487) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7421.386752,9691.46368,0.0,9061.793792,8463.626752,s,1,11.71444921875,11.71444921875,0.0,11.71444921875,11.71444921875,11.71444921875,11.71444921875,[11.71444921875],,kWh,5.7973470254863616e-05,3.174750187391831e-05,0.00011447898047201033,0.00020419995260079226,,MB,1803.93984,9708.240896,0.0,9061.793792,7991.22432,s,10,23.824510498046877,2.3824510498046876,0.00020037926386997176,2.38242919921875,2.3826262451171876,2.382775769042969,2.3828953881835937,"[2.382374267578125, 2.382155029296875, 2.38246728515625, 2.382593017578125, 2.38239111328125, 2.38292529296875, 2.382479736328125, 2.382517578125, 2.382231689453125, 2.38237548828125]",tokens/s,107.45236508468318,kWh,2.8129367959027405e-05,1.5415721646139327e-05,0.00015833340444439958,0.00020187849404956632,tokens/kWh,1268089.5070335995,MB,1816.154112,9710.338048,0.0,9061.793792,8265.583104,s,10,21.238251220703123,2.123825122070312,0.004956021277464406,2.1224349365234376,2.1303528564453122,2.1315880493164063,2.132576203613281,"[2.1169951171875, 2.1244716796875, 2.1193916015625, 2.123501220703125, 2.130078369140625, 2.129014892578125, 2.119614501953125, 2.12136865234375, 2.1328232421875, 2.120991943359375]",tokens/s,29.663459267581967,kWh,2.5864412252638495e-05,1.417595664538818e-05,9.336496358079984e-05,0.0001334053324788264,tokens/kWh,472244.99073152954,,s,630,21.235358753204356,0.033706918655879915,0.0004907431164797837,0.03352115249633789,0.03438704605102539,0.034763059616088866,0.03561154514312745,"[0.033890304565429685, 0.03346944046020508, 0.033530879974365234, 0.03332710266113281, 0.03345305633544922, 0.03440435028076172, 0.03421388626098633, 0.033549312591552735, 0.0334837760925293, 0.0335093765258789, 0.033462272644042966, 0.03342950439453125, 0.033396736145019534, 0.03350630569458008, 0.03391897583007813, 0.03500236892700195, 0.033511425018310545, 0.03356467056274414, 0.03407462310791016, 0.03381964874267578, 0.03337420654296875, 0.033527809143066405, 0.03338137435913086, 0.0334837760925293, 0.03338137435913086, 0.033476608276367184, 0.03367424011230469, 0.03358617782592774, 0.03347148895263672, 0.03354521560668945, 0.033459201812744144, 0.033568767547607424, 0.033446910858154294, 0.033355777740478515, 0.03364044952392578, 0.03485184097290039, 0.033607711791992186, 0.03348988723754883, 0.03354009628295898, 0.033430526733398434, 0.03340902328491211, 0.033459201812744144, 0.03342131042480469, 0.033498111724853515, 0.033576961517333984, 0.03395686340332031, 0.034044929504394535, 0.03342233657836914, 0.03323699188232422, 0.033546241760253906, 0.03339980697631836, 0.03344179153442383, 0.03342335891723633, 0.033492992401123044, 0.03338547134399414, 0.03340697479248047, 0.033498111724853515, 0.03349913787841797, 0.033463294982910154, 0.03353702545166016, 0.03349708938598633, 0.033463294982910154, 0.0335810546875, 0.034016254425048825, 0.03393740844726562, 0.03379097747802735, 0.03397529602050781, 0.03418828964233398, 0.033516544342041016, 0.03341926574707031, 0.033576961517333984, 0.03361996841430664, 0.03357798385620117, 0.03349606323242187, 0.03350630569458008, 0.03373056030273437, 0.03383705520629883, 0.03354316711425781, 0.03377766418457031, 0.034759681701660154, 0.034753536224365236, 0.03386982345581055, 0.033459201812744144, 0.03356979370117188, 0.03354111862182617, 0.03343462371826172, 0.03351244735717773, 0.03337011337280273, 0.03352371215820313, 0.03357798385620117, 0.0335994873046875, 0.03345100784301758, 0.033565696716308595, 0.03351551818847656, 0.03350732803344727, 0.033500160217285156, 0.033470462799072266, 0.03393024063110352, 0.03380223846435547, 0.03350630569458008, 0.033547264099121094, 0.03347148895263672, 0.03348070526123047, 0.03353190231323242, 0.0334837760925293, 0.03343462371826172, 0.034533374786376955, 0.034852863311767575, 0.03381043243408203, 0.03359743881225586, 0.03360870361328125, 0.03350630569458008, 0.033511425018310545, 0.034219009399414066, 0.033560577392578124, 0.033568767547607424, 0.03359743881225586, 0.03345510482788086, 0.033516544342041016, 0.033396736145019534, 0.033516544342041016, 0.033484798431396484, 0.033452030181884765, 0.033465343475341795, 0.033737728118896484, 0.03604889678955078, 0.033838081359863284, 0.03360255813598633, 0.034369537353515625, 0.03403776168823242, 0.03357491302490234, 0.0335728645324707, 0.03359539031982422, 0.03351347351074219, 0.03356159973144531, 0.03352166366577149, 0.033448959350585936, 0.0335022087097168, 0.033331199645996096, 0.033532928466796875, 0.03376639938354492, 0.03529011154174805, 0.03431219100952149, 0.033882110595703126, 0.03357900619506836, 0.03346432113647461, 0.033501182556152344, 0.03344384002685547, 0.03350527954101563, 0.0343818244934082, 0.033849342346191406, 0.0335810546875, 0.03360153579711914, 0.0333834228515625, 0.03341209411621094, 0.0335175666809082, 0.03361894226074219, 0.03351039886474609, 0.03355750274658203, 0.033438720703125, 0.03377151870727539, 0.03413094329833984, 0.03345305633544922, 0.033452030181884765, 0.0335206413269043, 0.033454078674316406, 0.03345612716674805, 0.03364147186279297, 0.03341823959350586, 0.033519615173339845, 0.033650688171386715, 0.03365478515625, 0.033478656768798826, 0.033432575225830076, 0.03334860610961914, 0.03339059066772461, 0.033288192749023435, 0.0334202880859375, 0.03340288162231445, 0.03368755340576172, 0.03427020645141601, 0.033732608795166014, 0.03344179153442383, 0.03329228973388672, 0.03345305633544922, 0.033555454254150394, 0.03384320068359375, 0.03353190231323242, 0.03385036849975586, 0.03376128005981445, 0.03343155288696289, 0.03350630569458008, 0.033410049438476565, 0.033360897064208986, 0.033329151153564454, 0.03298611068725586, 0.03345612716674805, 0.03335372924804687, 0.033414142608642575, 0.03337932968139649, 0.03403776168823242, 0.0339681282043457, 0.033723392486572266, 0.033658878326416015, 0.03371929550170898, 0.033516544342041016, 0.03372851181030274, 0.03363123321533203, 0.0335022087097168, 0.0340316162109375, 0.03352883148193359, 0.033413120269775394, 0.03342540740966797, 0.03340800094604492, 0.033454078674316406, 0.033465343475341795, 0.03342950439453125, 0.03329228973388672, 0.033468414306640625, 0.033584129333496096, 0.03534643173217773, 0.03501567840576172, 0.03380633544921875, 0.03359231948852539, 0.03332710266113281, 0.033377281188964845, 0.033519615173339845, 0.033588222503662106, 0.03347558212280274, 0.03404083251953125, 0.036155391693115234, 0.03503104019165039, 0.03382067108154297, 0.03366400146484375, 0.03366809463500976, 0.03352883148193359, 0.03386265563964844, 0.033570816040039066, 0.03361280059814453, 0.03347455978393555, 0.0335093765258789, 0.03345510482788086, 0.034520065307617184, 0.033898494720458985, 0.033546241760253906, 0.0346060791015625, 0.033508350372314456, 0.033312767028808594, 0.033532928466796875, 0.03340595245361328, 0.03342540740966797, 0.03368038558959961, 0.034148353576660156, 0.03385651016235352, 0.03346944046020508, 0.03353497695922852, 0.033481727600097655, 0.03522355270385742, 0.033544193267822264, 0.03367833709716797, 0.03358310317993164, 0.033562625885009766, 0.03413913726806641, 0.03402137756347656, 0.03342233657836914, 0.033767425537109375, 0.03333017730712891, 0.03367424011230469, 0.03439308929443359, 0.03369881439208984, 0.03354521560668945, 0.033721343994140625, 0.03376537704467773, 0.03343667221069336, 0.03383603286743164, 0.033508350372314456, 0.03382067108154297, 0.034525184631347655, 0.034062335968017575, 0.03362611389160156, 0.03326566314697266, 0.03360358428955078, 0.03436544036865234, 0.03401420974731445, 0.03371110534667969, 0.03347967910766601, 0.03365478515625, 0.03440332794189453, 0.03331379318237305, 0.03349913787841797, 0.0335206413269043, 0.03343667221069336, 0.033463294982910154, 0.03348582458496094, 0.033633281707763675, 0.03429683303833008, 0.03554611206054688, 0.035535873413085936, 0.03476582336425781, 0.033713153839111325, 0.033484798431396484, 0.033686527252197264, 0.03351039886474609, 0.033416191101074216, 0.03358924865722656, 0.03355136108398438, 0.033667072296142575, 0.033873920440673826, 0.03381350326538086, 0.03347148895263672, 0.033375232696533204, 0.033976318359375, 0.03346636962890625, 0.03381350326538086, 0.03402649688720703, 0.03377151870727539, 0.033363967895507815, 0.03339059066772461, 0.03346636962890625, 0.03342540740966797, 0.03343462371826172, 0.033372161865234375, 0.03356671905517578, 0.03366092681884766, 0.033544193267822264, 0.03356467056274414, 0.0347064323425293, 0.03366604614257813, 0.033600513458251956, 0.03469311904907227, 0.03506995010375977, 0.0335022087097168, 0.033416191101074216, 0.03346944046020508, 0.03505766296386719, 0.03488460922241211, 0.03381043243408203, 0.03337420654296875, 0.033964031219482424, 0.03438796615600586, 0.03343155288696289, 0.033478656768798826, 0.03348992156982422, 0.03341209411621094, 0.03337318420410156, 0.033549312591552735, 0.03342335891723633, 0.033532928466796875, 0.03342540740966797, 0.03341516876220703, 0.03356979370117188, 0.03345817565917969, 0.03339878463745117, 0.03362611389160156, 0.03344076919555664, 0.03478220748901367, 0.03406950378417969, 0.033732608795166014, 0.03412377548217774, 0.033974273681640625, 0.0335994873046875, 0.03344076919555664, 0.033484798431396484, 0.03339775848388672, 0.03445145416259766, 0.034320384979248046, 0.03365273666381836, 0.03343462371826172, 0.03385343933105469, 0.03351859283447266, 0.03345817565917969, 0.03345510482788086, 0.03343667221069336, 0.03340595245361328, 0.03467366409301758, 0.035846145629882815, 0.03436032104492188, 0.03408588790893555, 0.03391897583007813, 0.03341209411621094, 0.033323009490966796, 0.033448959350585936, 0.033524734497070316, 0.03334860610961914, 0.03333631896972656, 0.033914878845214845, 0.034301952362060545, 0.03369472122192383, 0.03359539031982422, 0.03346022415161133, 0.033454078674316406, 0.033328128814697267, 0.03342540740966797, 0.03341516876220703, 0.03323699188232422, 0.033476608276367184, 0.033432575225830076, 0.03343462371826172, 0.03342745590209961, 0.033393665313720705, 0.03347558212280274, 0.03342131042480469, 0.03319910430908203, 0.03468185424804687, 0.034544639587402344, 0.03360358428955078, 0.033544193267822264, 0.03355852890014648, 0.0333568000793457, 0.03314176177978516, 0.033672191619873046, 0.0334202880859375, 0.035214336395263675, 0.03438694381713867, 0.033587200164794925, 0.033708030700683594, 0.03439513778686523, 0.03442892837524414, 0.03465420913696289, 0.03456512069702149, 0.03383705520629883, 0.03353395080566406, 0.03356979370117188, 0.03352883148193359, 0.03336703872680664, 0.033478656768798826, 0.03338854217529297, 0.033495040893554685, 0.03322367858886719, 0.03349401473999023, 0.03315097427368164, 0.033050624847412106, 0.03378073501586914, 0.03397119903564453, 0.03328307342529297, 0.033465343475341795, 0.033452030181884765, 0.03373056030273437, 0.03362201690673828, 0.033347583770751955, 0.03373056030273437, 0.03382271957397461, 0.03337932968139649, 0.03316633605957031, 0.033413120269775394, 0.033314815521240236, 0.03341926574707031, 0.033345535278320314, 0.03341516876220703, 0.033459201812744144, 0.03325747299194336, 0.03367833709716797, 0.033570816040039066, 0.03341107177734375, 0.03339571380615235, 0.033364990234375, 0.033495040893554685, 0.033501182556152344, 0.03340595245361328, 0.033468414306640625, 0.03519385528564453, 0.03604377746582031, 0.0348671989440918, 0.033508350372314456, 0.03424153518676758, 0.03419340896606445, 0.03369574356079102, 0.03361280059814453, 0.03392921447753906, 0.034283519744873044, 0.03348992156982422, 0.03338956832885742, 0.03332198333740234, 0.033414142608642575, 0.033426433563232424, 0.033306625366210936, 0.03383091354370117, 0.03459379196166992, 0.033737728118896484, 0.033446910858154294, 0.03331071853637695, 0.034560001373291016, 0.03427123260498047, 0.03362918472290039, 0.03375513458251953, 0.03349708938598633, 0.03362508773803711, 0.033410049438476565, 0.03339571380615235, 0.03331584167480469, 0.03338444900512695, 0.03341107177734375, 0.03341926574707031, 0.033410049438476565, 0.03355852890014648, 0.03283251190185547, 0.03346636962890625, 0.03388927841186523, 0.03369062423706055, 0.03410636901855469, 0.03393228912353516, 0.0333568000793457, 0.03334860610961914, 0.03373567962646484, 0.033860607147216795, 0.03645644760131836, 0.03485388946533203, 0.034751487731933595, 0.034543617248535156, 0.03346636962890625, 0.033724414825439454, 0.03351039886474609, 0.03465011215209961, 0.034514942169189454, 0.03330559921264648, 0.033500160217285156, 0.03407462310791016, 0.036350975036621096, 0.034813953399658204, 0.03440947341918945, 0.03537408065795898, 0.03376844787597656, 0.0346951675415039, 0.034783233642578126, 0.03373875045776367, 0.033478656768798826, 0.03343564987182617, 0.033498111724853515, 0.03439718246459961, 0.03400089645385742, 0.03347251129150391, 0.03339263916015625, 0.033393665313720705, 0.03349606323242187, 0.03348889541625977, 0.0334202880859375, 0.03333324813842774, 0.03367424011230469, 0.03353190231323242, 0.033463294982910154, 0.03340185546875, 0.03346636962890625, 0.0334837760925293, 0.03327897644042969, 0.03340800094604492, 0.03345817565917969, 0.033514495849609374, 0.03397119903564453, 0.03369062423706055, 0.03338649749755859, 0.033683456420898435, 0.03424358367919922, 0.033554431915283206, 0.033554431915283206, 0.03348992156982422, 0.033530879974365234, 0.03350527954101563, 0.033620990753173825, 0.03394355010986328, 0.03353702545166016, 0.03337625503540039, 0.0334919662475586, 0.0335093765258789, 0.033511425018310545, 0.033391616821289063, 0.033416191101074216, 0.03353497695922852, 0.03381760025024414, 0.03370700836181641, 0.033576961517333984, 0.033323009490966796, 0.03344076919555664, 0.03427942276000977, 0.03369062423706055, 0.033585151672363284, 0.033448959350585936, 0.033535999298095705, 0.0333834228515625, 0.03343462371826172, 0.033393665313720705, 0.0335206413269043, 0.03355955123901367, 0.03357593536376953, 0.033396736145019534, 0.03339878463745117, 0.034915328979492184, 0.0340766716003418, 0.033478656768798826, 0.03333324813842774, 0.03337318420410156, 0.03336601638793945, 0.03334348678588867, 0.03329228973388672, 0.033454078674316406, 0.03333222579956055, 0.03340390396118164, 0.03339980697631836, 0.03338137435913086, 0.033445888519287106, 0.03342950439453125, 0.033203201293945314, 0.03337011337280273, 0.033587200164794925, 0.03419647979736328, 0.033653759002685545, 0.03342233657836914, 0.033516544342041016, 0.033413120269775394, 0.033306625366210936, 0.033538047790527346, 0.03340492630004883, 0.03339571380615235, 0.03386777496337891, 0.035345409393310545, 0.033576961517333984, 0.035140609741210936, 0.03563827133178711, 0.03458560180664062, 0.03351244735717773, 0.0336732177734375, 0.034411518096923825, 0.03331891250610351, 0.033410049438476565, 0.03337113571166992, 0.03344384002685547, 0.03404390335083008, 0.03380223846435547, 0.03333017730712891, 0.033492992401123044, 0.03367628860473633]",tokens/s,29.667499726367232,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949002-26fdf5626dc5efa91db5ec71;22bfa71f-82db-4115-acfb-e8f0138cbf69) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4173.66016,6019.350528,0.0,5389.68064,5000.446464,s,1,10.22592578125,10.22592578125,0.0,10.22592578125,10.22592578125,10.22592578125,10.22592578125,[10.22592578125],,kWh,3.887307934861484e-05,2.1289899686768474e-05,7.229644672596125e-05,0.00013245942576134456,,MB,1588.133888,6042.4192,0.0,5393.874944,4706.596864,s,10,11.987768066406248,1.198776806640625,0.00030645578340983003,1.1986395263671876,1.1990952026367188,1.199296746826172,1.1994579821777345,"[1.19888037109375, 1.1985648193359375, 1.1990504150390624, 1.1984163818359375, 1.1985748291015625, 1.199498291015625, 1.1989476318359376, 1.198670166015625, 1.19860888671875, 1.1985562744140625]",tokens/s,213.55101181628459,kWh,1.4161041328889169e-05,7.759861756403327e-06,8.294603857900151e-05,0.00010486694166429401,tokens/kWh,2441188.7668043347,MB,1629.544448,6042.4192,0.0,5393.874944,4876.091904,s,10,17.1221904296875,1.71221904296875,0.007299119267096573,1.711336669921875,1.722142041015625,1.7247170166015626,1.7267769970703126,"[1.7097828369140624, 1.72156982421875, 1.7063389892578125, 1.715531005859375, 1.7132410888671874, 1.707018310546875, 1.706662841796875, 1.701863037109375, 1.7128905029296875, 1.7272919921875]",tokens/s,36.79435774220029,kWh,2.0461371633889233e-05,1.1211823510599526e-05,6.044415946639958e-05,9.211735461088837e-05,tokens/kWh,683910.2172019314,,s,630,17.11993345642092,0.027174497549874443,0.0004996715815677546,0.02693734359741211,0.02789693374633789,0.028045158195495606,0.028697303848266607,"[0.02731827163696289, 0.027052032470703126, 0.02692915153503418, 0.026928127288818358, 0.027982847213745117, 0.027816959381103516, 0.02779955291748047, 0.02795929527282715, 0.02747494316101074, 0.02695680046081543, 0.026893312454223633, 0.026830848693847657, 0.02695680046081543, 0.026879999160766603, 0.026894336700439454, 0.02696294403076172, 0.02695577621459961, 0.026778623580932616, 0.026845184326171875, 0.026870784759521486, 0.02699673652648926, 0.027068416595458986, 0.026970111846923828, 0.02691379165649414, 0.026888191223144533, 0.026832895278930666, 0.026844160079956055, 0.02673971176147461, 0.02688204765319824, 0.027030527114868166, 0.027439104080200196, 0.02715545654296875, 0.02691276741027832, 0.026859519958496093, 0.02690355110168457, 0.02697932815551758, 0.02676019287109375, 0.026918912887573244, 0.026784767150878908, 0.02679910469055176, 0.026885120391845704, 0.02719539260864258, 0.027006975173950197, 0.02689740753173828, 0.02694655990600586, 0.02682368087768555, 0.02700595283508301, 0.027084800720214845, 0.02711756706237793, 0.02756505584716797, 0.028008447647094727, 0.02773708724975586, 0.02789580726623535, 0.027871231079101562, 0.02798899269104004, 0.028015615463256836, 0.028241920471191406, 0.027009023666381835, 0.026925056457519532, 0.02690559959411621, 0.02693017578125, 0.026829824447631836, 0.026909696578979493, 0.027580415725708008, 0.02680012893676758, 0.02696703910827637, 0.026878976821899415, 0.026802175521850585, 0.02698240089416504, 0.02691276741027832, 0.027033599853515625, 0.026909696578979493, 0.026888191223144533, 0.026862592697143556, 0.02690559959411621, 0.026860544204711914, 0.027012096405029298, 0.026874879837036132, 0.027053056716918947, 0.026901504516601563, 0.026976255416870116, 0.026789888381958008, 0.027891712188720705, 0.028894208908081056, 0.028077056884765625, 0.027889663696289063, 0.02879795265197754, 0.028013568878173828, 0.02815488052368164, 0.027849727630615235, 0.027834367752075196, 0.026936319351196288, 0.026878976821899415, 0.02670694351196289, 0.02706329536437988, 0.026867712020874023, 0.026926080703735353, 0.02689638328552246, 0.02707046318054199, 0.02712678337097168, 0.028379135131835938, 0.027851776123046876, 0.02794291114807129, 0.02792550468444824, 0.02791731262207031, 0.027760639190673828, 0.026991615295410155, 0.027098112106323242, 0.027075584411621095, 0.026797056198120117, 0.027090944290161133, 0.027065343856811523, 0.027027456283569336, 0.027190271377563476, 0.02710425567626953, 0.027374591827392578, 0.027062271118164064, 0.026961919784545898, 0.02717184066772461, 0.026805248260498047, 0.027999231338500977, 0.027671552658081053, 0.02711244773864746, 0.02774220848083496, 0.02838118362426758, 0.02798182487487793, 0.02710937690734863, 0.0267509765625, 0.02681548881530762, 0.02671308708190918, 0.026927104949951174, 0.027248640060424805, 0.027092992782592775, 0.026857471466064452, 0.026779647827148437, 0.026909696578979493, 0.026866687774658202, 0.026845184326171875, 0.026847232818603517, 0.026850303649902343, 0.026834943771362304, 0.02689740753173828, 0.02695577621459961, 0.026856447219848634, 0.02698854446411133, 0.026756095886230468, 0.026893312454223633, 0.026785791397094725, 0.0268984317779541, 0.02697216033935547, 0.02690457534790039, 0.027356159210205077, 0.02798591995239258, 0.027870208740234374, 0.027732992172241212, 0.027701248168945314, 0.027782144546508788, 0.027839487075805663, 0.026832895278930666, 0.027741184234619142, 0.026442752838134766, 0.02675814437866211, 0.02688921546936035, 0.026813440322875977, 0.026822656631469727, 0.026977279663085937, 0.027371519088745116, 0.027049983978271484, 0.02701312065124512, 0.027236352920532225, 0.027246591567993163, 0.026888191223144533, 0.026876928329467774, 0.026945535659790038, 0.027034624099731445, 0.026874879837036132, 0.02695577621459961, 0.02693734359741211, 0.02854297637939453, 0.028082176208496092, 0.02813337516784668, 0.027053056716918947, 0.026900480270385742, 0.026820608139038086, 0.026806272506713868, 0.026861568450927735, 0.02696499252319336, 0.026797056198120117, 0.026822656631469727, 0.02716876792907715, 0.026772480010986328, 0.026851327896118164, 0.026936319351196288, 0.026851327896118164, 0.026861568450927735, 0.026999807357788085, 0.027016191482543944, 0.026885120391845704, 0.026841087341308592, 0.026762239456176756, 0.026875904083251953, 0.027449344635009764, 0.027131904602050783, 0.02692095947265625, 0.027002880096435547, 0.026822656631469727, 0.026845184326171875, 0.027229183197021483, 0.02778316879272461, 0.02780364799499512, 0.027862016677856444, 0.027782144546508788, 0.027876352310180662, 0.027663360595703124, 0.027876352310180662, 0.026665983200073243, 0.026852352142333984, 0.0267007999420166, 0.02690355110168457, 0.026887168884277345, 0.02691379165649414, 0.026869760513305665, 0.026832895278930666, 0.02688204765319824, 0.026852352142333984, 0.026844160079956055, 0.026828800201416016, 0.02753638458251953, 0.0279685115814209, 0.027812864303588865, 0.027827199935913087, 0.027867136001586915, 0.027847679138183593, 0.02779136085510254, 0.027842559814453126, 0.02773811149597168, 0.02862387275695801, 0.027467775344848632, 0.027060224533081056, 0.027449344635009764, 0.027196416854858397, 0.026917888641357423, 0.02679091262817383, 0.026874879837036132, 0.028009471893310548, 0.027651071548461914, 0.02775142478942871, 0.026814464569091798, 0.026861568450927735, 0.026852352142333984, 0.026995712280273438, 0.026855424880981447, 0.02713907241821289, 0.02687283134460449, 0.026874879837036132, 0.026875904083251953, 0.026653696060180664, 0.026863616943359377, 0.026831872940063478, 0.026995712280273438, 0.027677696228027345, 0.028338176727294922, 0.028031999588012696, 0.02833919906616211, 0.02793267250061035, 0.027700223922729493, 0.0277391357421875, 0.027870208740234374, 0.02691379165649414, 0.027230207443237304, 0.028115968704223632, 0.02855219268798828, 0.02732339286804199, 0.027618303298950195, 0.027236352920532225, 0.027021312713623048, 0.026926080703735353, 0.02676838493347168, 0.02714419174194336, 0.026829824447631836, 0.026787839889526367, 0.026496000289916992, 0.026813440322875977, 0.027232255935668945, 0.027631616592407225, 0.027806720733642577, 0.02777190399169922, 0.02775142478942871, 0.027122688293457032, 0.02697318458557129, 0.026868736267089844, 0.02697932815551758, 0.026851327896118164, 0.02676531219482422, 0.026809343338012694, 0.026856447219848634, 0.026850303649902343, 0.026840063095092775, 0.026818559646606444, 0.02696499252319336, 0.02735820770263672, 0.02717081642150879, 0.02675302314758301, 0.02687283134460449, 0.02694963264465332, 0.026876928329467774, 0.026944511413574217, 0.0271646728515625, 0.027046911239624022, 0.02776166343688965, 0.027283456802368163, 0.026816511154174806, 0.026851327896118164, 0.026875904083251953, 0.026879999160766603, 0.028456960678100586, 0.028219392776489258, 0.02795929527282715, 0.027749376296997072, 0.027882495880126954, 0.02776473617553711, 0.027806720733642577, 0.026952703475952147, 0.026860544204711914, 0.026597375869750976, 0.02656051254272461, 0.026835968017578125, 0.026827775955200195, 0.02676940727233887, 0.026829824447631836, 0.02696703910827637, 0.026747903823852538, 0.026635263442993166, 0.026772480010986328, 0.026861568450927735, 0.026696704864501954, 0.026792959213256837, 0.026840063095092775, 0.026834943771362304, 0.02751590347290039, 0.028280832290649413, 0.02860851287841797, 0.027850751876831056, 0.027891712188720705, 0.02794803237915039, 0.02751487922668457, 0.027003904342651368, 0.02688921546936035, 0.026836992263793946, 0.026805248260498047, 0.02674278450012207, 0.026884096145629883, 0.026834943771362304, 0.026549247741699217, 0.026841087341308592, 0.027456512451171877, 0.027395072937011718, 0.026916864395141602, 0.026811391830444335, 0.02677452850341797, 0.026677248001098632, 0.026838016510009766, 0.02687385559082031, 0.02674483108520508, 0.026806272506713868, 0.02676019287109375, 0.026491903305053712, 0.026641408920288087, 0.026867712020874023, 0.02669158363342285, 0.026883071899414062, 0.026851327896118164, 0.02675814437866211, 0.026772480010986328, 0.026848255157470705, 0.026762239456176756, 0.02690457534790039, 0.028041215896606447, 0.02734182357788086, 0.026854400634765626, 0.026852352142333984, 0.026789888381958008, 0.02672537612915039, 0.027478015899658204, 0.02678374481201172, 0.026910720825195314, 0.026871807098388673, 0.026829824447631836, 0.027022335052490236, 0.02777497673034668, 0.027830272674560546, 0.027817983627319336, 0.027883520126342775, 0.027667455673217774, 0.027037696838378908, 0.026818559646606444, 0.02711142349243164, 0.02695680046081543, 0.026862592697143556, 0.026727424621582032, 0.027043840408325196, 0.027056127548217773, 0.026871807098388673, 0.028803071975708007, 0.02817945671081543, 0.027543552398681642, 0.027059200286865235, 0.02690457534790039, 0.026868736267089844, 0.02698137664794922, 0.02699776077270508, 0.027490304946899413, 0.02709708786010742, 0.02694041633605957, 0.02682368087768555, 0.02689945602416992, 0.027006975173950197, 0.02712883186340332, 0.02715443229675293, 0.02680012893676758, 0.027371519088745116, 0.026910720825195314, 0.026841087341308592, 0.026820608139038086, 0.02693836784362793, 0.026916864395141602, 0.026855424880981447, 0.027098112106323242, 0.02651136016845703, 0.026834943771362304, 0.02675814437866211, 0.026828800201416016, 0.02707148742675781, 0.026951679229736326, 0.026831872940063478, 0.027000831604003905, 0.026770431518554686, 0.026764287948608398, 0.026832895278930666, 0.026738687515258788, 0.028181503295898438, 0.02714726448059082, 0.027208703994750977, 0.027517951965332032, 0.027213823318481444, 0.026779647827148437, 0.026586111068725587, 0.02672537612915039, 0.02692095947265625, 0.026856447219848634, 0.026927104949951174, 0.02693017578125, 0.02698137664794922, 0.026831872940063478, 0.027047935485839843, 0.026934272766113283, 0.026970111846923828, 0.026784767150878908, 0.026870784759521486, 0.02690559959411621, 0.026792959213256837, 0.026858495712280273, 0.026805248260498047, 0.026891263961791992, 0.027023359298706053, 0.026902528762817384, 0.02681548881530762, 0.026825727462768553, 0.026816511154174806, 0.027019264221191407, 0.026908672332763672, 0.0269117431640625, 0.027031551361083983, 0.026942464828491212, 0.026900480270385742, 0.02691276741027832, 0.02697318458557129, 0.026826751708984374, 0.026985471725463867, 0.026900480270385742, 0.027056127548217773, 0.027862016677856444, 0.02728550338745117, 0.02688102340698242, 0.028823551177978517, 0.027419647216796874, 0.026893312454223633, 0.026868736267089844, 0.026978303909301758, 0.026836992263793946, 0.026909696578979493, 0.02690764808654785, 0.026909696578979493, 0.026816511154174806, 0.026798080444335938, 0.026888191223144533, 0.027295743942260742, 0.028089344024658205, 0.027907072067260744, 0.026801151275634767, 0.02691481590270996, 0.026802175521850585, 0.026779647827148437, 0.026728448867797853, 0.02727731132507324, 0.026810367584228514, 0.02690355110168457, 0.026824703216552736, 0.02756710433959961, 0.02775961685180664, 0.027379711151123046, 0.026816511154174806, 0.0268984317779541, 0.03073023986816406, 0.028286975860595705, 0.027914239883422853, 0.0273756160736084, 0.027029504776000978, 0.026866687774658202, 0.02676019287109375, 0.026863616943359377, 0.026866687774658202, 0.026728448867797853, 0.02672435188293457, 0.02672230339050293, 0.026764287948608398, 0.026730495452880858, 0.026858495712280273, 0.026770431518554686, 0.026719232559204102, 0.02672127914428711, 0.026702848434448243, 0.026746879577636717, 0.02666700744628906, 0.026771455764770507, 0.026854400634765626, 0.027131904602050783, 0.02700595283508301, 0.026852352142333984, 0.026566656112670898, 0.026883071899414062, 0.026893312454223633, 0.02671308708190918, 0.02690662384033203, 0.026866687774658202, 0.027142143249511717, 0.027085823059082033, 0.026871807098388673, 0.02696294403076172, 0.026894336700439454, 0.02682368087768555, 0.027996160507202147, 0.029246463775634765, 0.027880447387695313, 0.027858943939208985, 0.027158527374267577, 0.026866687774658202, 0.026819583892822265, 0.02676019287109375, 0.026868736267089844, 0.02755891227722168, 0.027626495361328125, 0.027789312362670897, 0.027777023315429687, 0.02776473617553711, 0.02791935920715332, 0.02775347137451172, 0.02734694480895996, 0.027232255935668945, 0.026659839630126952, 0.026862592697143556, 0.026859519958496093, 0.02672332763671875, 0.026828800201416016, 0.026855424880981447, 0.026833919525146483, 0.027253759384155272, 0.027020288467407227, 0.026858495712280273, 0.026871807098388673, 0.026895360946655275, 0.026844160079956055, 0.02700595283508301, 0.027510784149169923, 0.027623424530029295, 0.026909696578979493, 0.02691584014892578, 0.026829824447631836, 0.02693734359741211, 0.026870784759521486, 0.027006975173950197, 0.02675200080871582, 0.02694041633605957, 0.027015167236328123, 0.028727296829223634, 0.0281661434173584, 0.027979776382446288, 0.02791935920715332, 0.027966463088989257, 0.027236352920532225, 0.026801151275634767, 0.026859519958496093, 0.028049407958984376, 0.028048383712768556, 0.027889663696289063, 0.027854848861694335, 0.027982847213745117, 0.02796236801147461, 0.027845632553100585, 0.02774220848083496, 0.027849727630615235, 0.02771968078613281, 0.027837440490722655, 0.02778726387023926, 0.027971584320068358, 0.027674623489379883, 0.02818662452697754, 0.027844608306884764, 0.02771353530883789, 0.02797260856628418, 0.02798899269104004, 0.02752102470397949, 0.027053056716918947, 0.026978303909301758, 0.02690355110168457, 0.027832319259643554, 0.027679744720458983, 0.027636735916137696, 0.027820032119750978, 0.027794431686401368]",tokens/s,36.799208455084035,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17656.827904,22207.266816,0.0,21577.596928,20724.970496,s,1,18.49708203125,18.49708203125,0.0,18.49708203125,18.49708203125,18.49708203125,18.49708203125,[18.49708203125],,kWh,0.00013618724680694358,7.462657754008782e-05,0.00029252384512999663,0.000503337669477028,,MB,1760.980992,22286.958592,0.0,21640.511488,19428.81536,s,10,63.306844726562495,6.33068447265625,0.00038012203264821637,6.330644775390625,6.3311208984375,6.331218896484375,6.3312972949218755,"[6.33014892578125, 6.33009521484375, 6.330443359375, 6.330564453125, 6.330607421875, 6.33068212890625, 6.33109912109375, 6.3308291015625, 6.33105810546875, 6.33131689453125]",tokens/s,40.43796545313948,kWh,7.475986627013885e-05,4.097124827558707e-05,0.0004241964782458002,0.0005399275927915261,tokens/kWh,474137.6499697531,MB,1781.02272,22316.31872,0.0,21667.774464,19428.81792,s,10,33.664468505859375,3.3664468505859375,0.007727167167994785,3.36631103515625,3.373078466796875,3.3778151123046873,3.3816044287109377,"[3.37167138671875, 3.351987548828125, 3.37202587890625, 3.36561962890625, 3.367631103515625, 3.361351806640625, 3.3825517578125, 3.3603505859375, 3.3642763671875, 3.36700244140625]",tokens/s,18.71409316592499,kWh,3.9716523283264034e-05,2.1770299128468594e-05,0.00018830553953319974,0.00024979236194493234,tokens/kWh,252209.47313788795,,s,630,33.66177074432375,0.0534313821338472,0.0005238483602445055,0.05332172966003418,0.05402582969665527,0.05428966464996338,0.0553656018447876,"[0.053664768218994144, 0.052789249420166016, 0.05400678253173828, 0.05279948806762695, 0.052770816802978515, 0.05271859359741211, 0.05283225631713867, 0.05284659194946289, 0.053047359466552736, 0.052835296630859375, 0.05344457626342773, 0.05392998504638672, 0.05394636917114258, 0.05379379272460937, 0.053644287109375, 0.05399347305297852, 0.05311897659301758, 0.05371596908569336, 0.05399859237670898, 0.053771263122558595, 0.05340774536132813, 0.05295308685302735, 0.052985855102539066, 0.0538152961730957, 0.05301964950561523, 0.05312921524047851, 0.05366067123413086, 0.05303603363037109, 0.05400064086914062, 0.05388800048828125, 0.05394944000244141, 0.05408768081665039, 0.053703678131103515, 0.053596160888671876, 0.05326540756225586, 0.053612545013427736, 0.05358694458007812, 0.05384703826904297, 0.05360025787353516, 0.05326847839355469, 0.05362995147705078, 0.05367295837402344, 0.056954879760742184, 0.05415116882324219, 0.05320806503295898, 0.05318656158447266, 0.05323571014404297, 0.053379070281982424, 0.05324800109863281, 0.05379891204833984, 0.053308414459228515, 0.05331455993652344, 0.05327872085571289, 0.05331455993652344, 0.05327974319458008, 0.05421977615356445, 0.05387059020996094, 0.05357158279418945, 0.05337190246582031, 0.053318656921386716, 0.05337497711181641, 0.053308414459228515, 0.05334425735473633, 0.053720062255859374, 0.05288857650756836, 0.052765697479248044, 0.05278003311157226, 0.05276671981811523, 0.052754432678222656, 0.05287014389038086, 0.05280153656005859, 0.052749313354492185, 0.05276774215698242, 0.05285990524291992, 0.05284352111816406, 0.05284044647216797, 0.05286502456665039, 0.05288345718383789, 0.05294387054443359, 0.052961280822753906, 0.052924415588378904, 0.05355724716186523, 0.05344972610473633, 0.05308927917480469, 0.052923393249511716, 0.05304115295410156, 0.05304422378540039, 0.05310771179199219, 0.05353472137451172, 0.053443584442138675, 0.053029888153076174, 0.05308006286621094, 0.05302067184448242, 0.05310464096069336, 0.05307392120361328, 0.05302579116821289, 0.0530882568359375, 0.05306675338745117, 0.05305548858642578, 0.05315071868896484, 0.05305753707885742, 0.05324492645263672, 0.05327667236328125, 0.05326847839355469, 0.053133312225341796, 0.053185535430908204, 0.053217281341552736, 0.05321113586425781, 0.053207038879394535, 0.05318143844604492, 0.05326233673095703, 0.05324492645263672, 0.056025089263916014, 0.05377228927612305, 0.05353472137451172, 0.05330022430419922, 0.053292030334472655, 0.05331148910522461, 0.053305343627929686, 0.05333708953857422, 0.05327872085571289, 0.05339136123657227, 0.05347225570678711, 0.053370880126953124, 0.054245376586914064, 0.053735424041748046, 0.05539737701416016, 0.05393920135498047, 0.05370163345336914, 0.05402521514892578, 0.05370675277709961, 0.05360435104370117, 0.05283020782470703, 0.05279641723632812, 0.052969470977783206, 0.05414912033081055, 0.053943294525146485, 0.05462527847290039, 0.05465292739868164, 0.05294182586669922, 0.0529356803894043, 0.05292544174194336, 0.05292031860351563, 0.05295820617675781, 0.05348863983154297, 0.05335039901733398, 0.053526527404785154, 0.05297459030151367, 0.053569534301757815, 0.05403955078125, 0.053539840698242185, 0.05349683380126953, 0.053610496520996094, 0.05373132705688476, 0.05304729461669922, 0.05302579116821289, 0.05311590576171875, 0.05309439849853516, 0.05319270324707031, 0.05308313751220703, 0.05307289505004883, 0.053215232849121094, 0.054296577453613284, 0.0545873908996582, 0.05363507080078125, 0.05362073516845703, 0.053580799102783204, 0.053651454925537106, 0.05335244750976562, 0.05333606338500976, 0.053343231201171876, 0.05345587158203125, 0.05332070541381836, 0.05337190246582031, 0.05320601654052735, 0.05318143844604492, 0.05326131057739258, 0.053425151824951174, 0.05323571014404297, 0.053272575378417966, 0.053275646209716795, 0.0542371826171875, 0.05390438461303711, 0.05365248107910156, 0.053684223175048826, 0.05363814544677734, 0.05376716613769531, 0.05366681671142578, 0.05360639953613281, 0.054529022216796875, 0.05361356735229492, 0.053372928619384766, 0.05279948806762695, 0.05507891082763672, 0.05562265777587891, 0.05379072189331055, 0.05366988754272461, 0.053482494354248046, 0.053460990905761716, 0.053782527923583984, 0.05282406234741211, 0.05291929626464844, 0.0528271369934082, 0.05289984130859375, 0.052962303161621094, 0.05291929626464844, 0.0541030387878418, 0.053926910400390625, 0.054056961059570315, 0.05385420989990235, 0.05386547088623047, 0.053969921112060545, 0.0530882568359375, 0.054079486846923826, 0.0529879035949707, 0.05306163024902344, 0.05301862335205078, 0.05309542465209961, 0.053010433197021485, 0.053117950439453124, 0.05305241775512695, 0.05306675338745117, 0.05303398513793945, 0.053070846557617186, 0.05308108901977539, 0.05358796691894531, 0.05303603363037109, 0.05328384017944336, 0.05310976028442383, 0.05305446243286133, 0.05303807830810547, 0.05319782257080078, 0.05332275390625, 0.05319475173950195, 0.05321011352539062, 0.053193729400634764, 0.0533309440612793, 0.05322444915771484, 0.05321420669555664, 0.0532490234375, 0.05331660842895508, 0.05323878479003906, 0.05320601654052735, 0.05326540756225586, 0.053523456573486325, 0.05331558227539063, 0.05329510498046875, 0.05336678314208984, 0.05333606338500976, 0.053349376678466794, 0.05332275390625, 0.05547315216064453, 0.053736446380615234, 0.052751361846923826, 0.05276467132568359, 0.05281792068481445, 0.052798465728759764, 0.052738048553466796, 0.05280255889892578, 0.05280767822265625, 0.05279132843017578, 0.05282403182983399, 0.05285068893432617, 0.05475430297851563, 0.05335039901733398, 0.05285068893432617, 0.052915199279785156, 0.05311897659301758, 0.054389759063720705, 0.053866497039794924, 0.054004737854003906, 0.05390643310546875, 0.053812225341796874, 0.05371187210083008, 0.054833152770996096, 0.05394124984741211, 0.05305036926269531, 0.052954113006591794, 0.053539840698242185, 0.053525505065917967, 0.05332889556884766, 0.05351116943359375, 0.05366886520385742, 0.05397196960449219, 0.05347020721435547, 0.05307494354248047, 0.05428121566772461, 0.05456588745117188, 0.053787647247314455, 0.05317631912231445, 0.0537077751159668, 0.0536893424987793, 0.05310976028442383, 0.053119998931884765, 0.053220352172851565, 0.053460990905761716, 0.05392895889282227, 0.05320806503295898, 0.05318963241577149, 0.0533309440612793, 0.05318963241577149, 0.053215232849121094, 0.05324492645263672, 0.053319679260253904, 0.05325107192993164, 0.05399961471557617, 0.053357566833496094, 0.05343743896484375, 0.053364734649658206, 0.053288959503173826, 0.05334425735473633, 0.05342310333251953, 0.05412351989746094, 0.053910526275634765, 0.05388390350341797, 0.05367295837402344, 0.052760574340820314, 0.052806655883789064, 0.05279334259033203, 0.052749313354492185, 0.0527011833190918, 0.05281792068481445, 0.05282611083984375, 0.052732929229736325, 0.05287116622924805, 0.05343743896484375, 0.05351116943359375, 0.05333913421630859, 0.05359001541137695, 0.052894718170166014, 0.052964351654052735, 0.05510860824584961, 0.05410713577270508, 0.05356851196289063, 0.052913150787353515, 0.052954113006591794, 0.05297663879394531, 0.053542911529541014, 0.05304217529296875, 0.05300326538085937, 0.05300121688842773, 0.05304422378540039, 0.05312921524047851, 0.054251518249511715, 0.05304422378540039, 0.05308620834350586, 0.053070846557617186, 0.05304524612426758, 0.053031936645507816, 0.053610496520996094, 0.05370265579223633, 0.05333401489257812, 0.0534466552734375, 0.05344460678100586, 0.05310976028442383, 0.053387264251708984, 0.0538787841796875, 0.053201919555664064, 0.05319168090820312, 0.05316812896728516, 0.05317529678344726, 0.053169151306152344, 0.054675457000732425, 0.05379891204833984, 0.05321932983398438, 0.05325107192993164, 0.05568000030517578, 0.0540497932434082, 0.05330636978149414, 0.05335347366333008, 0.05363916778564453, 0.053340160369873046, 0.053441535949707034, 0.05359206390380859, 0.05347020721435547, 0.05338214492797851, 0.053285888671875, 0.053411838531494144, 0.05374566268920898, 0.05292134475708008, 0.05278105545043945, 0.052787200927734375, 0.05271551895141602, 0.05333606338500976, 0.0534200325012207, 0.053319679260253904, 0.05397401428222656, 0.05367193603515625, 0.053733375549316405, 0.0528353271484375, 0.053812225341796874, 0.05365248107910156, 0.05348556900024414, 0.05392895889282227, 0.053085182189941404, 0.05388595199584961, 0.05404876708984375, 0.05351731109619141, 0.054406143188476565, 0.055201793670654295, 0.055585792541503906, 0.05420441436767578, 0.05384499359130859, 0.053743614196777346, 0.053582847595214846, 0.05413273620605469, 0.05365964889526367, 0.0535551986694336, 0.05309747314453125, 0.05305446243286133, 0.05306777572631836, 0.053075969696044924, 0.053098495483398435, 0.05319782257080078, 0.053187583923339846, 0.05350502395629883, 0.053515262603759765, 0.054340606689453126, 0.05419724655151367, 0.05386547088623047, 0.053804031372070314, 0.0534200325012207, 0.05378559875488281, 0.05359001541137695, 0.054591487884521485, 0.054095870971679685, 0.053759998321533206, 0.053678081512451174, 0.053582847595214846, 0.05380812835693359, 0.05399552154541016, 0.053967872619628904, 0.05379072189331055, 0.05333196640014649, 0.05447270584106445, 0.053272575378417966, 0.054313983917236325, 0.05427199935913086, 0.05381119918823242, 0.053564414978027344, 0.053607425689697265, 0.05372415924072266, 0.05277286529541016, 0.05281280136108398, 0.05281689453125, 0.05273395156860351, 0.052762622833251956, 0.05278617477416992, 0.052770816802978515, 0.05379379272460937, 0.05276671981811523, 0.052838401794433595, 0.052822017669677736, 0.05286195373535156, 0.053000190734863284, 0.05287628936767578, 0.052967422485351565, 0.0529090576171875, 0.052910079956054686, 0.05351116943359375, 0.05296332931518555, 0.052975616455078124, 0.0530513916015625, 0.053085182189941404, 0.05391667175292969, 0.05312409591674805, 0.05294079971313476, 0.05302272033691406, 0.05344563293457031, 0.05406412887573242, 0.053456897735595706, 0.05418707275390625, 0.054241214752197266, 0.05365657424926758, 0.0540313606262207, 0.053515262603759765, 0.05306060791015625, 0.05343436813354492, 0.0532490234375, 0.053735424041748046, 0.054844417572021485, 0.05386240005493164, 0.053768192291259766, 0.053835777282714846, 0.05355110549926758, 0.053195777893066405, 0.05319168090820312, 0.053185535430908204, 0.05325823974609375, 0.05319782257080078, 0.0532408332824707, 0.0535654411315918, 0.05430886459350586, 0.05388083267211914, 0.05330124664306641, 0.053362686157226565, 0.05332070541381836, 0.05329919815063477, 0.05327769470214844, 0.05333606338500976, 0.05337395095825195, 0.053612545013427736, 0.05332582473754883, 0.053400577545166014, 0.05375692749023438, 0.05275852966308594, 0.052836353302001954, 0.05282611083984375, 0.05280460739135742, 0.05310259246826172, 0.05279129409790039, 0.05278822326660156, 0.05391769790649414, 0.05278412628173828, 0.052836353302001954, 0.053866497039794924, 0.053819393157958986, 0.05394636917114258, 0.05371903991699219, 0.052929534912109374, 0.05385523223876953, 0.05375385665893555, 0.05376102447509765, 0.05366988754272461, 0.05296025466918945, 0.05353574371337891, 0.05383270263671875, 0.05394432067871094, 0.05448396682739258, 0.05417574310302734, 0.05306880187988281, 0.05301760101318359, 0.05343743896484375, 0.05379379272460937, 0.0530780143737793, 0.054370304107666016, 0.05306470489501953, 0.05528780746459961, 0.053184513092041016, 0.0531671028137207, 0.053190654754638675, 0.05300428771972656, 0.05340671920776367, 0.053891071319580076, 0.05311283111572265, 0.05311897659301758, 0.05323980712890625, 0.053215232849121094, 0.05332889556884766, 0.05329510498046875, 0.05317529678344726, 0.053282817840576174, 0.05325619125366211, 0.053220352172851565, 0.05324288177490234, 0.053343231201171876, 0.05323468780517578, 0.05324492645263672, 0.05333606338500976, 0.05333913421630859, 0.05331455993652344, 0.05330636978149414, 0.053343231201171876, 0.053372928619384766, 0.05341286468505859, 0.05332787322998047, 0.05340364837646484, 0.05420339202880859, 0.054452224731445314, 0.053698558807373044, 0.053602302551269534, 0.053763072967529295, 0.05369139099121094, 0.053713920593261716, 0.05401497650146484, 0.052792320251464846, 0.052792320251464846, 0.05383065414428711, 0.05426688003540039, 0.05383065414428711, 0.05415116882324219, 0.05352755355834961, 0.052947967529296876, 0.05289267349243164, 0.05303398513793945, 0.052994049072265625, 0.05292748641967773, 0.05297151947021484, 0.05293670272827149, 0.05300735855102539, 0.0530063362121582, 0.05303807830810547, 0.053013504028320314, 0.05305753707885742, 0.053032958984375, 0.053028865814208986, 0.0530063362121582, 0.05310566329956055, 0.053059585571289064, 0.05303500747680664, 0.05307289505004883, 0.05306675338745117, 0.05307494354248047, 0.053302272796630856, 0.053131263732910154, 0.0532674560546875, 0.0551280632019043, 0.05396480178833008, 0.05391360092163086, 0.05415321731567383, 0.053822463989257815, 0.05317529678344726, 0.053722110748291016, 0.05402009582519531, 0.05406412887573242, 0.053806079864501956, 0.05380300903320313, 0.05373440170288086, 0.053305343627929686, 0.05327360153198242, 0.0532940788269043, 0.05330739212036133, 0.05332070541381836, 0.05345382308959961, 0.05329510498046875, 0.05334220886230469, 0.053367809295654295, 0.05336576080322265, 0.05332787322998047, 0.053441535949707034]",tokens/s,18.715592972964284,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,1540.01408,1766.326272,0.0,1136.656384,1111.384576,s,1,8.3298486328125,8.3298486328125,0.0,8.3298486328125,8.3298486328125,8.3298486328125,8.3298486328125,[8.3298486328125],,kWh,1.8036368756949314e-05,9.834478953931343e-06,2.9932246168012933e-05,5.7803093878893586e-05,,MB,1684.312064,1839.726592,0.0,1193.279488,1029.128704,s,10,2.381833938598633,0.23818339385986328,0.00018681056399988985,0.23815821838378906,0.23848440551757813,0.23849834442138673,0.2385094955444336,"[0.23804328918457032, 0.23796035766601562, 0.23848130798339845, 0.23821932983398436, 0.23815852355957032, 0.2380793914794922, 0.2385122833251953, 0.2381579132080078, 0.2382821807861328, 0.2379393615722656]",tokens/s,1074.802050014533,kWh,2.816296173379637e-06,1.5427773037005763e-06,1.577993987261838e-05,2.0139013349698596e-05,tokens/kWh,12711645.578398276,MB,1730.048,1843.920896,0.0,1195.37664,1083.494912,s,10,10.5440283203125,1.05440283203125,0.011867693527136184,1.059742126464844,1.0658725463867187,1.0659790588378906,1.066064268798828,"[1.06100390625, 1.0483153076171876, 1.0615164794921874, 1.065848876953125, 1.0592333984375, 1.0520281982421875, 1.0660855712890625, 1.0252896728515626, 1.0444560546875, 1.0602508544921876]",tokens/s,59.749460155217825,kWh,1.2130962730371237e-05,6.648030103115862e-06,2.593892683578281e-05,4.47179196692699e-05,tokens/kWh,1408831.1904029273,,s,630,10.540060646057144,0.016730254993741474,0.0003969647653570955,0.01683507251739502,0.017232999038696288,0.017346970081329346,0.0176713733291626,"[0.01621401596069336, 0.016615423202514648, 0.01683865547180176, 0.01678745651245117, 0.01678233528137207, 0.016866304397583007, 0.016842752456665038, 0.016932928085327148, 0.017164224624633788, 0.01701785659790039, 0.017572864532470703, 0.0170199031829834, 0.01704652786254883, 0.016895999908447267, 0.016937984466552734, 0.01703321647644043, 0.016871423721313478, 0.016912384033203123, 0.0170199031829834, 0.01722265625, 0.017472511291503907, 0.0172042236328125, 0.01683865547180176, 0.01636966323852539, 0.016300031661987305, 0.016324607849121094, 0.01619660758972168, 0.016249856948852538, 0.01637068748474121, 0.016857088088989256, 0.016877567291259766, 0.017140735626220704, 0.01698099136352539, 0.01685196876525879, 0.016871423721313478, 0.016899072647094726, 0.016903167724609376, 0.016934911727905275, 0.016885759353637696, 0.01698406410217285, 0.016943103790283204, 0.01677107238769531, 0.016914432525634765, 0.016879615783691407, 0.016939008712768554, 0.016894975662231446, 0.016901119232177735, 0.016958463668823243, 0.016896032333374025, 0.017545183181762694, 0.01716633605957031, 0.016965631484985352, 0.01698508834838867, 0.016854015350341797, 0.016887807846069337, 0.01697689628601074, 0.016881664276123046, 0.016745471954345705, 0.016339967727661133, 0.016277503967285157, 0.016290815353393554, 0.016315391540527344, 0.016227327346801757, 0.016307199478149414, 0.016290815353393554, 0.016281600952148437, 0.016242687225341796, 0.016863231658935548, 0.01658982467651367, 0.016316415786743164, 0.016372735977172852, 0.016264192581176756, 0.01614847946166992, 0.016315391540527344, 0.016270336151123048, 0.016244735717773438, 0.016236543655395508, 0.016877567291259766, 0.016937984466552734, 0.016915456771850586, 0.016871423721313478, 0.01683967971801758, 0.01685196876525879, 0.016890880584716796, 0.01702707290649414, 0.016862207412719727, 0.016974847793579103, 0.016805919647216797, 0.016881631851196288, 0.016903167724609376, 0.016905216217041014, 0.016865280151367186, 0.016301055908203126, 0.01620582389831543, 0.016540672302246092, 0.01722265625, 0.016917503356933594, 0.016850944519042968, 0.01685196876525879, 0.01686016082763672, 0.01686835289001465, 0.01677107238769531, 0.01681407928466797, 0.01684480094909668, 0.016904191970825197, 0.01686835289001465, 0.016855039596557618, 0.016902143478393555, 0.016929792404174804, 0.01680691146850586, 0.016939008712768554, 0.016691200256347655, 0.016226303100585936, 0.01620889663696289, 0.01639321517944336, 0.016304128646850585, 0.01626016044616699, 0.016236480712890626, 0.016260095596313476, 0.016227327346801757, 0.016278528213500978, 0.016453632354736326, 0.01636249542236328, 0.016865280151367186, 0.016949247360229493, 0.01682431983947754, 0.016333824157714845, 0.016257024765014647, 0.016242687225341796, 0.01620992088317871, 0.01624166488647461, 0.01619660758972168, 0.01622528076171875, 0.01621708869934082, 0.01622323226928711, 0.01618841552734375, 0.016183296203613282, 0.01621299171447754, 0.01616998481750488, 0.016158784866333008, 0.016202688217163086, 0.017312768936157227, 0.017524736404418945, 0.017314912796020508, 0.01694607925415039, 0.01685606384277344, 0.016913408279418944, 0.016889856338500975, 0.016846847534179688, 0.016875520706176757, 0.016874496459960937, 0.016850944519042968, 0.016845823287963867, 0.016899072647094726, 0.01681510353088379, 0.01683456039428711, 0.016915456771850586, 0.017556480407714844, 0.018880512237548826, 0.01747148895263672, 0.01760153579711914, 0.01702604866027832, 0.017079296112060546, 0.016978015899658205, 0.016919519424438475, 0.016970687866210938, 0.0170199031829834, 0.017161216735839844, 0.016951295852661134, 0.016884735107421875, 0.016907264709472656, 0.016888832092285155, 0.01698307228088379, 0.016963552474975586, 0.016955392837524414, 0.016615423202514648, 0.017266687393188478, 0.017299455642700197, 0.016833536148071288, 0.016916479110717773, 0.016887807846069337, 0.01703628730773926, 0.016941055297851563, 0.016931840896606445, 0.016954399108886718, 0.016950239181518556, 0.016861183166503906, 0.01682022476196289, 0.01683660888671875, 0.016321535110473632, 0.01699737548828125, 0.016845888137817382, 0.016884672164916993, 0.016871423721313478, 0.016851999282836913, 0.017101791381835936, 0.01697177505493164, 0.017077247619628907, 0.016916479110717773, 0.01680998420715332, 0.0167956485748291, 0.016912384033203123, 0.016925695419311524, 0.01686425590515137, 0.016963584899902344, 0.01683251190185547, 0.01680076789855957, 0.016879615783691407, 0.016858112335205077, 0.01678745651245117, 0.016849920272827147, 0.0170199031829834, 0.017176576614379883, 0.016936960220336913, 0.016849920272827147, 0.01685196876525879, 0.016915456771850586, 0.01685606384277344, 0.016949247360229493, 0.016854015350341797, 0.016945152282714843, 0.016857088088989256, 0.01661235237121582, 0.017539072036743163, 0.017329151153564454, 0.016907264709472656, 0.01696460723876953, 0.01705779266357422, 0.01700044822692871, 0.016911359786987306, 0.01682329559326172, 0.01684480094909668, 0.016882688522338866, 0.016873472213745116, 0.016870399475097657, 0.016896064758300782, 0.01693280029296875, 0.016910335540771485, 0.01680588722229004, 0.01683558464050293, 0.01680793571472168, 0.01680588722229004, 0.016884735107421875, 0.01686016082763672, 0.016911359786987306, 0.017342464447021484, 0.017108991622924806, 0.01679769515991211, 0.016873472213745116, 0.01681407928466797, 0.01696870422363281, 0.01697177505493164, 0.016892927169799805, 0.01745510482788086, 0.01698918342590332, 0.016983039855957033, 0.016857088088989256, 0.0173353271484375, 0.017005535125732423, 0.016986112594604492, 0.01681817626953125, 0.01679462432861328, 0.016257055282592775, 0.016173023223876953, 0.01624678421020508, 0.01622220802307129, 0.016176128387451173, 0.016397312164306642, 0.016261184692382812, 0.016994239807128907, 0.017285120010375975, 0.017361919403076173, 0.017293312072753905, 0.01683558464050293, 0.01676288032531738, 0.016865280151367186, 0.016825344085693358, 0.016880640029907225, 0.01680691146850586, 0.016753696441650392, 0.016720863342285158, 0.016767999649047852, 0.016866304397583007, 0.01681305694580078, 0.016667648315429686, 0.016750591278076172, 0.016767999649047852, 0.01683456039428711, 0.01681612777709961, 0.016731136322021483, 0.017087488174438475, 0.016936960220336913, 0.016777215957641603, 0.01679769515991211, 0.01683148765563965, 0.01680384063720703, 0.016872480392456056, 0.01687651252746582, 0.01686016082763672, 0.01680486488342285, 0.0167956485748291, 0.016854015350341797, 0.016756736755371093, 0.01684377670288086, 0.01682022476196289, 0.016780319213867186, 0.016837600708007813, 0.016892927169799805, 0.016756736755371093, 0.01679462432861328, 0.016751615524291993, 0.01683865547180176, 0.01682841682434082, 0.016732191085815428, 0.01668502426147461, 0.0167096004486084, 0.01685606384277344, 0.01767731285095215, 0.01738956832885742, 0.016891904830932617, 0.016892927169799805, 0.016908287048339844, 0.016887807846069337, 0.016873472213745116, 0.016886783599853517, 0.01685196876525879, 0.016831520080566407, 0.016829408645629883, 0.016869375228881836, 0.016867328643798828, 0.01681920051574707, 0.016825344085693358, 0.016788480758666992, 0.01684787178039551, 0.016916479110717773, 0.017302528381347656, 0.016963615417480468, 0.01687958335876465, 0.016962560653686523, 0.01699430465698242, 0.01701478385925293, 0.016507904052734376, 0.016251903533935547, 0.016300031661987305, 0.016258047103881835, 0.016103424072265626, 0.016303104400634767, 0.01624575996398926, 0.016234527587890624, 0.016283615112304688, 0.016184320449829103, 0.016236543655395508, 0.016692224502563476, 0.016879615783691407, 0.016669696807861328, 0.01622323226928711, 0.01618534469604492, 0.01624166488647461, 0.01618636894226074, 0.01621504020690918, 0.016126976013183594, 0.016299007415771484, 0.01623961639404297, 0.0162478084564209, 0.016276575088500975, 0.016641952514648437, 0.016781312942504883, 0.01682841682434082, 0.01679974365234375, 0.016916479110717773, 0.016879615783691407, 0.01683660888671875, 0.01682329559326172, 0.01685606384277344, 0.01680588722229004, 0.016686080932617187, 0.016719871520996094, 0.017993728637695314, 0.016268287658691406, 0.016497663497924805, 0.01621299171447754, 0.01620889663696289, 0.016207872390747072, 0.016223295211791992, 0.016225215911865234, 0.016347135543823242, 0.01624166488647461, 0.01619558334350586, 0.01618841552734375, 0.01617203140258789, 0.016227327346801757, 0.01613926315307617, 0.016091136932373046, 0.01638400077819824, 0.016149503707885742, 0.016145408630371092, 0.01625497627258301, 0.01619558334350586, 0.01640243148803711, 0.01683456039428711, 0.016657407760620118, 0.016290815353393554, 0.016216064453125, 0.01695232009887695, 0.01744486427307129, 0.017334272384643554, 0.017245183944702147, 0.0171909122467041, 0.0172677116394043, 0.017317888259887695, 0.017898496627807618, 0.017282047271728516, 0.017277952194213866, 0.01725132751464844, 0.017262592315673828, 0.01718681526184082, 0.01726361656188965, 0.017316864013671874, 0.01721958351135254, 0.01724723243713379, 0.01725132751464844, 0.0171909122467041, 0.017289215087890625, 0.01739776039123535, 0.017253376007080077, 0.017369087219238282, 0.017504255294799806, 0.017884159088134767, 0.017656831741333007, 0.017271808624267578, 0.01738137626647949, 0.017320959091186524, 0.01724825668334961, 0.017350656509399414, 0.017314815521240236, 0.0172728328704834, 0.017142784118652343, 0.017361919403076173, 0.01723391914367676, 0.01723289680480957, 0.017723392486572266, 0.01613107109069824, 0.016290815353393554, 0.01623859214782715, 0.016313343048095702, 0.016265216827392577, 0.016320512771606444, 0.016291839599609375, 0.016305152893066405, 0.016281663894653322, 0.016244672775268556, 0.016273408889770507, 0.016133119583129883, 0.016297983169555663, 0.016265216827392577, 0.01624678421020508, 0.016296960830688476, 0.01620070457458496, 0.016280576705932616, 0.01620684814453125, 0.016244735717773438, 0.016207872390747072, 0.01625497627258301, 0.016258047103881835, 0.016319488525390623, 0.016356351852416993, 0.016242687225341796, 0.016117759704589844, 0.016418880462646484, 0.016324544906616213, 0.016235519409179687, 0.016277503967285157, 0.016299007415771484, 0.0163450870513916, 0.016508928298950197, 0.016941055297851563, 0.016434175491333008, 0.016250879287719726, 0.01621504020690918, 0.016257024765014647, 0.016268287658691406, 0.01623142433166504, 0.01618124771118164, 0.01620172882080078, 0.01619558334350586, 0.016248832702636717, 0.016194559097290038, 0.016260095596313476, 0.01655193519592285, 0.01620582389831543, 0.01623049545288086, 0.016238496780395507, 0.01618841552734375, 0.01620582389831543, 0.01617817687988281, 0.016236543655395508, 0.016352256774902343, 0.016264192581176756, 0.016176128387451173, 0.016192512512207033, 0.01616383934020996, 0.01620479965209961, 0.016236543655395508, 0.01614847946166992, 0.016284671783447266, 0.016308223724365235, 0.016242687225341796, 0.0162293758392334, 0.016249856948852538, 0.016219135284423827, 0.01621811294555664, 0.016289791107177733, 0.016202751159667968, 0.01616998481750488, 0.016125951766967773, 0.016150527954101563, 0.01623961639404297, 0.01619046401977539, 0.01616486358642578, 0.016262144088745118, 0.016121856689453123, 0.016151552200317384, 0.01617100715637207, 0.016352256774902343, 0.016879615783691407, 0.01655705642700195, 0.015930368423461915, 0.016235519409179687, 0.01617407989501953, 0.016137216567993166, 0.01601740837097168, 0.01617305564880371, 0.016128000259399415, 0.016153600692749022, 0.01738035202026367, 0.0169564151763916, 0.01676192092895508, 0.016796607971191407, 0.0167956485748291, 0.01678950309753418, 0.01683967971801758, 0.01675984001159668, 0.016749536514282227, 0.016931840896606445, 0.01677107238769531, 0.0167587833404541, 0.016829439163208008, 0.01680281639099121, 0.016875520706176757, 0.016918527603149415, 0.01683660888671875, 0.016902143478393555, 0.016881664276123046, 0.01678335952758789, 0.01685305595397949, 0.016850879669189453, 0.016788480758666992, 0.01717350387573242, 0.01698918342590332, 0.01757900810241699, 0.017137664794921875, 0.016929792404174804, 0.016881664276123046, 0.016743423461914063, 0.01678233528137207, 0.016712703704833985, 0.016777215957641603, 0.016503807067871093, 0.017153024673461914, 0.01696051216125488, 0.01683660888671875, 0.01723187255859375, 0.017296384811401368, 0.016846847534179688, 0.016862207412719727, 0.016892927169799805, 0.016773120880126953, 0.01697996711730957, 0.01741721534729004, 0.01743155288696289, 0.01701580810546875, 0.016889856338500975, 0.016963584899902344, 0.016891904830932617, 0.01681407928466797, 0.01685196876525879, 0.016496639251708984, 0.01620902442932129, 0.01621388816833496, 0.01616998481750488, 0.016161792755126952, 0.016249856948852538, 0.01636761665344238, 0.016920576095581053, 0.01680998420715332, 0.0168222713470459, 0.01692166328430176, 0.016850976943969725, 0.016850847244262696, 0.016942079544067384, 0.018127872467041017, 0.017544191360473634, 0.01716223907470703, 0.016942079544067384, 0.0168222713470459, 0.016707712173461915, 0.016681856155395507, 0.016264192581176756, 0.01616896057128906, 0.01619558334350586, 0.01616998481750488, 0.01619558334350586, 0.01759539222717285, 0.01700249671936035, 0.016859136581420898, 0.01678745651245117, 0.016890880584716796, 0.016909311294555664, 0.016852991104125976, 0.01682537651062012, 0.016730079650878905, 0.016785408020019533, 0.01700044822692871, 0.016882688522338866, 0.016932863235473633, 0.01680076789855957, 0.01678643226623535, 0.01686425590515137, 0.016861183166503906, 0.016928768157958983]",tokens/s,59.77195209362226,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495a3-42ea630c2d33350a7418178a;234132d0-87ad-4257-8d61-0454a01b01c7) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4037, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 146, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 79, in post_init - self.q4 = exl_ext.make_q4( -RuntimeError: scales and qweight have incompatible shapes - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1030.856704,1200.095232,0.0,570.425344,525.840896,s,1,7.61223583984375,7.61223583984375,0.0,7.61223583984375,7.61223583984375,7.61223583984375,7.61223583984375,[7.61223583984375],,kWh,9.794321869450566e-06,5.351182460273372e-06,1.5117789871954557e-05,3.0263294201678493e-05,,MB,1548.537856,1246.232576,0.0,597.68832,584.940544,s,10,0.689325340270996,0.0689325340270996,0.00012436152765737653,0.0689906234741211,0.06903243942260742,0.06906364402770995,0.06908860771179198,"[0.06868972778320312, 0.06878717041015625, 0.0690252456665039, 0.06901042938232421, 0.06879936218261719, 0.0690948486328125, 0.06899241638183594, 0.06902550506591797, 0.06891180419921875, 0.06898883056640626]",tokens/s,3713.7761379751128,kWh,8.132719494197407e-07,4.4563174950132343e-07,4.388858153397044e-06,5.647761852318108e-06,tokens/kWh,45327690.2769768,MB,1593.93792,1260.91264,0.0,612.368384,597.290496,s,10,13.235438964843748,1.3235438964843749,0.012282302679344853,1.3238765258789063,1.3398249633789063,1.3404457458496093,1.340942371826172,"[1.3020943603515625, 1.3301856689453124, 1.33968701171875, 1.3216077880859376, 1.314635986328125, 1.306641357421875, 1.3317672119140624, 1.323687255859375, 1.3240657958984374, 1.3410665283203125]",tokens/s,47.599479070805224,kWh,1.5861059970232643e-05,8.691756887632361e-06,2.634819421400516e-05,5.0901011071870166e-05,tokens/kWh,1237696.4361483224,,s,630,13.230677831649782,0.02100107592325362,0.0004137619063062231,0.021088768005371093,0.02137518005371094,0.021585304737091065,0.022566277942657476,"[0.020348928451538087, 0.020556800842285155, 0.02080460739135742, 0.02083430480957031, 0.020703231811523438, 0.021132287979125978, 0.020810752868652343, 0.02062848091125488, 0.02065920066833496, 0.02082611274719238, 0.020624383926391602, 0.020642816543579103, 0.02053222465515137, 0.021028863906860353, 0.020661247253417968, 0.020591615676879883, 0.020576255798339844, 0.020632575988769532, 0.020535295486450195, 0.020668415069580077, 0.020584447860717774, 0.020567039489746093, 0.020520959854125977, 0.020599807739257812, 0.020634624481201173, 0.020618240356445314, 0.020722688674926756, 0.020583423614501953, 0.020815872192382814, 0.02064076805114746, 0.02058137512207031, 0.020601919174194336, 0.02078303909301758, 0.02058137512207031, 0.020594688415527345, 0.02063871955871582, 0.02068172836303711, 0.020617216110229493, 0.020755456924438476, 0.020969472885131835, 0.020775936126708985, 0.02109132766723633, 0.021124095916748048, 0.020653055191040038, 0.020593664169311524, 0.02029465675354004, 0.020342784881591795, 0.02037555122375488, 0.020559871673583984, 0.020455423355102538, 0.020376575469970702, 0.020537343978881836, 0.02062335968017578, 0.020754432678222655, 0.0208721923828125, 0.020783103942871094, 0.020538368225097657, 0.02065920066833496, 0.020485120773315428, 0.020765695571899414, 0.020685823440551757, 0.02070528030395508, 0.02066431999206543, 0.020435968399047853, 0.020726783752441406, 0.020767744064331056, 0.020742143630981445, 0.020752384185791017, 0.020737024307250978, 0.020674560546875, 0.020733951568603515, 0.020725759506225586, 0.020760576248168947, 0.02149171257019043, 0.020666368484497072, 0.020677631378173827, 0.020744192123413087, 0.020707328796386718, 0.020686847686767578, 0.020529151916503906, 0.0206376953125, 0.020697120666503907, 0.02064076805114746, 0.02067555236816406, 0.020718591690063477, 0.02063155174255371, 0.0206561279296875, 0.021259263992309572, 0.020946943283081054, 0.022898687362670898, 0.022386688232421875, 0.02147020721435547, 0.021308416366577147, 0.021389312744140625, 0.021145599365234375, 0.0212992000579834, 0.021214208602905273, 0.02128895950317383, 0.021921791076660157, 0.02162483215332031, 0.02148249626159668, 0.02130534362792969, 0.02127359962463379, 0.021261375427246095, 0.021386175155639647, 0.021195775985717775, 0.021135360717773437, 0.02128998374938965, 0.02128895950317383, 0.021275648117065428, 0.021275648117065428, 0.02119987106323242, 0.021171199798583985, 0.02127052879333496, 0.021206016540527343, 0.02127359962463379, 0.021358591079711914, 0.021158912658691405, 0.0212490234375, 0.021136383056640624, 0.021180416107177736, 0.021169151306152344, 0.02108518409729004, 0.02126438331604004, 0.021255168914794922, 0.021197824478149413, 0.020742143630981445, 0.021168127059936523, 0.021227519989013673, 0.02127257537841797, 0.021133312225341795, 0.021192703247070312, 0.0215285758972168, 0.021207040786743164, 0.021164031982421876, 0.021405696868896484, 0.02123263931274414, 0.02125823974609375, 0.021145599365234375, 0.021218303680419923, 0.02110054397583008, 0.021207040786743164, 0.021144575119018554, 0.021373952865600586, 0.021135360717773437, 0.02124799919128418, 0.02143539237976074, 0.021300224304199217, 0.021307392120361326, 0.021161983489990235, 0.021163007736206055, 0.02126438331604004, 0.021024768829345702, 0.021219327926635743, 0.021246976852416992, 0.02125312042236328, 0.02166476821899414, 0.021342208862304687, 0.022619136810302733, 0.021925888061523437, 0.021200895309448242, 0.021124095916748048, 0.02129305648803711, 0.0212490234375, 0.021149696350097655, 0.021195808410644532, 0.021224416732788087, 0.02127667236328125, 0.021133312225341795, 0.021243904113769533, 0.020941823959350587, 0.021195775985717775, 0.021161983489990235, 0.021362688064575194, 0.02145280075073242, 0.021529600143432616, 0.02104422378540039, 0.02129100799560547, 0.021444608688354492, 0.02125209617614746, 0.021147647857666017, 0.021163007736206055, 0.021169151306152344, 0.02126643180847168, 0.021138431549072266, 0.02127769660949707, 0.021005311965942384, 0.021132287979125978, 0.0211015682220459, 0.020378623962402344, 0.020512767791748047, 0.020512767791748047, 0.020610048294067384, 0.02060905647277832, 0.020626399993896486, 0.020578304290771485, 0.020617216110229493, 0.020611072540283205, 0.020603904724121092, 0.020556800842285155, 0.020591615676879883, 0.02067865562438965, 0.020550655364990233, 0.020725759506225586, 0.0206059513092041, 0.020610048294067384, 0.02104729652404785, 0.021234687805175782, 0.021179391860961915, 0.021189632415771483, 0.021128192901611328, 0.021142528533935546, 0.020996095657348633, 0.021106687545776368, 0.021038080215454103, 0.021109760284423826, 0.021174272537231444, 0.021185535430908203, 0.02125823974609375, 0.02122547149658203, 0.021386240005493166, 0.021183488845825195, 0.02144256019592285, 0.021180416107177736, 0.021147647857666017, 0.021221439361572267, 0.020971456527709962, 0.02125721549987793, 0.021144575119018554, 0.021160959243774414, 0.021212160110473634, 0.021118976593017577, 0.020967424392700194, 0.021832704544067383, 0.021363712310791014, 0.021167104721069335, 0.021172224044799806, 0.021300224304199217, 0.020496383666992187, 0.020594688415527345, 0.02062233543395996, 0.020393983840942383, 0.022436864852905275, 0.021695487976074217, 0.020817920684814452, 0.020316160202026368, 0.020264991760253905, 0.0216790714263916, 0.021150720596313476, 0.02061516761779785, 0.020447231292724608, 0.021372928619384765, 0.020039680480957032, 0.02063974380493164, 0.02050048065185547, 0.020297727584838866, 0.02039910316467285, 0.020412416458129884, 0.02066227149963379, 0.020559871673583984, 0.02065100860595703, 0.020661247253417968, 0.020616191864013672, 0.020625408172607423, 0.020734975814819336, 0.02068377685546875, 0.02047488021850586, 0.02113747215270996, 0.020668352127075195, 0.020706304550170897, 0.020700159072875975, 0.020543487548828124, 0.020708351135253905, 0.020669439315795898, 0.020649984359741212, 0.020633600234985353, 0.020658176422119142, 0.020765695571899414, 0.021573631286621094, 0.022772735595703125, 0.021734399795532225, 0.021346303939819337, 0.021374975204467773, 0.020694015502929687, 0.020800512313842775, 0.02069811248779297, 0.020768768310546876, 0.020761600494384767, 0.020813823699951172, 0.020780031204223632, 0.020337663650512695, 0.02046463966369629, 0.020601856231689454, 0.02068992042541504, 0.020684799194335936, 0.02069820785522461, 0.020743072509765623, 0.02068070411682129, 0.02045030403137207, 0.020452352523803712, 0.02041548728942871, 0.02106368064880371, 0.021180416107177736, 0.020970495223999023, 0.021835775375366212, 0.02147225570678711, 0.021212160110473634, 0.02124083137512207, 0.02132275199890137, 0.021332992553710937, 0.02123980712890625, 0.021352447509765626, 0.021361663818359376, 0.021279743194580078, 0.021164031982421876, 0.02068070411682129, 0.021313535690307618, 0.021317632675170898, 0.02125823974609375, 0.021117952346801756, 0.02131865692138672, 0.02107187271118164, 0.02228428840637207, 0.021206047058105467, 0.02072470474243164, 0.020447231292724608, 0.02043289566040039, 0.020545536041259766, 0.02067967987060547, 0.02063155174255371, 0.020988927841186524, 0.020344831466674804, 0.02081177520751953, 0.02062131118774414, 0.020728832244873048, 0.020468736648559572, 0.02046668815612793, 0.020455423355102538, 0.020344831466674804, 0.020570112228393556, 0.020595712661743162, 0.0206376953125, 0.020529151916503906, 0.0204769287109375, 0.020371456146240235, 0.020357120513916017, 0.020371456146240235, 0.020473888397216797, 0.020404224395751954, 0.020281312942504882, 0.02040012741088867, 0.020296703338623046, 0.020343807220458983, 0.02045030403137207, 0.02064384078979492, 0.02039910316467285, 0.020855808258056642, 0.020701183319091796, 0.020685823440551757, 0.02036735916137695, 0.021007360458374022, 0.020487167358398437, 0.020395008087158203, 0.02043903923034668, 0.02039193534851074, 0.020644863128662108, 0.02081279945373535, 0.020708351135253905, 0.020799488067626954, 0.02064691162109375, 0.02064588737487793, 0.021770240783691407, 0.02122547149658203, 0.021169151306152344, 0.021137407302856445, 0.02119987106323242, 0.021111808776855468, 0.0210882568359375, 0.02103500747680664, 0.02122137641906738, 0.021102592468261717, 0.021215232849121093, 0.021123104095458985, 0.02128892707824707, 0.021542911529541017, 0.022915071487426757, 0.021598207473754884, 0.021513216018676756, 0.021377023696899415, 0.021163007736206055, 0.02129100799560547, 0.021135360717773437, 0.021145631790161133, 0.021200864791870118, 0.020997119903564454, 0.021152767181396484, 0.021004287719726563, 0.02110771179199219, 0.021181440353393553, 0.021172256469726564, 0.021225439071655274, 0.021287935256958008, 0.02123366355895996, 0.02129305648803711, 0.02162892723083496, 0.02122547149658203, 0.02122854423522949, 0.020858879089355468, 0.020550655364990233, 0.02086502456665039, 0.020766719818115235, 0.020773887634277344, 0.020619264602661135, 0.020791296005249024, 0.02106777572631836, 0.02121625518798828, 0.021153791427612305, 0.021213184356689452, 0.020928512573242186, 0.02088960075378418, 0.020760576248168947, 0.020765695571899414, 0.02087936019897461, 0.020747264862060546, 0.020959232330322267, 0.020878400802612305, 0.02116499137878418, 0.021222400665283202, 0.022649856567382814, 0.02123776054382324, 0.02110054397583008, 0.021198848724365234, 0.021597183227539063, 0.021001216888427734, 0.020521984100341797, 0.020619264602661135, 0.020875263214111327, 0.02027008056640625, 0.020315135955810547, 0.02188800048828125, 0.021337087631225587, 0.020624191284179687, 0.02119987106323242, 0.021214208602905273, 0.021368831634521485, 0.02145792007446289, 0.02127359962463379, 0.021588991165161133, 0.02120806312561035, 0.0212674560546875, 0.021394432067871092, 0.02124083137512207, 0.020706304550170897, 0.020978687286376953, 0.021325824737548828, 0.020946943283081054, 0.0210831356048584, 0.02125823974609375, 0.021226495742797852, 0.02200371170043945, 0.02128998374938965, 0.021393407821655275, 0.02118969535827637, 0.021725120544433593, 0.02121625518798828, 0.020910112380981446, 0.02068374443054199, 0.020719615936279297, 0.02084556770324707, 0.020947967529296875, 0.020780031204223632, 0.020943872451782225, 0.021128223419189452, 0.02093155288696289, 0.021128192901611328, 0.020940799713134766, 0.021348352432250976, 0.02132275199890137, 0.021255168914794922, 0.02128281593322754, 0.020954111099243163, 0.020867071151733398, 0.021219327926635743, 0.021145599365234375, 0.02086195182800293, 0.020592639923095703, 0.020324352264404297, 0.020237312316894532, 0.020996095657348633, 0.02073299217224121, 0.02103494453430176, 0.020351999282836913, 0.020618240356445314, 0.02109132766723633, 0.02059775924682617, 0.020543487548828124, 0.02083635139465332, 0.020734975814819336, 0.02048307228088379, 0.02039910316467285, 0.0210565128326416, 0.02026905632019043, 0.021380096435546874, 0.020549631118774413, 0.020600831985473633, 0.020540416717529295, 0.020948991775512696, 0.02059775924682617, 0.02066739273071289, 0.020699136734008788, 0.020592639923095703, 0.020600831985473633, 0.020578304290771485, 0.020583423614501953, 0.020619264602661135, 0.0206561279296875, 0.02067148780822754, 0.02062335968017578, 0.02071347236633301, 0.02122547149658203, 0.021021728515625, 0.02073084831237793, 0.020727807998657227, 0.02069196891784668, 0.021109760284423826, 0.021393407821655275, 0.02085478401184082, 0.02063564872741699, 0.020937728881835937, 0.02067251205444336, 0.020975616455078124, 0.02070425605773926, 0.020570112228393556, 0.020766719818115235, 0.02060697555541992, 0.020933631896972657, 0.020661312103271483, 0.02055776023864746, 0.02101043128967285, 0.021186559677124024, 0.021090303421020508, 0.021627904891967774, 0.020984832763671874, 0.021246976852416992, 0.023129119873046874, 0.02188591957092285, 0.021399551391601563, 0.02150092887878418, 0.021123071670532227, 0.0212674560546875, 0.021283840179443358, 0.021356544494628905, 0.021481472015380858, 0.021312511444091797, 0.02122035217285156, 0.021242879867553712, 0.021163007736206055, 0.021198848724365234, 0.021136383056640624, 0.021081087112426757, 0.021265439987182617, 0.021111839294433592, 0.021273536682128905, 0.021177343368530274, 0.02123161506652832, 0.021090303421020508, 0.02103398323059082, 0.021117952346801756, 0.021381120681762695, 0.02122547149658203, 0.021142528533935546, 0.021163007736206055, 0.021203968048095705, 0.02126540756225586, 0.021149696350097655, 0.0212541446685791, 0.021192703247070312, 0.021134336471557616, 0.021296127319335938, 0.021227519989013673, 0.021329919815063478, 0.021234687805175782, 0.021120000839233398, 0.021303295135498047, 0.021763071060180664, 0.021365760803222656, 0.021155839920043946, 0.02126335906982422, 0.021211135864257814, 0.021123071670532227, 0.021235712051391603, 0.021339136123657225, 0.02124492835998535, 0.021227519989013673, 0.02149478340148926, 0.021580799102783203, 0.021391359329223633, 0.021279743194580078, 0.02126950454711914, 0.021136383056640624, 0.02131865692138672, 0.021136383056640624, 0.021141504287719725, 0.021196800231933592, 0.021171199798583985, 0.021259263992309572, 0.02127769660949707, 0.021304319381713867, 0.021171199798583985, 0.021339136123657225, 0.022004735946655272, 0.02269081687927246, 0.022141952514648438, 0.02152038383483887, 0.021238784790039062, 0.021219327926635743, 0.021295103073120117, 0.021170175552368165, 0.021368831634521485, 0.020761632919311525, 0.021210079193115235, 0.021179391860961915, 0.021089279174804687, 0.020986879348754883, 0.02099404716491699, 0.021109760284423826, 0.021016576766967773, 0.02108415985107422, 0.021189632415771483, 0.02117635154724121]",tokens/s,47.616608008770704,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b27-2f11209560e5e59808fcd139;2cdc5181-f4ba-4dac-967f-6415cd5a6ecf) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4926.066688,8141.668352,0.0,7511.998464,6895.682048,s,1,10.896322265625,10.896322265625,0.0,10.896322265625,10.896322265625,10.896322265625,10.896322265625,[10.896322265625],,kWh,4.840031531600744e-05,2.6511123187815943e-05,8.388978933404623e-05,0.0001588012278378696,,MB,2525.093888,8160.54272,0.0,7514.095616,6822.141952,s,10,12.336150024414062,1.233615002441406,0.00013443096958069256,1.2336131591796875,1.233785595703125,1.23381943359375,1.23384650390625,"[1.2336898193359376, 1.2333668212890625, 1.2336474609375, 1.233578857421875, 1.233486083984375, 1.2335616455078124, 1.233532958984375, 1.233655029296875, 1.233853271484375, 1.233778076171875]",tokens/s,207.52017403595042,kWh,1.4581505123265338e-05,7.990333682714313e-06,8.624712455319594e-05,0.0001088189633591756,tokens/kWh,2352531.1406894056,MB,2529.419264,8164.737024,0.0,7516.192768,6822.144512,s,10,18.738342041015624,1.8738342041015623,0.0204998251795438,1.8652894287109376,1.9000093872070314,1.910690789794922,1.9192359118652342,"[1.866954345703125, 1.8482406005859375, 1.8722596435546874, 1.8976357421875, 1.9213721923828124, 1.8838333740234374, 1.863087890625, 1.86362451171875, 1.8591259765625, 1.862207763671875]",tokens/s,33.62090405976247,kWh,2.2014401797153413e-05,1.2065795797872041e-05,6.572830258260165e-05,9.980850017762704e-05,tokens/kWh,631208.7636612138,,s,630,18.736427013397225,0.029740360338725742,0.0006487899794547459,0.029504000663757322,0.030612685012817383,0.030800845336914063,0.031394785270690916,"[0.030288896560668944, 0.029229055404663085, 0.028767232894897462, 0.029029375076293946, 0.029425664901733397, 0.029817855834960938, 0.029343744277954102, 0.029328384399414063, 0.02919219207763672, 0.029303808212280274, 0.02959052848815918, 0.029470720291137696, 0.029271039962768555, 0.02898739242553711, 0.028854272842407228, 0.02898124885559082, 0.02879078483581543, 0.0307957763671875, 0.03061452865600586, 0.029129728317260743, 0.030072832107543947, 0.03085312080383301, 0.02934988784790039, 0.02979532814025879, 0.0295598087310791, 0.029499391555786132, 0.02935603141784668, 0.02924236869812012, 0.031054847717285155, 0.02951475143432617, 0.029418495178222655, 0.030005247116088866, 0.029560831069946288, 0.02937548828125, 0.02993356704711914, 0.029128704071044922, 0.029016063690185546, 0.029280256271362305, 0.03094937515258789, 0.029953023910522462, 0.029995008468627928, 0.029634559631347656, 0.029404159545898437, 0.028984319686889647, 0.030102527618408204, 0.0321607666015625, 0.03054591941833496, 0.030119935989379884, 0.029878271102905272, 0.02952908706665039, 0.02896588706970215, 0.02944819259643555, 0.03053670310974121, 0.0294072322845459, 0.029430784225463868, 0.029641727447509765, 0.029280256271362305, 0.029886463165283202, 0.029672447204589843, 0.029412351608276367, 0.029298688888549803, 0.029005823135375978, 0.029296640396118165, 0.030327808380126952, 0.029427711486816405, 0.029329408645629884, 0.0292096004486084, 0.029228031158447267, 0.030567424774169922, 0.02953625679016113, 0.02937446403503418, 0.029446144104003907, 0.029411327362060546, 0.029232128143310547, 0.029148160934448244, 0.028809215545654295, 0.028824575424194337, 0.02877952003479004, 0.02899660873413086, 0.029080575942993164, 0.029290496826171877, 0.029267967224121092, 0.02944819259643555, 0.031140863418579103, 0.03054489517211914, 0.03018649673461914, 0.030427135467529298, 0.02937651252746582, 0.029032447814941405, 0.029412351608276367, 0.02933148765563965, 0.029391839981079103, 0.029215744018554687, 0.02893824005126953, 0.029410303115844725, 0.029076480865478517, 0.029422592163085938, 0.029055999755859374, 0.02920140838623047, 0.029173759460449217, 0.02894233512878418, 0.028916736602783204, 0.02944000053405762, 0.02996735954284668, 0.02935603141784668, 0.02931711959838867, 0.02933964729309082, 0.029124607086181642, 0.029241344451904298, 0.02889625549316406, 0.029083648681640626, 0.028892160415649414, 0.028894208908081056, 0.0289751033782959, 0.029329408645629884, 0.028924928665161134, 0.029039615631103514, 0.029503488540649415, 0.029456384658813478, 0.029295616149902344, 0.02895462417602539, 0.02894643211364746, 0.0293570556640625, 0.02935807991027832, 0.0294021110534668, 0.029017087936401367, 0.02991923141479492, 0.029414400100708008, 0.029441024780273436, 0.029355007171630858, 0.029253631591796874, 0.029865983963012696, 0.029817855834960938, 0.02954547119140625, 0.029035520553588868, 0.029084672927856447, 0.028899328231811523, 0.0289751033782959, 0.02895359992980957, 0.028872703552246092, 0.02893414306640625, 0.029051904678344728, 0.029025279998779296, 0.029064191818237304, 0.028845056533813477, 0.028895231246948243, 0.028925952911376954, 0.030209024429321288, 0.030296064376831053, 0.030672895431518556, 0.030476287841796876, 0.03054489517211914, 0.02996019172668457, 0.030027776718139648, 0.029920255661010742, 0.030113792419433592, 0.030436351776123048, 0.030098432540893554, 0.029863935470581054, 0.029812736511230467, 0.030203903198242187, 0.03040563201904297, 0.030446592330932616, 0.030225408554077147, 0.030100479125976562, 0.030216192245483397, 0.03101081657409668, 0.029706239700317383, 0.02916966438293457, 0.029920255661010742, 0.02941644859313965, 0.029388799667358398, 0.029297664642333986, 0.030456832885742188, 0.03036467170715332, 0.030448640823364258, 0.03022233581542969, 0.03016499137878418, 0.029244415283203123, 0.029638656616210936, 0.029048831939697265, 0.029327360153198243, 0.029403135299682616, 0.02919526481628418, 0.02934681510925293, 0.0294072322845459, 0.02959872055053711, 0.030587903976440428, 0.030499839782714845, 0.030297088623046874, 0.02916454315185547, 0.029616128921508788, 0.029222911834716796, 0.029298688888549803, 0.029844480514526366, 0.029592575073242186, 0.02942361640930176, 0.029285375595092773, 0.030128128051757814, 0.028919807434082033, 0.029510656356811524, 0.030050304412841795, 0.02977484893798828, 0.02977382469177246, 0.031169536590576172, 0.030442495346069336, 0.029503488540649415, 0.029290496826171877, 0.029379583358764647, 0.03038310432434082, 0.029938688278198244, 0.029869056701660155, 0.030518272399902343, 0.030485504150390624, 0.02977894401550293, 0.030349311828613282, 0.0294072322845459, 0.029386751174926756, 0.02958233642578125, 0.030511104583740234, 0.029634559631347656, 0.0295731201171875, 0.03124940872192383, 0.031101951599121092, 0.030508031845092775, 0.030766080856323243, 0.03056844711303711, 0.030683135986328124, 0.031323135375976564, 0.031085567474365236, 0.030518272399902343, 0.030361600875854492, 0.030494720458984374, 0.03015065574645996, 0.02993971252441406, 0.03017420768737793, 0.030655487060546875, 0.030402559280395508, 0.030061567306518554, 0.030144512176513674, 0.030227455139160156, 0.030219263076782226, 0.030053375244140625, 0.03042406463623047, 0.03079475212097168, 0.03053670310974121, 0.030455808639526367, 0.030070783615112305, 0.030224384307861327, 0.030436351776123048, 0.030320640563964843, 0.030382080078125, 0.031286272048950195, 0.030699520111083983, 0.03218739318847656, 0.030273536682128906, 0.03167334365844727, 0.03061452865600586, 0.030513151168823242, 0.030688255310058594, 0.030499839782714845, 0.030427135467529298, 0.030492671966552733, 0.03041177558898926, 0.030651391983032225, 0.030542848587036132, 0.030521343231201172, 0.030670848846435547, 0.03073023986816406, 0.03052441596984863, 0.030523391723632814, 0.030333951950073244, 0.03052441596984863, 0.030486528396606444, 0.030485504150390624, 0.030693376541137695, 0.03062681579589844, 0.03014143943786621, 0.03013324737548828, 0.03037183952331543, 0.030642175674438478, 0.03022233581542969, 0.030457855224609375, 0.030596096038818358, 0.030667776107788085, 0.03059097671508789, 0.030700544357299804, 0.030458879470825196, 0.03058585548400879, 0.031031295776367186, 0.0313118724822998, 0.030907392501831055, 0.03116339111328125, 0.031784959793090824, 0.030678016662597656, 0.030671871185302735, 0.030241792678833007, 0.03057459259033203, 0.030192640304565428, 0.030297088623046874, 0.0305664005279541, 0.02933145523071289, 0.030683135986328124, 0.03080499267578125, 0.030489599227905274, 0.029282304763793947, 0.0297891845703125, 0.03019059181213379, 0.02916761589050293, 0.02954751968383789, 0.030271488189697264, 0.02916761589050293, 0.029667327880859375, 0.029851648330688478, 0.030859264373779297, 0.031235071182250978, 0.030216192245483397, 0.030091264724731445, 0.030096384048461915, 0.030523391723632814, 0.03075481605529785, 0.029062143325805666, 0.030703615188598633, 0.029033472061157226, 0.02940108871459961, 0.02959667205810547, 0.03077939224243164, 0.03035238456726074, 0.030444543838500978, 0.029619199752807617, 0.029112319946289062, 0.0293570556640625, 0.02996326446533203, 0.030307327270507813, 0.029363199234008788, 0.030159872055053712, 0.03013324737548828, 0.02972979164123535, 0.029108224868774416, 0.03019059181213379, 0.03153305625915527, 0.029478912353515626, 0.03077222442626953, 0.029445119857788086, 0.02993152046203613, 0.0299233283996582, 0.029282304763793947, 0.02897407913208008, 0.030063615798950196, 0.030458879470825196, 0.030274560928344726, 0.02963763236999512, 0.03146854400634766, 0.03038617515563965, 0.02953113555908203, 0.030043136596679686, 0.03016396713256836, 0.030612480163574218, 0.031172607421875, 0.029377536773681642, 0.02919424057006836, 0.02921062469482422, 0.02937343978881836, 0.02982809638977051, 0.02979635238647461, 0.02894438362121582, 0.02920140838623047, 0.02895052719116211, 0.029106176376342774, 0.029305856704711915, 0.029207551956176758, 0.029730815887451172, 0.029739007949829102, 0.030038015365600586, 0.029889535903930665, 0.030611455917358397, 0.030476287841796876, 0.02917888069152832, 0.03139686393737793, 0.030909439086914063, 0.03082444763183594, 0.029327360153198243, 0.02915225601196289, 0.028824575424194337, 0.029454336166381836, 0.02938163185119629, 0.029072383880615234, 0.028869632720947266, 0.028850175857543944, 0.030195711135864257, 0.028917760848999025, 0.028873727798461913, 0.028836864471435547, 0.031064064025878906, 0.029831167221069335, 0.030106624603271483, 0.02895257568359375, 0.02898124885559082, 0.02897407913208008, 0.03000831985473633, 0.030530559539794923, 0.03042815971374512, 0.030414848327636718, 0.02949836730957031, 0.02929254341125488, 0.028924928665161134, 0.029306880950927733, 0.02936832046508789, 0.029247488021850586, 0.029287424087524414, 0.02919424057006836, 0.029380607604980468, 0.02938368034362793, 0.029319168090820313, 0.02977894401550293, 0.029685760498046877, 0.02933350372314453, 0.029395967483520507, 0.030244863510131836, 0.029096960067749023, 0.028931072235107422, 0.02995609664916992, 0.031122432708740235, 0.030269439697265626, 0.029259775161743166, 0.029247488021850586, 0.02900377655029297, 0.02938982391357422, 0.029446144104003907, 0.02918502426147461, 0.029055999755859374, 0.029784063339233398, 0.029478912353515626, 0.031389696121215824, 0.029429792404174804, 0.03046703910827637, 0.02940620803833008, 0.02916044807434082, 0.02895871925354004, 0.029064191818237304, 0.0289751033782959, 0.03037593650817871, 0.029717504501342775, 0.029575168609619142, 0.029241344451904298, 0.02935398483276367, 0.029305856704711915, 0.029434879302978514, 0.029504512786865233, 0.029040639877319335, 0.030135295867919923, 0.02898739242553711, 0.029213695526123046, 0.02921062469482422, 0.029914112091064454, 0.030467071533203126, 0.029351936340332032, 0.029107200622558595, 0.029241344451904298, 0.029851648330688478, 0.029944831848144532, 0.03037286376953125, 0.03038719940185547, 0.029616128921508788, 0.029628416061401368, 0.02953727912902832, 0.029405183792114258, 0.02934783935546875, 0.029446144104003907, 0.02895564842224121, 0.029414400100708008, 0.029131776809692384, 0.029214719772338867, 0.029239295959472656, 0.029121536254882813, 0.029231103897094726, 0.029072383880615234, 0.030192640304565428, 0.03055615997314453, 0.029231103897094726, 0.029508607864379883, 0.029328384399414063, 0.029306880950927733, 0.030229503631591798, 0.029360128402709962, 0.0291276798248291, 0.029310976028442383, 0.029378559112548826, 0.029473791122436522, 0.030268415451049805, 0.030441471099853516, 0.03015577507019043, 0.029896703720092774, 0.030666751861572264, 0.03062272071838379, 0.030361600875854492, 0.030479360580444335, 0.028887039184570314, 0.028887039184570314, 0.029035520553588868, 0.029162496566772462, 0.029265920639038087, 0.02933964729309082, 0.028859392166137695, 0.02992742347717285, 0.02960383987426758, 0.029873151779174805, 0.029318143844604492, 0.029162496566772462, 0.02975027275085449, 0.030297088623046874, 0.030559232711791992, 0.029146112442016602, 0.029305856704711915, 0.029363199234008788, 0.029321216583251954, 0.029277183532714843, 0.029441024780273436, 0.029666303634643554, 0.029600767135620116, 0.029807615280151366, 0.030729215621948244, 0.029113344192504883, 0.030112768173217775, 0.030398464202880858, 0.030263296127319338, 0.02897817611694336, 0.0295280647277832, 0.030455808639526367, 0.029265920639038087, 0.02939084815979004, 0.029361152648925783, 0.02929151916503906, 0.02914303970336914, 0.029058048248291016, 0.02944000053405762, 0.029212671279907225, 0.02951372718811035, 0.02893721580505371, 0.02936832046508789, 0.02956800079345703, 0.030055423736572266, 0.029627391815185547, 0.029663232803344725, 0.03002470397949219, 0.029899776458740233, 0.0293703670501709, 0.02894540786743164, 0.0291409912109375, 0.02896384048461914, 0.028919807434082033, 0.029016063690185546, 0.02938368034362793, 0.029232128143310547, 0.029995008468627928, 0.030470144271850585, 0.030070783615112305, 0.029377536773681642, 0.02893414306640625, 0.029191167831420898, 0.02894233512878418, 0.029275136947631834, 0.029091840744018556, 0.028843008041381835, 0.029243392944335936, 0.02935398483276367, 0.029344768524169923, 0.02991923141479492, 0.030288896560668944, 0.03079167938232422, 0.029508607864379883, 0.02937343978881836, 0.029257823944091797, 0.02924943923950195, 0.029155328750610353, 0.028863487243652345, 0.029275136947631834, 0.029329408645629884, 0.030243839263916016, 0.02933247947692871, 0.03003392028808594, 0.030089216232299806, 0.029298688888549803, 0.028900352478027344, 0.02932326316833496, 0.029046783447265623, 0.028892160415649414, 0.02973388862609863, 0.03036467170715332, 0.02938163185119629, 0.031121408462524414, 0.030121984481811522, 0.029880319595336914, 0.02894745635986328, 0.02918809509277344, 0.029221887588500976, 0.02899456024169922, 0.02993152046203613, 0.029211648941040037, 0.029284351348876952, 0.029306880950927733, 0.02933350372314453, 0.0301527042388916, 0.030216192245483397, 0.030309375762939454, 0.029642751693725586, 0.02920140838623047, 0.029159423828125, 0.02934681510925293, 0.029297664642333986, 0.02895871925354004, 0.029219839096069337, 0.03076812744140625, 0.03055820846557617, 0.030641151428222657, 0.029274112701416017, 0.029394943237304686, 0.02944000053405762, 0.02941542434692383, 0.029430784225463868, 0.02936729621887207, 0.029419519424438476, 0.029240320205688477, 0.029288543701171874, 0.02936515235900879, 0.02939187240600586, 0.029669376373291017, 0.02934681510925293, 0.029369344711303712, 0.02893414306640625]",tokens/s,33.624340411836656,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,7844.036608,12374.769664,0.0,11737.759744,11171.24352,s,1,12.8456513671875,12.8456513671875,0.0,12.8456513671875,12.8456513671875,12.8456513671875,12.8456513671875,[12.8456513671875],,kWh,7.146892641735892e-05,3.913188574104515e-05,0.0001392428891719838,0.0002498437013303879,,MB,3638.206464,12393.644032,0.0,11739.856896,10925.606912,s,10,24.169941406249997,2.4169941406249995,5.532110201226042e-05,2.4169991455078126,2.4170465087890625,2.417067932128906,2.417085070800781,"[2.417036865234375, 2.416976318359375, 2.416998046875, 2.41708935546875, 2.417041748046875, 2.416967529296875, 2.4168779296875, 2.417000244140625, 2.41701025390625, 2.416943115234375]",tokens/s,105.91668208752965,kWh,2.854925527277803e-05,1.5645295418531838e-05,0.00016724607824120246,0.00021144062893251235,tokens/kWh,1210741.7637397877,MB,3642.51136,12397.838336,0.0,11741.954048,10925.609472,s,10,23.733299560546875,2.3733299560546874,0.021191656493858457,2.3682368164062497,2.4075067626953124,2.413476403808594,2.4182521166992186,"[2.355775634765625, 2.37031298828125, 2.3564130859375, 2.36616064453125, 2.374303466796875, 2.352482421875, 2.419446044921875, 2.3593876953125, 2.40618017578125, 2.37283740234375]",tokens/s,26.54498159401664,kWh,2.786394980298554e-05,1.5272161836775215e-05,0.00010150622009379577,0.00014464233173355645,tokens/kWh,435557.1376991584,,s,630,23.731200008392324,0.0376685714418926,0.0006051399213285353,0.03735654258728027,0.03857213516235351,0.03876428642272949,0.03924187152862549,"[0.038316032409667966, 0.037544960021972655, 0.03727052688598633, 0.03736883163452148, 0.038384639739990234, 0.0374128646850586, 0.037644287109375, 0.037493759155273435, 0.03732992172241211, 0.03754086303710937, 0.03726131057739258, 0.03731148910522461, 0.03728179168701172, 0.03731660842895508, 0.03735551834106445, 0.037250049591064455, 0.036967422485351564, 0.03734220886230469, 0.03718860626220703, 0.03714048004150391, 0.03717324829101563, 0.03723775863647461, 0.03718963241577149, 0.037236736297607424, 0.03725209426879883, 0.037253120422363284, 0.037250049591064455, 0.03729510498046875, 0.03748761749267578, 0.037498878479003905, 0.03749068832397461, 0.037599231719970705, 0.03816550445556641, 0.0373831672668457, 0.03731455993652344, 0.037348350524902346, 0.03731660842895508, 0.0373309440612793, 0.03852492904663086, 0.03808665466308594, 0.03739648056030274, 0.03721625518798828, 0.03725414276123047, 0.03723775863647461, 0.03720499038696289, 0.03713740921020508, 0.037177345275878904, 0.037285888671875, 0.0371701774597168, 0.03722854232788086, 0.037084159851074217, 0.03721625518798828, 0.037147647857666014, 0.037125118255615236, 0.03705548858642578, 0.037285888671875, 0.03721830368041992, 0.03724288177490234, 0.03715071868896484, 0.03791667175292969, 0.038160385131835936, 0.037341182708740234, 0.03715584182739258, 0.03817062377929688, 0.037168128967285156, 0.03711590576171875, 0.03727360153198242, 0.037114879608154294, 0.0371671028137207, 0.0370882568359375, 0.037133312225341795, 0.03671039962768555, 0.03704115295410156, 0.037176319122314457, 0.03714252853393555, 0.03718656158447266, 0.03753267288208008, 0.03768217468261719, 0.03782144165039063, 0.03738521575927734, 0.03735859298706055, 0.03716403198242187, 0.038215679168701173, 0.03749273681640625, 0.037456897735595705, 0.03728793716430664, 0.037700607299804685, 0.037961727142333986, 0.03838259124755859, 0.03925196838378906, 0.0392171516418457, 0.03770880126953125, 0.03739033508300781, 0.03743334579467773, 0.037362686157226564, 0.037375999450683595, 0.03696230316162109, 0.03790643310546875, 0.03841535949707031, 0.03740467071533203, 0.037233665466308595, 0.037978111267089845, 0.038042625427246096, 0.03767091369628906, 0.03744460678100586, 0.03814297485351562, 0.03842969512939453, 0.037466110229492186, 0.0380497932434082, 0.03743231964111328, 0.037212158203125, 0.03730124664306641, 0.03727360153198242, 0.037804031372070314, 0.03730739212036133, 0.03738828659057617, 0.03730944061279297, 0.03720499038696289, 0.03731763076782227, 0.03723980712890625, 0.03802521514892578, 0.0397916145324707, 0.03851776123046875, 0.03831398391723633, 0.03758694458007812, 0.03725823974609375, 0.038073345184326174, 0.037233665466308595, 0.03706163024902344, 0.03709439849853516, 0.03711385726928711, 0.03702169418334961, 0.03715891265869141, 0.0368455696105957, 0.03709030532836914, 0.037168128967285156, 0.03708927917480469, 0.03707187271118164, 0.037130241394042966, 0.037119998931884765, 0.03711078262329102, 0.03710259246826172, 0.037114879608154294, 0.037098495483398435, 0.03717324829101563, 0.03706367874145508, 0.037179393768310545, 0.03712716674804688, 0.037157886505126955, 0.03706572723388672, 0.037477375030517575, 0.03750604629516602, 0.03713740921020508, 0.03709439849853516, 0.03706982421875, 0.03704627227783203, 0.03710566329956055, 0.03704729461669922, 0.03721113586425781, 0.037070846557617186, 0.03762790298461914, 0.0385873908996582, 0.03852799987792969, 0.03845529556274414, 0.03880550384521484, 0.038491134643554685, 0.03720499038696289, 0.03710464096069336, 0.037101566314697264, 0.037154815673828126, 0.037179393768310545, 0.03809791946411133, 0.03799859237670898, 0.03721420669555664, 0.037015552520751956, 0.03705036926269531, 0.03723571014404297, 0.03723468780517578, 0.037789695739746096, 0.03734425735473633, 0.037185535430908204, 0.0370882568359375, 0.03715379333496094, 0.03780710220336914, 0.03713740921020508, 0.0371671028137207, 0.03781324768066406, 0.039327743530273435, 0.03906355285644531, 0.03811123275756836, 0.03725721740722656, 0.037160961151123044, 0.03712102508544922, 0.03713433456420898, 0.03719372940063476, 0.037282817840576174, 0.03729100799560547, 0.036782081604003904, 0.037292030334472655, 0.03704217529296875, 0.037179393768310545, 0.03727360153198242, 0.03718963241577149, 0.03712716674804688, 0.0371671028137207, 0.037184513092041016, 0.03722854232788086, 0.03711283111572266, 0.03719782257080078, 0.03718143844604492, 0.03718860626220703, 0.037154815673828126, 0.037160961151123044, 0.037171199798583986, 0.037179393768310545, 0.03717222213745117, 0.03724697494506836, 0.03715071868896484, 0.03711180877685547, 0.037138431549072266, 0.03722956848144531, 0.0371701774597168, 0.03726131057739258, 0.037174270629882815, 0.03711897659301758, 0.03783782577514649, 0.03868672180175781, 0.039128063201904296, 0.03810508728027344, 0.03734527969360352, 0.03729612731933594, 0.03724492645263672, 0.03713945770263672, 0.03727667236328125, 0.03724697494506836, 0.03866419219970703, 0.038542335510253906, 0.038383617401123046, 0.03829145431518555, 0.038371326446533204, 0.038329345703125, 0.037754878997802735, 0.03849420928955078, 0.038406143188476564, 0.03841024017333984, 0.03849728012084961, 0.03745280075073242, 0.03743027114868164, 0.03768832015991211, 0.037966846466064456, 0.03830886459350586, 0.03851059341430664, 0.03809894561767578, 0.037250049591064455, 0.03721932983398438, 0.03719475173950195, 0.037266433715820314, 0.03727667236328125, 0.037294078826904296, 0.03720294570922852, 0.03739340972900391, 0.0373043212890625, 0.039119873046875, 0.038578174591064454, 0.03762483215332031, 0.037182464599609374, 0.037926910400390625, 0.037370880126953124, 0.03785932922363281, 0.038348800659179685, 0.037548030853271484, 0.03849932861328125, 0.0374128646850586, 0.03732582473754883, 0.03804876708984375, 0.037217281341552735, 0.03836928176879883, 0.03864678573608398, 0.03736678314208984, 0.03764019012451172, 0.037663745880126956, 0.03727667236328125, 0.03725414276123047, 0.03750707244873047, 0.03759820938110352, 0.03728998565673828, 0.038624256134033204, 0.03863859176635742, 0.037967872619628903, 0.037318656921386716, 0.03864780807495117, 0.03774566268920899, 0.03738726425170898, 0.037323776245117186, 0.03890687942504883, 0.03752959823608398, 0.037449726104736326, 0.03730739212036133, 0.038591487884521485, 0.037348350524902346, 0.03739340972900391, 0.038299648284912106, 0.03772825622558594, 0.03763302230834961, 0.03729919815063477, 0.03724595260620117, 0.03742822265625, 0.03727769470214844, 0.037416961669921874, 0.03728179168701172, 0.03882393646240234, 0.037340160369873046, 0.037326847076416016, 0.03730124664306641, 0.03733913421630859, 0.03804159927368164, 0.03735756683349609, 0.038007808685302735, 0.037323776245117186, 0.03733196640014649, 0.03722956848144531, 0.037233665466308595, 0.03719168090820312, 0.03708313751220703, 0.03706880187988281, 0.03706572723388672, 0.03719987106323242, 0.03712204742431641, 0.03711897659301758, 0.037059585571289064, 0.03714252853393555, 0.037310462951660156, 0.038332416534423826, 0.037179393768310545, 0.037146625518798826, 0.03714048004150391, 0.0371671028137207, 0.03724595260620117, 0.037367809295654295, 0.037169151306152344, 0.03726847839355469, 0.03720806503295898, 0.037182464599609374, 0.03719782257080078, 0.03722956848144531, 0.03729305648803711, 0.03722547149658203, 0.03714048004150391, 0.03713433456420898, 0.03716403198242187, 0.03711795043945312, 0.037222400665283206, 0.03714048004150391, 0.037169151306152344, 0.03715584182739258, 0.03716505432128906, 0.03715071868896484, 0.03720806503295898, 0.037154815673828126, 0.03724492645263672, 0.03722444915771484, 0.037207038879394534, 0.03720191955566406, 0.037574657440185545, 0.040651775360107424, 0.03865599822998047, 0.037288959503173826, 0.03725107192993164, 0.037217281341552735, 0.03730636978149414, 0.037203968048095705, 0.037275646209716795, 0.03738828659057617, 0.03745894241333008, 0.03724390411376953, 0.03727052688598633, 0.03715891265869141, 0.03726847839355469, 0.038214656829833986, 0.037324798583984374, 0.03753676986694336, 0.03728998565673828, 0.037318656921386716, 0.03743948745727539, 0.037353473663330077, 0.03739136123657227, 0.03738828659057617, 0.038188030242919925, 0.038626304626464845, 0.03822182464599609, 0.0385351676940918, 0.03842764663696289, 0.03839487838745117, 0.03859763336181641, 0.038572032928466796, 0.03864780807495117, 0.0384983024597168, 0.03849420928955078, 0.03802521514892578, 0.039532543182373044, 0.03875942230224609, 0.037820415496826174, 0.03824844741821289, 0.038403072357177735, 0.038645759582519534, 0.037855232238769534, 0.038558719635009765, 0.038449153900146485, 0.038629375457763675, 0.03884646224975586, 0.03860889434814453, 0.038779903411865234, 0.03886489486694336, 0.038830078125, 0.03911167907714844, 0.03859763336181641, 0.03820748901367187, 0.03851980972290039, 0.03849216079711914, 0.03849318313598633, 0.03883827209472656, 0.03856486511230469, 0.03888127899169922, 0.03839487838745117, 0.03873484802246094, 0.038504447937011715, 0.038742015838623044, 0.03835903930664063, 0.03841535949707031, 0.037566463470458986, 0.03852185440063476, 0.03854848098754883, 0.03857305526733398, 0.038381568908691405, 0.039002113342285157, 0.0389119987487793, 0.038168575286865236, 0.038735870361328126, 0.038504447937011715, 0.039384063720703126, 0.0387665901184082, 0.03833446502685547, 0.037269504547119144, 0.03716403198242187, 0.037722110748291016, 0.03725414276123047, 0.03728998565673828, 0.03724288177490234, 0.03769241714477539, 0.03850137710571289, 0.03790848159790039, 0.0373309440612793, 0.03764121627807617, 0.03737497711181641, 0.03721932983398438, 0.03732275390625, 0.03810508728027344, 0.037410816192626956, 0.03721932983398438, 0.037288959503173826, 0.03722956848144531, 0.0373493766784668, 0.038084606170654296, 0.03747430419921875, 0.037222400665283206, 0.03717836761474609, 0.03721830368041992, 0.03717529678344727, 0.03726438522338867, 0.037250049591064455, 0.0372592658996582, 0.03717529678344727, 0.03720294570922852, 0.03712716674804688, 0.037190654754638675, 0.03721625518798828, 0.03723263931274414, 0.037326847076416016, 0.037348350524902346, 0.03731660842895508, 0.037379070281982424, 0.03736371231079102, 0.03729100799560547, 0.037272575378417966, 0.0373043212890625, 0.037372928619384765, 0.03728998565673828, 0.03732787322998047, 0.037459968566894535, 0.037372928619384765, 0.03741798400878906, 0.03728384017944336, 0.03726028823852539, 0.03728384017944336, 0.037338111877441404, 0.037324798583984374, 0.03741491317749023, 0.03733708953857422, 0.03731660842895508, 0.03747020721435547, 0.03785420989990235, 0.03896319961547851, 0.03789209747314453, 0.03845529556274414, 0.03852288055419922, 0.038470657348632815, 0.03750297546386719, 0.03856486511230469, 0.03743436813354492, 0.03734425735473633, 0.03720191955566406, 0.037207038879394534, 0.03734630584716797, 0.037256191253662106, 0.037220352172851565, 0.0377149429321289, 0.03725414276123047, 0.0374200325012207, 0.03731148910522461, 0.03735859298706055, 0.038383617401123046, 0.038432769775390625, 0.037852161407470705, 0.038940673828125, 0.038796287536621094, 0.03860889434814453, 0.038577152252197267, 0.03844095993041992, 0.038591487884521485, 0.037466110229492186, 0.03743743896484375, 0.038520832061767575, 0.03824947357177735, 0.03826483154296875, 0.03858124923706055, 0.038529022216796875, 0.04020121765136719, 0.03818393707275391, 0.03828224182128906, 0.03822182464599609, 0.0383744010925293, 0.03843686294555664, 0.03832627105712891, 0.03829248046875, 0.03860275268554687, 0.03839897537231445, 0.03880448150634765, 0.03864985656738281, 0.038793216705322264, 0.03881267166137695, 0.03910451126098633, 0.0379504623413086, 0.03838259124755859, 0.038591487884521485, 0.038509567260742186, 0.03776921463012695, 0.038335487365722655, 0.037324798583984374, 0.038401023864746094, 0.038391807556152346, 0.03826483154296875, 0.03859251022338867, 0.03794739151000977, 0.03827711868286133, 0.03812351989746094, 0.03853619384765625, 0.038286334991455076, 0.03829043197631836, 0.03739340972900391, 0.03724288177490234, 0.03721113586425781, 0.03724595260620117, 0.037163009643554686, 0.03721318435668945, 0.03724697494506836, 0.037198848724365234, 0.037217281341552735, 0.03794841766357422, 0.037561344146728515, 0.03734425735473633, 0.03732992172241211, 0.03735551834106445, 0.037282817840576174, 0.0373125114440918, 0.03728076934814453, 0.037217281341552735, 0.037321727752685545, 0.037335041046142575, 0.03728486251831055, 0.03736678314208984, 0.03758182525634766, 0.0378869743347168, 0.03880857467651367, 0.03855462265014648, 0.038214656829833986, 0.03733401489257813, 0.037370880126953124, 0.03837542343139649, 0.037369857788085936, 0.03760025787353516, 0.03836723327636719, 0.03786956787109375, 0.03803750228881836, 0.0373493766784668, 0.037718017578125, 0.03876147079467773, 0.03739136123657227, 0.03764326477050781, 0.037379070281982424, 0.03736576080322265, 0.03829350280761719, 0.03731148910522461, 0.037324798583984374, 0.03812249755859375, 0.037787647247314454, 0.03853004837036133, 0.037736446380615234, 0.03802009582519531, 0.03847987365722656, 0.037784576416015625, 0.038386688232421876, 0.03818086242675781, 0.037318656921386716, 0.03763097763061524, 0.03722854232788086, 0.03722547149658203, 0.037953536987304685, 0.03743027114868164, 0.03721932983398438, 0.038316032409667966]",tokens/s,26.54733008769914,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949155-7056511327fcda72778b4f6c;a5810a58-112b-4c8c-b677-59d406d81771) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c37-134fabb91c0cd58328069fde;b3b530aa-5ad1-434d-b422-e7ac6fc493fd) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948faf-59dd5cc513544b7d10041dee;286a7608-1bce-4738-8491-75c34ff23312) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694925d-0baf58ab1d166f8b78697d37;233f7997-9e86-437a-8c02-e00df324e39b) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11152.789504,14821.097472,0.0,14191.427584,13325.783552,s,1,13.9205224609375,13.9205224609375,0.0,13.9205224609375,13.9205224609375,13.9205224609375,13.9205224609375,[13.9205224609375],,kWh,8.317282771040862e-05,4.5569934007850835e-05,0.0001818082010019706,0.0003105509627202301,,MB,2150.326272,14839.97184,0.0,14191.427584,12591.007744,s,10,36.514958496093755,3.6514958496093746,0.0002096501094495581,3.6514576416015627,3.6517500000000003,3.651822509765625,3.651880517578125,"[3.651549560546875, 3.65135888671875, 3.651287841796875, 3.651680908203125, 3.6512353515625, 3.651546875, 3.65173388671875, 3.651368408203125, 3.65189501953125, 3.6513017578125]",tokens/s,70.10825440959657,kWh,4.312024081256975e-05,2.3632014080227774e-05,0.000262303570953798,0.0003290558258465955,tokens/kWh,777983.4906170182,MB,2163.892224,14839.97184,0.0,14191.427584,12934.19776,s,10,27.44469384765625,2.744469384765625,0.019714273457042058,2.743176513671875,2.7654335693359378,2.7724251586914064,2.778018430175781,"[2.747319580078125, 2.729662353515625, 2.736521240234375, 2.779416748046875, 2.707775634765625, 2.760232421875, 2.739033447265625, 2.72737353515625, 2.7638798828125, 2.75347900390625]",tokens/s,22.955256979621996,kWh,3.108763981499945e-05,1.703668100083878e-05,0.00013185502215060096,0.00017997934296643914,tokens/kWh,350040.17106422957,,s,630,27.428521949768054,0.04353733642820328,0.0011378166872538875,0.043955711364746096,0.04449321098327637,0.04485150718688965,0.045957794494628904,"[0.04183859252929688, 0.04157952117919922, 0.041468929290771485, 0.04135833740234375, 0.04154880142211914, 0.04169625473022461, 0.04174950408935547, 0.04151398468017578, 0.04144537734985351, 0.04150476837158203, 0.04128460693359375, 0.043730945587158204, 0.04513894271850586, 0.04437913513183594, 0.04405350494384765, 0.04384972763061523, 0.043812862396240236, 0.04389888000488281, 0.04386611175537109, 0.044273662567138675, 0.04475392150878906, 0.04398489761352539, 0.043911167144775394, 0.0438579216003418, 0.04391219329833984, 0.04390707015991211, 0.04386406326293945, 0.04389273452758789, 0.044368896484375, 0.04400230407714844, 0.04397055816650391, 0.044109825134277345, 0.043910144805908206, 0.043817985534667966, 0.04413235092163086, 0.04393983840942383, 0.04382822418212891, 0.043865089416503904, 0.043886592864990234, 0.043963390350341795, 0.04382515335083008, 0.04395622253417969, 0.043979774475097655, 0.044644351959228515, 0.044181503295898435, 0.04391424179077148, 0.043979774475097655, 0.044012542724609374, 0.04404326248168945, 0.04406988906860351, 0.04390195083618164, 0.043963390350341795, 0.04393164825439453, 0.044065792083740236, 0.04396441650390625, 0.0440186882019043, 0.04391424179077148, 0.04399718475341797, 0.04390707015991211, 0.044036094665527346, 0.043919361114501954, 0.04390195083618164, 0.04391219329833984, 0.041852928161621096, 0.04136038589477539, 0.04373811340332031, 0.044006401062011716, 0.04411904144287109, 0.0440709114074707, 0.044027904510498046, 0.04400128173828125, 0.04407398223876953, 0.04408422470092774, 0.04408627319335937, 0.044273662567138675, 0.04403507232666016, 0.043791358947753906, 0.04431257629394531, 0.04456447982788086, 0.04115865707397461, 0.04154163360595703, 0.041518081665039064, 0.04141056060791016, 0.0413829116821289, 0.04144537734985351, 0.041425918579101564, 0.041450496673583984, 0.0414648323059082, 0.041545726776123046, 0.04147507095336914, 0.041460735321044925, 0.04136243057250977, 0.041439231872558595, 0.041376766204833985, 0.04147507095336914, 0.04174950408935547, 0.04521984100341797, 0.04476006317138672, 0.04415283203125, 0.04413337707519531, 0.04396543884277344, 0.044391422271728515, 0.044854270935058595, 0.04644761657714844, 0.044763137817382816, 0.04412518310546875, 0.04430950546264648, 0.04333055877685547, 0.04440678405761719, 0.04420198440551758, 0.04419583892822266, 0.04170751953125, 0.041915393829345705, 0.04393574523925781, 0.0438364143371582, 0.04405452728271484, 0.04397772979736328, 0.04386918258666992, 0.044096511840820314, 0.04405759811401367, 0.04405452728271484, 0.04390911865234375, 0.04390707015991211, 0.043936767578125, 0.04407910537719727, 0.04429209518432617, 0.04187443161010742, 0.04597145462036133, 0.04449894332885742, 0.044063743591308595, 0.04408627319335937, 0.04385177612304687, 0.04449894332885742, 0.044085247039794925, 0.04401356887817383, 0.0443238410949707, 0.04466995239257812, 0.044230655670166014, 0.0440186882019043, 0.04397260665893555, 0.044126209259033204, 0.04424192047119141, 0.044616703033447266, 0.04388454437255859, 0.044290046691894534, 0.041591808319091796, 0.04213452911376953, 0.044109825134277345, 0.041352191925048826, 0.04124364852905273, 0.04311142349243164, 0.04408627319335937, 0.04393881607055664, 0.04401663970947266, 0.04372480010986328, 0.04389376068115235, 0.043734016418457033, 0.04400537490844727, 0.04393164825439453, 0.043930622100830076, 0.0446033935546875, 0.04419686508178711, 0.04382515335083008, 0.04391424179077148, 0.04428083038330078, 0.04410675048828125, 0.043804672241210936, 0.043998207092285156, 0.0437841911315918, 0.043921409606933595, 0.043908096313476565, 0.0437841911315918, 0.043891712188720705, 0.04398387145996094, 0.04393983840942383, 0.044080127716064454, 0.04378316879272461, 0.04159897613525391, 0.04144025421142578, 0.041567230224609376, 0.04140851211547852, 0.041614334106445314, 0.04136345672607422, 0.04145459365844727, 0.042387454986572266, 0.04175564956665039, 0.041455615997314454, 0.041565185546875, 0.04140544128417969, 0.044556289672851565, 0.044012542724609374, 0.044009471893310545, 0.043758590698242186, 0.04405964660644531, 0.044111873626708986, 0.044788734436035156, 0.04432281494140625, 0.044158977508544923, 0.043858943939208986, 0.04410572814941406, 0.0440186882019043, 0.046442497253417966, 0.04465971374511719, 0.0445296630859375, 0.04418764877319336, 0.04446003341674805, 0.04409036636352539, 0.04418560028076172, 0.044485633850097656, 0.04400537490844727, 0.04403712081909179, 0.044147712707519535, 0.04271104049682617, 0.04512255859375, 0.04449689483642578, 0.04394188690185547, 0.044144641876220705, 0.044246017456054686, 0.04541644668579101, 0.0452229118347168, 0.04404838562011719, 0.04386304092407227, 0.04394905471801758, 0.04395929718017578, 0.04395110321044922, 0.04400435256958008, 0.04400128173828125, 0.04405350494384765, 0.044075008392333984, 0.043902976989746094, 0.043889663696289063, 0.043911167144775394, 0.043853824615478515, 0.04397260665893555, 0.04401971054077149, 0.04400332641601563, 0.04396748733520508, 0.04435148620605469, 0.04397568130493164, 0.04400230407714844, 0.043924480438232424, 0.045541374206542966, 0.044548095703125, 0.04402380752563476, 0.04412006378173828, 0.043835391998291014, 0.04393471908569336, 0.04388044738769531, 0.04159078216552734, 0.04141363143920898, 0.04271206283569336, 0.04417740631103516, 0.04221644973754883, 0.04204646301269531, 0.04139212799072266, 0.04137062454223633, 0.041613311767578126, 0.04127334213256836, 0.0413306884765625, 0.04166348648071289, 0.04149862289428711, 0.041355262756347655, 0.04130918502807617, 0.04126822280883789, 0.04137881469726563, 0.04203724670410156, 0.044068862915039066, 0.0440002555847168, 0.04395212936401367, 0.04389683151245117, 0.04384972763061523, 0.04402483367919922, 0.043936767578125, 0.04401049423217773, 0.04386611175537109, 0.043908096313476565, 0.04407295989990234, 0.0447907829284668, 0.043458560943603515, 0.04378112030029297, 0.04392345428466797, 0.043889663696289063, 0.043719680786132815, 0.041543678283691404, 0.041403392791748046, 0.04136038589477539, 0.04136447906494141, 0.04143001556396484, 0.04183859252929688, 0.04405350494384765, 0.04377702331542969, 0.04389888000488281, 0.04381081771850586, 0.04400128173828125, 0.04372787094116211, 0.041744384765625, 0.04147609710693359, 0.04140236663818359, 0.041436161041259766, 0.04145459365844727, 0.04149555206298828, 0.04211507034301758, 0.04151500701904297, 0.04504780960083008, 0.0451860466003418, 0.04441600036621094, 0.04386816024780273, 0.04375040054321289, 0.04405964660644531, 0.04379238510131836, 0.04380979156494141, 0.044044288635253906, 0.044268543243408204, 0.04612710571289062, 0.044030975341796875, 0.04214374542236328, 0.04146995162963867, 0.04261171340942383, 0.0439183349609375, 0.043998207092285156, 0.04433203125, 0.04403302383422852, 0.043947006225585936, 0.04393164825439453, 0.04232294464111328, 0.0440186882019043, 0.04420710372924805, 0.04420710372924805, 0.04408729553222656, 0.04386099243164063, 0.04205670547485352, 0.044030975341796875, 0.04392755126953125, 0.0445849609375, 0.04497100830078125, 0.04592435073852539, 0.045412353515625, 0.04401049423217773, 0.04414361572265625, 0.04396236801147461, 0.04399411010742187, 0.04392755126953125, 0.04401663970947266, 0.04390911865234375, 0.04385996627807617, 0.04381081771850586, 0.04409241485595703, 0.0443504638671875, 0.043734016418457033, 0.044685310363769534, 0.044268543243408204, 0.043998207092285156, 0.044085247039794925, 0.044028926849365234, 0.04139929580688476, 0.041427967071533206, 0.04195328140258789, 0.043886592864990234, 0.042092544555664066, 0.044943359375, 0.04412211227416992, 0.04399411010742187, 0.043832321166992184, 0.04422246551513672, 0.044065792083740236, 0.04273766326904297, 0.04401561737060547, 0.04389683151245117, 0.043954177856445314, 0.043817985534667966, 0.04398489761352539, 0.04369100952148437, 0.04407603073120117, 0.04395827102661133, 0.04399411010742187, 0.043963390350341795, 0.044052478790283206, 0.04367257690429688, 0.04191231918334961, 0.04151500701904297, 0.042359809875488284, 0.04403507232666016, 0.041540607452392575, 0.04145663833618164, 0.04317900848388672, 0.04385279846191406, 0.04386918258666992, 0.04393574523925781, 0.044339199066162106, 0.04401561737060547, 0.0449536018371582, 0.04338995361328125, 0.04392243194580078, 0.044170238494873046, 0.04488294219970703, 0.04148223876953125, 0.042821632385253904, 0.04144332885742188, 0.04222873687744141, 0.04138598251342773, 0.04134809494018555, 0.041458686828613284, 0.042858497619628906, 0.04415078353881836, 0.04416204833984375, 0.04411699295043945, 0.04391321563720703, 0.043937793731689455, 0.043802623748779294, 0.04399718475341797, 0.04399513626098633, 0.04384460830688477, 0.04399411010742187, 0.044336128234863284, 0.044115966796875, 0.04379955291748047, 0.0439736328125, 0.044431358337402346, 0.04386611175537109, 0.04297011184692383, 0.04408627319335937, 0.04490752029418945, 0.04395110321044922, 0.044065792083740236, 0.04399718475341797, 0.04144332885742188, 0.04318719863891601, 0.046094337463378904, 0.0442347526550293, 0.044085247039794925, 0.04392038345336914, 0.04410675048828125, 0.04389888000488281, 0.04408115386962891, 0.0446300163269043, 0.044170238494873046, 0.04400435256958008, 0.04414156723022461, 0.04154163360595703, 0.04159590530395508, 0.04153036880493164, 0.04303564834594727, 0.04155392074584961, 0.041540607452392575, 0.04361523056030273, 0.044668926239013675, 0.04151295852661133, 0.04166144180297852, 0.04152524948120117, 0.04145971298217774, 0.04244172668457031, 0.04412416076660156, 0.04194918441772461, 0.0419051513671875, 0.04158156967163086, 0.04152217483520508, 0.041485313415527345, 0.04222873687744141, 0.044095489501953126, 0.04407910537719727, 0.044028926849365234, 0.043826175689697267, 0.043963390350341795, 0.04382310485839844, 0.04389580917358398, 0.0440893440246582, 0.04421017456054688, 0.044025856018066405, 0.044371967315673826, 0.04357222366333008, 0.04377702331542969, 0.04479897689819336, 0.04170035171508789, 0.042145790100097655, 0.04379033660888672, 0.04399513626098633, 0.04465971374511719, 0.041752574920654296, 0.041460735321044925, 0.0414463996887207, 0.041376766204833985, 0.04224512100219727, 0.04227174377441406, 0.044240894317626955, 0.04410163116455078, 0.04401152038574219, 0.043960319519042966, 0.04377088165283203, 0.04396953582763672, 0.04408422470092774, 0.04405145645141602, 0.044278785705566405, 0.04406476974487305, 0.043824127197265625, 0.04395622253417969, 0.04391321563720703, 0.044477439880371096, 0.044098560333251956, 0.044083198547363284, 0.0438364143371582, 0.04493721771240235, 0.041675777435302735, 0.04418764877319336, 0.04492697525024414, 0.04413337707519531, 0.04401049423217773, 0.043873279571533204, 0.042966014862060545, 0.044281856536865234, 0.041578495025634765, 0.0414648323059082, 0.044377086639404296, 0.04449280166625977, 0.0427694091796875, 0.04399718475341797, 0.041468929290771485, 0.04144844818115234, 0.041474048614501956, 0.042031105041503904, 0.0440074234008789, 0.04392345428466797, 0.04404326248168945, 0.04415795135498047, 0.04402073669433594, 0.04400128173828125, 0.043937793731689455, 0.04390092849731445, 0.04428492736816406, 0.044273662567138675, 0.04392038345336914, 0.043872257232666016, 0.04401663970947266, 0.04402073669433594, 0.04401049423217773, 0.04426342391967773, 0.044595199584960936, 0.04413644790649414, 0.04406784057617188, 0.043930622100830076, 0.04403814315795898, 0.04394496154785156, 0.043891712188720705, 0.044631038665771484, 0.04546355056762695, 0.04422860717773437, 0.04489420700073242, 0.04403200149536133, 0.04408115386962891, 0.04395110321044922, 0.04394905471801758, 0.04457574462890625, 0.0440709114074707, 0.04386816024780273, 0.04412006378173828, 0.04404019165039062, 0.04408115386962891, 0.04392652893066406, 0.044848129272460936, 0.044695552825927735, 0.04455014419555664, 0.04161740875244141, 0.043399166107177735, 0.04391321563720703, 0.045295616149902344, 0.04413747024536133, 0.0442347526550293, 0.04397260665893555, 0.04432486343383789, 0.041690113067626954, 0.042202110290527346, 0.04213555145263672, 0.04414361572265625, 0.045096958160400394, 0.045448192596435545, 0.04424192047119141, 0.0439552001953125, 0.044219390869140625, 0.04415999984741211, 0.04392959976196289, 0.04400435256958008, 0.043902976989746094, 0.04422041702270508, 0.043996158599853515, 0.04377190399169922, 0.04440576171875, 0.04611993789672852, 0.04429312133789062, 0.041371646881103515, 0.04151091384887695, 0.0437022705078125, 0.043889663696289063, 0.043911167144775394, 0.044647422790527344, 0.04441292953491211, 0.0443955192565918, 0.044095489501953126, 0.0420208625793457, 0.04448972702026367, 0.04459724807739258, 0.04641791915893555, 0.04494540786743164, 0.043919361114501954, 0.04403712081909179, 0.04390911865234375, 0.04416716766357422, 0.04401152038574219, 0.044063743591308595, 0.043889663696289063, 0.0439552001953125, 0.0440975341796875, 0.04434431838989258, 0.04419686508178711, 0.043911167144775394, 0.044066814422607424, 0.04400537490844727, 0.044273662567138675, 0.04486041641235351, 0.041619457244873044, 0.041488384246826174, 0.04154163360595703, 0.042947582244873043, 0.044186622619628906, 0.044251136779785157, 0.04153548812866211, 0.04142489624023438, 0.04145356750488281, 0.04149964904785156, 0.04153036880493164, 0.04410879898071289, 0.043878398895263675]",tokens/s,22.968791433740645,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948cd9-239d09731d5c160c64cbd31a;6d714113-4cf7-4041-9e7d-d7dc44ebb53d) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6655.315968,9406.251008,0.0,8776.58112,8188.314112,s,1,11.4897529296875,11.4897529296875,0.0,11.4897529296875,11.4897529296875,11.4897529296875,11.4897529296875,[11.4897529296875],,kWh,5.571244209860172e-05,3.051902730235648e-05,0.00011269564571197987,0.0001989271151129381,,MB,1610.223616,9425.125376,0.0,8776.58112,7663.089664,s,10,21.44249096679687,2.1442490966796877,0.0003201564697220389,2.1442928466796873,2.1445628662109373,2.1446798706054686,2.1447734741210938,"[2.144536865234375, 2.144796875, 2.1443388671875, 2.144279541015625, 2.14430615234375, 2.144510009765625, 2.143948486328125, 2.143623779296875, 2.143975341796875, 2.144175048828125]",tokens/s,119.3891140709394,kWh,2.53204952893744e-05,1.3876238804860093e-05,0.00014440414330100238,0.00018360087739523689,tokens/kWh,1394328.8487064787,MB,1651.904512,9425.125376,0.0,8776.58112,7906.289152,s,10,19.362933105468752,1.936293310546875,0.014833410360575191,1.9321217651367188,1.9612183471679687,1.962660137939453,1.9638135705566406,"[1.91702294921875, 1.932384521484375, 1.9343607177734374, 1.96089794921875, 1.9450233154296874, 1.926259033203125, 1.9265582275390625, 1.9244654541015624, 1.9641019287109376, 1.9318590087890626]",tokens/s,32.5363929404924,kWh,2.2814680619792905e-05,1.2502657634857778e-05,8.393923381799784e-05,0.0001192565720726484,tokens/kWh,528272.7727711461,,s,630,19.360483320236202,0.030730925905136836,0.0006592234331792792,0.030484479904174806,0.03143109493255615,0.03168629741668701,0.03259833354949952,"[0.030686208724975586, 0.030418943405151368, 0.03038003158569336, 0.030320640563964843, 0.030196735382080078, 0.030290943145751953, 0.030271488189697264, 0.0310262393951416, 0.030185407638549804, 0.030268415451049805, 0.03018956756591797, 0.030305280685424804, 0.03022643280029297, 0.030296064376831053, 0.030304256439208983, 0.03105075263977051, 0.030868480682373047, 0.030338048934936523, 0.030481407165527344, 0.031072256088256835, 0.03034623908996582, 0.030365695953369142, 0.03062579154968262, 0.03032678413391113, 0.030209024429321288, 0.030313472747802734, 0.030275583267211914, 0.030303232192993163, 0.030442495346069336, 0.03040563201904297, 0.030675968170166015, 0.03032678413391113, 0.030258176803588867, 0.030319616317749022, 0.030272512435913085, 0.030327808380126952, 0.030262271881103517, 0.030319616317749022, 0.030259199142456054, 0.030256128311157225, 0.03038211250305176, 0.03104252815246582, 0.030824480056762697, 0.030512096405029297, 0.03034828758239746, 0.03035852813720703, 0.030323711395263672, 0.030317567825317384, 0.030275583267211914, 0.03077529525756836, 0.03092787170410156, 0.030327808380126952, 0.030273536682128906, 0.030282751083374023, 0.03031449508666992, 0.030736383438110353, 0.030331903457641602, 0.030996480941772462, 0.030312448501586913, 0.030242847442626952, 0.03023868751525879, 0.030287872314453124, 0.030286848068237306, 0.030729215621948244, 0.03034726333618164, 0.0304005126953125, 0.0319815673828125, 0.031492095947265625, 0.03133030319213867, 0.030691328048706053, 0.031078399658203124, 0.030591999053955078, 0.030521343231201172, 0.030323711395263672, 0.030345216751098632, 0.030305280685424804, 0.030358591079711915, 0.03186886405944824, 0.031268863677978515, 0.031226879119873048, 0.03073023986816406, 0.031140863418579103, 0.030551071166992187, 0.030980064392089845, 0.030907392501831055, 0.03129958343505859, 0.031230976104736328, 0.03122790336608887, 0.030323711395263672, 0.03035136032104492, 0.030519296646118164, 0.03061350440979004, 0.03063091278076172, 0.03037900733947754, 0.031429664611816406, 0.030742496490478517, 0.030306304931640625, 0.030280704498291015, 0.030245887756347657, 0.030238719940185548, 0.030240768432617186, 0.030260351181030272, 0.030269311904907226, 0.030310400009155275, 0.030238719940185548, 0.030270463943481447, 0.030284799575805665, 0.030301183700561524, 0.029932544708251952, 0.03018137550354004, 0.030323711395263672, 0.030289920806884765, 0.030315519332885742, 0.030290943145751953, 0.030880767822265624, 0.030857215881347655, 0.030722080230712892, 0.03027452850341797, 0.030296064376831053, 0.030309375762939454, 0.030909439086914063, 0.030942207336425782, 0.030734336853027344, 0.03146240043640137, 0.031154176712036134, 0.031093759536743162, 0.03059507179260254, 0.03145017623901367, 0.031360960006713866, 0.031291391372680666, 0.03126169586181641, 0.030315519332885742, 0.03119615936279297, 0.03120639991760254, 0.03119308853149414, 0.03036672019958496, 0.030253055572509766, 0.031230976104736328, 0.030237695693969727, 0.030272512435913085, 0.030195711135864257, 0.030301183700561524, 0.030881792068481444, 0.03012915229797363, 0.030631935119628906, 0.03094528007507324, 0.030654464721679688, 0.03078246307373047, 0.031123455047607423, 0.03059404754638672, 0.033018878936767575, 0.031939584732055666, 0.031581216812133786, 0.03124940872192383, 0.030953439712524414, 0.030209024429321288, 0.030471168518066406, 0.030241792678833007, 0.03022643280029297, 0.03021107292175293, 0.030131200790405273, 0.030307327270507813, 0.030224384307861327, 0.030109695434570313, 0.030204927444458008, 0.03019366455078125, 0.03015065574645996, 0.030908416748046875, 0.031185920715332032, 0.031182880401611327, 0.031164384841918944, 0.03031449508666992, 0.030297088623046874, 0.030299135208129883, 0.030308351516723633, 0.030240768432617186, 0.030616575241088868, 0.030288896560668944, 0.03022336006164551, 0.03098419189453125, 0.030638080596923828, 0.030268415451049805, 0.030255104064941408, 0.030241792678833007, 0.03138150405883789, 0.03134464073181152, 0.03114905548095703, 0.030447616577148437, 0.030486528396606444, 0.030563327789306642, 0.030329856872558594, 0.031045631408691408, 0.03140096092224121, 0.03125964736938477, 0.03126067161560059, 0.03157196807861328, 0.031067136764526368, 0.03131289672851562, 0.031474687576293944, 0.031178752899169923, 0.030637056350708007, 0.030924800872802735, 0.030413824081420897, 0.030450687408447266, 0.03193446350097656, 0.030709760665893555, 0.030873600006103515, 0.03170918464660644, 0.03121971130371094, 0.03035955238342285, 0.031528959274291994, 0.03202764892578125, 0.03165286445617676, 0.03135487937927246, 0.031047679901123046, 0.030349311828613282, 0.03057868766784668, 0.031352832794189454, 0.03113471984863281, 0.0303687686920166, 0.031322111129760744, 0.03121561622619629, 0.030966783523559572, 0.03114703941345215, 0.03123708724975586, 0.03128934478759766, 0.031269887924194335, 0.031286272048950195, 0.030038015365600586, 0.030638080596923828, 0.0313384952545166, 0.030323711395263672, 0.030331903457641602, 0.03126067161560059, 0.03131494331359863, 0.030357503890991212, 0.031486976623535154, 0.031204351425170897, 0.030323711395263672, 0.03062272071838379, 0.031321088790893556, 0.03121664047241211, 0.030537727355957032, 0.03184127998352051, 0.033459201812744144, 0.0317573127746582, 0.03138355255126953, 0.03134464073181152, 0.031357952117919925, 0.03142758369445801, 0.03123302459716797, 0.03168767929077149, 0.03128832054138184, 0.030561279296875, 0.030521343231201172, 0.031119359970092773, 0.031236095428466795, 0.03131699180603027, 0.03125657653808594, 0.031304704666137696, 0.03073843193054199, 0.030715904235839843, 0.031093759536743162, 0.031230976104736328, 0.031664127349853514, 0.030246944427490235, 0.030303199768066405, 0.031319040298461914, 0.030240768432617186, 0.03141430473327637, 0.03062883186340332, 0.031068159103393556, 0.030327808380126952, 0.030470144271850585, 0.031040576934814452, 0.030804927825927735, 0.03115110397338867, 0.03151059150695801, 0.03088377571105957, 0.030264320373535155, 0.030235647201538086, 0.030478336334228515, 0.03129651260375976, 0.031121408462524414, 0.03052851104736328, 0.031015935897827147, 0.03021004867553711, 0.030257152557373046, 0.030299135208129883, 0.03017215919494629, 0.03014041519165039, 0.030212095260620117, 0.031122432708740235, 0.031172607421875, 0.030277631759643556, 0.03023155212402344, 0.03064735984802246, 0.031074239730834962, 0.030386207580566406, 0.030264287948608398, 0.03016499137878418, 0.030261247634887696, 0.030876672744750977, 0.030233600616455077, 0.031558656692504884, 0.03060633659362793, 0.030302207946777345, 0.031524864196777344, 0.03230822372436523, 0.0313384952545166, 0.030312448501586913, 0.030299135208129883, 0.03486003112792969, 0.03180441665649414, 0.031471616744995115, 0.03059507179260254, 0.03037183952331543, 0.03056025505065918, 0.03033087921142578, 0.029951040267944335, 0.030283712387084962, 0.031072256088256835, 0.030850048065185546, 0.03035647964477539, 0.030682111740112306, 0.030317567825317384, 0.030402559280395508, 0.030217216491699218, 0.030290943145751953, 0.030327871322631837, 0.030359487533569336, 0.03077836799621582, 0.030670848846435547, 0.03033497619628906, 0.030444543838500978, 0.03037183952331543, 0.03035647964477539, 0.030273536682128906, 0.030515199661254884, 0.031007743835449218, 0.030457855224609375, 0.030258176803588867, 0.0317706241607666, 0.03233587265014649, 0.03129548835754394, 0.031090688705444337, 0.03131084823608398, 0.031302656173706055, 0.031089664459228516, 0.03082035255432129, 0.030311424255371092, 0.03021107292175293, 0.030232576370239257, 0.03017215919494629, 0.03058278465270996, 0.030473215103149414, 0.0305633602142334, 0.03025814437866211, 0.03021107292175293, 0.030468095779418947, 0.030817279815673827, 0.03021824073791504, 0.03023155212402344, 0.030284799575805665, 0.031124479293823244, 0.03032678413391113, 0.030227455139160156, 0.030252031326293945, 0.03018956756591797, 0.030517248153686522, 0.03150028800964356, 0.030318592071533205, 0.030696447372436524, 0.030911487579345705, 0.03058585548400879, 0.030510080337524413, 0.03022643280029297, 0.0301527042388916, 0.030652416229248046, 0.030327808380126952, 0.0301844482421875, 0.03017420768737793, 0.03016908836364746, 0.03022233581542969, 0.030448640823364258, 0.030621696472167968, 0.030269439697265626, 0.030287872314453124, 0.030220287322998047, 0.030189599990844727, 0.03019363212585449, 0.030259199142456054, 0.03057868766784668, 0.030413888931274415, 0.03029395294189453, 0.030241792678833007, 0.03078451156616211, 0.030740480422973632, 0.030282751083374023, 0.030305280685424804, 0.030261247634887696, 0.030251007080078125, 0.03026639938354492, 0.03019158363342285, 0.030963712692260743, 0.03058892822265625, 0.03019878387451172, 0.03021824073791504, 0.030315519332885742, 0.03124940872192383, 0.03185766410827637, 0.0315156478881836, 0.030251007080078125, 0.03135487937927246, 0.03116851234436035, 0.030286848068237306, 0.03035238456726074, 0.03092889595031738, 0.03017932891845703, 0.030685184478759765, 0.03120025634765625, 0.030220287322998047, 0.0307640323638916, 0.031065088272094726, 0.030251007080078125, 0.031821823120117186, 0.031178815841674805, 0.030197696685791017, 0.030204927444458008, 0.03014963150024414, 0.030233600616455077, 0.03040768051147461, 0.03012915229797363, 0.030270463943481447, 0.03017318344116211, 0.030297088623046874, 0.030327808380126952, 0.03173990440368652, 0.031835136413574217, 0.03144704055786133, 0.031459327697753905, 0.03059507179260254, 0.030280704498291015, 0.03041689682006836, 0.030203903198242187, 0.030641151428222657, 0.030303232192993163, 0.030253055572509766, 0.030245887756347657, 0.03021824073791504, 0.030307327270507813, 0.03018137550354004, 0.030302207946777345, 0.03060633659362793, 0.030418943405151368, 0.039314430236816404, 0.03309363174438477, 0.03042918395996094, 0.030273536682128906, 0.03035238456726074, 0.030336000442504882, 0.030239744186401366, 0.030303232192993163, 0.03017420768737793, 0.030213119506835938, 0.03035136032104492, 0.030300159454345704, 0.030253055572509766, 0.030728191375732423, 0.03148185539245606, 0.030499839782714845, 0.030501888275146483, 0.030249984741210937, 0.030295040130615233, 0.030229503631591798, 0.03036672019958496, 0.030275583267211914, 0.030244863510131836, 0.03055615997314453, 0.030241792678833007, 0.03023052787780762, 0.03021516799926758, 0.030249984741210937, 0.030276607513427735, 0.03015679931640625, 0.03037183952331543, 0.031080448150634765, 0.030298112869262695, 0.030277631759643556, 0.030225408554077147, 0.030209024429321288, 0.030304256439208983, 0.030268415451049805, 0.030251007080078125, 0.030241792678833007, 0.03015273666381836, 0.030176256179809572, 0.03022640037536621, 0.030394367218017578, 0.03018342399597168, 0.030216192245483397, 0.030234624862670898, 0.03075071907043457, 0.030942207336425782, 0.031648767471313476, 0.03038310432434082, 0.031488000869750975, 0.03118694305419922, 0.030950399398803712, 0.03102617645263672, 0.03121049690246582, 0.031064064025878906, 0.03080294418334961, 0.03158732795715332, 0.03155763244628906, 0.03075584030151367, 0.0303636474609375, 0.030332927703857423, 0.030241792678833007, 0.03095142364501953, 0.031308895111083986, 0.030636959075927735, 0.032317440032958986, 0.03321753692626953, 0.03138457679748535, 0.03144396781921387, 0.03168460845947266, 0.031893503189086916, 0.03134771156311035, 0.03138559913635254, 0.03210956954956055, 0.03061759948730469, 0.03118694305419922, 0.031023103713989256, 0.030627840042114256, 0.030846975326538087, 0.030482431411743165, 0.030256128311157225, 0.031307775497436525, 0.030922752380371094, 0.030252031326293945, 0.030741504669189453, 0.031132671356201173, 0.031160320281982422, 0.03039743995666504, 0.03141222381591797, 0.03270553588867187, 0.03177881622314453, 0.03156582450866699, 0.031076351165771485, 0.03017728042602539, 0.03159347152709961, 0.0314521598815918, 0.031236095428466795, 0.031645696640014646, 0.031662080764770506, 0.03116748809814453, 0.03117158317565918, 0.031318016052246093, 0.031696895599365234, 0.03142451286315918, 0.031409151077270506, 0.03133235168457031, 0.030509056091308592, 0.031267839431762694, 0.0301977596282959, 0.03078860855102539, 0.031696895599365234, 0.030741504669189453, 0.03138047981262207, 0.031546367645263675, 0.03114905548095703, 0.031730688095092774, 0.03177779197692871, 0.030454784393310546, 0.030340095520019532, 0.03038412857055664, 0.030365695953369142, 0.030369792938232422, 0.030611455917358397, 0.03038515281677246, 0.030315519332885742, 0.03036467170715332, 0.030235647201538086, 0.03011075210571289, 0.030915584564208985, 0.030523359298706056, 0.030299135208129883, 0.030423040390014647, 0.030591999053955078, 0.030480384826660156, 0.030305280685424804, 0.030340095520019532, 0.030306304931640625, 0.030331903457641602, 0.030479360580444335, 0.03058585548400879, 0.030938112258911132, 0.03134259223937988, 0.030529535293579102, 0.03119308853149414, 0.03132825660705566, 0.030437376022338865, 0.030294015884399415, 0.03035955238342285, 0.030325759887695314, 0.030500864028930662, 0.03040563201904297, 0.030277631759643556, 0.030422016143798827, 0.030284799575805665, 0.030341119766235353, 0.03142860794067383, 0.0303687686920166, 0.03022233581542969, 0.031285247802734374, 0.03117568016052246, 0.030316543579101563, 0.030247936248779295, 0.030516223907470705, 0.0313753604888916, 0.030855167388916017, 0.03059916877746582, 0.03037081527709961, 0.030292991638183595, 0.031116287231445314, 0.03037696075439453, 0.030724096298217773, 0.03079782485961914, 0.031023103713989256]",tokens/s,32.54050994385577,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694935a-35f42d024bb07a2971d6deef;ee9a5760-9924-4384-8bd7-8625ad6ae447) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,914.755584,925.36832,0.0,295.698432,277.263872,s,1,7.35141796875,7.35141796875,0.0,7.35141796875,7.35141796875,7.35141796875,7.35141796875,[7.35141796875],,kWh,6.233995624999504e-06,3.381914999584419e-06,1.0008619118040407e-05,1.962452974262433e-05,,MB,1490.079744,981.991424,0.0,335.54432,313.833472,s,15,0.3399496002197266,0.022663306681315103,5.7211383432443204e-05,0.022685184478759765,0.0227266242980957,0.022732736206054687,0.022735423583984376,"[0.022736095428466798, 0.02260870361328125, 0.022541471481323242, 0.02260688018798828, 0.022654272079467772, 0.022688287734985352, 0.022685184478759765, 0.02273129653930664, 0.022719615936279296, 0.02271891212463379, 0.02261903953552246, 0.022593856811523438, 0.022687007904052734, 0.022641727447509766, 0.022717248916625975]",tokens/s,11295.79207481937,kWh,2.665686498029198e-07,1.4603816851541322e-07,1.334549365936983e-06,1.7471561842553161e-06,tokens/kWh,146523820.99950266,MB,1535.93856,1009.2544,0.0,360.710144,313.836032,s,15,10.371762329101564,0.6914508219401042,0.006680039247002462,0.6938456420898438,0.6987757690429688,0.6995920532226563,0.7008718774414062,"[0.6989064331054687, 0.68086376953125, 0.6938456420898438, 0.68159033203125, 0.6929327392578125, 0.6916873168945312, 0.69467822265625, 0.7011918334960937, 0.693891845703125, 0.6985797729492188, 0.680870361328125, 0.695806640625, 0.6839374389648437, 0.6866423950195313, 0.6963375854492188]",tokens/s,91.11277042556942,kWh,8.052412294132352e-06,4.4124098637208345e-06,1.3229073396061313e-05,2.5693895553914503e-05,tokens/kWh,2451944.2708796198,,s,945,10.365152215957652,0.010968415043341421,0.000307329418872117,0.011035648345947266,0.011148902320861817,0.011267891120910645,0.011937669525146483,"[0.01092403221130371, 0.01103769588470459, 0.01109609603881836, 0.011143136024475097, 0.011041791915893554, 0.011183103561401368, 0.011080703735351562, 0.011054080009460449, 0.011104255676269532, 0.011065343856811523, 0.011074560165405273, 0.011065343856811523, 0.011167743682861327, 0.01107968044281006, 0.011082752227783203, 0.011101183891296386, 0.01111244773864746, 0.011108351707458495, 0.01107148838043213, 0.01103052806854248, 0.011026432037353515, 0.011078656196594238, 0.011072511672973634, 0.011072511672973634, 0.011323391914367676, 0.011191295623779298, 0.011150336265563965, 0.011110400199890137, 0.011073535919189453, 0.011131903648376466, 0.011086848258972168, 0.011089920043945312, 0.011066368103027344, 0.011131903648376466, 0.01103769588470459, 0.011099136352539063, 0.0110448637008667, 0.01102950382232666, 0.011064319610595704, 0.01103769588470459, 0.011045887947082519, 0.011085824012756347, 0.011364352226257325, 0.011018239974975585, 0.011051008224487305, 0.011011072158813476, 0.011094016075134277, 0.01102233600616455, 0.01107148838043213, 0.010986495971679687, 0.011045887947082519, 0.011146240234375, 0.011232255935668945, 0.011099136352539063, 0.011142144203186035, 0.011088895797729491, 0.01110431957244873, 0.011065279960632324, 0.01103769588470459, 0.011043840408325196, 0.011039744377136231, 0.01107151985168457, 0.011028448104858398, 0.010381312370300292, 0.01050931167602539, 0.010608639717102051, 0.010675200462341309, 0.010619903564453125, 0.010665984153747558, 0.01063321590423584, 0.010593279838562012, 0.010629119873046875, 0.010791935920715333, 0.010598400115966796, 0.010657792091369628, 0.01062604808807373, 0.01061068820953369, 0.010650624275207519, 0.010651647567749023, 0.010588159561157226, 0.01061683177947998, 0.01063219165802002, 0.010644479751586914, 0.01063424015045166, 0.01064140796661377, 0.010585087776184082, 0.010617856025695802, 0.010552319526672363, 0.011041791915893554, 0.01103052806854248, 0.011026432037353515, 0.01102233600616455, 0.011040831565856933, 0.011064255714416504, 0.011060223579406739, 0.011059200286865235, 0.011051008224487305, 0.011058176040649414, 0.011132927894592285, 0.011039744377136231, 0.011015168190002441, 0.011070464134216309, 0.011059200286865235, 0.011067392349243164, 0.0110448637008667, 0.011052032470703126, 0.011100159645080567, 0.011109375953674316, 0.0110632963180542, 0.011045887947082519, 0.010989567756652833, 0.011021311759948731, 0.0110632963180542, 0.010942463874816894, 0.010740736007690429, 0.01062399959564209, 0.010611712455749512, 0.010656767845153809, 0.010670080184936523, 0.010613759994506837, 0.01062502384185791, 0.010669055938720704, 0.010629119873046875, 0.010695679664611817, 0.010607616424560547, 0.010619903564453125, 0.0104017915725708, 0.010597375869750977, 0.010612735748291016, 0.010653727531433106, 0.010641375541687011, 0.010512384414672851, 0.010671104431152344, 0.010595328330993652, 0.01063321590423584, 0.010650624275207519, 0.01061580753326416, 0.010653696060180663, 0.010602496147155761, 0.01061683177947998, 0.011652095794677735, 0.011155455589294434, 0.011094047546386718, 0.011023327827453614, 0.011056127548217774, 0.011075584411621094, 0.011133952140808106, 0.011289600372314454, 0.011041791915893554, 0.01103052806854248, 0.011023360252380371, 0.0110632963180542, 0.01100595188140869, 0.011060223579406739, 0.010938367843627929, 0.011053055763244628, 0.01142579174041748, 0.012782591819763184, 0.011768832206726074, 0.011103232383728028, 0.011157504081726074, 0.01103667163848877, 0.01104691219329834, 0.011016192436218262, 0.01095577621459961, 0.011020288467407227, 0.011056127548217774, 0.011081791877746583, 0.011016127586364746, 0.011026432037353515, 0.011048959732055665, 0.011035648345947266, 0.011051008224487305, 0.010978303909301757, 0.01103667163848877, 0.011033599853515624, 0.010984448432922364, 0.011032575607299805, 0.011043840408325196, 0.01103052806854248, 0.011019264221191406, 0.011003904342651367, 0.011095040321350098, 0.011143168449401856, 0.01103872013092041, 0.011020288467407227, 0.011049983978271484, 0.011089920043945312, 0.010998784065246582, 0.010390527725219726, 0.010586112022399903, 0.010650624275207519, 0.010658816337585449, 0.01062604808807373, 0.010683391571044922, 0.01062707233428955, 0.010609663963317872, 0.010572799682617188, 0.010597375869750977, 0.010591232299804687, 0.0106496000289917, 0.010707967758178711, 0.010603615760803223, 0.010621855735778809, 0.010668031692504883, 0.010585087776184082, 0.010675200462341309, 0.010656767845153809, 0.010588159561157226, 0.01061683177947998, 0.010639360427856445, 0.010613759994506837, 0.010644479751586914, 0.01062707233428955, 0.010602496147155761, 0.01061888027191162, 0.010631168365478515, 0.010599424362182617, 0.010580991744995117, 0.010609663963317872, 0.010629119873046875, 0.010654784202575684, 0.010620863914489746, 0.010638336181640624, 0.01064243221282959, 0.010549247741699219, 0.010658816337585449, 0.010680319786071778, 0.011213824272155762, 0.011612159729003906, 0.011681792259216308, 0.011481087684631347, 0.01107148838043213, 0.011068415641784669, 0.01108995246887207, 0.011048928260803222, 0.011090944290161133, 0.011126784324645997, 0.011083776473999024, 0.010987520217895508, 0.010962944030761718, 0.011011072158813476, 0.011010047912597656, 0.010966015815734862, 0.010954751968383789, 0.011041791915893554, 0.011064319610595704, 0.010991616249084473, 0.011100159645080567, 0.011055168151855468, 0.011087807655334472, 0.01112063980102539, 0.01036691188812256, 0.010607680320739747, 0.010598336219787598, 0.010628095626831055, 0.01061068820953369, 0.010711039543151855, 0.010677248001098634, 0.010622976303100586, 0.01063526439666748, 0.010647551536560058, 0.010594304084777833, 0.011035648345947266, 0.01112063980102539, 0.010936320304870606, 0.011115519523620606, 0.010995712280273438, 0.011070464134216309, 0.011026432037353515, 0.011041791915893554, 0.011885567665100098, 0.010996735572814942, 0.011124735832214355, 0.01104691219329834, 0.011052032470703126, 0.011042816162109375, 0.011066368103027344, 0.011184160232543944, 0.01092092800140381, 0.010875904083251953, 0.011048959732055665, 0.0110448637008667, 0.011023360252380371, 0.010977279663085938, 0.011068448066711426, 0.011055071830749512, 0.011078656196594238, 0.010880000114440918, 0.0110632963180542, 0.011087871551513672, 0.011100159645080567, 0.011134976387023926, 0.011040767669677735, 0.011054112434387206, 0.011039711952209473, 0.011035648345947266, 0.01101414394378662, 0.011064319610595704, 0.011096063613891602, 0.011222016334533692, 0.011051008224487305, 0.01102233600616455, 0.011041791915893554, 0.011019264221191406, 0.010997759819030761, 0.011105279922485351, 0.011052032470703126, 0.01136025619506836, 0.011206656455993653, 0.010975232124328613, 0.011032575607299805, 0.011088895797729491, 0.011069439888000488, 0.011076607704162598, 0.010775551795959473, 0.011309087753295899, 0.011130880355834961, 0.011128800392150879, 0.010812416076660156, 0.010598400115966796, 0.010579999923706054, 0.010659808158874512, 0.010646528244018554, 0.010599424362182617, 0.010686464309692383, 0.01062707233428955, 0.010614784240722656, 0.010630144119262695, 0.01064140796661377, 0.01061888027191162, 0.010592479705810547, 0.010593055725097656, 0.01112063980102539, 0.011104255676269532, 0.011051008224487305, 0.011066368103027344, 0.01103872013092041, 0.011080703735351562, 0.011111424446105958, 0.011041791915893554, 0.011043840408325196, 0.011075584411621094, 0.011108351707458495, 0.011111424446105958, 0.01123532772064209, 0.011111424446105958, 0.011189248085021973, 0.011076607704162598, 0.011042816162109375, 0.01104793643951416, 0.011069439888000488, 0.011108351707458495, 0.01113702392578125, 0.011021311759948731, 0.011020288467407227, 0.011087871551513672, 0.011039744377136231, 0.011026432037353515, 0.010992639541625977, 0.011061247825622558, 0.011091967582702637, 0.011033599853515624, 0.010986495971679687, 0.011043840408325196, 0.01104793643951416, 0.011069439888000488, 0.01102847957611084, 0.011040767669677735, 0.011086848258972168, 0.0110448637008667, 0.011012096405029297, 0.011095040321350098, 0.011108351707458495, 0.011000831604003907, 0.011123711585998536, 0.011035743713378907, 0.01093827247619629, 0.010412032127380372, 0.010594304084777833, 0.010607616424560547, 0.010579968452453613, 0.01061580753326416, 0.010592255592346191, 0.010562560081481934, 0.010630144119262695, 0.01064857578277588, 0.01073151969909668, 0.011073535919189453, 0.0110632963180542, 0.011059200286865235, 0.011065343856811523, 0.011098112106323242, 0.011061247825622558, 0.01105510425567627, 0.011415552139282227, 0.011135999679565429, 0.011079744338989258, 0.011003840446472167, 0.011040767669677735, 0.011017215728759766, 0.011103296279907226, 0.011174847602844238, 0.011084799766540527, 0.011339776039123535, 0.011124735832214355, 0.011083776473999024, 0.011088895797729491, 0.011053055763244628, 0.010978303909301757, 0.011076607704162598, 0.011031552314758301, 0.01107968044281006, 0.010992639541625977, 0.011060223579406739, 0.011074560165405273, 0.011161600112915039, 0.011033599853515624, 0.011067392349243164, 0.011058176040649414, 0.011024383544921875, 0.010960895538330078, 0.011053055763244628, 0.01102950382232666, 0.011012096405029297, 0.010587136268615722, 0.010617856025695802, 0.010685440063476562, 0.010578944206237792, 0.011067392349243164, 0.011158528327941895, 0.01103769588470459, 0.011052032470703126, 0.012264448165893555, 0.011268095970153809, 0.011296768188476563, 0.012126208305358887, 0.011174912452697755, 0.01112166404724121, 0.011267071723937988, 0.011043840408325196, 0.011446271896362305, 0.011192319869995117, 0.01105510425567627, 0.011045887947082519, 0.011044896125793457, 0.011062239646911621, 0.01194803237915039, 0.01103052806854248, 0.011012096405029297, 0.011060223579406739, 0.011072511672973634, 0.01107151985168457, 0.010985440254211425, 0.011041791915893554, 0.011056127548217774, 0.011107328414916993, 0.011078656196594238, 0.011034624099731445, 0.011149312019348144, 0.011369471549987792, 0.011176959991455078, 0.011075584411621094, 0.011110400199890137, 0.011094016075134277, 0.011132960319519042, 0.011158495903015138, 0.01110431957244873, 0.011121600151062012, 0.011169792175292969, 0.011134976387023926, 0.011083776473999024, 0.011125760078430176, 0.011133952140808106, 0.011097087860107421, 0.01109712028503418, 0.011073504447937012, 0.011096063613891602, 0.011073535919189453, 0.011148287773132324, 0.011051008224487305, 0.01115443229675293, 0.011074560165405273, 0.011403264045715332, 0.011179007530212403, 0.011023360252380371, 0.010973183631896973, 0.011076607704162598, 0.011054112434387206, 0.011023327827453614, 0.011091967582702637, 0.011119615554809571, 0.011052032470703126, 0.011324416160583496, 0.011096063613891602, 0.011189248085021973, 0.011140095710754394, 0.011097087860107421, 0.011095040321350098, 0.0110448637008667, 0.011119615554809571, 0.011092991828918456, 0.011147263526916504, 0.011058176040649414, 0.01042643165588379, 0.010554304122924804, 0.010661888122558593, 0.010563584327697753, 0.010606592178344726, 0.010669055938720704, 0.011553791999816895, 0.011209728240966797, 0.011141152381896972, 0.011108320236206054, 0.011080703735351562, 0.011080703735351562, 0.011109375953674316, 0.01103769588470459, 0.011189248085021973, 0.011067392349243164, 0.011067392349243164, 0.010991616249084473, 0.011080703735351562, 0.011031583786010742, 0.011148256301879883, 0.011061280250549317, 0.011025376319885254, 0.01114527988433838, 0.011065279960632324, 0.01116262435913086, 0.011040767669677735, 0.011123711585998536, 0.010991616249084473, 0.011013119697570802, 0.01101414394378662, 0.011040767669677735, 0.011109375953674316, 0.0110448637008667, 0.01103872013092041, 0.011070464134216309, 0.01103667163848877, 0.011078656196594238, 0.01101414394378662, 0.011061247825622558, 0.011083776473999024, 0.011158528327941895, 0.011053055763244628, 0.01107968044281006, 0.011113471984863281, 0.011097087860107421, 0.010952704429626465, 0.01080835247039795, 0.010745823860168457, 0.010880000114440918, 0.011281408309936524, 0.011013119697570802, 0.010921983718872071, 0.011090944290161133, 0.011042816162109375, 0.011009023666381837, 0.011118592262268067, 0.01107968044281006, 0.010598400115966796, 0.010507264137268067, 0.011070464134216309, 0.011076607704162598, 0.011149312019348144, 0.010867712020874023, 0.010805248260498047, 0.010576895713806152, 0.010609663963317872, 0.010430496215820312, 0.010461152076721192, 0.010785856246948243, 0.011085760116577149, 0.011068415641784669, 0.012213248252868653, 0.015571968078613281, 0.011165696144104004, 0.011365376472473144, 0.011157504081726074, 0.011023360252380371, 0.01082265567779541, 0.010863615989685058, 0.010982432365417481, 0.011000800132751464, 0.011001855850219726, 0.011010047912597656, 0.010854399681091309, 0.011082752227783203, 0.011283455848693847, 0.010651647567749023, 0.010589183807373047, 0.010653696060180663, 0.010611712455749512, 0.010614784240722656, 0.010540032386779785, 0.01064857578277588, 0.010611712455749512, 0.010639360427856445, 0.01062399959564209, 0.011058176040649414, 0.010994688034057617, 0.010942463874816894, 0.010987520217895508, 0.010988544464111329, 0.010995712280273438, 0.01104691219329834, 0.011040831565856933, 0.011169728279113769, 0.010977279663085938, 0.010989567756652833, 0.011008000373840332, 0.011513855934143067, 0.01107148838043213, 0.01102847957611084, 0.011132927894592285, 0.01102950382232666, 0.010951680183410644, 0.01095680046081543, 0.01235865592956543, 0.01267199993133545, 0.011489279747009277, 0.011286527633666991, 0.01111244773864746, 0.011045887947082519, 0.011021311759948731, 0.01098038387298584, 0.011017184257507324, 0.011002880096435547, 0.010257408142089844, 0.010472448348999023, 0.01051750373840332, 0.010638336181640624, 0.010678272247314453, 0.010571776390075683, 0.010614784240722656, 0.010638336181640624, 0.01062399959564209, 0.010456064224243163, 0.01064345645904541, 0.010599424362182617, 0.01063321590423584, 0.010647551536560058, 0.010594304084777833, 0.010611712455749512, 0.010611712455749512, 0.010576895713806152, 0.01062604808807373, 0.010602496147155761, 0.0108155517578125, 0.011169728279113769, 0.011043840408325196, 0.01101318359375, 0.011002816200256348, 0.011023360252380371, 0.011069439888000488, 0.011019295692443847, 0.010900447845458985, 0.011216896057128906, 0.011082752227783203, 0.010988544464111329, 0.010913791656494141, 0.011008000373840332, 0.011076640129089356, 0.011070431709289551, 0.010981375694274903, 0.011013119697570802, 0.011405311584472656, 0.011207679748535156, 0.011499520301818847, 0.011885567665100098, 0.011226112365722657, 0.011024415969848633, 0.010759136199951173, 0.010683391571044922, 0.01063526439666748, 0.010617888450622559, 0.010602463722229004, 0.010604543685913086, 0.010672127723693848, 0.010611743927001953, 0.010613727569580077, 0.01064352035522461, 0.010744768142700195, 0.010564607620239258, 0.010663935661315918, 0.010669055938720704, 0.01063424015045166, 0.010614784240722656, 0.010595328330993652, 0.01064243221282959, 0.010602496147155761, 0.01030348777770996, 0.010587136268615722, 0.010575872421264648, 0.010607616424560547, 0.010951680183410644, 0.01164185619354248, 0.011061247825622558, 0.011924480438232422, 0.012004351615905762, 0.011126784324645997, 0.01115443229675293, 0.011119615554809571, 0.011109375953674316, 0.010994688034057617, 0.011042816162109375, 0.011066368103027344, 0.01103667163848877, 0.011033599853515624, 0.01103667163848877, 0.011026432037353515, 0.01103052806854248, 0.01102233600616455, 0.011054080009460449, 0.01103769588470459, 0.011076607704162598, 0.01101414394378662, 0.011074560165405273, 0.011069439888000488, 0.011081727981567382, 0.011062272071838379, 0.011078656196594238, 0.011126784324645997, 0.011081727981567382, 0.011048992156982422, 0.010977248191833495, 0.011039744377136231, 0.010936384201049804, 0.011095999717712402, 0.01105510425567627, 0.011092991828918456, 0.011157504081726074, 0.01107968044281006, 0.01104793643951416, 0.011019264221191406, 0.011009023666381837, 0.011059200286865235, 0.011032575607299805, 0.011124735832214355, 0.011111424446105958, 0.011105279922485351, 0.011040767669677735, 0.011024383544921875, 0.011084799766540527, 0.011078656196594238, 0.011057151794433593, 0.011369471549987792, 0.011206656455993653, 0.011052032470703126, 0.010828800201416015, 0.010575872421264648, 0.010619903564453125, 0.010656767845153809, 0.010573823928833008, 0.010895359992980956, 0.010866687774658204, 0.010784768104553222, 0.011086848258972168, 0.011074560165405273, 0.011039744377136231, 0.011025407791137695, 0.011009023666381837, 0.011042816162109375, 0.011049983978271484, 0.010871808052062988, 0.011072511672973634, 0.011059200286865235, 0.011107328414916993, 0.011214847564697266, 0.011043840408325196, 0.011031552314758301, 0.011009023666381837, 0.010996735572814942, 0.011074560165405273, 0.011076607704162598, 0.011080703735351562, 0.010996735572814942, 0.011002880096435547, 0.010557439804077149, 0.010447872161865235, 0.010537983894348145, 0.010617856025695802, 0.010567680358886718, 0.010667008399963379, 0.010599424362182617, 0.010565631866455078, 0.010555392265319824, 0.010578944206237792, 0.010561535835266114, 0.010571776390075683, 0.010599424362182617, 0.010596351623535156, 0.01061068820953369, 0.01062604808807373, 0.010516480445861816, 0.01063321590423584, 0.010629119873046875, 0.01061888027191162, 0.010630175590515137, 0.010628064155578614, 0.01061888027191162, 0.010685440063476562, 0.010567680358886718, 0.01061996841430664, 0.011008959770202636, 0.01104691219329834, 0.011019264221191406, 0.011059200286865235, 0.01103667163848877, 0.011130880355834961, 0.010986495971679687, 0.011164671897888183, 0.01101414394378662, 0.011035648345947266, 0.01103052806854248, 0.01098140811920166, 0.011067359924316407, 0.01032198429107666, 0.010611647605895997, 0.010672127723693848, 0.010607616424560547, 0.01061068820953369, 0.010605567932128907, 0.010564607620239258, 0.010639360427856445, 0.01061888027191162, 0.010631168365478515, 0.010667008399963379, 0.010652671813964844, 0.01064243221282959, 0.01064345645904541, 0.01063526439666748, 0.01061683177947998, 0.010652671813964844, 0.010650655746459962, 0.010795999526977539, 0.010682368278503418, 0.01061068820953369, 0.010596480369567871, 0.010547072410583495, 0.010631168365478515, 0.010560511589050293, 0.010655743598937988, 0.010617856025695802, 0.011240447998046875, 0.01112883186340332, 0.011074560165405273, 0.01107148838043213, 0.01100595188140869, 0.011068415641784669, 0.01103769588470459, 0.01102847957611084, 0.011016192436218262, 0.010984448432922364, 0.011027456283569336, 0.011133952140808106, 0.011040767669677735, 0.011058176040649414, 0.011133952140808106, 0.011010047912597656, 0.01100595188140869, 0.010941439628601075, 0.01095680046081543, 0.01101414394378662, 0.010989567756652833, 0.011013119697570802, 0.01105510425567627, 0.010982399940490722, 0.010964991569519043, 0.011003904342651367, 0.01103872013092041, 0.01103052806854248, 0.01103052806854248, 0.01102233600616455, 0.011023360252380371, 0.01115443229675293, 0.011359231948852539, 0.011514880180358887, 0.01217843246459961, 0.011117568016052246, 0.01043558406829834, 0.010829824447631836, 0.01092403221130371, 0.011034624099731445, 0.01102233600616455, 0.010977312088012696, 0.011154399871826173, 0.011081727981567382, 0.011067392349243164, 0.0110632963180542, 0.010980352401733399, 0.011020288467407227, 0.011045887947082519, 0.01104793643951416, 0.01102847957611084, 0.011061247825622558, 0.011043840408325196, 0.011031552314758301, 0.01105510425567627, 0.01103667163848877, 0.011031552314758301, 0.011031552314758301, 0.011025407791137695, 0.011043904304504394, 0.011107328414916993, 0.011454400062561034, 0.011119615554809571, 0.011304960250854493, 0.011396096229553223, 0.011134976387023926, 0.011113471984863281, 0.011054080009460449, 0.011116543769836425, 0.011035648345947266, 0.011052032470703126, 0.01098857593536377, 0.010913760185241698, 0.010993663787841796, 0.010980352401733399, 0.011011072158813476, 0.010975232124328613, 0.011001855850219726, 0.011016192436218262, 0.011216896057128906, 0.011073535919189453, 0.011017215728759766, 0.011027487754821777, 0.010930239677429199, 0.010919839859008788, 0.011016192436218262, 0.011034624099731445, 0.01103667163848877, 0.011095040321350098, 0.011111424446105958, 0.011056192398071289, 0.011031488418579101, 0.011099136352539063, 0.011096063613891602, 0.01111244773864746, 0.011031552314758301, 0.011092991828918456, 0.011013119697570802, 0.011045887947082519]",tokens/s,91.17087528585714,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,7841.718272,12367.429632,0.0,11737.759744,11171.24352,s,1,12.895005859375,12.895005859375,0.0,12.895005859375,12.895005859375,12.895005859375,12.895005859375,[12.895005859375],,kWh,7.179825345138448e-05,3.931415269732902e-05,0.00013130482726586568,0.00024241723341457918,,MB,3582.316544,12386.304,0.0,11739.856896,10925.606912,s,10,24.191484130859376,2.4191484130859378,8.838797338355962e-05,2.4191396484374996,2.4192248046875,2.419283911132813,2.419331196289063,"[2.419184814453125, 2.419147216796875, 2.419211669921875, 2.41912548828125, 2.419171142578125, 2.419343017578125, 2.419014892578125, 2.419024169921875, 2.419129638671875, 2.419132080078125]",tokens/s,105.82236237149203,kWh,2.857803405923682e-05,1.5661670982546436e-05,0.00015832804332900353,0.00020256774837078677,tokens/kWh,1263774.7225753285,MB,3586.64192,12390.498304,0.0,11741.954048,10925.609472,s,10,24.112470214843746,2.4112470214843755,0.031708831988401694,2.3988656005859372,2.4456813720703123,2.4674891235351564,2.484935324707031,"[2.38800048828125, 2.489296875, 2.394822509765625, 2.413488037109375, 2.390289306640625, 2.388175048828125, 2.440835205078125, 2.40290869140625, 2.3791767578125, 2.425477294921875]",tokens/s,26.127559490448593,kWh,2.7620828798540563e-05,1.5138643139702715e-05,9.68689941618028e-05,0.0001396284661000461,tokens/kWh,451197.393766823,,s,630,24.1103278121948,0.038270361606658455,0.0012979806030451542,0.03878707313537598,0.03965573120117188,0.03990143909454346,0.040785470733642575,"[0.04119039916992188, 0.0400711669921875, 0.03986022567749024, 0.039229438781738284, 0.03686707305908203, 0.03658342361450195, 0.037852161407470705, 0.036706302642822264, 0.036441089630126954, 0.03665510559082031, 0.03697663879394531, 0.03706060791015625, 0.03725721740722656, 0.03951001739501953, 0.038776832580566405, 0.03761971282958984, 0.039416831970214845, 0.03919974517822265, 0.03694182586669922, 0.03690496063232422, 0.036880382537841795, 0.03701862335205078, 0.03689574432373047, 0.03681491088867188, 0.03772102355957031, 0.039294975280761715, 0.039365631103515625, 0.039387134552001955, 0.03694182586669922, 0.03877580642700195, 0.0370780143737793, 0.03697868728637695, 0.03681075286865235, 0.038128639221191404, 0.03959296035766602, 0.03689574432373047, 0.036827136993408206, 0.036779041290283206, 0.0368117446899414, 0.038980609893798826, 0.036860927581787106, 0.039446529388427735, 0.03958681488037109, 0.03669913482666016, 0.03684454345703125, 0.03678412628173828, 0.036787200927734375, 0.03692544174194336, 0.03691417694091797, 0.03684044647216797, 0.03681689453125, 0.03829759979248047, 0.036375553131103515, 0.03638681411743164, 0.03849523162841797, 0.03683327865600586, 0.03849318313598633, 0.03981619262695312, 0.03947622299194336, 0.03946495819091797, 0.039449600219726565, 0.03956531143188476, 0.03953664016723633, 0.04056371307373047, 0.03902668762207031, 0.039482368469238284, 0.03950284957885742, 0.03936870574951172, 0.039482368469238284, 0.039444480895996094, 0.03946700668334961, 0.03918745422363281, 0.03931238555908203, 0.03949465560913086, 0.040205310821533204, 0.03951308822631836, 0.03920588684082031, 0.03904409790039062, 0.03946393585205078, 0.03942195129394531, 0.03972915267944336, 0.03947727966308594, 0.03942294311523437, 0.03894476699829102, 0.03951308822631836, 0.039142398834228515, 0.03905641555786133, 0.039310302734375, 0.04075929641723633, 0.03967692947387695, 0.039360511779785154, 0.03935641479492188, 0.03943116760253906, 0.03892736053466797, 0.03947315216064453, 0.03916287994384766, 0.039521278381347655, 0.03971072006225586, 0.0393175048828125, 0.039570430755615234, 0.040217601776123046, 0.03972198486328125, 0.03952640151977539, 0.039600128173828124, 0.03948953628540039, 0.03990835189819336, 0.03989299011230469, 0.03931545639038086, 0.03987353515625, 0.039667713165283204, 0.03941888046264649, 0.03954483032226563, 0.03956326293945313, 0.039430145263671876, 0.039583744049072264, 0.03954483032226563, 0.039444480895996094, 0.039288833618164064, 0.039411712646484375, 0.03945369720458984, 0.04015411376953125, 0.03970560073852539, 0.039330814361572264, 0.03948134231567383, 0.039193599700927735, 0.03926732635498047, 0.03779993438720703, 0.036604927062988284, 0.03880755233764648, 0.03944755172729492, 0.03683737564086914, 0.03684966278076172, 0.03644313430786133, 0.03662233734130859, 0.03691929626464844, 0.03935232162475586, 0.03929292678833008, 0.03929600143432617, 0.03915776062011719, 0.03929702377319336, 0.03926732635498047, 0.036567039489746093, 0.03672576141357422, 0.03647999954223633, 0.0374466552734375, 0.03680051040649414, 0.03879731369018555, 0.03667660903930664, 0.03943936157226562, 0.036514816284179685, 0.03681075286865235, 0.03662233734130859, 0.0364031982421875, 0.03816758346557617, 0.04008752059936523, 0.0394598388671875, 0.03905023956298828, 0.03918131256103516, 0.03912908935546875, 0.036757503509521484, 0.03666534423828125, 0.03869696044921875, 0.03890790557861328, 0.03654655838012695, 0.03676979064941406, 0.03663359832763672, 0.03654348754882813, 0.036400127410888675, 0.036773887634277344, 0.036762622833251955, 0.036746238708496096, 0.03700121688842774, 0.037195777893066405, 0.03960627365112305, 0.03931340789794922, 0.039465984344482424, 0.03946393585205078, 0.03891507339477539, 0.038902782440185545, 0.03929190444946289, 0.039228416442871096, 0.038964225769042966, 0.03887615966796875, 0.03937484741210937, 0.03925196838378906, 0.039196670532226564, 0.03684249496459961, 0.03832217788696289, 0.03884543991088867, 0.03799961471557617, 0.03666022491455078, 0.036519935607910156, 0.03679846572875976, 0.03679235076904297, 0.03672572708129883, 0.03698175811767578, 0.037171199798583986, 0.037098495483398435, 0.03672780990600586, 0.03680972671508789, 0.04087091064453125, 0.040210430145263674, 0.039599105834960936, 0.03977523040771484, 0.0367718391418457, 0.04013772964477539, 0.03963187026977539, 0.040097793579101565, 0.03934822463989258, 0.03684454345703125, 0.036953086853027346, 0.03687116622924805, 0.03919462585449219, 0.03928985595703125, 0.039803905487060545, 0.039430145263671876, 0.04001484680175781, 0.03965030288696289, 0.039411712646484375, 0.03697049713134765, 0.03899699020385742, 0.03682099151611328, 0.04012543869018555, 0.039836673736572265, 0.039523326873779296, 0.03919974517822265, 0.0391833610534668, 0.036582401275634766, 0.037615615844726565, 0.03944345474243164, 0.0364031982421875, 0.036724735260009765, 0.0362690544128418, 0.03621993637084961, 0.038568927764892576, 0.039155712127685545, 0.039065601348876954, 0.03680460739135742, 0.03648409652709961, 0.03667251205444336, 0.036746238708496096, 0.03681382369995117, 0.03904000091552735, 0.03979776000976563, 0.03958476638793945, 0.03925503921508789, 0.03917619323730469, 0.03935334396362305, 0.0392806396484375, 0.03907788848876953, 0.03912908935546875, 0.0391649284362793, 0.03786956787109375, 0.03680665588378906, 0.03888025665283203, 0.03674009704589844, 0.037400577545166014, 0.039569408416748046, 0.03955199813842773, 0.039521278381347655, 0.036827136993408206, 0.03791872024536133, 0.039534591674804685, 0.036751361846923826, 0.03732889556884766, 0.03973427200317383, 0.03951103973388672, 0.038056961059570314, 0.03640115356445312, 0.03695001602172852, 0.03929702377319336, 0.036773887634277344, 0.038389759063720705, 0.03919462585449219, 0.0393779182434082, 0.038865921020507815, 0.03680460739135742, 0.03755212783813477, 0.03971072006225586, 0.03980595016479492, 0.039229438781738284, 0.03899903869628906, 0.03757875061035156, 0.039261184692382815, 0.03890585708618164, 0.03680153656005859, 0.03674214553833008, 0.03672883224487305, 0.03727974319458008, 0.03927961730957031, 0.039436286926269534, 0.038967296600341796, 0.039636993408203126, 0.03807129669189453, 0.03664486312866211, 0.03678003311157227, 0.03936358261108398, 0.03954278564453125, 0.03702272033691406, 0.036908031463623044, 0.03697049713134765, 0.036278270721435545, 0.0363765754699707, 0.03658137512207031, 0.036759552001953126, 0.036708351135253905, 0.03687116622924805, 0.03689164733886719, 0.03676671981811523, 0.03789823913574219, 0.039521278381347655, 0.036972545623779295, 0.0369356803894043, 0.037029888153076174, 0.03720806503295898, 0.03792486572265625, 0.038212608337402344, 0.0395489273071289, 0.037166080474853515, 0.037195777893066405, 0.03690291213989258, 0.03686502456665039, 0.036951038360595705, 0.03705036926269531, 0.036787200927734375, 0.03934310531616211, 0.03940249633789063, 0.03943936157226562, 0.03942604827880859, 0.03684864044189453, 0.03683635330200195, 0.03733401489257813, 0.03712921524047851, 0.03970355224609375, 0.03719168090820312, 0.036907009124755856, 0.03656192016601562, 0.036139007568359374, 0.036350975036621096, 0.03742617416381836, 0.03699814224243164, 0.039106559753417966, 0.03943936157226562, 0.03698175811767578, 0.03818393707275391, 0.03941785430908203, 0.03949977493286133, 0.03685478210449219, 0.03703705596923828, 0.037465087890625, 0.03926630401611328, 0.03942707061767578, 0.039444480895996094, 0.0393994255065918, 0.03935027313232422, 0.037814273834228515, 0.03696332931518555, 0.03901235198974609, 0.04174540710449219, 0.03971379089355469, 0.03975065612792969, 0.039605247497558595, 0.03909836959838867, 0.036721664428710936, 0.03686502456665039, 0.03681382369995117, 0.03667251205444336, 0.036706302642822264, 0.0368353271484375, 0.03681280136108398, 0.03658649444580078, 0.03687833786010742, 0.03654143905639649, 0.036280319213867186, 0.0365404167175293, 0.036547584533691405, 0.03916697692871094, 0.03977830505371094, 0.04058726501464844, 0.03830579376220703, 0.03714355087280274, 0.039462913513183595, 0.03931955337524414, 0.03928575897216797, 0.039332862854003905, 0.038877185821533204, 0.03928575897216797, 0.03842355346679688, 0.03939430236816406, 0.039360511779785154, 0.03937996673583984, 0.03746713638305664, 0.039382015228271484, 0.036601856231689454, 0.036915199279785156, 0.038694911956787106, 0.036975616455078124, 0.037996543884277346, 0.03949465560913086, 0.0376995849609375, 0.03979673767089844, 0.03694182586669922, 0.03948134231567383, 0.03911475372314453, 0.04102860641479492, 0.04029542541503906, 0.03972403335571289, 0.03943526458740235, 0.039376895904541014, 0.03697151947021484, 0.037784576416015625, 0.03936870574951172, 0.03943423843383789, 0.039349246978759765, 0.03943731307983398, 0.03948339080810547, 0.0370063362121582, 0.03697663879394531, 0.039003135681152344, 0.036988929748535154, 0.03696025466918945, 0.039106559753417966, 0.03932672119140625, 0.03920588684082031, 0.03689267349243164, 0.0370964469909668, 0.03879731369018555, 0.03928268814086914, 0.03695718383789062, 0.039365631103515625, 0.03893862533569336, 0.038504447937011715, 0.040151039123535154, 0.03959091186523438, 0.03950796890258789, 0.039360511779785154, 0.03939328002929687, 0.0390748176574707, 0.03917824172973633, 0.03838054275512695, 0.03916287994384766, 0.037779457092285154, 0.036574207305908206, 0.03671244812011719, 0.036773887634277344, 0.03668172836303711, 0.03673907089233398, 0.036746238708496096, 0.03675545501708984, 0.03919564819335938, 0.04016025543212891, 0.03930316925048828, 0.03930214309692383, 0.03918745422363281, 0.03689471817016601, 0.03837952041625976, 0.03932876968383789, 0.03926425552368164, 0.03942297744750976, 0.03923865509033203, 0.036708351135253905, 0.03678412628173828, 0.036732929229736325, 0.03673907089233398, 0.036566017150878906, 0.03910860824584961, 0.039654399871826174, 0.039757823944091795, 0.03933491134643555, 0.039293952941894535, 0.036939777374267575, 0.036760574340820314, 0.036969470977783206, 0.039204864501953124, 0.037716991424560545, 0.03982745742797852, 0.037141502380371096, 0.03970048141479492, 0.039357440948486325, 0.036760574340820314, 0.036982784271240236, 0.0374015998840332, 0.03952947235107422, 0.039413761138916016, 0.03948953628540039, 0.03940966415405273, 0.03924582290649414, 0.03684966278076172, 0.03681689453125, 0.03697868728637695, 0.03702579116821289, 0.036877311706542966, 0.039975936889648435, 0.04019814300537109, 0.03968511962890625, 0.03947622299194336, 0.03697459030151367, 0.03685171127319336, 0.03833446502685547, 0.038970367431640625, 0.03700121688842774, 0.0393963508605957, 0.036972545623779295, 0.03728998565673828, 0.03780611038208008, 0.0384931526184082, 0.03946086502075195, 0.03970764923095703, 0.03957657623291016, 0.03956326293945313, 0.03941273498535156, 0.03951001739501953, 0.03721830368041992, 0.041014270782470705, 0.039532543182373044, 0.036828159332275394, 0.03693772888183594, 0.03674214553833008, 0.03677695846557617, 0.03654860687255859, 0.03673702239990234, 0.03709337615966797, 0.03711795043945312, 0.036923393249511716, 0.036877311706542966, 0.037746688842773435, 0.03947315216064453, 0.03894169616699219, 0.03856588745117188, 0.036803585052490234, 0.03698483276367188, 0.036760574340820314, 0.037835777282714846, 0.03771187210083008, 0.03934105682373047, 0.039327743530273435, 0.03952230453491211, 0.03689779281616211, 0.036759552001953126, 0.03682099151611328, 0.03812761688232422, 0.039119873046875, 0.036939777374267575, 0.036913150787353514, 0.03664281463623047, 0.03721932983398438, 0.03681689453125, 0.036819969177246094, 0.03656294250488281, 0.037394432067871096, 0.036923393249511716, 0.03688345718383789, 0.03888947296142578, 0.039398399353027344, 0.03688550567626953, 0.03683225631713867, 0.03684249496459961, 0.03681792068481445, 0.03684966278076172, 0.03664281463623047, 0.037000190734863284, 0.03668582534790039, 0.03796275329589844, 0.03696230316162109, 0.037238784790039066, 0.03923353576660156, 0.03899699020385742, 0.039119873046875, 0.03938816070556641, 0.03924070358276367, 0.03920076751708984, 0.03935334396362305, 0.03685990524291992, 0.03696230316162109, 0.038425632476806644, 0.03991958236694336, 0.03954073715209961, 0.03947520065307617, 0.03703603363037109, 0.03687833786010742, 0.03674828720092774, 0.03673907089233398, 0.038007808685302735, 0.038491134643554685, 0.036898815155029296, 0.036863998413085936, 0.03696025466918945, 0.03744460678100586, 0.03695206451416016, 0.03652096176147461, 0.03819724655151367, 0.037147647857666014, 0.039567359924316405, 0.03940249633789063, 0.04007526397705078, 0.03821977615356445, 0.03911167907714844, 0.03690086364746094, 0.036939777374267575, 0.03683942413330078, 0.03916697692871094, 0.039965694427490234, 0.0395489273071289, 0.03973017501831055, 0.03970150375366211, 0.03938816070556641, 0.041283584594726565, 0.03975884628295898, 0.03951103973388672, 0.0396492805480957, 0.03954278564453125, 0.03948646545410156, 0.038073345184326174, 0.039397377014160156, 0.03949772644042969, 0.04051148986816406, 0.03962060928344727, 0.03939123153686523, 0.03697868728637695, 0.037138431549072266, 0.03728076934814453, 0.03960627365112305, 0.036775936126708986, 0.03687628936767578, 0.036857856750488284, 0.03684864044189453, 0.03735756683349609, 0.040185855865478515, 0.040796161651611325, 0.039910400390625]",tokens/s,26.129881140867386,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948155-7fe862463b36ae76360f388e;fe492e76-e99d-488e-a31f-db34d4326761) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1871.118336,3095.92064,0.0,2466.250752,2401.696256,s,1,8.882888671875,8.882888671875,0.0,8.882888671875,8.882888671875,8.882888671875,8.882888671875,[8.882888671875],,kWh,2.3580262875717987e-05,1.2907887292708803e-05,3.5771417505992e-05,7.225956767441878e-05,,MB,1895.206912,3326.60736,0.0,2680.160256,2582.175744,s,10,2.343885559082031,0.23438855590820312,6.625298674110216e-05,0.23437321472167968,0.23448367462158204,0.23449865036010742,0.23451063095092772,"[0.2345136260986328, 0.2343237762451172, 0.2344803466796875, 0.2342906951904297, 0.23436837768554689, 0.2343705291748047, 0.23433103942871095, 0.2344017333984375, 0.23437590026855468, 0.23442953491210938]",tokens/s,1092.2034952093006,kWh,2.7706452002259954e-06,1.5181686681675372e-06,1.6799270545348558e-05,2.1088084413742088e-05,tokens/kWh,12139556.869052418,MB,1901.842432,3328.704512,0.0,2680.160256,2582.178304,s,10,14.054714355468748,1.4054714355468751,0.008412522232201996,1.4060438842773437,1.4168462402343749,1.4184422241210937,1.4197190112304687,"[1.3965853271484374, 1.4200382080078124, 1.4041143798828124, 1.4082254638671876, 1.3889356689453125, 1.406063232421875, 1.4061092529296875, 1.402126708984375, 1.4164915771484374, 1.4060245361328125]",tokens/s,44.8248170732025,kWh,1.6573700920538287e-05,9.08262591344625e-06,3.3432075195254836e-05,5.9088402029239374e-05,tokens/kWh,1066199.0819928588,,s,630,14.05293461227417,0.022306245416308206,0.0004870779975481084,0.02210406494140625,0.02290698184967041,0.023074457263946534,0.023916380081176757,"[0.022962175369262695, 0.022143999099731446, 0.02246553611755371, 0.02221772766113281, 0.021712896347045898, 0.021744640350341796, 0.022041599273681642, 0.02245427131652832, 0.022179840087890625, 0.022024192810058595, 0.022582271575927734, 0.022803455352783202, 0.022786048889160155, 0.022740991592407226, 0.021951488494873047, 0.021968896865844727, 0.02205388832092285, 0.021979135513305666, 0.02200371170043945, 0.02194534492492676, 0.022146047592163084, 0.021983232498168945, 0.021978111267089845, 0.022237184524536133, 0.022046720504760742, 0.022166528701782227, 0.02204470443725586, 0.022032352447509767, 0.022005760192871093, 0.022235136032104492, 0.022236160278320313, 0.022244352340698242, 0.022271999359130858, 0.02207439994812012, 0.022061023712158203, 0.02205695915222168, 0.022034431457519533, 0.021777408599853516, 0.021808128356933593, 0.02186649513244629, 0.021962751388549806, 0.021967872619628907, 0.022286336898803712, 0.022390783309936522, 0.022013952255249023, 0.02186751937866211, 0.02213580894470215, 0.02224947166442871, 0.02208460807800293, 0.022000640869140626, 0.022183935165405275, 0.021931007385253908, 0.0219289608001709, 0.022155263900756835, 0.02207846450805664, 0.022355968475341798, 0.022143999099731446, 0.021974016189575195, 0.021970943450927736, 0.022746112823486327, 0.022139904022216796, 0.022793216705322264, 0.022974464416503908, 0.02188800048828125, 0.021770240783691407, 0.02391347122192383, 0.023567359924316408, 0.023207935333251953, 0.023154687881469727, 0.022545408248901368, 0.02253209686279297, 0.022776832580566408, 0.02290790367126465, 0.022849536895751952, 0.02285875129699707, 0.02252288055419922, 0.022009855270385743, 0.02209280014038086, 0.022738943099975584, 0.022166528701782227, 0.02234060859680176, 0.02287513542175293, 0.022580223083496095, 0.022236160278320313, 0.02208051109313965, 0.022129663467407225, 0.022384639739990234, 0.021884927749633788, 0.02204569625854492, 0.023014400482177736, 0.022718463897705078, 0.021953535079956055, 0.02206924819946289, 0.022882303237915038, 0.02288640022277832, 0.022829055786132812, 0.022916095733642578, 0.022782976150512696, 0.02290278434753418, 0.021940223693847655, 0.02249625587463379, 0.022976512908935546, 0.02268057632446289, 0.02268569564819336, 0.02271232032775879, 0.022819839477539062, 0.02208665657043457, 0.021988351821899413, 0.022134815216064453, 0.021817312240600586, 0.02376192092895508, 0.022435840606689454, 0.023053312301635744, 0.02289254379272461, 0.02274406433105469, 0.022830080032348633, 0.022047744750976563, 0.02188800048828125, 0.021972991943359374, 0.021977088928222657, 0.02188800048828125, 0.021948415756225585, 0.02201190376281738, 0.022764543533325195, 0.023217151641845703, 0.02307583999633789, 0.022167552947998048, 0.021986303329467775, 0.021935104370117187, 0.02203545570373535, 0.024457216262817383, 0.023228416442871092, 0.02290073585510254, 0.022809600830078124, 0.02280243110656738, 0.02281881523132324, 0.021960704803466798, 0.022025215148925782, 0.02206719970703125, 0.021958656311035156, 0.021711872100830077, 0.022017023086547852, 0.022005760192871093, 0.02189516830444336, 0.021970943450927736, 0.02250752067565918, 0.021995519638061522, 0.021724159240722657, 0.021980159759521483, 0.02200371170043945, 0.021985279083251954, 0.02205695915222168, 0.021961727142333985, 0.021983232498168945, 0.021957632064819335, 0.021893119812011717, 0.022691839218139647, 0.023203840255737306, 0.02290483283996582, 0.022433792114257813, 0.022805503845214844, 0.02404351997375488, 0.02326016044616699, 0.022238208770751954, 0.021921791076660157, 0.02265497589111328, 0.022761472702026365, 0.02283417510986328, 0.022351871490478514, 0.02210201644897461, 0.02202828788757324, 0.022435840606689454, 0.022879232406616212, 0.02286591911315918, 0.022562816619873048, 0.021801984786987305, 0.021762048721313477, 0.021763071060180664, 0.02185113525390625, 0.022123519897460937, 0.02205900764465332, 0.022040576934814454, 0.022026239395141603, 0.021961727142333985, 0.02187571144104004, 0.02172313690185547, 0.021646335601806642, 0.0218470401763916, 0.02168115234375, 0.02193916893005371, 0.021971967697143553, 0.021987327575683592, 0.022016000747680665, 0.021987327575683592, 0.02229964828491211, 0.022009855270385743, 0.022631423950195313, 0.022189056396484375, 0.021986303329467775, 0.02210508728027344, 0.021857280731201172, 0.02190540885925293, 0.021970943450927736, 0.022588415145874022, 0.022691839218139647, 0.02200579261779785, 0.02215727996826172, 0.021948415756225585, 0.021720064163208007, 0.022112255096435548, 0.021980159759521483, 0.021999616622924805, 0.022063104629516602, 0.021996543884277343, 0.022013952255249023, 0.021978111267089845, 0.022122495651245116, 0.022076416015625, 0.02209280014038086, 0.021958656311035156, 0.02210406494140625, 0.022026239395141603, 0.0224399356842041, 0.022732799530029296, 0.023051263809204102, 0.022975488662719725, 0.021939199447631837, 0.02222489547729492, 0.022786048889160155, 0.021947391510009767, 0.02205286407470703, 0.022165504455566407, 0.02454630470275879, 0.023842815399169923, 0.022602752685546876, 0.02261299133300781, 0.02289459228515625, 0.0224716796875, 0.022039552688598633, 0.022076416015625, 0.02247270393371582, 0.02282700729370117, 0.0231147518157959, 0.02302566337585449, 0.023476224899291992, 0.022785024642944338, 0.022458368301391602, 0.02210304069519043, 0.02313523292541504, 0.022556671142578123, 0.022138879776000975, 0.0220579833984375, 0.021800960540771484, 0.021622783660888673, 0.02167398452758789, 0.021840896606445313, 0.022122495651245116, 0.021789695739746092, 0.02307276725769043, 0.02309119987487793, 0.022101024627685546, 0.022154207229614257, 0.02208768081665039, 0.02202726364135742, 0.021991424560546875, 0.02229452705383301, 0.021729280471801758, 0.021777408599853516, 0.021832704544067383, 0.02164838409423828, 0.021988351821899413, 0.021998592376708984, 0.021934080123901366, 0.02201907157897949, 0.022025215148925782, 0.02206822395324707, 0.021959680557250977, 0.022072320938110353, 0.022345727920532226, 0.02231500816345215, 0.021942272186279296, 0.022205440521240235, 0.022158336639404298, 0.02233344078063965, 0.021971967697143553, 0.022064128875732423, 0.022041599273681642, 0.02168217658996582, 0.021906431198120118, 0.02200371170043945, 0.02189004707336426, 0.021894144058227538, 0.021884927749633788, 0.022016000747680665, 0.021969919204711915, 0.022194175720214843, 0.02208665657043457, 0.021984256744384766, 0.021956607818603514, 0.022022144317626953, 0.021788671493530275, 0.021827583312988282, 0.02162483215332031, 0.021749759674072267, 0.022841344833374022, 0.022001663208007814, 0.021917695999145507, 0.022129663467407225, 0.02210304069519043, 0.02188390350341797, 0.021893119812011717, 0.022545408248901368, 0.022737920761108397, 0.022303743362426756, 0.02182246398925781, 0.02273587226867676, 0.021958656311035156, 0.022395904541015626, 0.02287513542175293, 0.021901311874389647, 0.022064128875732423, 0.02186240005493164, 0.021986303329467775, 0.02209280014038086, 0.02171801567077637, 0.021901311874389647, 0.02208563232421875, 0.022005760192871093, 0.022139904022216796, 0.02200371170043945, 0.02205695915222168, 0.02204569625854492, 0.021949440002441405, 0.022009855270385743, 0.021942272186279296, 0.02203647994995117, 0.022133760452270508, 0.022012928009033202, 0.02220953559875488, 0.021963775634765623, 0.021748767852783204, 0.02189206314086914, 0.021987327575683592, 0.022001663208007814, 0.022006784439086914, 0.021964799880981444, 0.02211840057373047, 0.02184297561645508, 0.021677024841308595, 0.021598207473754884, 0.021769216537475586, 0.022016000747680665, 0.02267750358581543, 0.02346188735961914, 0.024416255950927734, 0.022947839736938477, 0.022975488662719725, 0.022990848541259764, 0.0227061767578125, 0.02307072067260742, 0.022930431365966796, 0.02330112075805664, 0.023362560272216795, 0.022937599182128905, 0.022775808334350587, 0.022806528091430665, 0.02305638313293457, 0.022879232406616212, 0.02310553550720215, 0.02263039970397949, 0.022597631454467772, 0.02169139289855957, 0.021746688842773438, 0.021958656311035156, 0.022000640869140626, 0.021953535079956055, 0.022054912567138672, 0.022132736206054687, 0.022320127487182616, 0.021993471145629884, 0.02187571144104004, 0.022331392288208008, 0.022014976501464844, 0.021767168045043944, 0.022122495651245116, 0.022137855529785155, 0.02201190376281738, 0.021800960540771484, 0.022976512908935546, 0.022253568649291993, 0.02166783905029297, 0.02247987174987793, 0.02294169616699219, 0.022812671661376953, 0.021929983139038087, 0.0222423038482666, 0.022723583221435546, 0.023224319458007812, 0.022054912567138672, 0.02206719970703125, 0.022010879516601564, 0.022042623519897463, 0.02272051239013672, 0.022355968475341798, 0.021732351303100587, 0.022296575546264647, 0.022575103759765625, 0.02187571144104004, 0.021970943450927736, 0.02267136001586914, 0.021725183486938478, 0.021734399795532225, 0.022025215148925782, 0.021761024475097656, 0.022063104629516602, 0.022074367523193358, 0.02270412826538086, 0.02251571273803711, 0.02220134353637695, 0.0221214714050293, 0.0228351993560791, 0.022561792373657227, 0.022038528442382813, 0.022278144836425783, 0.022592512130737305, 0.02207846450805664, 0.02204364776611328, 0.02200371170043945, 0.02190438461303711, 0.022424575805664062, 0.022840320587158205, 0.023661567687988282, 0.02285055923461914, 0.02229043197631836, 0.0227194881439209, 0.022846464157104493, 0.02282598304748535, 0.02291302490234375, 0.02213478469848633, 0.022435840606689454, 0.02272153663635254, 0.022809600830078124, 0.02207334327697754, 0.021983232498168945, 0.021957632064819335, 0.021810176849365235, 0.021971967697143553, 0.022326271057128907, 0.022922239303588866, 0.022906879425048828, 0.022091775894165038, 0.0224849910736084, 0.02289664077758789, 0.022963199615478515, 0.022231039047241212, 0.022150144577026368, 0.022179840087890625, 0.02206515121459961, 0.021813247680664064, 0.02200371170043945, 0.02206822395324707, 0.022074367523193358, 0.02211020851135254, 0.02189926338195801, 0.022197248458862305, 0.021999616622924805, 0.022227968215942383, 0.02265907287597656, 0.022289407730102538, 0.022082559585571288, 0.021836799621582033, 0.021796863555908205, 0.02255564880371094, 0.022064128875732423, 0.021948415756225585, 0.022008832931518556, 0.022774784088134766, 0.022822912216186524, 0.022535167694091796, 0.02247987174987793, 0.02231808090209961, 0.022139904022216796, 0.022160383224487306, 0.022041599273681642, 0.02206822395324707, 0.022922239303588866, 0.022758399963378906, 0.02274406433105469, 0.022839296340942384, 0.022635520935058592, 0.02206822395324707, 0.022022144317626953, 0.021746688842773438, 0.021998592376708984, 0.02204979133605957, 0.021940223693847655, 0.022537216186523438, 0.022830080032348633, 0.021963775634765623, 0.022017023086547852, 0.02206822395324707, 0.021997568130493163, 0.022033407211303712, 0.021979135513305666, 0.022025215148925782, 0.02209587287902832, 0.022847488403320314, 0.02187059211730957, 0.021935104370117187, 0.02189516830444336, 0.02229350471496582, 0.022603776931762694, 0.02210918426513672, 0.02208870315551758, 0.022830080032348633, 0.023177215576171875, 0.02294988822937012, 0.022771711349487304, 0.02523750305175781, 0.024203264236450195, 0.022993919372558593, 0.022777856826782225, 0.021876735687255858, 0.021953535079956055, 0.021979135513305666, 0.021994495391845705, 0.022559743881225586, 0.022895616531372072, 0.022874111175537108, 0.022760448455810548, 0.02330624008178711, 0.022861824035644532, 0.021925888061523437, 0.021953535079956055, 0.021740543365478517, 0.022749183654785156, 0.022176767349243166, 0.021843967437744142, 0.021790719985961913, 0.021734399795532225, 0.02191564750671387, 0.022131711959838866, 0.02185116767883301, 0.021965791702270508, 0.022124544143676757, 0.02290073585510254, 0.023571456909179687, 0.02391756820678711, 0.02244607925415039, 0.022115327835083007, 0.02188595199584961, 0.022337535858154296, 0.022253568649291993, 0.022013952255249023, 0.02207846450805664, 0.02224847984313965, 0.02211427116394043, 0.022200319290161134, 0.02206617546081543, 0.02225868797302246, 0.02264374351501465, 0.023469024658203125, 0.022837247848510742, 0.022922239303588866, 0.02287615966796875, 0.02272153663635254, 0.022764543533325195, 0.02226483154296875, 0.022054912567138672, 0.02210304069519043, 0.021997568130493163, 0.021967872619628907, 0.021996543884277343, 0.021944320678710938, 0.02210406494140625, 0.022350847244262697, 0.022805503845214844, 0.021990400314331054, 0.02205183982849121, 0.021949440002441405, 0.02230067253112793, 0.022939647674560547, 0.02209280014038086, 0.021982208251953125, 0.022561792373657227, 0.0228853759765625, 0.022822912216186524, 0.023038976669311522, 0.022017023086547852, 0.022286336898803712, 0.022253568649291993, 0.02209791946411133, 0.022838272094726563, 0.02281881523132324, 0.022145023345947267, 0.02209382438659668, 0.022433792114257813, 0.022409215927124023, 0.022560768127441407, 0.022724607467651366, 0.02290278434753418, 0.02285158348083496, 0.022800384521484376, 0.022237184524536133, 0.021707775115966797, 0.02165452766418457, 0.021809152603149414, 0.022123519897460937, 0.02285772705078125, 0.023004159927368165, 0.02285875129699707, 0.022502399444580077, 0.02287615966796875, 0.02269491195678711, 0.021939199447631837, 0.02289356803894043, 0.02206208038330078, 0.022013952255249023, 0.02167807960510254, 0.022162431716918944, 0.021747711181640626, 0.02188287925720215, 0.022076416015625, 0.022023168563842774, 0.022996992111206056, 0.022793216705322264, 0.022218751907348632, 0.02169343948364258, 0.022252544403076172, 0.02163199996948242]",tokens/s,44.83049394179511,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 123792 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949103-5bc42b867a3e578b301bdf84;3bd5d13b-68ab-4200-896c-05d44122967b) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948229-737bac1f473f50945bc8d0b0;117f7788-2c0d-4053-84e3-d75d8bf7d799) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481bd-5fab551753c2656905cd0985;2c272794-392b-4766-991d-adfa098a50b1) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949473-61e860600dac1c367dd02b0b;f6f0dede-edd1-4196-ac94-bcff2355b84c) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2217.578496,3142.057984,0.0,2512.388096,2240.694784,s,1,8.39915234375,8.39915234375,0.0,8.39915234375,8.39915234375,8.39915234375,8.39915234375,[8.39915234375],,kWh,1.7526506480563362e-05,9.589923677237652e-06,2.9772523817972463e-05,5.688895397577347e-05,,MB,2266.292224,3163.029504,0.0,2514.485248,2227.003904,s,10,4.97069253540039,0.49706925354003906,0.0003083185556469878,0.49697511291503904,0.4974484497070312,0.49758712463378907,0.4976980645751953,"[0.49683786010742187, 0.49701901245117186, 0.496711181640625, 0.4974176330566406, 0.4971011962890625, 0.4973040771484375, 0.4969312133789062, 0.4967309265136719, 0.49772579956054686, 0.49691363525390625]",tokens/s,515.0187789263033,kWh,5.873578829067253e-06,3.2174479704813765e-06,3.314534397657107e-05,4.22363707761197e-05,tokens/kWh,6061126.827325361,MB,2288.140288,3163.029504,0.0,2514.485248,2334.959104,s,10,17.013342163085937,1.7013342163085938,0.016853537273620774,1.702905517578125,1.717168115234375,1.7265423095703125,1.7340416650390627,"[1.67981298828125, 1.684728515625, 1.7120701904296876, 1.69590869140625, 1.6782646484375, 1.73591650390625, 1.7150849609375, 1.700149169921875, 1.705661865234375, 1.70574462890625]",tokens/s,37.02976134618152,kWh,2.0466917612529567e-05,1.1217303127446522e-05,4.3566911837627535e-05,7.525113257760363e-05,tokens/kWh,837196.7017909066,,s,630,17.010580488204955,0.027000921409849134,0.0005176238093801499,0.0268154878616333,0.027628748703002932,0.027772876834869384,0.028768696117401137,"[0.026580991744995116, 0.02656768035888672, 0.02651238441467285, 0.02654515266418457, 0.026600448608398438, 0.02651545524597168, 0.026556415557861326, 0.02671615982055664, 0.027231231689453125, 0.02713804817199707, 0.02652876853942871, 0.026436607360839845, 0.0265164794921875, 0.026508287429809572, 0.02612531280517578, 0.02656358337402344, 0.02649087905883789, 0.02647039985656738, 0.027184127807617187, 0.027410432815551757, 0.027304960250854493, 0.027015167236328123, 0.026550271987915038, 0.026635263442993166, 0.026444799423217775, 0.026497024536132813, 0.026514432907104493, 0.02652364730834961, 0.026587135314941408, 0.027068416595458986, 0.027588607788085938, 0.026797056198120117, 0.02635775947570801, 0.02667519950866699, 0.02657177543640137, 0.02657689666748047, 0.026441728591918946, 0.026574848175048828, 0.0265533447265625, 0.026658815383911134, 0.027181055068969725, 0.02674380874633789, 0.026611711502075194, 0.026690559387207033, 0.026504192352294922, 0.026662912368774414, 0.02676736068725586, 0.027633663177490234, 0.0269803524017334, 0.02635264015197754, 0.026572799682617186, 0.026484735488891603, 0.02628812789916992, 0.026587135314941408, 0.02627686309814453, 0.02655436706542969, 0.026565631866455077, 0.026201087951660155, 0.02656870460510254, 0.026618879318237306, 0.02655436706542969, 0.026650623321533205, 0.026400768280029296, 0.02707967948913574, 0.026587135314941408, 0.026145792007446288, 0.026728448867797853, 0.02768998336791992, 0.027016191482543944, 0.02693734359741211, 0.026976255416870116, 0.02617241668701172, 0.026621952056884765, 0.026210304260253905, 0.02634752082824707, 0.026299392700195313, 0.026394624710083008, 0.02698137664794922, 0.02637107276916504, 0.026382335662841795, 0.026441728591918946, 0.026255359649658205, 0.026549247741699217, 0.026488832473754883, 0.02654207992553711, 0.026532863616943358, 0.027829248428344725, 0.026869760513305665, 0.026670080184936523, 0.026613792419433593, 0.026363872528076173, 0.026597375869750976, 0.02656051254272461, 0.02647756767272949, 0.026679296493530274, 0.026606592178344726, 0.02657177543640137, 0.02659328079223633, 0.02652569580078125, 0.026564607620239256, 0.026643455505371092, 0.026573823928833007, 0.026878976821899415, 0.02672230339050293, 0.02654515266418457, 0.026909696578979493, 0.027496448516845705, 0.027678720474243163, 0.027287551879882813, 0.02652672004699707, 0.026738687515258788, 0.02732441520690918, 0.0273438720703125, 0.026565631866455077, 0.026631168365478516, 0.027011072158813477, 0.0265164794921875, 0.02653183937072754, 0.02736639976501465, 0.0267827205657959, 0.026641408920288087, 0.026573823928833007, 0.026762239456176756, 0.027632640838623046, 0.02688204765319824, 0.026637311935424804, 0.02814975929260254, 0.027583488464355467, 0.026868736267089844, 0.026662912368774414, 0.027283552169799805, 0.02677238464355469, 0.02653696060180664, 0.029240320205688477, 0.027443199157714843, 0.02730188751220703, 0.02776780891418457, 0.02730188751220703, 0.026570751190185548, 0.02651033592224121, 0.026534912109375, 0.02720358467102051, 0.030059520721435546, 0.028255231857299806, 0.027687936782836913, 0.027584512710571288, 0.026611711502075194, 0.026661888122558593, 0.026590208053588867, 0.02652774429321289, 0.026663936614990235, 0.026625024795532228, 0.026606592178344726, 0.026600448608398438, 0.027655168533325194, 0.027085823059082033, 0.02669158363342285, 0.02699673652648926, 0.027880447387695313, 0.027673599243164062, 0.026639360427856446, 0.02668339157104492, 0.026639360427856446, 0.02700492858886719, 0.027615232467651366, 0.02672947120666504, 0.027223039627075195, 0.027379711151123046, 0.0267325439453125, 0.026625024795532228, 0.027028480529785157, 0.02712166404724121, 0.027694080352783205, 0.027266048431396486, 0.027009023666381835, 0.026787839889526367, 0.027148288726806642, 0.026505216598510743, 0.027390975952148438, 0.027108352661132814, 0.027373567581176757, 0.02734284782409668, 0.027601919174194335, 0.02753945541381836, 0.02753228759765625, 0.02673459243774414, 0.026694656372070313, 0.027354112625122072, 0.02711142349243164, 0.027038719177246092, 0.026627071380615236, 0.02656153678894043, 0.026834943771362304, 0.02671308708190918, 0.027047935485839843, 0.027395072937011718, 0.02751692771911621, 0.02758143997192383, 0.026617855072021485, 0.02654719924926758, 0.02652774429321289, 0.02655539131164551, 0.02652262306213379, 0.027184127807617187, 0.026642431259155275, 0.026610687255859376, 0.02655436706542969, 0.026878976821899415, 0.027622400283813478, 0.026870784759521486, 0.02753433609008789, 0.030519296646118164, 0.02813747215270996, 0.027621376037597657, 0.027122688293457032, 0.026590208053588867, 0.026618879318237306, 0.02682368087768555, 0.027470848083496095, 0.027470848083496095, 0.026793983459472655, 0.02734284782409668, 0.02736742401123047, 0.026885120391845704, 0.02692095947265625, 0.027173887252807616, 0.027150335311889647, 0.026582015991210937, 0.026681343078613282, 0.026606592178344726, 0.02654207992553711, 0.0265533447265625, 0.026558464050292968, 0.026428415298461915, 0.026586111068725587, 0.026611711502075194, 0.026771455764770507, 0.026771455764770507, 0.0265533447265625, 0.02658406448364258, 0.026662912368774414, 0.026633216857910157, 0.026521600723266602, 0.026521600723266602, 0.026662912368774414, 0.02655948829650879, 0.02676121520996094, 0.026680320739746095, 0.02692095947265625, 0.0267458553314209, 0.026602495193481446, 0.02656051254272461, 0.02672537612915039, 0.02680012893676758, 0.02676633644104004, 0.02674278450012207, 0.02660147285461426, 0.02653900718688965, 0.026586111068725587, 0.026620927810668944, 0.026633216857910157, 0.026445823669433592, 0.02632294464111328, 0.026635263442993166, 0.02653696060180664, 0.026582015991210937, 0.02660147285461426, 0.0267458553314209, 0.02627276802062988, 0.026597375869750976, 0.02627174377441406, 0.027256832122802735, 0.026438655853271483, 0.026634239196777345, 0.026627071380615236, 0.026629119873046874, 0.02660966491699219, 0.026652671813964843, 0.026436607360839845, 0.02652876853942871, 0.02720256042480469, 0.027487232208251954, 0.027003904342651368, 0.02656768035888672, 0.02652876853942871, 0.026653696060180664, 0.026608640670776368, 0.02656768035888672, 0.026537984848022462, 0.026492927551269533, 0.026813440322875977, 0.02740940856933594, 0.02714112091064453, 0.026719232559204102, 0.02650931167602539, 0.026587135314941408, 0.026572799682617186, 0.02653388786315918, 0.02659328079223633, 0.026688512802124024, 0.02653900718688965, 0.02656051254272461, 0.0265482234954834, 0.02657177543640137, 0.026595327377319337, 0.026506240844726563, 0.02655539131164551, 0.026673152923583986, 0.026446847915649413, 0.02679193687438965, 0.02616422462463379, 0.026390527725219725, 0.02646937561035156, 0.0265482234954834, 0.026590208053588867, 0.027510784149169923, 0.027628543853759766, 0.027436031341552734, 0.02753023910522461, 0.027444223403930663, 0.02756710433959961, 0.02752409553527832, 0.02758246421813965, 0.027527168273925783, 0.02757734489440918, 0.027411455154418944, 0.027639808654785155, 0.027320320129394532, 0.027510784149169923, 0.027449344635009764, 0.027576320648193358, 0.02752511978149414, 0.027622400283813478, 0.027554815292358398, 0.02751590347290039, 0.027777023315429687, 0.027452415466308593, 0.027674623489379883, 0.029025279998779296, 0.028231679916381838, 0.027570175170898437, 0.027638784408569338, 0.02750464057922363, 0.027653120040893556, 0.027628543853759766, 0.027304960250854493, 0.027623424530029295, 0.027906047821044923, 0.027627519607543945, 0.027509759902954102, 0.027618303298950195, 0.027568128585815428, 0.02734694480895996, 0.02754969596862793, 0.027819007873535157, 0.027640832901000976, 0.02755788803100586, 0.02711142349243164, 0.02770227241516113, 0.027822080612182616, 0.028956672668457032, 0.02792550468444824, 0.027621376037597657, 0.028049407958984376, 0.026788864135742187, 0.02657177543640137, 0.02694144058227539, 0.02652876853942871, 0.0265850887298584, 0.026663936614990235, 0.027297792434692384, 0.02773606491088867, 0.027475967407226562, 0.027442176818847655, 0.02776678466796875, 0.027615232467651366, 0.027707391738891602, 0.027652095794677735, 0.027069440841674806, 0.026703872680664063, 0.026780672073364258, 0.027473920822143554, 0.027364351272583007, 0.026619903564453123, 0.026817535400390623, 0.02689945602416992, 0.027165695190429686, 0.02657177543640137, 0.026778623580932616, 0.027228160858154295, 0.0275732479095459, 0.026398719787597655, 0.026951679229736326, 0.026604543685913085, 0.026629119873046874, 0.027166719436645507, 0.028234752655029297, 0.027546623229980468, 0.026669055938720702, 0.026651647567749022, 0.02656051254272461, 0.028104703903198244, 0.02757427215576172, 0.027482112884521483, 0.026960895538330077, 0.02675712013244629, 0.026597375869750976, 0.026570751190185548, 0.027853824615478515, 0.02889727973937988, 0.027778047561645508, 0.02773811149597168, 0.02771251106262207, 0.027278335571289062, 0.027495424270629884, 0.02695782470703125, 0.026608640670776368, 0.027219968795776366, 0.027066368103027344, 0.027000831604003905, 0.02669875144958496, 0.02693529510498047, 0.02671513557434082, 0.026648576736450196, 0.026858495712280273, 0.027249664306640626, 0.027407360076904298, 0.026993663787841796, 0.026811391830444335, 0.02735206413269043, 0.026885120391845704, 0.027656192779541015, 0.02757529640197754, 0.028026880264282225, 0.027480064392089845, 0.026990591049194337, 0.027609088897705077, 0.027792383193969726, 0.028926975250244142, 0.028265472412109374, 0.027624448776245116, 0.027142143249511717, 0.026776575088500978, 0.026670080184936523, 0.02666700744628906, 0.026643455505371092, 0.02654515266418457, 0.02734182357788086, 0.02750771141052246, 0.026795007705688476, 0.02699776077270508, 0.02674483108520508, 0.026680320739746095, 0.027312128067016602, 0.027896831512451172, 0.0273623046875, 0.027603967666625977, 0.026995712280273438, 0.027288576126098633, 0.026599424362182617, 0.026662912368774414, 0.02676940727233887, 0.027425792694091795, 0.02675302314758301, 0.0267325439453125, 0.02697420883178711, 0.026763263702392577, 0.02716057586669922, 0.026670080184936523, 0.026875904083251953, 0.026663936614990235, 0.026696704864501954, 0.027464704513549806, 0.02715238380432129, 0.026580991744995116, 0.026657791137695314, 0.026754047393798826, 0.02679193687438965, 0.02666803169250488, 0.026686464309692383, 0.027227136611938478, 0.026927104949951174, 0.026754047393798826, 0.02658406448364258, 0.02731724739074707, 0.027082752227783204, 0.02657587242126465, 0.026652671813964843, 0.026878976821899415, 0.02656358337402344, 0.026664960861206056, 0.027299840927124022, 0.02750464057922363, 0.027632640838623046, 0.02739200019836426, 0.027364351272583007, 0.02673151969909668, 0.026594303131103517, 0.026887168884277345, 0.027085823059082033, 0.027074560165405274, 0.027290624618530275, 0.027378688812255858, 0.02796134376525879, 0.02692095947265625, 0.028453887939453124, 0.027671552658081053, 0.027422719955444336, 0.027190271377563476, 0.02656358337402344, 0.027459583282470702, 0.02753638458251953, 0.027426816940307616, 0.027475967407226562, 0.027599872589111327, 0.027411455154418944, 0.027422719955444336, 0.02720358467102051, 0.026656768798828126, 0.027072511672973632, 0.02650931167602539, 0.0271646728515625, 0.026728448867797853, 0.0265482234954834, 0.026762239456176756, 0.02678374481201172, 0.026570751190185548, 0.026845184326171875, 0.02717900848388672, 0.027476991653442383, 0.02674995231628418, 0.02637926483154297, 0.02688102340698242, 0.026562559127807618, 0.026672128677368165, 0.02672435188293457, 0.026643455505371092, 0.02731827163696289, 0.027675647735595704, 0.027049983978271484, 0.027083776473999024, 0.026608640670776368, 0.02674892807006836, 0.02715648078918457, 0.027493375778198242, 0.02715238380432129, 0.026819583892822265, 0.02753126335144043, 0.02770534324645996, 0.027666431427001953, 0.028075008392333983, 0.027295743942260742, 0.026660863876342773, 0.02671718406677246, 0.027115520477294923, 0.02715648078918457, 0.02675302314758301, 0.026858495712280273, 0.02707148742675781, 0.026788864135742187, 0.026648576736450196, 0.02667519950866699, 0.027245567321777343, 0.027073535919189453, 0.026687488555908204, 0.027023359298706053, 0.026874879837036132, 0.026994688034057617, 0.026797056198120117, 0.027847679138183593, 0.027395072937011718, 0.026636287689208983, 0.02691993522644043, 0.027058176040649414, 0.02680729675292969, 0.026647552490234375, 0.026893312454223633, 0.0267325439453125, 0.02668339157104492, 0.026591232299804687, 0.02693222427368164, 0.027427839279174804, 0.027419647216796874, 0.02709503936767578, 0.027364351272583007, 0.026704896926879884, 0.026789888381958008, 0.027448320388793947, 0.02771865653991699, 0.02674995231628418, 0.026793983459472655, 0.027239423751831054, 0.027244543075561522, 0.026776575088500978, 0.02676736068725586, 0.027321344375610353, 0.027455488204956056, 0.0268984317779541, 0.02674073600769043, 0.027438079833984375, 0.027001855850219726, 0.026817535400390623, 0.026944511413574217, 0.027037696838378908, 0.02718720054626465, 0.02756608009338379, 0.02749235153198242, 0.027675647735595704, 0.027417600631713866, 0.026916864395141602, 0.02697420883178711, 0.026586111068725587, 0.026622976303100586, 0.027457536697387694, 0.027616256713867186, 0.027438079833984375, 0.02737664031982422, 0.027630592346191408, 0.027046911239624022, 0.027106304168701172, 0.027185152053833008, 0.027662336349487306, 0.02778316879272461, 0.026514432907104493, 0.027023359298706053, 0.026818559646606444, 0.026610687255859376, 0.026582015991210937, 0.02652262306213379, 0.02652262306213379]",tokens/s,37.03577314347612,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3569.516544,5404.884992,0.0,4775.215104,4427.072512,s,1,10.18662109375,10.18662109375,0.0,10.18662109375,10.18662109375,10.18662109375,10.18662109375,[10.18662109375],,kWh,4.0658227272222705e-05,2.2267676394069845e-05,7.205589097800027e-05,0.0001349817946442928,,MB,1676.746752,5440.536576,0.0,4794.089472,4101.022208,s,10,10.500593505859374,1.0500593505859375,7.875154645095101e-05,1.0500662841796875,1.0501588134765625,1.050159912109375,1.050160791015625,"[1.0499320068359375, 1.0501585693359374, 1.0500716552734375, 1.049978271484375, 1.0501610107421875, 1.049951171875, 1.05010693359375, 1.0500609130859375, 1.050044921875, 1.0501280517578124]",tokens/s,243.7957434093139,kWh,1.2410170630416619e-05,6.800246660055093e-06,7.091197339620008e-05,9.012239068667178e-05,tokens/kWh,2840581.5474872873,MB,1712.197632,5451.022336,0.0,4802.47808,4101.024768,s,10,17.675414184570315,1.767541418457031,0.021422518567536353,1.7689312744140624,1.7939232666015625,1.796118005371094,1.7978737963867188,"[1.7693355712890626, 1.74630712890625, 1.7348173828125, 1.753927734375, 1.783694580078125, 1.798312744140625, 1.793435546875, 1.7685269775390624, 1.78552197265625, 1.7415345458984375]",tokens/s,35.642729127669114,kWh,2.0440588685277894e-05,1.1203139735433922e-05,5.5151849676999916e-05,8.679557809771173e-05,tokens/kWh,725843.4286718683,,s,630,17.673438211441024,0.02805307652609689,0.0006212121192990897,0.028282368659973144,0.02865879001617432,0.028840141010284422,0.029514373435974125,"[0.028317695617675782, 0.027290624618530275, 0.027299840927124022, 0.027265024185180665, 0.027479040145874024, 0.028753919601440428, 0.029019136428833008, 0.03000831985473633, 0.028729343414306642, 0.028429311752319338, 0.028702720642089844, 0.02775449562072754, 0.02711756706237793, 0.0272988166809082, 0.027681791305541992, 0.028844032287597656, 0.028435455322265626, 0.02838015937805176, 0.028018688201904295, 0.027414527893066407, 0.027415552139282227, 0.027841535568237305, 0.028496896743774414, 0.028556287765502928, 0.028368896484375, 0.028110847473144532, 0.028037120819091797, 0.028413951873779295, 0.02731622314453125, 0.027296768188476563, 0.027321344375610353, 0.027637760162353517, 0.027487232208251954, 0.0285614070892334, 0.028635135650634767, 0.02859519958496094, 0.02837606430053711, 0.02838528060913086, 0.02840985679626465, 0.02840575981140137, 0.02735103988647461, 0.027357183456420898, 0.030456832885742188, 0.029403135299682616, 0.0285614070892334, 0.028482559204101563, 0.02836172866821289, 0.027510784149169923, 0.027364351272583007, 0.027421728134155273, 0.027170783996582033, 0.027226112365722657, 0.02711859130859375, 0.02713599967956543, 0.02853887939453125, 0.02856857681274414, 0.028281856536865234, 0.02854400062561035, 0.02835251235961914, 0.02819891166687012, 0.027305984497070314, 0.028395519256591797, 0.028120063781738282, 0.027873279571533204, 0.027325439453125, 0.027494400024414063, 0.027395072937011718, 0.02732646369934082, 0.027275264739990233, 0.027267072677612306, 0.02730291175842285, 0.02726092720031738, 0.0271278076171875, 0.027253759384155272, 0.027219968795776366, 0.027232255935668945, 0.02857062339782715, 0.028412927627563478, 0.027433984756469725, 0.0273305606842041, 0.02735923194885254, 0.02735308837890625, 0.027427839279174804, 0.028274688720703125, 0.028290048599243164, 0.028145696640014647, 0.02862179183959961, 0.02796031951904297, 0.02736742401123047, 0.02876927947998047, 0.02876518440246582, 0.02835148811340332, 0.027054079055786134, 0.02728447914123535, 0.02752921676635742, 0.027417600631713866, 0.027356159210205077, 0.028120063781738282, 0.028451839447021485, 0.02837606430053711, 0.02840575981140137, 0.029256704330444337, 0.029061119079589845, 0.02778112030029297, 0.02749235153198242, 0.027454463958740235, 0.0273438720703125, 0.02730803108215332, 0.027348991394042968, 0.027193344116210938, 0.027418624877929686, 0.027652095794677735, 0.027222015380859374, 0.028028928756713867, 0.02837196731567383, 0.027741184234619142, 0.027979776382446288, 0.02831974411010742, 0.02750876808166504, 0.027253728866577148, 0.027881471633911133, 0.027312128067016602, 0.02736844825744629, 0.027249664306640626, 0.027467775344848632, 0.02730803108215332, 0.028030975341796875, 0.027231231689453125, 0.027122688293457032, 0.027238399505615234, 0.027114496231079102, 0.02721075248718262, 0.026933248519897462, 0.027247615814208984, 0.027184127807617187, 0.02712063980102539, 0.027122688293457032, 0.027133951187133788, 0.02831155204772949, 0.028188671112060547, 0.027058176040649414, 0.02727731132507324, 0.027191295623779296, 0.027467775344848632, 0.027193344116210938, 0.027646976470947264, 0.028688383102416993, 0.028482559204101563, 0.028016639709472657, 0.027312128067016602, 0.027248640060424805, 0.02714931106567383, 0.02718720054626465, 0.0271278076171875, 0.027200511932373047, 0.027201536178588868, 0.027197439193725585, 0.027370496749877928, 0.027199487686157226, 0.02718003273010254, 0.02731724739074707, 0.02739302444458008, 0.02716364860534668, 0.026801151275634767, 0.02716160011291504, 0.027280384063720704, 0.027223039627075195, 0.02717695999145508, 0.027265024185180665, 0.02718720054626465, 0.027224063873291016, 0.02720256042480469, 0.027272192001342774, 0.027428863525390625, 0.027468799591064453, 0.028216320037841795, 0.02816307258605957, 0.028257280349731444, 0.028209152221679686, 0.028206079483032227, 0.02817228889465332, 0.027998207092285156, 0.02719539260864258, 0.028817407608032225, 0.027664384841918944, 0.029099008560180665, 0.028299264907836914, 0.028072959899902345, 0.02809343910217285, 0.028264448165893553, 0.0271278076171875, 0.027261951446533202, 0.027835391998291017, 0.02995814323425293, 0.028630016326904296, 0.02853068733215332, 0.02859212875366211, 0.02840166473388672, 0.028277759552001954, 0.02834739112854004, 0.02832383918762207, 0.02833612823486328, 0.028887039184570314, 0.027631616592407225, 0.027423744201660157, 0.02735001564025879, 0.027440128326416017, 0.027389951705932617, 0.02738380813598633, 0.027439104080200196, 0.027372543334960937, 0.027347967147827147, 0.027454463958740235, 0.027290624618530275, 0.02733158493041992, 0.02735923194885254, 0.02739200019836426, 0.028823551177978517, 0.028404735565185548, 0.02831974411010742, 0.028598272323608398, 0.027493375778198242, 0.027460607528686523, 0.027626495361328125, 0.02753023910522461, 0.027443199157714843, 0.02739302444458008, 0.027371519088745116, 0.027867136001586915, 0.027883520126342775, 0.027478015899658204, 0.02898944091796875, 0.028275711059570312, 0.027610111236572265, 0.02736742401123047, 0.02740019226074219, 0.02756710433959961, 0.027406335830688477, 0.02733670425415039, 0.0273756160736084, 0.027390975952148438, 0.027437055587768554, 0.027444223403930663, 0.027420671463012695, 0.02753945541381836, 0.027411455154418944, 0.02736639976501465, 0.02814668846130371, 0.028983295440673826, 0.028366847991943358, 0.028492799758911135, 0.028329984664916992, 0.02856345558166504, 0.02759782409667969, 0.027453439712524414, 0.027851776123046876, 0.02838835144042969, 0.028322816848754883, 0.028229631423950196, 0.028527616500854492, 0.028437503814697264, 0.02832076835632324, 0.02772787284851074, 0.02728550338745117, 0.02733670425415039, 0.027224063873291016, 0.027404287338256835, 0.027472896575927733, 0.027283456802368163, 0.02736332893371582, 0.027426816940307616, 0.027448320388793947, 0.02736844825744629, 0.02772889518737793, 0.028370943069458008, 0.028511232376098632, 0.02835558319091797, 0.028358655929565428, 0.028215295791625978, 0.030488576889038086, 0.02899456024169922, 0.02858700752258301, 0.028508159637451173, 0.028639232635498047, 0.028611583709716795, 0.02850918388366699, 0.02878054428100586, 0.028831743240356447, 0.028539903640747072, 0.028472320556640625, 0.02838528060913086, 0.02872422409057617, 0.029245439529418944, 0.028489728927612305, 0.02876108741760254, 0.02858393669128418, 0.02854400062561035, 0.02856959915161133, 0.028445695877075194, 0.028648448944091798, 0.028443647384643556, 0.02855219268798828, 0.028499967575073244, 0.02852659225463867, 0.028464128494262695, 0.02858393669128418, 0.028478464126586913, 0.028639232635498047, 0.02853887939453125, 0.028452863693237306, 0.028477439880371092, 0.028450815200805665, 0.028506111145019532, 0.028436479568481447, 0.028511232376098632, 0.029138944625854493, 0.028659711837768553, 0.028464128494262695, 0.028476415634155275, 0.02856038475036621, 0.02857164764404297, 0.02925056076049805, 0.028475391387939454, 0.028644351959228515, 0.028589056015014647, 0.028480512619018555, 0.028484607696533205, 0.028539903640747072, 0.028519424438476562, 0.028454912185668944, 0.02859212875366211, 0.028486656188964843, 0.02837196731567383, 0.02854707145690918, 0.028493824005126952, 0.028531711578369142, 0.028519424438476562, 0.028490751266479493, 0.02850201606750488, 0.02850099182128906, 0.028470272064208983, 0.028513280868530274, 0.028633087158203126, 0.028481536865234375, 0.028438528060913085, 0.028423168182373046, 0.028548095703125, 0.028648448944091798, 0.02858598327636719, 0.028647424697875977, 0.028725248336791992, 0.028480512619018555, 0.028515327453613282, 0.028481536865234375, 0.029108224868774416, 0.02920243263244629, 0.028675104141235353, 0.02886038398742676, 0.028642303466796876, 0.028406784057617186, 0.028383232116699218, 0.02837708854675293, 0.02857062339782715, 0.028444671630859376, 0.02831974411010742, 0.02837811279296875, 0.02837401580810547, 0.028443647384643556, 0.02832486343383789, 0.028338176727294922, 0.02838425636291504, 0.02836787223815918, 0.028459007263183594, 0.02836787223815918, 0.028404735565185548, 0.028519424438476562, 0.028455936431884765, 0.028366847991943358, 0.0288143367767334, 0.02836275291442871, 0.028429311752319338, 0.028709888458251953, 0.02852659225463867, 0.028449792861938477, 0.028598272323608398, 0.02852454376220703, 0.028453887939453124, 0.028452863693237306, 0.029046783447265623, 0.02855423927307129, 0.02855014419555664, 0.028431360244750976, 0.028452863693237306, 0.02860032081604004, 0.02858598327636719, 0.028427263259887696, 0.028462080001831053, 0.028487680435180664, 0.028465152740478516, 0.028673023223876954, 0.028512256622314453, 0.028717056274414062, 0.028487680435180664, 0.028442623138427735, 0.028512256622314453, 0.028459007263183594, 0.028476415634155275, 0.02855219268798828, 0.028399616241455077, 0.02855423927307129, 0.02859212875366211, 0.028498943328857423, 0.02862387275695801, 0.02878361511230469, 0.028564479827880858, 0.028556287765502928, 0.028658687591552736, 0.028645376205444335, 0.028632064819335938, 0.02854911994934082, 0.02880102348327637, 0.028637184143066406, 0.02862387275695801, 0.028834815979003905, 0.0286167049407959, 0.02871603202819824, 0.028837888717651368, 0.028521472930908204, 0.028470272064208983, 0.028461055755615236, 0.02834636878967285, 0.028456960678100586, 0.028439552307128906, 0.028503040313720703, 0.028685312271118164, 0.028042240142822264, 0.027669504165649415, 0.027371519088745116, 0.02714419174194336, 0.027403263092041014, 0.02738380813598633, 0.029018112182617187, 0.028504064559936523, 0.028397600173950197, 0.028499935150146486, 0.02855936050415039, 0.028642303466796876, 0.02850918388366699, 0.02859929656982422, 0.02876620864868164, 0.028545024871826172, 0.0285296630859375, 0.02837299156188965, 0.028286975860595705, 0.028406784057617186, 0.028395519256591797, 0.028841983795166014, 0.0296048641204834, 0.028438528060913085, 0.028346431732177733, 0.027853759765625, 0.028258304595947265, 0.02837708854675293, 0.028282880783081055, 0.02831155204772949, 0.028177408218383788, 0.028221439361572266, 0.028306432723999023, 0.02836582374572754, 0.028265472412109374, 0.02832383918762207, 0.02836070442199707, 0.028479488372802734, 0.02779955291748047, 0.027987968444824218, 0.028094463348388672, 0.027930624008178712, 0.028206079483032227, 0.0279685115814209, 0.02797260856628418, 0.02793471908569336, 0.027886592864990234, 0.0277258243560791, 0.026942464828491212, 0.026927104949951174, 0.02725894355773926, 0.026914751052856446, 0.026852352142333984, 0.02695577621459961, 0.02696499252319336, 0.026832895278930666, 0.02680012893676758, 0.027938816070556642, 0.028892160415649414, 0.02831974411010742, 0.02801568031311035, 0.02806368064880371, 0.027816032409667967, 0.027839391708374024, 0.027894784927368164, 0.027846656799316406, 0.02777292823791504, 0.02774015998840332, 0.027365375518798828, 0.028622848510742187, 0.028030975341796875, 0.028033023834228517, 0.027688991546630858, 0.027981792449951172, 0.028024831771850587, 0.028045312881469726, 0.02815385627746582, 0.028110847473144532, 0.02811801528930664, 0.028075136184692383, 0.028078975677490233, 0.02805971145629883, 0.027910079956054688, 0.02789990425109863, 0.028209152221679686, 0.028124160766601562, 0.02796134376525879, 0.027666431427001953, 0.028165119171142578, 0.028030975341796875, 0.028296192169189452, 0.029082624435424805, 0.028661760330200195, 0.02834943962097168, 0.028651519775390624, 0.02835148811340332, 0.0283371524810791, 0.028480512619018555, 0.028453887939453124, 0.028471296310424804, 0.02859519958496094, 0.02922700881958008, 0.028448768615722656, 0.02874367904663086, 0.02996019172668457, 0.028786687850952147, 0.02838015937805176, 0.02839347267150879, 0.028456960678100586, 0.028387327194213868, 0.028309503555297853, 0.027840511322021484, 0.027554815292358398, 0.028396543502807618, 0.02855833625793457, 0.029108224868774416, 0.028762111663818358, 0.0285614070892334, 0.028454912185668944, 0.028725248336791992, 0.028452863693237306, 0.02832076835632324, 0.02816716766357422, 0.028224512100219725, 0.028427263259887696, 0.028283903121948242, 0.028516351699829103, 0.028302335739135744, 0.02837708854675293, 0.028210176467895507, 0.028260351181030274, 0.028010496139526365, 0.0295598087310791, 0.02899558448791504, 0.027461631774902344, 0.027370496749877928, 0.02727734375, 0.027269088745117187, 0.027182079315185546, 0.027262975692749023, 0.02694041633605957, 0.027250688552856447, 0.027268096923828124, 0.02727628707885742, 0.02719539260864258, 0.027219968795776366, 0.02732339286804199, 0.02736025619506836, 0.027240447998046875, 0.027219968795776366, 0.027346975326538087, 0.027267040252685545, 0.02734489631652832, 0.027665407180786132, 0.02836479949951172, 0.028478464126586913, 0.02834636878967285, 0.028313600540161132, 0.02832383918762207, 0.02831155204772949, 0.028635135650634767, 0.02919424057006836, 0.028519424438476562, 0.028594175338745118, 0.028244991302490235, 0.027339775085449217, 0.027543552398681642, 0.0274913272857666, 0.02731520080566406, 0.027328512191772462, 0.02733260726928711, 0.027417600631713866, 0.027201536178588868, 0.027247615814208984, 0.027404287338256835, 0.027403263092041014, 0.027270143508911132, 0.02735513687133789, 0.027406335830688477, 0.027159551620483398, 0.027322368621826174, 0.027274240493774415, 0.027313152313232423, 0.027257856369018556, 0.027140096664428712, 0.027386880874633788, 0.027430912017822266, 0.027395072937011718, 0.02736639976501465, 0.027406335830688477, 0.02860851287841797, 0.028399616241455077, 0.028395519256591797, 0.027443199157714843, 0.02736128044128418]",tokens/s,35.646714151645064,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,2219.687936,2726.821888,0.0,2097.152,1986.693632,s,1,9.26169921875,9.26169921875,0.0,9.26169921875,9.26169921875,9.26169921875,9.26169921875,[9.26169921875],,kWh,2.706480979028293e-05,1.48173279287855e-05,4.378336836000085e-05,8.566550607906929e-05,,MB,2322.460672,2743.599104,0.0,2097.152,1862.37952,s,10,4.974516479492188,0.49745164794921876,0.00046529207892693976,0.49750669860839847,0.49800158996582033,0.49807562408447265,0.49813485137939456,"[0.49665179443359375, 0.49787545776367187, 0.4971648254394531, 0.49798513793945315, 0.4973188171386719, 0.49748269653320315, 0.49757720947265627, 0.498149658203125, 0.49678018188476564, 0.49753070068359373]",tokens/s,514.6228805460369,kWh,5.874464907407234e-06,3.2189486893854145e-06,3.24537825767624e-05,4.154719617355505e-05,tokens/kWh,6161667.298332516,MB,2333.814784,2745.696256,0.0,2097.152,1946.953216,s,10,13.879918701171876,1.3879918701171876,0.01728989083711722,1.3877505493164062,1.4011555053710938,1.4146900573730468,1.4255176989746094,"[1.3929793701171875, 1.393319091796875, 1.3621142578125, 1.38217626953125, 1.3918331298828126, 1.3981478271484375, 1.38366796875, 1.367791015625, 1.3796651611328126, 1.428224609375]",tokens/s,45.389314848566755,kWh,1.6425683343565035e-05,9.001863565408014e-06,3.748646781983439e-05,6.291401472880746e-05,tokens/kWh,1001366.7109238408,,s,630,13.875648504257196,0.02202483889564635,0.0005221798212614926,0.021752320289611816,0.022794547843933106,0.02298695640563965,0.02359890899658204,"[0.022380544662475587, 0.022575103759765625, 0.022500352859497072, 0.02285260772705078, 0.02206208038330078, 0.021609472274780273, 0.021577728271484374, 0.021839872360229492, 0.021840896606445313, 0.022131711959838866, 0.02164531135559082, 0.0217262077331543, 0.0216944637298584, 0.021977088928222657, 0.021790719985961913, 0.021765119552612306, 0.021715967178344727, 0.021958656311035156, 0.02230886459350586, 0.022199296951293947, 0.021964799880981444, 0.02273689651489258, 0.022008832931518556, 0.02184601593017578, 0.021679103851318358, 0.021602304458618164, 0.02167296028137207, 0.021627904891967774, 0.021707775115966797, 0.022289407730102538, 0.021963775634765623, 0.022635520935058592, 0.02239897537231445, 0.022518783569335937, 0.022486015319824217, 0.02265907287597656, 0.022674432754516603, 0.02244812774658203, 0.022510591506958007, 0.022771711349487304, 0.02262118339538574, 0.021744640350341796, 0.02173030471801758, 0.0214517765045166, 0.022000640869140626, 0.021619712829589844, 0.022008832931518556, 0.02269900894165039, 0.022890495300292968, 0.023004159927368165, 0.022576128005981445, 0.022601728439331056, 0.022518783569335937, 0.02206924819946289, 0.022458368301391602, 0.022518783569335937, 0.021617664337158202, 0.02170572853088379, 0.021963775634765623, 0.02164735984802246, 0.02166886329650879, 0.021573631286621094, 0.02165555191040039, 0.02222489547729492, 0.0225167350769043, 0.022611967086791994, 0.0224849910736084, 0.022444032669067384, 0.022542335510253905, 0.022467584609985353, 0.022609920501708985, 0.022861824035644532, 0.022714368820190428, 0.022452224731445314, 0.02167398452758789, 0.021597183227539063, 0.02167398452758789, 0.021534719467163087, 0.021719039916992186, 0.021638143539428712, 0.02164019203186035, 0.02168217658996582, 0.02166681671142578, 0.021699583053588867, 0.023259136199951173, 0.02323865509033203, 0.023414783477783203, 0.02275328063964844, 0.022549503326416014, 0.02251366424560547, 0.022556671142578123, 0.023085056304931642, 0.022731775283813475, 0.021707775115966797, 0.021794815063476563, 0.02162380790710449, 0.02164838409423828, 0.021570560455322265, 0.021584896087646483, 0.02170163154602051, 0.021531648635864258, 0.02212761688232422, 0.02187980842590332, 0.021618688583374023, 0.021317632675170898, 0.021534719467163087, 0.021592063903808592, 0.021608448028564452, 0.02146406364440918, 0.021304319381713867, 0.02142207908630371, 0.021595136642456055, 0.021570560455322265, 0.021590015411376954, 0.021642240524291992, 0.021583871841430666, 0.022071296691894532, 0.022821887969970703, 0.023406591415405274, 0.02268671989440918, 0.022589439392089843, 0.022441984176635742, 0.02267136001586914, 0.02249830436706543, 0.022353919982910156, 0.021593088150024413, 0.021987327575683592, 0.021588991165161133, 0.02131865692138672, 0.02130534362792969, 0.02131046485900879, 0.021646335601806642, 0.02147430419921875, 0.02171494483947754, 0.021577728271484374, 0.021227519989013673, 0.021363712310791014, 0.021336063385009766, 0.02128998374938965, 0.021644287109375, 0.021779455184936524, 0.021562368392944335, 0.021390335083007812, 0.021626880645751953, 0.021609472274780273, 0.02165760040283203, 0.021565439224243164, 0.022288383483886717, 0.021629951477050782, 0.021751808166503905, 0.021787647247314454, 0.021582847595214845, 0.021598207473754884, 0.02165760040283203, 0.021562368392944335, 0.021590015411376954, 0.021617664337158202, 0.021734399795532225, 0.021634048461914062, 0.021712896347045898, 0.021710847854614256, 0.021581823348999024, 0.02225766372680664, 0.022108160018920898, 0.022192127227783204, 0.02171392059326172, 0.021352447509765626, 0.021576704025268553, 0.021696512222290038, 0.021350400924682617, 0.021178367614746094, 0.021343231201171875, 0.021601280212402343, 0.021758975982666014, 0.02164019203186035, 0.021582847595214845, 0.021600255966186522, 0.021609472274780273, 0.021586944580078125, 0.021621759414672852, 0.021548032760620117, 0.02169343948364258, 0.021526527404785157, 0.021583871841430666, 0.021618688583374023, 0.02164121627807617, 0.021745664596557617, 0.021700607299804688, 0.021571584701538086, 0.02210918426513672, 0.02173030471801758, 0.02165247917175293, 0.02164735984802246, 0.021601280212402343, 0.02249932861328125, 0.022527999877929687, 0.022622207641601562, 0.022477823257446287, 0.022500352859497072, 0.022483968734741212, 0.022435840606689454, 0.022495231628417968, 0.02250649642944336, 0.02247987174987793, 0.022543359756469726, 0.022428672790527345, 0.02244915199279785, 0.022569984436035157, 0.022683647155761717, 0.022583295822143554, 0.022594560623168947, 0.022487039566040038, 0.022098943710327147, 0.02142310333251953, 0.0216494083404541, 0.0216944637298584, 0.021481472015380858, 0.0216627197265625, 0.02189926338195801, 0.021755903244018555, 0.021719039916992186, 0.021630975723266603, 0.021594112396240234, 0.021614591598510743, 0.021621759414672852, 0.02162892723083496, 0.02169753646850586, 0.021525503158569336, 0.02147430419921875, 0.021791744232177734, 0.021588991165161133, 0.02171494483947754, 0.021578752517700195, 0.02168320083618164, 0.021873664855957032, 0.02242252731323242, 0.022143999099731446, 0.021577728271484374, 0.021530624389648437, 0.021510143280029297, 0.02171801567077637, 0.021598207473754884, 0.021736448287963867, 0.021572608947753907, 0.021659648895263672, 0.021606399536132814, 0.021544960021972655, 0.021530624389648437, 0.021574655532836915, 0.022133760452270508, 0.022040576934814454, 0.02146099281311035, 0.02170163154602051, 0.022181888580322266, 0.02188697624206543, 0.021602304458618164, 0.02163609504699707, 0.02163609504699707, 0.02169856071472168, 0.02215116882324219, 0.022367231369018553, 0.02169036865234375, 0.021584896087646483, 0.021552127838134767, 0.021612543106079102, 0.022148096084594726, 0.02332262420654297, 0.023202816009521485, 0.022533119201660155, 0.02265292739868164, 0.022525951385498046, 0.02243071937561035, 0.022424575805664062, 0.02165247917175293, 0.021545984268188476, 0.021734399795532225, 0.022452224731445314, 0.02266726493835449, 0.022508544921875, 0.02189004707336426, 0.02163609504699707, 0.021629951477050782, 0.021708799362182618, 0.022182912826538087, 0.022626304626464845, 0.02225971221923828, 0.02168217658996582, 0.022388736724853517, 0.022494207382202147, 0.022492160797119142, 0.022392831802368163, 0.021719039916992186, 0.021535743713378908, 0.021541887283325196, 0.021548032760620117, 0.02170163154602051, 0.022164480209350586, 0.021793792724609375, 0.022435840606689454, 0.022533119201660155, 0.02233241653442383, 0.02244915199279785, 0.022384639739990234, 0.02244915199279785, 0.02251263999938965, 0.02187775993347168, 0.02207027244567871, 0.02226278305053711, 0.021590015411376954, 0.021593088150024413, 0.021606399536132814, 0.021775360107421874, 0.022386688232421875, 0.022402048110961914, 0.022387712478637696, 0.021784576416015625, 0.021598207473754884, 0.02191360092163086, 0.02269491195678711, 0.022635520935058592, 0.022429695129394533, 0.02244607925415039, 0.022605823516845702, 0.02209280014038086, 0.022023168563842774, 0.022534143447875975, 0.02249625587463379, 0.023998464584350586, 0.022666240692138673, 0.02249728012084961, 0.022996992111206056, 0.023120895385742187, 0.02251468849182129, 0.02249318313598633, 0.022477823257446287, 0.022210559844970702, 0.022429695129394533, 0.022421503067016603, 0.022673408508300782, 0.02248089599609375, 0.022406143188476564, 0.022145023345947267, 0.02149580764770508, 0.021644287109375, 0.02271027183532715, 0.022322175979614257, 0.021703680038452147, 0.02163711929321289, 0.021580799102783203, 0.021588991165161133, 0.021619712829589844, 0.021562368392944335, 0.021979135513305666, 0.022354944229125977, 0.022509567260742186, 0.02269593620300293, 0.0224849910736084, 0.021947391510009767, 0.022806528091430665, 0.023229440689086913, 0.02344550323486328, 0.022687744140625, 0.022441984176635742, 0.021711872100830077, 0.021592063903808592, 0.021603328704833984, 0.021562368392944335, 0.021574655532836915, 0.021591039657592775, 0.021584896087646483, 0.021704704284667968, 0.021562368392944335, 0.021533695220947266, 0.021586944580078125, 0.021541887283325196, 0.021638143539428712, 0.021522432327270507, 0.02205081558227539, 0.02212761688232422, 0.021752832412719726, 0.021747711181640626, 0.02168320083618164, 0.021572608947753907, 0.02162892723083496, 0.021574655532836915, 0.02170675277709961, 0.02182963180541992, 0.022370304107666016, 0.021540864944458008, 0.023051263809204102, 0.023793664932250977, 0.023008256912231444, 0.022631423950195313, 0.022543359756469726, 0.021644287109375, 0.021626880645751953, 0.02165043258666992, 0.021695487976074217, 0.02164531135559082, 0.021582847595214845, 0.02168115234375, 0.022063104629516602, 0.02229964828491211, 0.021751808166503905, 0.02171801567077637, 0.02163302421569824, 0.021602304458618164, 0.021625856399536132, 0.02164019203186035, 0.021765119552612306, 0.02168627166748047, 0.021566463470458985, 0.022322175979614257, 0.022533119201660155, 0.022403072357177735, 0.022427648544311524, 0.022445056915283205, 0.02202726364135742, 0.02162380790710449, 0.021643264770507813, 0.021700607299804688, 0.02164121627807617, 0.021587968826293946, 0.02185215950012207, 0.021720064163208007, 0.021754880905151368, 0.02163711929321289, 0.021811199188232423, 0.022486015319824217, 0.02244607925415039, 0.022261760711669923, 0.021588991165161133, 0.021580799102783203, 0.021702655792236326, 0.02148863983154297, 0.02165862464904785, 0.02230784034729004, 0.0224768009185791, 0.02244812774658203, 0.022494207382202147, 0.02187980842590332, 0.021976064682006836, 0.022041599273681642, 0.021724159240722657, 0.02165452766418457, 0.021612543106079102, 0.02206003189086914, 0.021711872100830077, 0.0216944637298584, 0.021922815322875978, 0.02168934440612793, 0.02168627166748047, 0.02165555191040039, 0.021587968826293946, 0.021626880645751953, 0.02170675277709961, 0.021961727142333985, 0.02164838409423828, 0.021567487716674806, 0.021622783660888673, 0.021585920333862304, 0.021627904891967774, 0.021591039657592775, 0.021582847595214845, 0.0216944637298584, 0.021604352951049805, 0.021599231719970705, 0.021625856399536132, 0.021534719467163087, 0.02168832015991211, 0.021602304458618164, 0.0219238395690918, 0.02182246398925781, 0.02224844741821289, 0.02206105613708496, 0.021634048461914062, 0.021594112396240234, 0.02163302421569824, 0.021766143798828123, 0.02147737693786621, 0.021696512222290038, 0.02191360092163086, 0.022034431457519533, 0.02163302421569824, 0.021621759414672852, 0.021597183227539063, 0.021567487716674806, 0.02163199996948242, 0.021742591857910155, 0.021611520767211914, 0.02164121627807617, 0.021594112396240234, 0.02184601593017578, 0.021833728790283204, 0.021635072708129883, 0.021577728271484374, 0.021703680038452147, 0.021578752517700195, 0.021551103591918946, 0.021564416885375977, 0.021593088150024413, 0.021583871841430666, 0.021545984268188476, 0.02146713638305664, 0.022026239395141603, 0.021964799880981444, 0.02163711929321289, 0.02164838409423828, 0.02165043258666992, 0.022171648025512695, 0.022112255096435548, 0.02173030471801758, 0.021638143539428712, 0.021696512222290038, 0.021597183227539063, 0.021736448287963867, 0.021746688842773438, 0.02167193603515625, 0.02164019203186035, 0.021763071060180664, 0.02188287925720215, 0.02273689651489258, 0.023249919891357423, 0.021999616622924805, 0.022534143447875975, 0.022155263900756835, 0.021582847595214845, 0.022280191421508787, 0.022280191421508787, 0.02167807960510254, 0.021833728790283204, 0.02168012809753418, 0.021767168045043944, 0.02167091178894043, 0.021983232498168945, 0.022342655181884767, 0.021720064163208007, 0.021531648635864258, 0.021602304458618164, 0.02165452766418457, 0.02165555191040039, 0.02185625648498535, 0.022178815841674804, 0.02187264060974121, 0.02164531135559082, 0.022492160797119142, 0.021969919204711915, 0.02166067123413086, 0.021708799362182618, 0.021525503158569336, 0.022364160537719727, 0.0222423038482666, 0.021619712829589844, 0.021599231719970705, 0.02169753646850586, 0.02167398452758789, 0.022318111419677735, 0.021882848739624025, 0.021581823348999024, 0.022122495651245116, 0.021548032760620117, 0.022157312393188477, 0.021839872360229492, 0.02220134353637695, 0.02204569625854492, 0.02164121627807617, 0.02164121627807617, 0.021785600662231445, 0.021793792724609375, 0.022090751647949217, 0.022363136291503907, 0.021740543365478517, 0.021541887283325196, 0.021696512222290038, 0.02186649513244629, 0.02165657615661621, 0.022208511352539064, 0.022466560363769532, 0.02171494483947754, 0.021639167785644533, 0.021610496520996093, 0.02173030471801758, 0.021741567611694337, 0.022076416015625, 0.02223308753967285, 0.02191974449157715, 0.02166374397277832, 0.021731327056884766, 0.023828479766845705, 0.023900159835815428, 0.023661567687988282, 0.022962175369262695, 0.022984703063964843, 0.02290892791748047, 0.022952959060668944, 0.022793216705322264, 0.023044095993041993, 0.022948863983154297, 0.022989824295043947, 0.02290176010131836, 0.02291097640991211, 0.02290483283996582, 0.022855680465698244, 0.02290892791748047, 0.023047168731689452, 0.02295910453796387, 0.02293452835083008, 0.023029760360717775, 0.02391142463684082, 0.023746559143066406, 0.02323148727416992, 0.022984703063964843, 0.022981632232666017, 0.02290892791748047, 0.02305638313293457, 0.022989824295043947, 0.0228351993560791, 0.02287308883666992, 0.022984703063964843, 0.022988800048828126, 0.0229171199798584, 0.022951936721801756, 0.022977535247802734, 0.022970367431640625, 0.022905855178833007, 0.022964223861694336, 0.02290995216369629, 0.022930431365966796, 0.02308403205871582, 0.02305638313293457]",tokens/s,45.40328329927853,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494d3-6af856cf1c8afc57242df494;6c6317f3-b7d5-4453-86b3-85f9cdc1364e) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5167.280128,6469.189632,0.0,5832.179712,5645.103616,s,1,12.10156640625,12.10156640625,0.0,12.10156640625,12.10156640625,12.10156640625,12.10156640625,[12.10156640625],,kWh,6.200802607915787e-05,3.396645383666979e-05,0.00011119897784800381,0.00020717345776383146,,MB,1792.458752,6527.909888,0.0,5874.122752,5159.561216,s,10,16.552156860351563,1.6552156860351563,0.00011134605196456247,1.6551574096679689,1.6553685180664064,1.6553723083496095,1.655375340576172,"[1.6553577880859376, 1.6551451416015626, 1.6551116943359374, 1.65515771484375, 1.6551571044921876, 1.6550809326171876, 1.6551182861328124, 1.6553760986328125, 1.655284423828125, 1.65536767578125]",tokens/s,154.66262322175857,kWh,1.955550849229225e-05,1.071555966292178e-05,0.00011502389757459818,0.00014529496572981222,tokens/kWh,1761933.0354229393,MB,1803.337728,6548.881408,0.0,5892.99712,5159.563776,s,10,25.5156572265625,2.55156572265625,0.013510257276341366,2.5500773925781246,2.5704140380859375,2.574718371582031,2.578161838378906,"[2.55011767578125, 2.550037109375, 2.55752685546875, 2.530854736328125, 2.54418798828125, 2.56945751953125, 2.5526171875, 2.541088623046875, 2.540746826171875, 2.579022705078125]",tokens/s,24.690722030242384,kWh,3.0585989778193914e-05,1.6764438087177494e-05,8.392673380800409e-05,0.00013127716167337544,tokens/kWh,479900.6864327807,,s,630,25.513270267486586,0.04049725439283584,0.000644322978768631,0.040262144088745115,0.04126648178100586,0.04156605319976807,0.04299605007171632,"[0.04174131011962891, 0.04096409606933594, 0.04159078216552734, 0.04048793411254883, 0.04020940780639649, 0.04212223815917969, 0.040548351287841795, 0.04126924896240235, 0.04018483352661133, 0.04009267044067383, 0.040941566467285154, 0.041562110900878906, 0.04033740615844727, 0.04009164810180664, 0.04036505508422852, 0.04047769546508789, 0.04012646484375, 0.040120319366455076, 0.040627201080322264, 0.041224193572998044, 0.04113612747192383, 0.040231937408447264, 0.040400894165039065, 0.04023807907104492, 0.040403968811035154, 0.040769535064697264, 0.04087807846069336, 0.04090777587890625, 0.04007219314575195, 0.04016537475585937, 0.040420352935791014, 0.04062003326416016, 0.04038246536254883, 0.04008038330078125, 0.0405401611328125, 0.04077260971069336, 0.04048384094238281, 0.04108492660522461, 0.04043673706054687, 0.04018483352661133, 0.040397823333740236, 0.04039680099487305, 0.039926784515380856, 0.03997491073608399, 0.04053606414794922, 0.04069478225708008, 0.040268798828125, 0.04022886276245117, 0.04007628631591797, 0.039965694427490234, 0.03942911911010742, 0.039861248016357424, 0.040215553283691405, 0.03992268753051758, 0.03996979141235352, 0.04021657562255859, 0.040205310821533204, 0.040207359313964845, 0.0399441909790039, 0.03995033645629883, 0.04181196975708008, 0.040455169677734375, 0.03996160125732422, 0.04103168106079102, 0.040414207458496096, 0.040866817474365234, 0.040005630493164065, 0.039787521362304686, 0.04061695861816406, 0.04166758346557617, 0.04064767837524414, 0.03998720169067383, 0.04074700927734375, 0.04150067138671875, 0.040275966644287106, 0.04023091125488281, 0.040612865447998046, 0.040446975708007815, 0.04137984085083008, 0.04006604766845703, 0.04077363204956055, 0.040400894165039065, 0.04053606414794922, 0.04009369659423828, 0.04003839874267578, 0.039897087097167966, 0.039943168640136716, 0.03950592041015625, 0.04033126449584961, 0.04011520004272461, 0.040151039123535154, 0.040248321533203124, 0.04042342376708984, 0.04127231979370117, 0.0407347183227539, 0.04059648132324219, 0.040888320922851565, 0.04004761505126953, 0.04032716751098633, 0.04014796829223633, 0.0400373764038086, 0.040025089263916014, 0.041016319274902346, 0.0406036491394043, 0.040190975189208986, 0.040205310821533204, 0.040217601776123046, 0.04031590270996094, 0.04013363265991211, 0.04118527984619141, 0.043763713836669924, 0.04151705551147461, 0.040600574493408204, 0.04013363265991211, 0.040025089263916014, 0.04006399917602539, 0.04006092834472656, 0.040089599609375, 0.04150886535644531, 0.040323070526123043, 0.0400261116027832, 0.039975936889648435, 0.04049407958984375, 0.040062976837158204, 0.04026572799682617, 0.04022476959228516, 0.041875457763671874, 0.042447872161865234, 0.04092620849609375, 0.0412149772644043, 0.040932350158691407, 0.041442302703857424, 0.039977985382080077, 0.04077568054199219, 0.04065075302124024, 0.042270721435546874, 0.04475392150878906, 0.04007526397705078, 0.03994112014770508, 0.03990528106689453, 0.040035327911376956, 0.03972608184814453, 0.03999948883056641, 0.03988889694213867, 0.04005683135986328, 0.04007014465332031, 0.040008705139160154, 0.039876609802246096, 0.04023603057861328, 0.04048281478881836, 0.04142489624023438, 0.040738815307617186, 0.04114636611938476, 0.04040499114990234, 0.04009574508666992, 0.0409804801940918, 0.039949310302734374, 0.03993088150024414, 0.03984384155273438, 0.03996876907348633, 0.04010496139526367, 0.04526387023925781, 0.04141363143920898, 0.04011212921142578, 0.04006092834472656, 0.04020633697509766, 0.040049663543701174, 0.03995238494873047, 0.03999846267700195, 0.04003635025024414, 0.04104191970825195, 0.04037836837768555, 0.04008553695678711, 0.040253406524658204, 0.04000460815429688, 0.04024524688720703, 0.04078387069702148, 0.04111360168457031, 0.04046745681762695, 0.03998003387451172, 0.040114177703857425, 0.04021247863769531, 0.039977985382080077, 0.04017868804931641, 0.04009574508666992, 0.04269465637207031, 0.040220672607421876, 0.04018175888061523, 0.03999846267700195, 0.041218048095703126, 0.04010086441040039, 0.03996057510375976, 0.04006195068359375, 0.040156158447265625, 0.04026777648925781, 0.0402083854675293, 0.04023603057861328, 0.04008652877807617, 0.04048179244995117, 0.039913471221923826, 0.03995852661132813, 0.04001792144775391, 0.03990425491333008, 0.04013568115234375, 0.04021964645385742, 0.040041473388671874, 0.040164352416992184, 0.040027137756347655, 0.040030208587646485, 0.0401715202331543, 0.04021657562255859, 0.04024729537963867, 0.04023807907104492, 0.040022014617919925, 0.04037529754638672, 0.03999641418457031, 0.04006195068359375, 0.040018943786621096, 0.04008038330078125, 0.04016844940185547, 0.04026163101196289, 0.040089599609375, 0.04011929702758789, 0.040130561828613284, 0.04014387130737305, 0.04005478286743164, 0.04003430557250977, 0.04014284896850586, 0.04033740615844727, 0.04004761505126953, 0.03961548614501953, 0.04020019149780273, 0.039678974151611326, 0.04006604766845703, 0.040578048706054685, 0.04069375991821289, 0.04047052764892578, 0.04007731246948242, 0.04048793411254883, 0.040030208587646485, 0.03999641418457031, 0.03999027252197265, 0.03990630340576172, 0.04038451385498047, 0.040825855255126955, 0.040653823852539066, 0.040741886138916016, 0.04007014465332031, 0.03998003387451172, 0.040005630493164065, 0.04001792144775391, 0.040019966125488284, 0.041312255859375, 0.04104191970825195, 0.04051660919189453, 0.03996672058105469, 0.04008652877807617, 0.04011724853515625, 0.040033279418945314, 0.04023091125488281, 0.03997491073608399, 0.03998515319824219, 0.04008755111694336, 0.039880702972412106, 0.04104499053955078, 0.04193075180053711, 0.04074086380004883, 0.040318977355957034, 0.04016332626342774, 0.03989811325073242, 0.04016025543212891, 0.040046592712402344, 0.04038246536254883, 0.039975936889648435, 0.04000665664672851, 0.040123390197753905, 0.04003942489624023, 0.04043775939941406, 0.04118937683105469, 0.04023295974731445, 0.03994319915771485, 0.04003631973266602, 0.03991142272949219, 0.04002816009521484, 0.03992166519165039, 0.039989246368408206, 0.039975936889648435, 0.03969843292236328, 0.04003123092651367, 0.0401080322265625, 0.04012236785888672, 0.040103935241699216, 0.04034457778930664, 0.04084735870361328, 0.04326604843139648, 0.04213759994506836, 0.040581119537353515, 0.040048641204833986, 0.04033126449584961, 0.040275966644287106, 0.04067225646972656, 0.04134195327758789, 0.041240577697753904, 0.04132454299926758, 0.04007628631591797, 0.04010598373413086, 0.04010905456542969, 0.040210430145263674, 0.04006911849975586, 0.04015411376953125, 0.040010753631591796, 0.040225791931152347, 0.04019404983520508, 0.04014796829223633, 0.04045004653930664, 0.04156927871704102, 0.04098457717895508, 0.04078387069702148, 0.04123545455932617, 0.04118527984619141, 0.04132556915283203, 0.041545726776123046, 0.04156108856201172, 0.04125491333007812, 0.04106137466430664, 0.04119039916992188, 0.0411596794128418, 0.04041113662719727, 0.03976192092895508, 0.04013260650634766, 0.040622081756591794, 0.04002918243408203, 0.04010700988769531, 0.040169471740722655, 0.04006399917602539, 0.04051865768432617, 0.04097536087036133, 0.04043062210083008, 0.040664031982421876, 0.040804351806640625, 0.04084428787231445, 0.04099993515014649, 0.04104294586181641, 0.04114739227294922, 0.04105830383300781, 0.04071219253540039, 0.041250816345214845, 0.04055449676513672, 0.04087807846069336, 0.040513534545898434, 0.040776702880859376, 0.040592384338378903, 0.04086067199707031, 0.04064767837524414, 0.04173926544189453, 0.04077772903442383, 0.04031488037109375, 0.0403394546508789, 0.04080230331420898, 0.040602622985839845, 0.04106752014160156, 0.04106137466430664, 0.04143718338012695, 0.04095897674560547, 0.04055859375, 0.040289279937744144, 0.04026163101196289, 0.03990425491333008, 0.040986625671386716, 0.04043673706054687, 0.04063846588134765, 0.04080640029907227, 0.041111553192138675, 0.04092313766479492, 0.040374271392822264, 0.04095590209960937, 0.04084735870361328, 0.04061798477172852, 0.04097536087036133, 0.04010905456542969, 0.040022014617919925, 0.04013568115234375, 0.040499198913574216, 0.04025446319580078, 0.040243198394775394, 0.040392704010009765, 0.04003123092651367, 0.04066611099243164, 0.040292350769042966, 0.040046592712402344, 0.040205310821533204, 0.04009164810180664, 0.040103935241699216, 0.04105011367797851, 0.04063334274291992, 0.041003009796142575, 0.0411412467956543, 0.04083200073242187, 0.04151295852661133, 0.043104255676269534, 0.042761215209960936, 0.04158771133422851, 0.040546302795410154, 0.040427520751953126, 0.040343551635742186, 0.04029849624633789, 0.0400373764038086, 0.04043468856811523, 0.040041473388671874, 0.04028211212158203, 0.04004044723510742, 0.040118270874023435, 0.04048896026611328, 0.04059033584594727, 0.04017561721801758, 0.04019302368164063, 0.04022476959228516, 0.04067532730102539, 0.041134078979492186, 0.04080230331420898, 0.04034764862060547, 0.04012953567504883, 0.04028313446044922, 0.04067225646972656, 0.04023295974731445, 0.04039475250244141, 0.04037017440795899, 0.04009676742553711, 0.039989246368408206, 0.04158771133422851, 0.04222259140014648, 0.040389633178710936, 0.04033126449584961, 0.04022988891601562, 0.040158206939697266, 0.040118270874023435, 0.03986431884765625, 0.04046540832519531, 0.03998003387451172, 0.04002304077148437, 0.03996160125732422, 0.04070502471923828, 0.040215553283691405, 0.04000771331787109, 0.03986940765380859, 0.04026675033569336, 0.04013875198364258, 0.040030208587646485, 0.03995238494873047, 0.03995238494873047, 0.04071731185913086, 0.04100198364257813, 0.04163481521606445, 0.04309196853637695, 0.04062515258789062, 0.04024729537963867, 0.039995391845703124, 0.04040703964233398, 0.04018483352661133, 0.04016128158569336, 0.04017356872558594, 0.04013158416748047, 0.04006092834472656, 0.040150016784667966, 0.04019814300537109, 0.04006399917602539, 0.04017868804931641, 0.040392704010009765, 0.04005990219116211, 0.039926784515380856, 0.040425472259521485, 0.03987558364868164, 0.03983871841430664, 0.03982643127441406, 0.03977318572998047, 0.04174848175048828, 0.040822784423828126, 0.04020121765136719, 0.0401162223815918, 0.03981414413452149, 0.04026265716552734, 0.041524223327636715, 0.0407347183227539, 0.04000665664672851, 0.04005683135986328, 0.04019814300537109, 0.04028931045532227, 0.04020323181152344, 0.040114177703857425, 0.040048641204833986, 0.040180736541748044, 0.04005376052856445, 0.04010598373413086, 0.04074700927734375, 0.040258560180664066, 0.04034560012817383, 0.040233985900878906, 0.04009881591796875, 0.039874561309814455, 0.03984588623046875, 0.0402606086730957, 0.040576000213623044, 0.04152012634277344, 0.040354816436767575, 0.041322494506835936, 0.040235008239746094, 0.04010291290283203, 0.040027137756347655, 0.04038451385498047, 0.04029439926147461, 0.04028006362915039, 0.040787967681884765, 0.040258560180664066, 0.040264705657958984, 0.040260639190673825, 0.04007113647460937, 0.04009267044067383, 0.03998207855224609, 0.04011520004272461, 0.040256511688232424, 0.040084480285644535, 0.04030156707763672, 0.040118270874023435, 0.04018483352661133, 0.04043366241455078, 0.040651775360107424, 0.040180736541748044, 0.04021452713012695, 0.04096409606933594, 0.04028518295288086, 0.039640064239501956, 0.04000460815429688, 0.03998617553710938, 0.04048793411254883, 0.04108697509765625, 0.04052275085449219, 0.03999641418457031, 0.040187904357910156, 0.0400261116027832, 0.0400373764038086, 0.03998720169067383, 0.0400076789855957, 0.040025089263916014, 0.04014899063110351, 0.039951358795166016, 0.03998003387451172, 0.04004761505126953, 0.0402083854675293, 0.039981056213378906, 0.03996876907348633, 0.04022988891601562, 0.04031692886352539, 0.040025089263916014, 0.0404398078918457, 0.040068096160888675, 0.03998003387451172, 0.03991961669921875, 0.0400076789855957, 0.04036198425292969, 0.041253887176513675, 0.040443904876708986, 0.04148940658569336, 0.04017663955688477, 0.040150016784667966, 0.04031999969482422, 0.04246220779418945, 0.04222873687744141, 0.04177407836914063, 0.04080230331420898, 0.041171966552734376, 0.0410880012512207, 0.041319423675537106, 0.04070809555053711, 0.04024115371704102, 0.04036403274536133, 0.0400076789855957, 0.04023295974731445, 0.04057395172119141, 0.04111974334716797, 0.04120985412597656, 0.040871936798095705, 0.04115865707397461, 0.04080537414550781, 0.040233985900878906, 0.04046950531005859, 0.04026367950439453, 0.040755199432373046, 0.040302593231201174, 0.04102963256835938, 0.040899585723876954, 0.041270271301269534, 0.04225228881835937, 0.041057281494140625, 0.04132659149169922, 0.040970241546630856, 0.04087705612182617, 0.041166881561279296, 0.04061590576171875, 0.04022476959228516, 0.040235008239746094, 0.040271873474121096, 0.04016128158569336, 0.040205310821533204, 0.04053401565551758, 0.041229312896728515, 0.04128460693359375, 0.041350143432617184, 0.04121395111083984, 0.04129075241088867, 0.040237056732177735, 0.04126617431640625, 0.04087398529052735, 0.040338432312011716, 0.04050841522216797, 0.04121395111083984, 0.04050227355957031, 0.04151910400390625, 0.041057281494140625, 0.04129075241088867, 0.04366438293457031, 0.04202700805664063, 0.04121395111083984, 0.041306110382080076, 0.04076339340209961, 0.04087091064453125, 0.04170444869995117, 0.041106433868408204, 0.041250816345214845, 0.04083609771728516, 0.04032716751098633]",tokens/s,24.69303203371992,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,codegen,MB,4280.1152,6116.868096,0.0,5479.858176,5102.547456,s,1,10.1764970703125,10.1764970703125,0.0,10.1764970703125,10.1764970703125,10.1764970703125,10.1764970703125,[10.1764970703125],,kWh,3.921220716597166e-05,2.147211784264571e-05,7.426061496401681e-05,0.0001349449399726342,,MB,1631.834112,6152.51968,0.0,5498.732544,4675.92192,s,10,12.416454833984375,1.2416454833984374,0.00020170473654342622,1.241615234375,1.2418873779296875,1.2419485717773437,1.2419975268554688,"[1.242009765625, 1.241873779296875, 1.241426025390625, 1.241505126953125, 1.241805908203125, 1.24144384765625, 1.24159326171875, 1.241384521484375, 1.24163720703125, 1.241775390625]",tokens/s,206.1780141134303,kWh,1.4669930254930505e-05,8.03842999775567e-06,8.713131970499998e-05,0.00010983967995768615,tokens/kWh,2330669.573132584,MB,1664.712704,6154.616832,0.0,5498.732544,4809.526784,s,10,22.78700048828125,2.2787000488281253,0.015166007906271253,2.2781634521484375,2.28762275390625,2.302819921875,2.31497765625,"[2.2756484375, 2.26557958984375, 2.263264892578125, 2.278427001953125, 2.281048583984375, 2.261636962890625, 2.28424560546875, 2.27789990234375, 2.281232421875, 2.31801708984375]",tokens/s,27.64734219073687,kWh,2.7553775445903797e-05,1.50992923204953e-05,7.21151410253973e-05,0.00011476820879179643,tokens/kWh,548932.5019813606,,s,630,22.78466358566286,0.036166132675655306,0.0006298834846007237,0.035930112838745115,0.03711826095581055,0.03741337642669678,0.038458849754333495,"[0.036446208953857424, 0.03579904174804688, 0.035846145629882815, 0.03624345779418945, 0.036674560546875, 0.036329471588134765, 0.03609190368652344, 0.0366827507019043, 0.03593318557739258, 0.03604991912841797, 0.035866622924804685, 0.035922943115234376, 0.035775489807128906, 0.035883007049560545, 0.035884033203125, 0.03595775985717774, 0.03588915252685547, 0.03881881713867188, 0.0366376953125, 0.03603148651123047, 0.03585638427734375, 0.03590655899047852, 0.03600281524658203, 0.0359813117980957, 0.0358656005859375, 0.03592704010009766, 0.03593830490112305, 0.03602534484863281, 0.035810302734375, 0.03591680145263672, 0.035947521209716796, 0.035966976165771485, 0.035915775299072264, 0.035937278747558594, 0.03576627349853516, 0.035920894622802735, 0.03600998306274414, 0.03596083068847656, 0.03586764907836914, 0.03592396926879883, 0.03587788772583008, 0.035846145629882815, 0.03580928039550781, 0.0360079345703125, 0.03585433578491211, 0.03591167831420899, 0.03586150360107422, 0.03598950576782227, 0.03594035339355469, 0.035934207916259765, 0.035958782196044925, 0.03604275131225586, 0.03806617736816406, 0.03726335906982422, 0.03645849609375, 0.03719987106323242, 0.03591884613037109, 0.036029441833496094, 0.03592192077636719, 0.03586457443237305, 0.036311038970947264, 0.03624652862548828, 0.035899391174316404, 0.036209663391113284, 0.035829761505126956, 0.03573555374145508, 0.03590963363647461, 0.03606630325317383, 0.036337665557861325, 0.03585638427734375, 0.03601100921630859, 0.03592806243896484, 0.03585433578491211, 0.036239360809326174, 0.035929088592529294, 0.03582566452026367, 0.035530750274658206, 0.035688449859619144, 0.0359813117980957, 0.03666636657714844, 0.03628236770629883, 0.03623219299316406, 0.036942848205566405, 0.03586969757080078, 0.0359813117980957, 0.03577446365356445, 0.036070400238037106, 0.035794944763183595, 0.03583078384399414, 0.035784702301025394, 0.035901439666748046, 0.03585331344604492, 0.036192256927490236, 0.035860481262207033, 0.036157440185546875, 0.03587583923339844, 0.03585331344604492, 0.0358922233581543, 0.03617587280273438, 0.03587276840209961, 0.0359741439819336, 0.036222976684570314, 0.0359659538269043, 0.03591987228393555, 0.03594649505615234, 0.03552153778076172, 0.036004863739013675, 0.035950592041015625, 0.035942401885986325, 0.03609088134765625, 0.035980289459228515, 0.035883007049560545, 0.03598643112182617, 0.03588710403442383, 0.03585126495361328, 0.03587891387939453, 0.03578572845458984, 0.03596492767333984, 0.03586969757080078, 0.0357386245727539, 0.035862529754638675, 0.035844097137451174, 0.03590655899047852, 0.03586150360107422, 0.035860481262207033, 0.03585638427734375, 0.0361420783996582, 0.0359444465637207, 0.03546726226806641, 0.0358287353515625, 0.036013057708740234, 0.03599052810668945, 0.035860481262207033, 0.03606835174560547, 0.035810302734375, 0.035920894622802735, 0.03574476623535156, 0.03598950576782227, 0.035800064086914066, 0.03584102249145508, 0.03572531127929687, 0.035833854675292966, 0.03577241516113281, 0.03582259368896484, 0.0357314567565918, 0.03587583923339844, 0.03567718505859375, 0.0358922233581543, 0.03576422500610352, 0.0359024658203125, 0.035975166320800785, 0.035833854675292966, 0.035813377380371096, 0.03583283233642578, 0.0358656005859375, 0.03591987228393555, 0.035945472717285154, 0.0359659538269043, 0.03622604751586914, 0.035955711364746096, 0.03581542587280274, 0.03603046417236328, 0.035999744415283204, 0.035776512145996094, 0.03584819030761719, 0.0359659538269043, 0.035958782196044925, 0.03589529418945313, 0.035781631469726564, 0.035920894622802735, 0.03591884613037109, 0.035939327239990236, 0.03621478271484375, 0.03599871826171875, 0.03585638427734375, 0.03595980834960937, 0.03575807952880859, 0.03599769592285156, 0.0363612174987793, 0.03600998306274414, 0.03599155044555664, 0.03602841567993164, 0.03595468902587891, 0.03580825424194336, 0.0358656005859375, 0.03589734268188476, 0.03698995208740234, 0.03592806243896484, 0.03580825424194336, 0.036119552612304685, 0.03597619247436523, 0.035778560638427735, 0.03791667175292969, 0.036972545623779295, 0.0359444465637207, 0.03598745727539063, 0.03596083068847656, 0.035914752960205076, 0.03601715087890625, 0.03596492767333984, 0.035884033203125, 0.03578265762329102, 0.03577139282226562, 0.03589836883544922, 0.036094974517822266, 0.03580825424194336, 0.03586764907836914, 0.036944896697998046, 0.036675582885742186, 0.035904510498046875, 0.03604787063598633, 0.03594035339355469, 0.035950592041015625, 0.03590655899047852, 0.035811328887939455, 0.03585638427734375, 0.035800064086914066, 0.0358656005859375, 0.03592396926879883, 0.03587583923339844, 0.036001792907714845, 0.03589120101928711, 0.03584819030761719, 0.035800064086914066, 0.035767295837402346, 0.03614720153808594, 0.038340606689453126, 0.038319103240966795, 0.036380672454833986, 0.03768115234375, 0.03606937789916992, 0.03585945510864258, 0.03585843276977539, 0.035814399719238284, 0.03583590316772461, 0.03699507141113281, 0.03604991912841797, 0.03591987228393555, 0.0357498893737793, 0.03584921646118164, 0.03739648056030274, 0.03664486312866211, 0.035814399719238284, 0.035846145629882815, 0.035862529754638675, 0.03591167831420899, 0.035931137084960936, 0.03589120101928711, 0.03579084777832031, 0.03589120101928711, 0.036192256927490236, 0.036653057098388675, 0.03609395217895508, 0.03592704010009766, 0.03573657608032227, 0.03602431869506836, 0.03734527969360352, 0.036721664428710936, 0.03715379333496094, 0.03780198287963867, 0.037599231719970705, 0.03654655838012695, 0.03580928039550781, 0.03580825424194336, 0.03575500869750976, 0.03582156753540039, 0.03629260635375976, 0.03645542526245117, 0.035931137084960936, 0.035929088592529294, 0.03584921646118164, 0.03590348815917969, 0.03581235122680664, 0.03583283233642578, 0.035833854675292966, 0.036067329406738284, 0.036013057708740234, 0.035922943115234376, 0.035901439666748046, 0.03589734268188476, 0.03584000015258789, 0.03589324951171875, 0.03589734268188476, 0.03678515243530273, 0.0358656005859375, 0.03578879928588867, 0.03594342422485351, 0.035915775299072264, 0.03589017486572266, 0.036111358642578126, 0.03581235122680664, 0.0359444465637207, 0.035931137084960936, 0.035922943115234376, 0.03666124725341797, 0.03605299377441406, 0.035811328887939455, 0.03598233413696289, 0.036544513702392575, 0.03602841567993164, 0.0358389778137207, 0.037427200317382815, 0.03733708953857422, 0.03663359832763672, 0.03578572845458984, 0.035904510498046875, 0.03586150360107422, 0.03638272094726563, 0.0358205451965332, 0.03583078384399414, 0.037612545013427735, 0.03731660842895508, 0.03602841567993164, 0.035798015594482424, 0.03683225631713867, 0.036154369354248046, 0.03577446365356445, 0.03583692932128906, 0.03616563034057617, 0.03587788772583008, 0.035767295837402346, 0.035844097137451174, 0.03595161437988281, 0.035983360290527344, 0.03586764907836914, 0.035806209564208984, 0.03585433578491211, 0.03590963363647461, 0.03582668685913086, 0.03587481689453125, 0.035917823791503906, 0.035862529754638675, 0.036908031463623044, 0.0359741439819336, 0.035934207916259765, 0.03568537521362305, 0.035961856842041014, 0.03577446365356445, 0.03598950576782227, 0.035776512145996094, 0.03574169540405273, 0.03551641464233399, 0.03587788772583008, 0.0357283821105957, 0.03596799850463867, 0.03585433578491211, 0.03680665588378906, 0.035729408264160156, 0.03578879928588867, 0.035844097137451174, 0.03575091171264649, 0.03623833465576172, 0.036560897827148435, 0.03612160110473633, 0.035939327239990236, 0.035827713012695314, 0.0353331184387207, 0.03561062240600586, 0.03608473587036133, 0.03592704010009766, 0.0358656005859375, 0.03576627349853516, 0.035942401885986325, 0.03589529418945313, 0.03580313491821289, 0.035813377380371096, 0.03580313491821289, 0.03581542587280274, 0.03584307098388672, 0.035465217590332034, 0.035931137084960936, 0.03594137573242188, 0.0359813117980957, 0.035896320343017575, 0.03583795166015625, 0.03582259368896484, 0.03571507263183594, 0.03574272155761719, 0.03619123077392578, 0.03605913543701172, 0.03585945510864258, 0.036239360809326174, 0.035827713012695314, 0.03590963363647461, 0.03585331344604492, 0.0361267204284668, 0.03601203155517578, 0.035884033203125, 0.035942401885986325, 0.0358287353515625, 0.03583590316772461, 0.035912704467773435, 0.03590655899047852, 0.03638784027099609, 0.036446208953857424, 0.036001792907714845, 0.035552257537841796, 0.03625062561035156, 0.03581235122680664, 0.036350975036621096, 0.04022988891601562, 0.0385904655456543, 0.03668377685546875, 0.03693363189697266, 0.03662540817260742, 0.03591167831420899, 0.03589529418945313, 0.03617792129516602, 0.036391937255859375, 0.036838401794433595, 0.035862529754638675, 0.0359024658203125, 0.03595673751831055, 0.03594956970214844, 0.035860481262207033, 0.0358922233581543, 0.03599564743041992, 0.035932159423828124, 0.0358922233581543, 0.03602329635620117, 0.036203521728515625, 0.036209663391113284, 0.03586457443237305, 0.035827713012695314, 0.03583795166015625, 0.036029441833496094, 0.03579289627075195, 0.03574784088134766, 0.03668377685546875, 0.03613798522949219, 0.03595161437988281, 0.03596083068847656, 0.036168704986572264, 0.03719987106323242, 0.03876249694824219, 0.03698688125610351, 0.03664896011352539, 0.035896320343017575, 0.03579596710205078, 0.03650867080688477, 0.03605811309814453, 0.03671142578125, 0.03604787063598633, 0.03590963363647461, 0.035855358123779296, 0.03577446365356445, 0.03593523025512695, 0.03592396926879883, 0.036195327758789066, 0.03611340713500977, 0.03599052810668945, 0.03589324951171875, 0.036324352264404294, 0.03587788772583008, 0.03597619247436523, 0.03602841567993164, 0.03617484664916992, 0.037501953125, 0.03594137573242188, 0.035866622924804685, 0.036432895660400394, 0.03598643112182617, 0.03597926330566406, 0.035915775299072264, 0.03625267028808594, 0.03589734268188476, 0.03583795166015625, 0.03588915252685547, 0.0367718391418457, 0.036596736907958984, 0.03611443328857422, 0.03562188720703125, 0.037084159851074217, 0.03671244812011719, 0.03593830490112305, 0.03841740798950195, 0.036749313354492184, 0.035896320343017575, 0.035942401885986325, 0.035855358123779296, 0.035901439666748046, 0.03592396926879883, 0.03591680145263672, 0.03577446365356445, 0.03589734268188476, 0.037026817321777344, 0.03606528091430664, 0.035833854675292966, 0.036329471588134765, 0.03594137573242188, 0.036162559509277346, 0.035829761505126956, 0.03587583923339844, 0.035871742248535156, 0.035983360290527344, 0.03596799850463867, 0.036503551483154296, 0.03711590576171875, 0.036036609649658206, 0.035920894622802735, 0.03599769592285156, 0.03581542587280274, 0.03593830490112305, 0.03609702301025391, 0.036073471069335936, 0.03591884613037109, 0.036337665557861325, 0.03610214233398437, 0.03581644821166992, 0.03622604751586914, 0.03584819030761719, 0.03606630325317383, 0.035850238800048825, 0.03587686538696289, 0.035776512145996094, 0.03587788772583008, 0.035796993255615236, 0.0359741439819336, 0.037238784790039066, 0.036087806701660154, 0.035980289459228515, 0.03593523025512695, 0.036154369354248046, 0.03568537521362305, 0.035784702301025394, 0.03598438262939453, 0.0357918701171875, 0.03651686477661133, 0.03627724838256836, 0.035759105682373046, 0.035942401885986325, 0.03599564743041992, 0.035885055541992186, 0.036953086853027346, 0.03618304061889648, 0.03625983810424805, 0.03847577667236328, 0.03763302230834961, 0.03618406295776367, 0.03601510238647461, 0.036016128540039063, 0.035310592651367184, 0.035827713012695314, 0.03599769592285156, 0.035901439666748046, 0.03586457443237305, 0.03701862335205078, 0.037177345275878904, 0.0358922233581543, 0.03578777694702148, 0.03582463836669922, 0.03584921646118164, 0.03584204864501953, 0.036560897827148435, 0.03593523025512695, 0.03584204864501953, 0.03572326278686523, 0.035681278228759765, 0.03584921646118164, 0.03586457443237305, 0.03744255828857422, 0.03734527969360352, 0.037266433715820314, 0.03719168090820312, 0.036659198760986327, 0.03727052688598633, 0.03579904174804688, 0.03746099090576172, 0.037250049591064455, 0.03718348693847656, 0.037233665466308595, 0.03722854232788086, 0.03784806442260742, 0.03833446502685547, 0.03848396682739258, 0.037684223175048825, 0.037510143280029294, 0.03580313491821289, 0.03589529418945313, 0.03624755096435547, 0.036139007568359374, 0.03567923355102539, 0.036926464080810545, 0.03728076934814453, 0.03724492645263672, 0.037372928619384765, 0.03733606338500976, 0.03709747314453125, 0.03727155303955078, 0.03714355087280274, 0.037353473663330077, 0.03707699203491211, 0.03713945770263672, 0.03735551834106445, 0.03720601654052735, 0.03583795166015625, 0.03588198471069336, 0.035781631469726564, 0.035920894622802735, 0.03565260696411133, 0.03709952163696289, 0.03721318435668945, 0.03751833724975586, 0.037713920593261716, 0.03752755355834961, 0.03748659133911133, 0.0382033920288086, 0.03803852844238281, 0.04169625473022461, 0.03827199935913086, 0.037364734649658206, 0.03728998565673828, 0.03609292984008789, 0.03573555374145508, 0.035862529754638675, 0.03571814346313477, 0.03581542587280274, 0.0357283821105957, 0.03570483016967774, 0.03565875244140625, 0.03581542587280274, 0.03592806243896484, 0.03588710403442383, 0.03581644821166992, 0.03576934432983398, 0.035625984191894534, 0.03536383819580078, 0.03572633743286133, 0.03563008117675781, 0.03560755157470703]",tokens/s,27.65017783262005,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 85715 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493bc-3ee307762ecae6cf22723ec7;4f7372f1-7bff-440c-8066-9015ee974461) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490b2-45ae11576d865af9256c6d61;e4c2efda-3968-4a76-9362-193274524683) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,codegen,MB,8799.870976,12418.809856,0.0,11781.799936,10922.070528,s,1,12.3437568359375,12.3437568359375,0.0,12.3437568359375,12.3437568359375,12.3437568359375,12.3437568359375,[12.3437568359375],,kWh,6.525307695833805e-05,3.572210912671106e-05,0.00013837316625400775,0.00023934835233905687,,MB,1893.433344,12452.364288,0.0,11798.577152,9734.047744,s,10,28.0681962890625,2.8068196289062497,0.0004396040702134905,2.8069051513671877,2.8071917480468747,2.807361376953125,2.807497080078125,"[2.806646240234375, 2.806382080078125, 2.807091064453125, 2.806726318359375, 2.807083984375, 2.805973388671875, 2.807531005859375, 2.80646923828125, 2.807154052734375, 2.807138916015625]",tokens/s,91.20643071024733,kWh,3.314130040583419e-05,1.816237138648416e-05,0.00020168585579299902,0.00025298952758531735,tokens/kWh,1011899.5930124712,MB,1906.74944,12454.46144,0.0,11798.577152,9980.698112,s,10,24.391792968750003,2.4391792968750003,0.034540387714247056,2.4387574462890624,2.4762410400390626,2.490054479980469,2.501105231933594,"[2.503867919921875, 2.4083642578125, 2.385995849609375, 2.441389892578125, 2.436125, 2.403375244140625, 2.47317138671875, 2.45348193359375, 2.46833056640625, 2.41769091796875]",tokens/s,25.82835959648953,kWh,2.8772486506249188e-05,1.576984864654034e-05,0.00010827616995420233,0.00015281850510699186,tokens/kWh,412253.7382229476,,s,630,24.389378036499032,0.03871329847063337,0.000990986278628844,0.03824025726318359,0.0398101432800293,0.040077414512634274,0.040872252578735356,"[0.038286334991455076, 0.04129792022705078, 0.0403691520690918, 0.04043775939941406, 0.03986841583251953, 0.03975372695922851, 0.03952435302734375, 0.039801856994628904, 0.03958169555664062, 0.03956121444702149, 0.03968000030517578, 0.039534591674804685, 0.039406593322753904, 0.04000665664672851, 0.039739391326904294, 0.03975065612792969, 0.039602176666259765, 0.03966566467285156, 0.03952947235107422, 0.03964416122436523, 0.03951513671875, 0.03959603118896484, 0.03960319900512695, 0.03970560073852539, 0.03955712127685547, 0.03954483032226563, 0.03930931091308594, 0.039504894256591795, 0.03965030288696289, 0.04103379058837891, 0.040279998779296874, 0.040033279418945314, 0.03959603118896484, 0.03957452774047852, 0.03945676803588867, 0.039537662506103514, 0.039577598571777346, 0.03953664016723633, 0.039613441467285154, 0.040755199432373046, 0.03994521713256836, 0.039728126525878905, 0.03979776000976563, 0.039599166870117185, 0.03966048049926758, 0.03968511962890625, 0.039600128173828124, 0.03980492782592773, 0.039613441467285154, 0.03967488098144531, 0.03966054534912109, 0.03970560073852539, 0.039708671569824217, 0.0396308479309082, 0.03987968063354492, 0.039800830841064457, 0.039790592193603515, 0.03956531143188476, 0.03968819046020508, 0.03996364974975586, 0.039803905487060545, 0.03968204879760742, 0.03961958312988281, 0.038196224212646485, 0.03781324768066406, 0.03781324768066406, 0.03781017684936523, 0.037746688842773435, 0.03784396743774414, 0.03789311981201172, 0.03791462326049805, 0.03733299255371094, 0.037874687194824216, 0.03850035095214844, 0.03798732757568359, 0.037736446380615234, 0.03772518539428711, 0.037889022827148434, 0.037937152862548826, 0.03782144165039063, 0.037833728790283204, 0.03786137771606445, 0.037795841217041014, 0.03749273681640625, 0.03744870376586914, 0.037820415496826174, 0.0381102066040039, 0.037884929656982425, 0.03741183853149414, 0.037765121459960936, 0.037935104370117184, 0.037796928405761716, 0.03771078491210938, 0.03774259185791016, 0.03802828979492188, 0.03790848159790039, 0.03786444854736328, 0.03778559875488281, 0.0378419189453125, 0.03787161636352539, 0.03826176071166992, 0.0380497932434082, 0.03800371170043945, 0.037795841217041014, 0.03789209747314453, 0.03778252792358398, 0.03783475112915039, 0.037850112915039064, 0.0378081283569336, 0.03775590515136719, 0.03801599884033203, 0.039175167083740234, 0.040065025329589846, 0.040574977874755856, 0.039810047149658204, 0.03947110366821289, 0.03978854370117187, 0.03952844619750977, 0.03975987243652344, 0.039572479248046875, 0.03787673568725586, 0.03937484741210937, 0.03969740676879883, 0.03963187026977539, 0.03921100616455078, 0.03779379272460937, 0.0382217903137207, 0.03781631851196289, 0.03785728073120117, 0.03774259185791016, 0.03774259185791016, 0.037792766571044925, 0.037797889709472655, 0.037894142150878905, 0.03790335845947266, 0.038324222564697266, 0.03789209747314453, 0.03767193603515625, 0.037749759674072264, 0.037819393157958986, 0.037822463989257815, 0.03791155242919922, 0.03776921463012695, 0.039613441467285154, 0.03959807968139648, 0.037967872619628903, 0.03775795364379883, 0.03779072189331055, 0.03769241714477539, 0.037889022827148434, 0.03772313690185547, 0.03776409530639648, 0.037763072967529294, 0.037814273834228515, 0.03783475112915039, 0.037424129486083986, 0.03778662490844727, 0.03783679962158203, 0.037754878997802735, 0.037422080993652344, 0.03764326477050781, 0.03777433776855469, 0.03856486511230469, 0.037814273834228515, 0.03785420989990235, 0.0378869743347168, 0.0378152961730957, 0.03790233612060547, 0.03765555191040039, 0.037976062774658204, 0.03785932922363281, 0.037784576416015625, 0.03780198287963867, 0.03792281723022461, 0.037754878997802735, 0.037738494873046875, 0.0377077751159668, 0.0377262077331543, 0.037789695739746096, 0.03768729782104492, 0.037792766571044925, 0.03808256149291992, 0.03775692749023438, 0.037765121459960936, 0.037703678131103514, 0.03773235321044922, 0.03775283050537109, 0.03761971282958984, 0.03770163345336914, 0.03811635208129883, 0.03774054336547852, 0.03764223861694336, 0.03761663818359375, 0.03765862274169922, 0.03760947036743164, 0.037694465637207034, 0.03789926528930664, 0.03833446502685547, 0.037792766571044925, 0.0376545295715332, 0.03770982360839844, 0.03804774475097656, 0.04108492660522461, 0.039618560791015625, 0.03958681488037109, 0.039392257690429686, 0.039462913513183595, 0.039411712646484375, 0.039400447845458986, 0.03945062255859375, 0.03954073715209961, 0.0377149429321289, 0.037826591491699216, 0.037776351928710934, 0.03774259185791016, 0.037743614196777346, 0.037684223175048825, 0.037716991424560545, 0.03766886520385742, 0.03761663818359375, 0.03783270263671875, 0.037705726623535156, 0.03772723388671875, 0.037591041564941405, 0.03833651351928711, 0.03789004898071289, 0.03778047943115234, 0.039000064849853515, 0.03977830505371094, 0.0397209587097168, 0.039523326873779296, 0.039394367218017576, 0.03935327911376953, 0.039532543182373044, 0.03944755172729492, 0.03942604827880859, 0.039567359924316405, 0.03948646545410156, 0.03950697708129883, 0.03940041732788086, 0.03954483032226563, 0.03947520065307617, 0.03946700668334961, 0.039523326873779296, 0.03957350540161133, 0.03954585647583008, 0.039648319244384764, 0.039685054779052736, 0.03952742385864258, 0.03949363327026367, 0.04003129577636719, 0.03967891311645508, 0.038191104888916014, 0.0377968635559082, 0.03785932922363281, 0.03773235321044922, 0.03869696044921875, 0.04008857727050781, 0.04030156707763672, 0.03964211273193359, 0.039656448364257815, 0.039479297637939455, 0.039847934722900394, 0.038844417572021485, 0.03966054534912109, 0.039539710998535156, 0.03965849685668945, 0.04029644775390625, 0.03976396942138672, 0.03971379089355469, 0.039567359924316405, 0.039547904968261716, 0.039610366821289066, 0.03954995346069336, 0.03946495819091797, 0.03964825439453125, 0.03967385482788086, 0.03782860946655273, 0.03787059020996094, 0.03786444854736328, 0.03797094345092773, 0.037920768737792966, 0.037978111267089845, 0.03829145431518555, 0.03849216079711914, 0.03811532974243164, 0.037773311614990236, 0.03821158218383789, 0.03781222534179687, 0.038525951385498046, 0.03798425674438476, 0.03800883102416992, 0.03788288116455078, 0.037872638702392575, 0.03786956787109375, 0.03791974258422852, 0.037443584442138675, 0.03783679962158203, 0.03788390350341797, 0.03794944000244141, 0.037820415496826174, 0.03783782577514649, 0.037768192291259765, 0.03791263961791992, 0.03783366394042969, 0.0377968635559082, 0.037884929656982425, 0.03807743835449219, 0.037953536987304685, 0.038809600830078124, 0.03966873550415039, 0.04020633697509766, 0.039967742919921875, 0.039616512298583983, 0.03959500885009765, 0.03800371170043945, 0.03784505462646484, 0.03779987335205078, 0.037848094940185546, 0.03864982223510742, 0.03788800048828125, 0.0377784309387207, 0.03794432067871094, 0.03783168029785156, 0.03782144165039063, 0.03782860946655273, 0.037846080780029295, 0.03779884719848633, 0.037766143798828124, 0.03781836700439453, 0.03814912033081055, 0.0396308479309082, 0.039607295989990236, 0.03971788787841797, 0.03954995346069336, 0.03957145690917969, 0.039669761657714846, 0.03957964706420898, 0.03983564758300781, 0.039580673217773435, 0.04010294342041015, 0.03803849411010742, 0.03789311981201172, 0.03777433776855469, 0.037901313781738284, 0.03788601684570313, 0.038521793365478514, 0.03789823913574219, 0.037872638702392575, 0.03772723388671875, 0.03790028762817383, 0.03773750305175781, 0.03784195327758789, 0.03783468627929688, 0.03788390350341797, 0.03806208038330078, 0.037749759674072264, 0.03769548797607422, 0.037746688842773435, 0.03778252792358398, 0.037800960540771485, 0.037759998321533206, 0.03787673568725586, 0.03778355026245117, 0.037689342498779296, 0.03762483215332031, 0.037748737335205076, 0.03769343948364258, 0.03769139099121094, 0.03769651031494141, 0.03782451248168945, 0.037746688842773435, 0.03769753646850586, 0.03813273620605469, 0.03795251083374023, 0.037768192291259765, 0.03776409530639648, 0.03765760040283203, 0.03825356674194336, 0.037814273834228515, 0.038419456481933595, 0.04008755111694336, 0.04453478240966797, 0.03949264144897461, 0.03945366287231445, 0.039588863372802735, 0.03940966415405273, 0.03801804733276367, 0.037781505584716796, 0.03771289443969727, 0.03784703826904297, 0.03761663818359375, 0.03779993438720703, 0.037822463989257815, 0.03873484802246094, 0.039330814361572264, 0.038670337677001954, 0.04070502471923828, 0.039501823425292966, 0.03947520065307617, 0.03953155136108399, 0.039612384796142576, 0.03975270462036133, 0.037759998321533206, 0.038424575805664066, 0.03963904190063477, 0.039608318328857424, 0.040237056732177735, 0.03793612670898437, 0.0380497932434082, 0.039564289093017575, 0.03952742385864258, 0.039398399353027344, 0.03951718521118164, 0.03778355026245117, 0.03950284957885742, 0.03957145690917969, 0.03963391876220703, 0.03953561782836914, 0.039521278381347655, 0.03951103973388672, 0.038599681854248044, 0.03939328002929687, 0.040148033142089846, 0.039573440551757814, 0.03958681488037109, 0.039397377014160156, 0.039398399353027344, 0.03942195129394531, 0.039669761657714846, 0.03954483032226563, 0.03959807968139648, 0.03935027313232422, 0.040180736541748044, 0.03985414505004883, 0.03981100845336914, 0.039436286926269534, 0.03959807968139648, 0.03952025604248047, 0.03969945526123047, 0.03945062255859375, 0.03877171325683594, 0.03804876708984375, 0.0377784309387207, 0.03775078582763672, 0.03768115234375, 0.03775385665893555, 0.03960319900512695, 0.039702529907226565, 0.03950284957885742, 0.03954995346069336, 0.03951103973388672, 0.03952742385864258, 0.03880038452148438, 0.03970048141479492, 0.039865375518798825, 0.039734241485595706, 0.039754753112792966, 0.03951103973388672, 0.03760639953613281, 0.037698558807373043, 0.03773952102661133, 0.03786444854736328, 0.03752140808105469, 0.037705726623535156, 0.037738494873046875, 0.037795841217041014, 0.03824025726318359, 0.03825151824951172, 0.037789695739746096, 0.03778559875488281, 0.03779993438720703, 0.03793920135498047, 0.0378081283569336, 0.03760639953613281, 0.037768192291259765, 0.03782860946655273, 0.03782963180541992, 0.03786342239379883, 0.03906252670288086, 0.03981619262695312, 0.03964825439453125, 0.04000153732299805, 0.039672832489013675, 0.038866943359375, 0.039684097290039064, 0.03982643127441406, 0.04011008071899414, 0.03967078399658203, 0.03960627365112305, 0.03987865447998047, 0.039465984344482424, 0.0399738883972168, 0.039300094604492186, 0.040521728515625, 0.03988172912597656, 0.0402606086730957, 0.04092006301879883, 0.03992063903808594, 0.039894016265869144, 0.04011110305786133, 0.03942297744750976, 0.03950796890258789, 0.03949260711669922, 0.038432769775390625, 0.03972403335571289, 0.0395489273071289, 0.039413761138916016, 0.04312678527832031, 0.040525825500488284, 0.03971891021728516, 0.04044595336914063, 0.039777278900146484, 0.03961958312988281, 0.0398551025390625, 0.03770880126953125, 0.03763507080078125, 0.037779457092285154, 0.03794841766357422, 0.03790028762817383, 0.03899903869628906, 0.03947520065307617, 0.03976192092895508, 0.039691360473632815, 0.03958160018920898, 0.03984076690673828, 0.03960934448242188, 0.03968000030517578, 0.041164798736572264, 0.03949363327026367, 0.03949260711669922, 0.03964723205566406, 0.039623680114746096, 0.03951308822631836, 0.039523326873779296, 0.040275966644287106, 0.039616512298583983, 0.04005686569213867, 0.03785833740234375, 0.037867454528808596, 0.03790643310546875, 0.037917697906494144, 0.03787366485595703, 0.03790335845947266, 0.03894169616699219, 0.03949465560913086, 0.03947520065307617, 0.039570430755615234, 0.039616512298583983, 0.037835777282714846, 0.03783270263671875, 0.03805081558227539, 0.037797889709472655, 0.03790950393676758, 0.03832115173339844, 0.038529022216796875, 0.039721023559570315, 0.03968096160888672, 0.03925708770751953, 0.03944755172729492, 0.03984896087646484, 0.039597057342529295, 0.037749759674072264, 0.0380579833984375, 0.04029747009277344, 0.0398326416015625, 0.039693248748779296, 0.038160385131835936, 0.037833728790283204, 0.03782758331298828, 0.03779993438720703, 0.0378869743347168, 0.0377262077331543, 0.03787161636352539, 0.03788185501098633, 0.03787571334838867, 0.037787647247314454, 0.03789516830444336, 0.03792588806152344, 0.037803009033203126, 0.037751808166503906, 0.0373831672668457, 0.03787673568725586, 0.03781119918823242, 0.03772825622558594, 0.03778355026245117, 0.03788288116455078, 0.03762688064575195, 0.038400001525878906, 0.03824025726318359, 0.03803551864624023, 0.037813182830810546, 0.03788288116455078, 0.037814273834228515, 0.037792766571044925, 0.037700607299804685, 0.03781631851196289, 0.037748737335205076, 0.03795251083374023, 0.03775897598266602, 0.03781232070922851, 0.03783055877685547, 0.037789695739746096, 0.03773952102661133, 0.03786547088623047, 0.03775795364379883, 0.03740467071533203, 0.03782451248168945, 0.03771398544311524, 0.03795142364501953, 0.03997183990478516, 0.04040806579589844, 0.039708671569824217, 0.039839744567871094, 0.04006092834472656, 0.04011929702758789, 0.03996160125732422, 0.03944857788085938, 0.03971686553955078, 0.0395417594909668, 0.03952230453491211, 0.03775283050537109, 0.037930015563964845, 0.03942601776123047, 0.03941273498535156, 0.03950899124145508, 0.0395335693359375, 0.03961958312988281, 0.039626750946044925, 0.03958476638793945]",tokens/s,25.830917010560775,,,1,64,1,,, -4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491d4-22858b3357f2964948d23c91;9564856a-ceae-48eb-af48-1e4aa36b295e) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,4397.162496,24111.480832,0.0,23465.033728,21690.932224,s,10,26.23831787109375,2.623831787109375,0.003031033246657479,2.6228023681640624,2.627721069335937,2.628540100097656,2.629195324707031,"[2.629359130859375, 2.621765380859375, 2.6223037109375, 2.621092041015625, 2.623301025390625, 2.620621826171875, 2.62457861328125, 2.6205380859375, 2.627218994140625, 2.6275390625]",tokens/s,97.56723020801203,kWh,3.09448529779911e-05,1.6958929474258183e-05,0.00014797261837800056,0.00019587640083024984,tokens/kWh,1306946.619985398,MB,4397.162496,24111.480832,0.0,23465.033728,21890.217984,s,10,1555.758796875,155.5758796875,0.01362531366110068,155.577890625,155.5897375,155.594603125,155.598495625,"[155.55940625, 155.57425, 155.583734375, 155.59946875, 155.58153125, 155.58503125, 155.568875, 155.58865625, 155.564453125, 155.553390625]",tokens/s,0.4049470915835152,kWh,0.0018369476799004608,0.0010068097738273171,0.008895798588855384,0.011739556042583163,tokens/kWh,5366.4721026484,,s,629,1576.8986267089822,2.50699304723209,0.31148250553470674,2.46929931640625,2.47089541015625,2.471475439453125,5.08945892578125,"[2.468798583984375, 2.46915185546875, 2.46936669921875, 2.469295166015625, 2.469130126953125, 2.468601806640625, 2.46879638671875, 2.46974560546875, 2.468950927734375, 2.468516845703125, 2.468890625, 2.469065673828125, 2.468599853515625, 2.46870947265625, 2.468912109375, 2.469274658203125, 2.468938720703125, 2.46896337890625, 2.4693310546875, 2.46970068359375, 2.469392333984375, 2.46949072265625, 2.47088330078125, 2.4697119140625, 2.469168212890625, 2.468552734375, 2.4692490234375, 2.4690791015625, 2.47006005859375, 2.46929931640625, 2.46951123046875, 2.46925, 2.469129150390625, 2.468915283203125, 2.46999560546875, 2.4695, 2.46917724609375, 2.4696279296875, 2.469708740234375, 2.469084228515625, 2.46938427734375, 2.468663330078125, 2.46932275390625, 2.468631591796875, 2.468556884765625, 2.46858447265625, 2.46909326171875, 2.46888232421875, 2.46854248046875, 2.469202880859375, 2.469517333984375, 2.468483154296875, 2.468864013671875, 2.46881494140625, 2.468998046875, 2.469455810546875, 2.469496826171875, 2.469419921875, 2.4694599609375, 2.469094482421875, 2.47101025390625, 2.468877197265625, 5.09138330078125, 2.468682861328125, 2.468284423828125, 2.471290771484375, 2.471075927734375, 2.47086279296875, 2.470519775390625, 2.471779296875, 2.4712724609375, 2.470220703125, 2.469166015625, 2.47142919921875, 2.469087158203125, 2.470717529296875, 2.469718994140625, 2.4686796875, 2.468135986328125, 2.469396484375, 2.47069482421875, 2.469185546875, 2.46943359375, 2.4694208984375, 2.469435302734375, 2.472340576171875, 2.46964404296875, 2.4692294921875, 2.4692080078125, 2.4687861328125, 2.4688701171875, 2.468831298828125, 2.468864013671875, 2.468359130859375, 2.468737060546875, 2.4681728515625, 2.46879345703125, 2.47047265625, 2.471103515625, 2.4709150390625, 2.469086181640625, 2.467948486328125, 2.46850244140625, 2.46847998046875, 2.468877197265625, 2.47016845703125, 2.472048583984375, 2.471004150390625, 2.46881689453125, 2.46902783203125, 2.469129150390625, 2.468443115234375, 2.468276123046875, 2.46843603515625, 2.468770751953125, 2.46885888671875, 2.468476806640625, 2.46837255859375, 2.46976318359375, 2.4682578125, 2.468187255859375, 2.467946533203125, 2.4700517578125, 2.468588623046875, 2.468370361328125, 5.09084375, 2.46865625, 2.4686162109375, 2.471500732421875, 2.47128173828125, 2.47047265625, 2.470948974609375, 2.46904638671875, 2.46915283203125, 2.468949951171875, 2.469103515625, 2.469283935546875, 2.46955615234375, 2.46936669921875, 2.46929931640625, 2.469792724609375, 2.46934521484375, 2.468339599609375, 2.469074951171875, 2.46917724609375, 2.46955322265625, 2.469060546875, 2.471446533203125, 2.471505859375, 2.471520263671875, 2.46911376953125, 2.470220703125, 2.46964111328125, 2.46999658203125, 2.469699462890625, 2.469473388671875, 2.4692724609375, 2.469899169921875, 2.469782470703125, 2.47012158203125, 2.469866455078125, 2.4691630859375, 2.46879345703125, 2.469590087890625, 2.47096533203125, 2.471996337890625, 2.46934326171875, 2.468957275390625, 2.468894775390625, 2.468242431640625, 2.470201416015625, 2.46946923828125, 2.469783447265625, 2.469652587890625, 2.469385009765625, 2.4691455078125, 2.469442626953125, 2.46881494140625, 2.470005859375, 2.469909423828125, 2.46930224609375, 2.469780517578125, 2.46929296875, 2.468728759765625, 2.46875146484375, 2.468708251953125, 2.469340087890625, 2.468431884765625, 5.08925537109375, 2.469267333984375, 2.4689326171875, 2.468461669921875, 2.470912841796875, 2.47052294921875, 2.47040625, 2.4696259765625, 2.470639404296875, 2.470507568359375, 2.469961669921875, 2.4698583984375, 2.470289306640625, 2.470327392578125, 2.470380615234375, 2.470289306640625, 2.47063037109375, 2.4716962890625, 2.4711904296875, 2.469096435546875, 2.46898583984375, 2.469234619140625, 2.46925927734375, 2.469075927734375, 2.468787109375, 2.470892578125, 2.468845458984375, 2.470190185546875, 2.470021240234375, 2.46980908203125, 2.469390380859375, 2.469427001953125, 2.469474365234375, 2.46984912109375, 2.468821044921875, 2.46932470703125, 2.46972412109375, 2.470828125, 2.470804443359375, 2.47084228515625, 2.471381103515625, 2.471739501953125, 2.4696513671875, 2.469551025390625, 2.46862744140625, 2.468588623046875, 2.468927490234375, 2.469970947265625, 2.469509033203125, 2.468726806640625, 2.470922119140625, 2.47033642578125, 2.470152099609375, 2.47086181640625, 2.469655517578125, 2.469350341796875, 2.469897216796875, 2.4691435546875, 2.468494384765625, 2.469423095703125, 2.470116455078125, 2.470024169921875, 2.4697353515625, 5.09418994140625, 2.47079833984375, 2.471482421875, 2.470703125, 2.4698798828125, 2.469234619140625, 2.469583984375, 2.469041259765625, 2.469078125, 2.468781982421875, 2.469856201171875, 2.470139892578125, 2.4699013671875, 2.46999853515625, 2.469337158203125, 2.4686396484375, 2.46900732421875, 2.46865625, 2.4688505859375, 2.46949365234375, 2.468842529296875, 2.469308349609375, 2.469699462890625, 2.469474365234375, 2.46980810546875, 2.470485107421875, 2.470642578125, 2.470287353515625, 2.46951220703125, 2.46904931640625, 2.47022900390625, 2.470436767578125, 2.46926025390625, 2.469675048828125, 2.47014306640625, 2.46932275390625, 2.468830322265625, 2.469003173828125, 2.469866455078125, 2.46951123046875, 2.468826171875, 2.469062744140625, 2.469338134765625, 2.469421142578125, 2.468864013671875, 2.4689970703125, 2.469686279296875, 2.468821044921875, 2.468958251953125, 2.46921533203125, 2.4694580078125, 2.468634521484375, 2.46913232421875, 2.4693955078125, 2.469814208984375, 2.468588623046875, 2.469433349609375, 2.470032470703125, 2.470792236328125, 2.469285888671875, 2.469370849609375, 2.469603271484375, 2.46991357421875, 5.0895380859375, 2.472522705078125, 2.472004638671875, 2.469856201171875, 2.468981689453125, 2.469172119140625, 2.46925, 2.4692265625, 2.469295166015625, 2.46936376953125, 2.47034765625, 2.468842529296875, 2.470485107421875, 2.471478271484375, 2.469750732421875, 2.468664306640625, 2.471371826171875, 2.468883544921875, 2.471439453125, 2.469444580078125, 2.46917626953125, 2.469329833984375, 2.469687255859375, 2.46869091796875, 2.469650390625, 2.468767822265625, 2.46934619140625, 2.46850048828125, 2.46826806640625, 2.46904443359375, 2.4693974609375, 2.469318603515625, 2.47453076171875, 2.469267333984375, 2.470331298828125, 2.4707685546875, 2.469550048828125, 2.46943017578125, 2.469969970703125, 2.469962646484375, 2.46923486328125, 2.468137939453125, 2.468229248046875, 2.46898779296875, 2.468890625, 2.468874267578125, 2.469168212890625, 2.470299560546875, 2.47147119140625, 2.469370849609375, 2.469350341796875, 2.470095947265625, 2.469555419921875, 2.46888232421875, 2.4688681640625, 2.469992431640625, 2.468869140625, 2.469044189453125, 2.469062744140625, 2.469525390625, 2.468809814453125, 2.46928076171875, 2.468957275390625, 5.09382470703125, 2.469845947265625, 2.4695244140625, 2.469098388671875, 2.46944677734375, 2.469396484375, 2.470770751953125, 2.469032958984375, 2.469140380859375, 2.468855712890625, 2.469170166015625, 2.469623779296875, 2.469095458984375, 2.4704697265625, 2.47050244140625, 2.4702392578125, 2.469874755859375, 2.4695224609375, 2.469214111328125, 2.469747802734375, 2.46868896484375, 2.469425048828125, 2.468915283203125, 2.4693955078125, 2.468820068359375, 2.46910986328125, 2.468548583984375, 2.4704716796875, 2.469411865234375, 2.4691220703125, 2.468193359375, 2.468788330078125, 2.4683017578125, 2.46884033203125, 2.470220703125, 2.4702197265625, 2.468990966796875, 2.468908935546875, 2.4688486328125, 2.4704501953125, 2.470517822265625, 2.470140869140625, 2.469308349609375, 2.470096923828125, 2.469972900390625, 2.4689111328125, 2.468103271484375, 2.468663330078125, 2.468252685546875, 2.468338623046875, 2.467991455078125, 2.468906982421875, 2.46822802734375, 2.468630615234375, 2.469042236328125, 2.469347412109375, 2.47062939453125, 2.471343017578125, 2.467967041015625, 2.468644775390625, 2.46843701171875, 2.4686162109375, 2.473323486328125, 5.0889111328125, 2.468672607421875, 2.4683642578125, 2.468341796875, 2.468705322265625, 2.468413330078125, 2.46852197265625, 2.468662353515625, 2.468912109375, 2.46987158203125, 2.470073486328125, 2.46920068359375, 2.469920654296875, 2.46879638671875, 2.4687412109375, 2.4697333984375, 2.4695244140625, 2.4696298828125, 2.4695244140625, 2.470005859375, 2.468862060546875, 2.47003125, 2.4691630859375, 2.468483154296875, 2.4710625, 2.469022705078125, 2.46885888671875, 2.4692705078125, 2.469971923828125, 2.47102783203125, 2.47071435546875, 2.4704482421875, 2.471227294921875, 2.470994873046875, 2.469182373046875, 2.46873095703125, 2.47003125, 2.468622314453125, 2.4721171875, 2.468535400390625, 2.47090673828125, 2.470928466796875, 2.470781982421875, 2.470194091796875, 2.470340576171875, 2.468509765625, 2.468907958984375, 2.469888916015625, 2.469969970703125, 2.47013671875, 2.472005859375, 2.47183740234375, 2.47176708984375, 2.471166015625, 2.471228515625, 2.4702783203125, 2.47012451171875, 2.468314208984375, 2.46890087890625, 2.46814208984375, 2.46915283203125, 2.467857421875, 2.46899609375, 5.0967275390625, 2.468865966796875, 2.47231494140625, 2.4689765625, 2.469561279296875, 2.469706787109375, 2.468801513671875, 2.468005859375, 2.46970068359375, 2.469357666015625, 2.470153076171875, 2.47166455078125, 2.4716962890625, 2.47134716796875, 2.4716728515625, 2.469540771484375, 2.46961865234375, 2.46875439453125, 2.468744140625, 2.469203857421875, 2.46841748046875, 2.469667724609375, 2.4690810546875, 2.46848828125, 2.468600830078125, 2.469570556640625, 2.46897265625, 2.468644775390625, 2.46887109375, 2.46993310546875, 2.469518310546875, 2.468893798828125, 2.469458740234375, 2.4689755859375, 2.469214111328125, 2.468822998046875, 2.468765625, 2.469544921875, 2.46893359375, 2.46794140625, 2.46793115234375, 2.468474853515625, 2.468509765625, 2.469032958984375, 2.468798583984375, 2.46907177734375, 2.469205078125, 2.46849853515625, 2.468729736328125, 2.469506103515625, 2.4690390625, 2.4686376953125, 2.4710419921875, 2.469719970703125, 2.4685322265625, 2.468116455078125, 2.468727783203125, 2.4698369140625, 2.46875244140625, 2.46940869140625, 2.469123046875, 2.47010205078125, 2.468192138671875, 5.09570947265625, 2.469843994140625, 2.469245849609375, 2.46862939453125, 2.469123046875, 2.470595703125, 2.46889990234375, 2.468484130859375, 2.468884521484375, 2.469498779296875, 2.46938720703125, 2.468440185546875, 2.4687841796875, 2.46934326171875, 2.468601806640625, 2.469178466796875, 2.46820654296875, 2.469718017578125, 2.469128173828125, 2.468865966796875, 2.468513671875, 2.46963525390625, 2.468724609375, 2.4697353515625, 2.468211669921875, 2.468957275390625, 2.4694794921875, 2.46911083984375, 2.4683447265625, 2.4698837890625, 2.468577392578125, 2.468810791015625, 2.468404296875, 2.46858251953125, 2.469440673828125, 2.469866455078125, 2.468959228515625, 2.4695625, 2.46875732421875, 2.46845849609375, 2.469051513671875, 2.46925830078125, 2.470119384765625, 2.468737060546875, 2.468697998046875, 2.469822509765625, 2.469697509765625, 2.4686376953125, 2.4682802734375, 2.468843505859375, 2.471318603515625, 2.4700693359375, 2.469341064453125, 2.469017578125, 2.469211181640625, 2.469179443359375, 2.468263916015625, 2.46847998046875, 2.4693955078125, 2.468968505859375, 2.469316650390625, 2.46883740234375, 2.46984814453125]",tokens/s,0.39888423348603846,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1232.244736,1005.060096,0.0,358.612992,318.913024,s,24,0.17188252639770507,0.007161771933237711,0.0003243953015149955,0.0070352799892425535,0.007393788623809815,0.007410750222206116,0.00821779634475708,"[0.008458335876464844, 0.007218272209167481, 0.007377503871917724, 0.007035391807556152, 0.006997471809387207, 0.006964863777160645, 0.006896895885467529, 0.006932159900665283, 0.006920639991760254, 0.006907519817352295, 0.0068839678764343265, 0.006967360019683838, 0.007303135871887207, 0.007367551803588867, 0.007091263771057129, 0.007035168170928955, 0.0072576642036437985, 0.006943456172943115, 0.007412511825561524, 0.007331488132476807, 0.006977695941925049, 0.007400767803192139, 0.00699567985534668, 0.007205760002136231]",tokens/s,35745.34380408103,kWh,8.272282266742969e-08,4.5327846553754305e-08,1.771287987213526e-07,3.051794679425366e-07,tokens/kWh,838850666.2191416,MB,1232.539648,1005.060096,0.0,358.612992,328.804864,s,24,10.164368652343752,0.4235153605143229,0.013402359087444363,0.4229962463378906,0.43468231201171875,0.43530513000488286,0.46083279296875,"[0.4684530029296875, 0.4353216552734375, 0.431840576171875, 0.4128219909667969, 0.41338916015625, 0.4116455383300781, 0.41182235717773436, 0.41141156005859375, 0.41097488403320315, 0.4117151794433594, 0.41143212890625, 0.4306710510253906, 0.4334475708007812, 0.43521148681640626, 0.41144964599609374, 0.4190294189453125, 0.4116963806152344, 0.4119750061035156, 0.43146246337890626, 0.4310239562988281, 0.42696307373046877, 0.4310359191894531, 0.43171932983398437, 0.42785531616210937]",tokens/s,148.7549351775386,kWh,4.88881335726806e-06,2.6788370271125138e-06,8.551130498695033e-06,1.611878088307561e-05,tokens/kWh,3908484.1748887296,,s,1511,10.320183299064633,0.006830035274033512,0.0009334458342907997,0.006691840171813965,0.006923264026641846,0.007306751966476441,0.013799629211425782,"[0.007993343830108643, 0.008039423942565918, 0.007952383995056152, 0.007895040035247802, 0.007833600044250488, 0.007610367774963379, 0.007690271854400635, 0.007604191780090332, 0.007493631839752197, 0.007529471874237061, 0.007613440036773681, 0.007756800174713135, 0.007661568164825439, 0.007654399871826172, 0.0076687679290771485, 0.007647200107574463, 0.007772160053253174, 0.007723008155822754, 0.00773632001876831, 0.007501823902130127, 0.0075970559120178225, 0.007692287921905518, 0.007699456214904785, 0.007568384170532226, 0.0075038719177246095, 0.0075335679054260255, 0.007614528179168701, 0.007336895942687988, 0.007300096035003662, 0.007535615921020508, 0.00753868818283081, 0.007528448104858398, 0.007377920150756836, 0.0075038719177246095, 0.007502848148345947, 0.007266304016113281, 0.007370751857757568, 0.007285759925842285, 0.007271423816680909, 0.007333888053894043, 0.007404543876647949, 0.007350272178649903, 0.007293951988220215, 0.0070860800743103025, 0.007158783912658692, 0.007108607769012451, 0.007090176105499267, 0.007136256217956543, 0.0072202239036560055, 0.007285759925842285, 0.007223296165466309, 0.007193600177764893, 0.007156735897064209, 0.007122943878173828, 0.007145472049713135, 0.007048223972320557, 0.006976480007171631, 0.0068577280044555666, 0.006855679988861084, 0.007054336071014404, 0.006946815967559815, 0.0066406397819519045, 0.01427455997467041, 0.0069928960800170895, 0.007123968124389648, 0.0074291200637817386, 0.007486495971679687, 0.007579616069793701, 0.007607327938079834, 0.007285791873931885, 0.007080895900726318, 0.006916096210479736, 0.00690176010131836, 0.006837247848510742, 0.006814720153808594, 0.006848512172698974, 0.006859776020050049, 0.006823935985565186, 0.006833151817321777, 0.006855679988861084, 0.006862847805023193, 0.006806528091430664, 0.006830080032348633, 0.0068351998329162595, 0.006816768169403077, 0.00682700777053833, 0.006862847805023193, 0.006846528053283691, 0.006804416179656983, 0.006821887969970703, 0.006904831886291504, 0.0068351998329162595, 0.006896639823913574, 0.006830080032348633, 0.006900735855102539, 0.006841343879699707, 0.00683622407913208, 0.006845439910888672, 0.006834176063537598, 0.006814720153808594, 0.006823935985565186, 0.00682700777053833, 0.006820864200592041, 0.006845439910888672, 0.00684441614151001, 0.006830080032348633, 0.006867968082427979, 0.006841343879699707, 0.006922239780426025, 0.006859776020050049, 0.006872064113616944, 0.006855679988861084, 0.006818816184997558, 0.006988800048828125, 0.006820864200592041, 0.006837247848510742, 0.006816768169403077, 0.006854656219482422, 0.00682700777053833, 0.0068055038452148435, 0.006838272094726563, 0.006840320110321045, 0.006822912216186523, 0.0068351998329162595, 0.006842368125915528, 0.014609408378601075, 0.006852608203887939, 0.006833151817321777, 0.006881279945373535, 0.006899712085723877, 0.006879231929779053, 0.006865920066833496, 0.006904831886291504, 0.006851583957672119, 0.006899712085723877, 0.006859776020050049, 0.006854656219482422, 0.006852608203887939, 0.006855679988861084, 0.0068618240356445315, 0.0068249602317810056, 0.006856768131256103, 0.006852543830871582, 0.006809599876403808, 0.0068280320167541505, 0.006817791938781738, 0.006834176063537598, 0.006813695907592773, 0.006920191764831543, 0.0068915200233459475, 0.0068280320167541505, 0.006845439910888672, 0.00683622407913208, 0.0068618240356445315, 0.0068351998329162595, 0.006834176063537598, 0.0068076162338256835, 0.006836160182952881, 0.006854656219482422, 0.006830080032348633, 0.006843391895294189, 0.006821887969970703, 0.0068689918518066405, 0.00683622407913208, 0.006937600135803222, 0.00690176010131836, 0.00684441614151001, 0.006833151817321777, 0.006840320110321045, 0.0069324798583984375, 0.006849535942077637, 0.0068280320167541505, 0.006894591808319092, 0.006841343879699707, 0.006843391895294189, 0.006812672138214112, 0.006821887969970703, 0.00682700777053833, 0.006804480075836182, 0.006867968082427979, 0.006823935985565186, 0.006815743923187256, 0.006767615795135498, 0.006802432060241699, 0.0069324798583984375, 0.006825984001159668, 0.006831103801727295, 0.006831103801727295, 0.013900799751281738, 0.00653004789352417, 0.006533120155334473, 0.006556672096252441, 0.006542335987091064, 0.00651478385925293, 0.0065207362174987795, 0.006524928092956543, 0.006504447937011719, 0.0065157442092895505, 0.0065392317771911625, 0.0065146880149841305, 0.006575104236602783, 0.0065136637687683106, 0.006525951862335205, 0.006525951862335205, 0.006590464115142822, 0.006534143924713135, 0.006563839912414551, 0.006557695865631104, 0.006511616230010986, 0.006556672096252441, 0.006681600093841553, 0.0066375679969787596, 0.006707200050354004, 0.006534143924713135, 0.006488096237182617, 0.0064992961883544925, 0.006550528049468994, 0.006586368083953857, 0.0065075201988220215, 0.006549503803253174, 0.006560768127441406, 0.006505472183227539, 0.00653107213973999, 0.00653107213973999, 0.0065136637687683106, 0.006508543968200684, 0.006520832061767578, 0.006533120155334473, 0.0065146880149841305, 0.006519807815551758, 0.006543360233306885, 0.006706175804138184, 0.006556672096252441, 0.006560768127441406, 0.006526976108551025, 0.00652185583114624, 0.0065146880149841305, 0.0065443840026855465, 0.006526976108551025, 0.006536191940307618, 0.006510591983795166, 0.006647808074951172, 0.0066109437942504885, 0.006515711784362793, 0.00653107213973999, 0.006625279903411865, 0.006593535900115967, 0.006519807815551758, 0.006575104236602783, 0.0065474557876586915, 0.006552576065063476, 0.013965312004089356, 0.006725632190704346, 0.006696959972381592, 0.006689792156219483, 0.006518784046173095, 0.006554624080657959, 0.006492159843444824, 0.0065484800338745115, 0.006560768127441406, 0.006615039825439453, 0.0065443840026855465, 0.006516736030578613, 0.0066344962120056155, 0.006715392112731934, 0.00669593620300293, 0.00672051191329956, 0.006722591876983643, 0.006665184020996094, 0.00653004789352417, 0.0065413122177124024, 0.006516736030578613, 0.0065484800338745115, 0.006526976108551025, 0.006519807815551758, 0.0065710082054138185, 0.006595583915710449, 0.006533120155334473, 0.006511616230010986, 0.006498303890228272, 0.006542335987091064, 0.006518784046173095, 0.006569983959197998, 0.006553599834442139, 0.006550528049468994, 0.00653004789352417, 0.0065075201988220215, 0.0065136637687683106, 0.006527999877929688, 0.0065136637687683106, 0.006504447937011719, 0.006568960189819336, 0.006516736030578613, 0.006509568214416504, 0.006617087841033936, 0.006519807815551758, 0.0065382399559021, 0.006511616230010986, 0.006516736030578613, 0.006520832061767578, 0.0065443840026855465, 0.0065075201988220215, 0.006515711784362793, 0.0065064959526062015, 0.00652185583114624, 0.006533120155334473, 0.0065146880149841305, 0.006527999877929688, 0.006569983959197998, 0.006503424167633057, 0.006500351905822754, 0.006516736030578613, 0.006496255874633789, 0.006520832061767578, 0.013800448417663574, 0.0065484800338745115, 0.006523903846740723, 0.006492159843444824, 0.006493184089660644, 0.006494207859039307, 0.006560768127441406, 0.006505472183227539, 0.006500351905822754, 0.006540287971496582, 0.006510591983795166, 0.006511616230010986, 0.00652185583114624, 0.006496255874633789, 0.0065710082054138185, 0.006529024124145508, 0.006532095909118653, 0.0066119680404663084, 0.006691840171813965, 0.006487040042877197, 0.006536191940307618, 0.00648089599609375, 0.00653107213973999, 0.0066078720092773435, 0.006512639999389648, 0.006558720111846924, 0.006624256134033203, 0.006502463817596436, 0.006521791934967041, 0.006515711784362793, 0.006665215969085693, 0.006515711784362793, 0.0065064959526062015, 0.006520832061767578, 0.006500351905822754, 0.006518784046173095, 0.006551551818847656, 0.00653004789352417, 0.006560768127441406, 0.0065259838104248045, 0.006515679836273193, 0.006488096237182617, 0.006514656066894531, 0.006523903846740723, 0.006515711784362793, 0.0065075201988220215, 0.006520832061767578, 0.006516736030578613, 0.006495232105255127, 0.006559743881225586, 0.006502399921417237, 0.006503424167633057, 0.006516736030578613, 0.00652185583114624, 0.006478879928588867, 0.0065075201988220215, 0.0064992961883544925, 0.006602752208709717, 0.006545407772064209, 0.006527999877929688, 0.006510591983795166, 0.006556672096252441, 0.006497280120849609, 0.013763584136962891, 0.006534143924713135, 0.006526976108551025, 0.006529024124145508, 0.0065064959526062015, 0.00652288007736206, 0.006519807815551758, 0.006511616230010986, 0.006551551818847656, 0.006550528049468994, 0.006493184089660644, 0.00652185583114624, 0.006499328136444092, 0.0065669121742248536, 0.006535168170928955, 0.006510591983795166, 0.006533120155334473, 0.0065484800338745115, 0.006524928092956543, 0.00657203197479248, 0.006549503803253174, 0.0065146880149841305, 0.006523903846740723, 0.006582272052764892, 0.006560768127441406, 0.006545407772064209, 0.006491136074066162, 0.006533120155334473, 0.006532095909118653, 0.006486015796661377, 0.006552576065063476, 0.006518784046173095, 0.006516736030578613, 0.006542335987091064, 0.0065064959526062015, 0.006583295822143555, 0.006563839912414551, 0.006527999877929688, 0.0065484800338745115, 0.006523903846740723, 0.006543360233306885, 0.0065413122177124024, 0.00653926420211792, 0.0065177597999572755, 0.00653107213973999, 0.006545407772064209, 0.00653107213973999, 0.006542335987091064, 0.0065064959526062015, 0.006540287971496582, 0.0065484800338745115, 0.006519807815551758, 0.006502399921417237, 0.006603775978088379, 0.006535168170928955, 0.0065669121742248536, 0.006512639999389648, 0.00652185583114624, 0.006523903846740723, 0.006495232105255127, 0.00653107213973999, 0.006523903846740723, 0.006518784046173095, 0.013785087585449218, 0.006524928092956543, 0.006550528049468994, 0.006527999877929688, 0.006502399921417237, 0.006593535900115967, 0.006502399921417237, 0.006545407772064209, 0.0065474557876586915, 0.006515711784362793, 0.006556672096252441, 0.0065495681762695315, 0.006510528087615966, 0.006535168170928955, 0.006501376152038574, 0.0065413122177124024, 0.006576128005981445, 0.006512639999389648, 0.0065177597999572755, 0.006511616230010986, 0.006504479885101318, 0.006548448085784912, 0.006603775978088379, 0.0065146880149841305, 0.006516736030578613, 0.006497280120849609, 0.00653004789352417, 0.006505472183227539, 0.006496255874633789, 0.006533152103424073, 0.006500319957733154, 0.006509568214416504, 0.00652185583114624, 0.006504447937011719, 0.006512639999389648, 0.006512639999389648, 0.006487040042877197, 0.00652185583114624, 0.006524928092956543, 0.006500351905822754, 0.006516736030578613, 0.006619135856628418, 0.006508543968200684, 0.006534143924713135, 0.0065413122177124024, 0.0065382399559021, 0.0065382399559021, 0.006491136074066162, 0.006525951862335205, 0.00652288007736206, 0.006512639999389648, 0.006525951862335205, 0.006505536079406738, 0.006543295860290527, 0.00653926420211792, 0.006500351905822754, 0.006529024124145508, 0.006589439868927002, 0.0065146880149841305, 0.0065064959526062015, 0.0065147199630737306, 0.006502367973327637, 0.006497280120849609, 0.013766655921936035, 0.006527999877929688, 0.00652185583114624, 0.006518784046173095, 0.006512639999389648, 0.006503424167633057, 0.00652185583114624, 0.00652185583114624, 0.006498303890228272, 0.006508543968200684, 0.006535168170928955, 0.0065064959526062015, 0.006542399883270263, 0.006519743919372559, 0.006504447937011719, 0.006533120155334473, 0.00653107213973999, 0.006560768127441406, 0.006490111827850342, 0.0065146880149841305, 0.006509568214416504, 0.006500351905822754, 0.0065177597999572755, 0.0065413122177124024, 0.0065064959526062015, 0.006504447937011719, 0.006474751949310303, 0.00652185583114624, 0.006520864009857178, 0.006483935832977295, 0.006499328136444092, 0.006504447937011719, 0.006525951862335205, 0.0065177597999572755, 0.00652185583114624, 0.0065177597999572755, 0.006525951862335205, 0.006589439868927002, 0.0064880638122558594, 0.006551551818847656, 0.006508543968200684, 0.0065443840026855465, 0.006529024124145508, 0.006518784046173095, 0.006505472183227539, 0.006511616230010986, 0.006496255874633789, 0.006481919765472412, 0.006491136074066162, 0.006533120155334473, 0.00653107213973999, 0.006512639999389648, 0.00652288007736206, 0.0065075201988220215, 0.006501376152038574, 0.006582272052764892, 0.006552576065063476, 0.006540287971496582, 0.00652185583114624, 0.006492159843444824, 0.0065372161865234375, 0.006520895957946777, 0.006529983997344971, 0.013715456008911133, 0.006525951862335205, 0.006534143924713135, 0.006516736030578613, 0.006515711784362793, 0.00652288007736206, 0.006479872226715088, 0.006540287971496582, 0.006534143924713135, 0.006509568214416504, 0.006516736030578613, 0.006504447937011719, 0.006498303890228272, 0.00652185583114624, 0.0065064959526062015, 0.00653004789352417, 0.0065781760215759275, 0.0065075201988220215, 0.0065443840026855465, 0.006538271903991699, 0.006487008094787598, 0.006527999877929688, 0.006557695865631104, 0.006527999877929688, 0.006519807815551758, 0.006497280120849609, 0.006681600093841553, 0.0065669121742248536, 0.006529024124145508, 0.006519807815551758, 0.006519807815551758, 0.0065075201988220215, 0.00653926420211792, 0.006518784046173095, 0.00658739185333252, 0.0065075201988220215, 0.006497280120849609, 0.006516736030578613, 0.006512639999389648, 0.006494207859039307, 0.006525951862335205, 0.00653107213973999, 0.006519807815551758, 0.006552576065063476, 0.0065474557876586915, 0.006553599834442139, 0.0065474557876586915, 0.006501376152038574, 0.006543360233306885, 0.0065443840026855465, 0.0065064959526062015, 0.006554624080657959, 0.006540287971496582, 0.0065413122177124024, 0.0065372161865234375, 0.006603775978088379, 0.006525951862335205, 0.006563839912414551, 0.006503424167633057, 0.006545407772064209, 0.006515711784362793, 0.006534143924713135, 0.006552576065063476, 0.013682687759399414, 0.006536191940307618, 0.0065270400047302245, 0.006512576103210449, 0.006529024124145508, 0.006494207859039307, 0.00653004789352417, 0.006556672096252441, 0.006519807815551758, 0.006551551818847656, 0.0065474557876586915, 0.0065372161865234375, 0.006563839912414551, 0.006561791896820069, 0.006527999877929688, 0.006533120155334473, 0.00653004789352417, 0.006553599834442139, 0.006523903846740723, 0.006510591983795166, 0.006527999877929688, 0.00653107213973999, 0.006479936122894287, 0.0065289602279663085, 0.006498303890228272, 0.00653926420211792, 0.00672051191329956, 0.006555647850036621, 0.006533120155334473, 0.0065382399559021, 0.006508543968200684, 0.00653004789352417, 0.0065382399559021, 0.006503424167633057, 0.006499328136444092, 0.006526976108551025, 0.0065064959526062015, 0.0065146880149841305, 0.006526976108551025, 0.006509568214416504, 0.006527999877929688, 0.006505472183227539, 0.006520832061767578, 0.006573056221008301, 0.006516736030578613, 0.0065075201988220215, 0.006510591983795166, 0.006519807815551758, 0.006508543968200684, 0.006491136074066162, 0.00652288007736206, 0.006520832061767578, 0.0065064959526062015, 0.006519807815551758, 0.006508543968200684, 0.006501376152038574, 0.006516736030578613, 0.0065146880149841305, 0.006532095909118653, 0.006524928092956543, 0.006520832061767578, 0.006516736030578613, 0.006501376152038574, 0.013792256355285644, 0.0065710082054138185, 0.00652288007736206, 0.006676544189453125, 0.007540671825408935, 0.007066624164581299, 0.0068392958641052244, 0.006879231929779053, 0.006876160144805908, 0.006834176063537598, 0.006880256175994873, 0.006866943836212158, 0.006873087882995605, 0.006825984001159668, 0.006841343879699707, 0.0068249602317810056, 0.006807551860809326, 0.006838272094726563, 0.006795263767242431, 0.006808576107025147, 0.0068055038452148435, 0.006918144226074219, 0.0068280320167541505, 0.006778880119323731, 0.006833151817321777, 0.006814720153808594, 0.006817791938781738, 0.006815743923187256, 0.006845439910888672, 0.006854656219482422, 0.00682092809677124, 0.0068156800270080566, 0.0067983360290527345, 0.0068249602317810056, 0.006822912216186523, 0.006814720153808594, 0.006783999919891357, 0.006845439910888672, 0.006808576107025147, 0.006819839954376221, 0.006809599876403808, 0.006944767951965332, 0.006852608203887939, 0.006819839954376221, 0.006840320110321045, 0.00683622407913208, 0.00681990385055542, 0.006798272132873535, 0.006862847805023193, 0.006880320072174072, 0.006816703796386719, 0.0068055038452148435, 0.006820864200592041, 0.006815743923187256, 0.006790143966674805, 0.0068577280044555666, 0.006776832103729248, 0.006856704235076904, 0.006820864200592041, 0.006874112129211426, 0.006807551860809326, 0.006838272094726563, 0.006872064113616944, 0.01447321605682373, 0.0068392958641052244, 0.006914048194885254, 0.006894591808319092, 0.006840320110321045, 0.006819839954376221, 0.006793216228485107, 0.006817791938781738, 0.0068392958641052244, 0.006800384044647217, 0.006812672138214112, 0.006815743923187256, 0.00682700777053833, 0.0067983360290527345, 0.0068055038452148435, 0.0068392958641052244, 0.006866943836212158, 0.006837247848510742, 0.006831103801727295, 0.0068392958641052244, 0.00678604793548584, 0.006795263767242431, 0.006829055786132812, 0.006802432060241699, 0.006811647891998291, 0.006818816184997558, 0.006819839954376221, 0.007336959838867187, 0.007061503887176514, 0.006850560188293457, 0.006797311782836914, 0.0069816322326660156, 0.006795263767242431, 0.006820864200592041, 0.006807551860809326, 0.006803455829620361, 0.006829055786132812, 0.006796288013458252, 0.006837247848510742, 0.006790143966674805, 0.006817791938781738, 0.006819839954376221, 0.006802432060241699, 0.00679423999786377, 0.006817791938781738, 0.006881279945373535, 0.0067696962356567384, 0.006842336177825928, 0.006814720153808594, 0.00678604793548584, 0.006843391895294189, 0.0073134078979492185, 0.00707583999633789, 0.0068884482383728025, 0.007676928043365478, 0.007090176105499267, 0.0069212160110473635, 0.006817791938781738, 0.006964223861694336, 0.00703385591506958, 0.0068392958641052244, 0.00682700777053833, 0.006873087882995605, 0.01455513572692871, 0.006838272094726563, 0.006873087882995605, 0.0068280320167541505, 0.006850560188293457, 0.006773759841918945, 0.006806528091430664, 0.006809599876403808, 0.006825984001159668, 0.006817791938781738, 0.006823935985565186, 0.0071198720932006835, 0.006949888229370117, 0.007005184173583984, 0.008019968032836914, 0.007243775844573975, 0.006923264026641846, 0.00713318395614624, 0.007066624164581299, 0.006996992111206054, 0.006957056045532227, 0.006958079814910889, 0.006940671920776367, 0.006837247848510742, 0.006847487926483154, 0.006825984001159668, 0.007000063896179199, 0.007004159927368164, 0.007004159927368164, 0.0069027838706970214, 0.006880256175994873, 0.006895679950714112, 0.006774720191955567, 0.0068392958641052244, 0.006806528091430664, 0.006803455829620361, 0.006842368125915528, 0.006960127830505371, 0.006965248107910156, 0.0068618240356445315, 0.006854656219482422, 0.006937600135803222, 0.00684441614151001, 0.006890495777130127, 0.006914048194885254, 0.006811647891998291, 0.006814720153808594, 0.006846464157104492, 0.0068280320167541505, 0.006800384044647217, 0.006813695907592773, 0.006837247848510742, 0.006803455829620361, 0.00679423999786377, 0.0068392958641052244, 0.006813695907592773, 0.006801407814025879, 0.006912000179290771, 0.006808576107025147, 0.006809599876403808, 0.006806528091430664, 0.006800384044647217, 0.007064576148986816, 0.013950976371765136, 0.006559743881225586, 0.006540351867675781, 0.006517695903778076, 0.006505472183227539, 0.006495232105255127, 0.006523903846740723, 0.006510591983795166, 0.00659660816192627, 0.006633471965789795, 0.006639616012573242, 0.006500351905822754, 0.006551551818847656, 0.0065372161865234375, 0.00653926420211792, 0.006533120155334473, 0.006512639999389648, 0.006516736030578613, 0.006492159843444824, 0.006520832061767578, 0.006509568214416504, 0.00653107213973999, 0.006512639999389648, 0.006523903846740723, 0.006508543968200684, 0.006549503803253174, 0.006542335987091064, 0.0064839677810668945, 0.006533120155334473, 0.006533120155334473, 0.006512639999389648, 0.0065075201988220215, 0.006558720111846924, 0.0064839677810668945, 0.006508543968200684, 0.006649856090545654, 0.006551551818847656, 0.0065146880149841305, 0.006499360084533692, 0.006588384151458741, 0.00653107213973999, 0.006508543968200684, 0.006524991989135742, 0.006502336025238037, 0.006525951862335205, 0.006500351905822754, 0.006506559848785401, 0.0065586562156677245, 0.0065146880149841305, 0.006532095909118653, 0.0065382399559021, 0.00653004789352417, 0.006519807815551758, 0.006494207859039307, 0.006496255874633789, 0.006491136074066162, 0.0064880638122558594, 0.0065064959526062015, 0.006510623931884766, 0.0065361599922180175, 0.006493184089660644, 0.0065075201988220215, 0.0064839677810668945, 0.013864959716796875, 0.006532095909118653, 0.00653926420211792, 0.006496255874633789, 0.006491136074066162, 0.006524928092956543, 0.00652288007736206, 0.006497280120849609, 0.006524928092956543, 0.006515711784362793, 0.0064849920272827145, 0.0065474557876586915, 0.006524928092956543, 0.006529024124145508, 0.006545407772064209, 0.006533120155334473, 0.0065064959526062015, 0.006496255874633789, 0.006502399921417237, 0.006509568214416504, 0.006509568214416504, 0.006526976108551025, 0.006535168170928955, 0.006474751949310303, 0.006523903846740723, 0.0065075201988220215, 0.006489088058471679, 0.00653107213973999, 0.0066447358131408694, 0.0065136637687683106, 0.006546432018280029, 0.006527999877929688, 0.006535200119018555, 0.0065586881637573246, 0.006535168170928955, 0.006546432018280029, 0.006505472183227539, 0.006532095909118653, 0.00653107213973999, 0.006519807815551758, 0.006527999877929688, 0.006552576065063476, 0.006491136074066162, 0.006564864158630371, 0.0065075201988220215, 0.006528031826019287, 0.006522848129272461, 0.006523903846740723, 0.0065064959526062015, 0.00743936014175415, 0.008608799934387207, 0.0074587841033935546, 0.007060480117797851, 0.006941696166992187, 0.0068280320167541505, 0.006870016098022461, 0.006840320110321045, 0.00690176010131836, 0.006821887969970703, 0.006807551860809326, 0.0068351998329162595, 0.006849535942077637, 0.0068280320167541505, 0.014097408294677734, 0.006561791896820069, 0.006553599834442139, 0.00653004789352417, 0.006556672096252441, 0.006526976108551025, 0.0065177597999572755, 0.006543360233306885, 0.00652185583114624, 0.006516736030578613, 0.006516736030578613, 0.006520832061767578, 0.006527999877929688, 0.006533120155334473, 0.006500351905822754, 0.0065372161865234375, 0.006552576065063476, 0.006498303890228272, 0.00653004789352417, 0.006487040042877197, 0.00653107213973999, 0.006527999877929688, 0.006502399921417237, 0.00653004789352417, 0.006520832061767578, 0.0064839677810668945, 0.006499328136444092, 0.006510591983795166, 0.006546432018280029, 0.006532095909118653, 0.006508543968200684, 0.006543360233306885, 0.0065413122177124024, 0.00653004789352417, 0.0066109437942504885, 0.006540287971496582, 0.00652288007736206, 0.006557695865631104, 0.006511616230010986, 0.0066406397819519045, 0.0065372161865234375, 0.006504447937011719, 0.006589439868927002, 0.006603775978088379, 0.0064839677810668945, 0.006511616230010986, 0.00653004789352417, 0.006542335987091064, 0.006554624080657959, 0.006504447937011719, 0.006520832061767578, 0.006527999877929688, 0.006511616230010986, 0.006516736030578613, 0.006532095909118653, 0.006487040042877197, 0.006592544078826904, 0.006533120155334473, 0.0065361599922180175, 0.0065064959526062015, 0.006512639999389648, 0.006505472183227539, 0.006549503803253174, 0.013765631675720215, 0.006526976108551025, 0.0065382399559021, 0.0065484800338745115, 0.006520832061767578, 0.006501376152038574, 0.006524928092956543, 0.006593535900115967, 0.006523903846740723, 0.006598656177520752, 0.006499328136444092, 0.006542335987091064, 0.006536191940307618, 0.00653004789352417, 0.0065136637687683106, 0.00653926420211792, 0.006533120155334473, 0.006525951862335205, 0.006525951862335205, 0.006510591983795166, 0.006510591983795166, 0.006495232105255127, 0.006550528049468994, 0.006529024124145508, 0.00648089599609375, 0.006518784046173095, 0.006536191940307618, 0.0065146880149841305, 0.0065064959526062015, 0.006486015796661377, 0.0066979842185974124, 0.006559743881225586, 0.006510591983795166, 0.006525951862335205, 0.006543360233306885, 0.006523903846740723, 0.006526976108551025, 0.0065413122177124024, 0.00653004789352417, 0.006499328136444092, 0.006524928092956543, 0.00653004789352417, 0.006540287971496582, 0.006515711784362793, 0.006534143924713135, 0.006490111827850342, 0.006504447937011719, 0.006503424167633057, 0.006680575847625733, 0.006489088058471679, 0.006542335987091064, 0.00648089599609375, 0.0065484800338745115, 0.006479872226715088, 0.006515711784362793, 0.006545407772064209, 0.006503424167633057, 0.006809599876403808, 0.0065669121742248536, 0.006527999877929688, 0.006543360233306885, 0.006518784046173095, 0.0065413122177124024, 0.014844927787780762, 0.006867968082427979, 0.007014400005340577, 0.006895616054534912, 0.006855679988861084, 0.006817791938781738, 0.0067983360290527345, 0.0068280320167541505, 0.006812672138214112, 0.0068280320167541505, 0.006944767951965332, 0.006852608203887939, 0.006920191764831543, 0.006803455829620361, 0.006838272094726563, 0.006853631973266602, 0.006814720153808594, 0.006803455829620361, 0.006814720153808594, 0.006807551860809326, 0.006799359798431396, 0.00679423999786377, 0.006817791938781738, 0.006821887969970703, 0.00683622407913208, 0.0068249602317810056, 0.006795263767242431, 0.006770688056945801, 0.006814720153808594, 0.006854656219482422, 0.006831103801727295, 0.006865920066833496, 0.006854656219482422, 0.0068351998329162595, 0.0068351998329162595, 0.006831103801727295, 0.006831103801727295, 0.006838272094726563, 0.0068392958641052244, 0.006834176063537598, 0.0068249602317810056, 0.006810624122619629, 0.006847487926483154, 0.006812672138214112, 0.007004159927368164, 0.006865920066833496, 0.006843391895294189, 0.006852608203887939, 0.006821887969970703, 0.006858751773834228, 0.006813695907592773, 0.006807551860809326, 0.006823935985565186, 0.006848512172698974, 0.006816768169403077, 0.006859776020050049, 0.006837247848510742, 0.006851583957672119, 0.006810624122619629, 0.006855679988861084, 0.006823935985565186, 0.006811647891998291, 0.006850592136383057, 0.014511072158813476, 0.006811647891998291, 0.006855679988861084, 0.006822912216186523, 0.006848512172698974, 0.006813695907592773, 0.006862847805023193, 0.006793216228485107, 0.006822912216186523, 0.006864895820617676, 0.006845439910888672, 0.006826015949249267, 0.006809567928314209, 0.006840320110321045, 0.006837247848510742, 0.006806528091430664, 0.006859776020050049, 0.006813695907592773, 0.006849599838256836, 0.006807487964630127, 0.006851583957672119, 0.006814720153808594, 0.006834176063537598, 0.006840320110321045, 0.006829055786132812, 0.006846464157104492, 0.006884352207183838, 0.006825984001159668, 0.006845439910888672, 0.006814720153808594, 0.006846464157104492, 0.0068618240356445315, 0.00677785587310791, 0.006863872051239014, 0.006830080032348633, 0.0068280320167541505, 0.006810624122619629, 0.006864895820617676, 0.0067983360290527345, 0.006937600135803222, 0.006856704235076904, 0.00683622407913208, 0.006834176063537598, 0.006809599876403808, 0.006858751773834228, 0.006840320110321045, 0.006808640003204346, 0.0068607358932495114, 0.006880256175994873, 0.006843391895294189, 0.00681171178817749, 0.006853568077087402, 0.006813695907592773, 0.006850560188293457, 0.006812672138214112, 0.006823935985565186, 0.0068249602317810056, 0.0069027838706970214, 0.006849535942077637, 0.00682700777053833, 0.006840320110321045, 0.006843391895294189, 0.006808576107025147, 0.013846528053283692, 0.006520832061767578, 0.006509568214416504, 0.006534143924713135, 0.006516736030578613, 0.006527008056640625, 0.006534111976623535, 0.006515711784362793, 0.006516736030578613, 0.006543360233306885, 0.006533120155334473, 0.006516736030578613, 0.006518784046173095, 0.006505472183227539, 0.006532095909118653, 0.0065443840026855465, 0.006529024124145508, 0.006532095909118653, 0.006526976108551025, 0.006515711784362793, 0.006546432018280029, 0.006532095909118653, 0.006558720111846924, 0.006774784088134766, 0.006849535942077637, 0.006837247848510742, 0.006949888229370117, 0.007171072006225586, 0.0071311678886413574, 0.00744547176361084, 0.007161856174468994, 0.0074414081573486324, 0.007116799831390381, 0.0068986878395080565, 0.0068618240356445315, 0.006876160144805908, 0.00684441614151001, 0.00683622407913208, 0.0068628802299499515, 0.0068576960563659665, 0.006879231929779053, 0.006804480075836182, 0.006847487926483154, 0.006843391895294189, 0.006845439910888672, 0.006879231929779053, 0.006862847805023193, 0.006822912216186523, 0.006791168212890625, 0.007130112171173096, 0.006842368125915528, 0.006853631973266602, 0.006847487926483154, 0.006822912216186523, 0.006852608203887939, 0.006838272094726563, 0.006830080032348633, 0.0068321280479431154, 0.006811647891998291, 0.006845439910888672, 0.006819839954376221, 0.00684441614151001, 0.0068321280479431154, 0.014599167823791503, 0.006838272094726563, 0.006821887969970703, 0.0068321280479431154, 0.006842368125915528, 0.006843391895294189, 0.00684441614151001, 0.0068249602317810056, 0.006851615905761718, 0.006811615943908692, 0.006864927768707276, 0.0068269758224487305, 0.006820864200592041, 0.006862847805023193, 0.006830080032348633, 0.0068884482383728025, 0.006837247848510742, 0.006840320110321045, 0.006854656219482422, 0.006791232109069824, 0.006830016136169434, 0.006804480075836182, 0.006800384044647217, 0.006840320110321045, 0.0068280320167541505, 0.006809599876403808, 0.00676966381072998, 0.006829055786132812, 0.006815743923187256, 0.00679423999786377, 0.006838272094726563, 0.006829055786132812, 0.006801407814025879, 0.006876160144805908, 0.006797311782836914, 0.006789120197296142, 0.00678604793548584, 0.006810624122619629, 0.006821887969970703, 0.00682700777053833, 0.006837247848510742, 0.006804480075836182, 0.006830080032348633, 0.006811647891998291, 0.006975488185882568, 0.006833151817321777, 0.006806528091430664, 0.006866943836212158, 0.00692633581161499, 0.006884352207183838, 0.006833151817321777, 0.006858751773834228, 0.006841343879699707, 0.006834176063537598, 0.006867968082427979, 0.0068618240356445315, 0.006867968082427979, 0.006899712085723877, 0.006859776020050049, 0.0068249602317810056, 0.0068321280479431154, 0.0068577280044555666, 0.006837247848510742, 0.013855744361877441, 0.006562880039215088, 0.00689247989654541, 0.006939712047576904, 0.006982592105865479, 0.006859776020050049, 0.006833151817321777, 0.006833151817321777, 0.006905888080596924, 0.006889440059661865, 0.006778880119323731, 0.0068351998329162595, 0.0068249602317810056, 0.0069621758460998535, 0.0068055357933044435, 0.0068873920440673825, 0.0068618240356445315, 0.006809599876403808, 0.006841343879699707, 0.006985727787017822, 0.006808576107025147, 0.006840320110321045, 0.006838272094726563, 0.006924287796020508, 0.006774784088134766, 0.006858751773834228, 0.006848512172698974, 0.006866943836212158, 0.006783999919891357, 0.006855679988861084, 0.006812672138214112, 0.006789120197296142, 0.006813695907592773, 0.006794271945953369, 0.006850527763366699, 0.0068055038452148435, 0.007599103927612305, 0.0069632000923156735, 0.006883327960968018, 0.006840320110321045, 0.006845439910888672, 0.006845439910888672, 0.00679423999786377, 0.006895616054534912, 0.006833151817321777, 0.006796288013458252, 0.006781951904296875, 0.006810624122619629, 0.006819839954376221, 0.0067983360290527345, 0.006820864200592041, 0.006903903961181641, 0.006820767879486084, 0.006845439910888672, 0.006820864200592041, 0.006811647891998291, 0.006808576107025147, 0.00684441614151001, 0.006811647891998291, 0.006816768169403077, 0.006833151817321777, 0.0068392958641052244, 0.006854656219482422, 0.014112768173217773, 0.006528031826019287, 0.006518752098083496, 0.006535168170928955, 0.0066826238632202144, 0.006511616230010986, 0.006540287971496582, 0.006558720111846924, 0.0065146880149841305, 0.006554624080657959, 0.006533120155334473, 0.006774784088134766, 0.006858751773834228, 0.006843391895294189, 0.006834176063537598, 0.006846464157104492, 0.006812672138214112, 0.006800384044647217, 0.006847487926483154, 0.006813695907592773, 0.006833151817321777, 0.006821887969970703, 0.006822912216186523, 0.006803455829620361, 0.006812672138214112, 0.006829055786132812, 0.006812672138214112, 0.006818816184997558, 0.0068321280479431154, 0.00682700777053833, 0.0068249602317810056, 0.006820864200592041, 0.0068351998329162595, 0.00683622407913208, 0.006767615795135498, 0.006837247848510742, 0.006819839954376221, 0.006820864200592041, 0.006815743923187256, 0.006843391895294189, 0.006804480075836182, 0.006821887969970703, 0.00683523178100586, 0.006829023838043213, 0.006842368125915528, 0.0068321280479431154, 0.006810624122619629, 0.0068392958641052244, 0.006783999919891357, 0.007003136157989502, 0.006883327960968018, 0.006802432060241699, 0.006848512172698974, 0.00683622407913208, 0.006863872051239014, 0.006843391895294189, 0.006842368125915528, 0.006903808116912841, 0.006806528091430664, 0.006872064113616944, 0.006909952163696289, 0.006910975933074951, 0.006916096210479736]",tokens/s,146.412128177699,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3193.102336,5128.060928,0.0,4481.613824,4276.256768,s,10,3.2413734130859373,0.32413734130859373,0.0016825840816240875,0.3241157684326172,0.32623468627929686,0.3264099456787109,0.32655015319824215,"[0.326585205078125, 0.3253639221191406, 0.3222522888183594, 0.32284228515625, 0.32339013671875, 0.32129388427734373, 0.32326702880859376, 0.3248414001464844, 0.32534152221679685, 0.3261957397460937]",tokens/s,789.7886709580806,kWh,3.8053463575326738e-06,2.0845240635480878e-06,1.689038677618604e-05,2.27802571972668e-05,tokens/kWh,11237801.126789523,MB,3193.102336,5128.060928,0.0,4481.613824,4465.661952,s,10,189.078173828125,18.9078173828125,0.012273201694566093,18.9097021484375,18.920707812499998,18.9233919921875,18.9255393359375,"[18.91315625, 18.920111328125, 18.9024375, 18.901080078125, 18.88138671875, 18.897064453125, 18.91123828125, 18.908166015625, 18.926076171875, 18.91745703125]",tokens/s,3.331955176236681,kWh,0.00022324078258437416,0.00012235523933478362,0.0009709997090908196,0.0013165957310099774,tokens/kWh,47850.67922988926,,s,629,191.68752508544924,0.3047496424251975,0.038502073286161766,0.300015625,0.3008729064941406,0.3014152465820313,0.6232092919921874,"[0.3012270202636719, 0.30114816284179685, 0.3007774658203125, 0.30110003662109375, 0.29941351318359377, 0.29971661376953124, 0.2996531066894531, 0.2997452697753906, 0.30033203125, 0.3001241455078125, 0.30014566040039065, 0.3000565795898438, 0.3007344665527344, 0.30101809692382814, 0.2998548583984375, 0.2999808044433594, 0.3005767822265625, 0.29991937255859374, 0.3002378234863281, 0.3010672607421875, 0.30078155517578126, 0.30025625610351564, 0.29994802856445313, 0.3000330505371094, 0.3002951354980469, 0.30040884399414064, 0.30003506469726565, 0.30066790771484375, 0.30030438232421874, 0.30015179443359374, 0.30028594970703126, 0.3002818603515625, 0.30056243896484375, 0.3007068786621094, 0.3003309326171875, 0.29966232299804685, 0.2997370910644531, 0.2999521179199219, 0.29981594848632814, 0.2995947570800781, 0.29999102783203124, 0.29974835205078126, 0.299620361328125, 0.2994493408203125, 0.2994759826660156, 0.29976165771484375, 0.30030642700195315, 0.3000166320800781, 0.2999060363769531, 0.30026956176757813, 0.2995128173828125, 0.29971661376953124, 0.2999552001953125, 0.3005082092285156, 0.30011181640625, 0.2995394592285156, 0.3000186767578125, 0.30043032836914063, 0.29998388671875, 0.29997259521484376, 0.299610107421875, 0.29887283325195313, 0.6254059448242187, 0.3006320495605469, 0.3013908386230469, 0.30085733032226564, 0.3013273620605469, 0.29998489379882814, 0.2996244506835937, 0.3003135986328125, 0.299826171875, 0.2998814697265625, 0.300400634765625, 0.29994085693359374, 0.3004631042480469, 0.3001395263671875, 0.2999992370605469, 0.299926513671875, 0.300759033203125, 0.300590087890625, 0.29988455200195313, 0.29991729736328127, 0.30021221923828123, 0.30040472412109376, 0.3002593383789062, 0.30088909912109374, 0.3011358642578125, 0.2998763427734375, 0.30023577880859376, 0.30020709228515624, 0.3008174133300781, 0.3003627624511719, 0.300759033203125, 0.3007027587890625, 0.30042620849609375, 0.30063821411132813, 0.2999624328613281, 0.29996844482421875, 0.30086349487304687, 0.30146969604492185, 0.29992141723632815, 0.3000361022949219, 0.3000586242675781, 0.29992755126953125, 0.2999654541015625, 0.2998486938476562, 0.30020095825195314, 0.3000299377441406, 0.30309478759765623, 0.3002388610839844, 0.2999859313964844, 0.3003576354980469, 0.30067608642578125, 0.3001446533203125, 0.30003302001953125, 0.3000166320800781, 0.2999244689941406, 0.3001692199707031, 0.29990194702148437, 0.3005317077636719, 0.30013031005859375, 0.2998405151367188, 0.29998797607421873, 0.2999336853027344, 0.30006988525390627, 0.622824462890625, 0.30008013916015625, 0.29997259521484376, 0.2997729187011719, 0.29949029541015626, 0.300337158203125, 0.30040985107421875, 0.30018765258789065, 0.2999029846191406, 0.29995724487304687, 0.30025112915039065, 0.30034637451171875, 0.29992959594726565, 0.30029824829101565, 0.3003873291015625, 0.2998978576660156, 0.30032589721679687, 0.3002255249023438, 0.3004590454101562, 0.3001773681640625, 0.2999449462890625, 0.2996777038574219, 0.2997125244140625, 0.3017687072753906, 0.2998691711425781, 0.29983026123046874, 0.2998783874511719, 0.30037503051757813, 0.3002183532714844, 0.29920254516601563, 0.29975244140625, 0.2997073974609375, 0.29997671508789064, 0.2997862548828125, 0.30004327392578123, 0.2995845031738281, 0.29983743286132813, 0.29989376831054687, 0.3005511779785156, 0.3004989318847656, 0.3004620666503906, 0.2998620300292969, 0.2996592712402344, 0.2998609924316406, 0.29997567749023435, 0.2994964599609375, 0.3000780944824219, 0.30188134765625, 0.29991015625, 0.30026956176757813, 0.30005453491210937, 0.29941964721679687, 0.3000770568847656, 0.2994923400878906, 0.29899981689453126, 0.299219970703125, 0.29973809814453123, 0.29967666625976563, 0.30005966186523436, 0.3000115661621094, 0.3005040283203125, 0.2999951477050781, 0.30062490844726564, 0.6233589477539062, 0.29990194702148437, 0.3002234802246094, 0.2998190002441406, 0.30008734130859377, 0.30004833984375, 0.2998200378417969, 0.3000770568847656, 0.300015625, 0.3000606689453125, 0.29941146850585937, 0.29921484375, 0.299578369140625, 0.2996756591796875, 0.2996449279785156, 0.30001971435546876, 0.3000995788574219, 0.29999002075195313, 0.3003607177734375, 0.30082763671875, 0.3000340576171875, 0.300611572265625, 0.30002584838867186, 0.299694091796875, 0.29959066772460935, 0.30004019165039064, 0.2999818115234375, 0.30008218383789065, 0.29999002075195313, 0.3006935119628906, 0.2997913513183594, 0.29910015869140627, 0.2994124755859375, 0.29980160522460936, 0.2999715881347656, 0.3000504455566406, 0.3000074157714844, 0.2999378051757812, 0.2999920654296875, 0.3000719299316406, 0.2999992370605469, 0.30074981689453123, 0.30103448486328127, 0.2998681640625, 0.3038136291503906, 0.3005091857910156, 0.3002746887207031, 0.2998763427734375, 0.299767822265625, 0.29981594848632814, 0.2999541625976562, 0.29982720947265623, 0.299641845703125, 0.29993267822265623, 0.29993472290039064, 0.2999715881347656, 0.2998343811035156, 0.29893017578125, 0.29962136840820314, 0.30002072143554687, 0.2994237365722656, 0.299831298828125, 0.30021630859375, 0.623921142578125, 0.3000299377441406, 0.30005966186523436, 0.299926513671875, 0.299947021484375, 0.300115966796875, 0.29993572998046875, 0.30002072143554687, 0.30004531860351563, 0.29974118041992187, 0.30062188720703126, 0.29995513916015626, 0.29942477416992186, 0.2993663940429688, 0.29983026123046874, 0.2999603271484375, 0.2999285888671875, 0.3015209045410156, 0.29998797607421873, 0.30004327392578123, 0.30011700439453126, 0.2999449462890625, 0.30022860717773436, 0.3003965759277344, 0.30081021118164064, 0.30038836669921876, 0.30042520141601564, 0.30013336181640626, 0.3006033935546875, 0.29989376831054687, 0.29989273071289063, 0.30014874267578123, 0.3004436340332031, 0.299747314453125, 0.2998476867675781, 0.3000094604492187, 0.29924453735351564, 0.29881036376953124, 0.29879910278320315, 0.2987581481933594, 0.29875506591796874, 0.29884518432617185, 0.298787841796875, 0.2988482666015625, 0.29977191162109373, 0.3000657958984375, 0.2989967346191406, 0.2990592041015625, 0.2991626281738281, 0.2990673828125, 0.29893017578125, 0.2991349792480469, 0.299323486328125, 0.29853277587890625, 0.29891278076171873, 0.29906942749023435, 0.2987386779785156, 0.3028009033203125, 0.29910748291015626, 0.2988552551269531, 0.2989045715332031, 0.299146240234375, 0.29900595092773435, 0.621075439453125, 0.2991349792480469, 0.2998179931640625, 0.29896807861328123, 0.2991811828613281, 0.2986914672851563, 0.29862911987304686, 0.29889239501953124, 0.2989116516113281, 0.2987960205078125, 0.29937152099609377, 0.29994189453125, 0.29974630737304686, 0.30000537109375, 0.300537841796875, 0.29991015625, 0.30004837036132814, 0.300326904296875, 0.3002030029296875, 0.3000575866699219, 0.3000770568847656, 0.3004405822753906, 0.2999531555175781, 0.30005148315429686, 0.3000484008789063, 0.3000626525878906, 0.30036172485351564, 0.3001272277832031, 0.30000640869140627, 0.2996879272460938, 0.29982925415039063, 0.29986611938476565, 0.3000391540527344, 0.2998128662109375, 0.29970330810546875, 0.30041189575195315, 0.3001200561523438, 0.29997772216796875, 0.3035576171875, 0.30046923828125, 0.29996749877929685, 0.3007201232910156, 0.3004538879394531, 0.29997671508789064, 0.30013543701171874, 0.30009344482421874, 0.3003351135253906, 0.3000340576171875, 0.300626953125, 0.29982925415039063, 0.29992959594726565, 0.299978759765625, 0.3000924072265625, 0.30002484130859375, 0.29995724487304687, 0.2998896789550781, 0.299720703125, 0.2996705322265625, 0.2998804626464844, 0.2997698669433594, 0.3005307006835938, 0.29985382080078127, 0.30008731079101564, 0.6239016723632812, 0.3004989318847656, 0.3000094604492187, 0.3003781127929688, 0.3024650268554688, 0.30008941650390625, 0.30021728515625, 0.29997671508789064, 0.3000985717773437, 0.2997452697753906, 0.3001559143066406, 0.299789306640625, 0.299578369140625, 0.3000637512207031, 0.3000022888183594, 0.30041189575195315, 0.300179443359375, 0.30062490844726564, 0.3001978759765625, 0.29980978393554686, 0.30084915161132814, 0.3001692199707031, 0.30007400512695315, 0.3024322204589844, 0.3004334106445313, 0.29990194702148437, 0.30002584838867186, 0.2999787902832031, 0.3001108093261719, 0.3005962219238281, 0.3003105163574219, 0.30027059936523437, 0.299931640625, 0.2999029846191406, 0.3000309753417969, 0.30007601928710936, 0.2999808044433594, 0.299936767578125, 0.3001610107421875, 0.29992755126953125, 0.29989376831054687, 0.29997567749023435, 0.3000391540527344, 0.3000443115234375, 0.3002992248535156, 0.2999715881347656, 0.2998241577148438, 0.2998548278808594, 0.3000862731933594, 0.300653564453125, 0.3015086059570313, 0.29992242431640626, 0.299863037109375, 0.29971966552734375, 0.29996954345703125, 0.2999613342285156, 0.3001968688964844, 0.2997841796875, 0.2999797668457031, 0.29949029541015626, 0.2995916748046875, 0.29981695556640625, 0.30072625732421876, 0.6255585327148437, 0.30017230224609376, 0.299831298828125, 0.3000555419921875, 0.30022042846679686, 0.30029925537109375, 0.30140109252929687, 0.30094949340820315, 0.2998886413574219, 0.29979647827148437, 0.30036376953125, 0.29990194702148437, 0.30001458740234377, 0.300010498046875, 0.29990707397460936, 0.29992959594726565, 0.3013775329589844, 0.2998958129882813, 0.3009157104492188, 0.3004344177246094, 0.2997698974609375, 0.299400146484375, 0.3000780944824219, 0.300410888671875, 0.3009054870605469, 0.3005409240722656, 0.301384765625, 0.30065350341796876, 0.30062591552734375, 0.30096075439453124, 0.3004375, 0.30160894775390623, 0.3008819274902344, 0.3004241943359375, 0.29964389038085937, 0.2996091003417969, 0.2996643981933594, 0.29964901733398436, 0.2998384704589844, 0.29929779052734373, 0.29952517700195314, 0.30001556396484375, 0.29967666625976563, 0.29992242431640626, 0.2999613342285156, 0.299926513671875, 0.2998753356933594, 0.2995947570800781, 0.2999183349609375, 0.2996172790527344, 0.29993267822265623, 0.30100991821289064, 0.30017843627929686, 0.30001254272460937, 0.29992141723632815, 0.29997055053710936, 0.300147705078125, 0.3002347412109375, 0.29967462158203123, 0.299504638671875, 0.299514892578125, 0.29878680419921877, 0.2995189819335938, 0.6255062866210938, 0.30063821411132813, 0.30005966186523436, 0.30045697021484374, 0.3000965270996094, 0.2999285888671875, 0.300015625, 0.300263427734375, 0.299894775390625, 0.29981695556640625, 0.30063821411132813, 0.300000244140625, 0.29996954345703125, 0.29998797607421873, 0.30009344482421874, 0.2999808044433594, 0.3002378234863281, 0.30191104125976564, 0.3011993713378906, 0.30066586303710935, 0.3013355407714844, 0.30207794189453124, 0.302529541015625, 0.30110720825195314, 0.3007958984375, 0.3010785217285156, 0.30087783813476565, 0.2998056945800781, 0.30067608642578125, 0.3011614685058594, 0.3007979431152344, 0.30112152099609374, 0.3007201232910156, 0.3007068176269531, 0.30179122924804686, 0.3001978759765625, 0.3004272766113281, 0.2999080810546875, 0.3004764099121094, 0.30043954467773437, 0.3001835632324219, 0.3006484375, 0.3004712829589844, 0.3014246826171875, 0.3003248291015625, 0.30087884521484376, 0.3002808227539063, 0.299840576171875, 0.3000575256347656, 0.30002383422851564, 0.2996745910644531, 0.29950567626953123, 0.2997145690917969, 0.29997567749023435, 0.30001458740234377, 0.30037503051757813, 0.30008114624023435, 0.2989066162109375, 0.30013543701171874, 0.2999111633300781, 0.29975347900390625, 0.2997350463867188, 0.30005453491210937, 0.6268078002929688, 0.30009548950195314, 0.29986407470703125, 0.30065869140625, 0.3011307373046875, 0.30074368286132813, 0.3008604125976562, 0.30035250854492185, 0.30087167358398437, 0.29988760375976564, 0.30013644409179685, 0.3006300048828125, 0.3005399169921875, 0.30181991577148437, 0.30090756225585935, 0.29990191650390624, 0.30007601928710936, 0.30002688598632815, 0.2999869384765625, 0.3001077880859375, 0.30047845458984374, 0.29997567749023435, 0.3000227966308594, 0.3018362731933594, 0.30386279296875, 0.30291455078125, 0.303236083984375, 0.3023329162597656, 0.30000436401367186, 0.2989014892578125, 0.2988851318359375, 0.29931622314453127, 0.2989588623046875, 0.29878271484375, 0.2992056274414063, 0.30051431274414064, 0.3006761169433594, 0.29950460815429686, 0.29902847290039064, 0.2990940246582031, 0.29902545166015626, 0.29914212036132815, 0.29988455200195313, 0.299652099609375, 0.30047232055664064, 0.2996326293945312, 0.30089727783203124, 0.29991729736328127, 0.3000965270996094, 0.30000234985351565, 0.29993365478515627, 0.3000115051269531, 0.2999603881835938, 0.29999508666992186, 0.29998284912109374, 0.2999869384765625, 0.30106521606445313, 0.3007928466796875, 0.300732421875, 0.29975653076171876, 0.29992755126953125, 0.30012313842773436, 0.29981491088867185]",tokens/s,3.281382029005844,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1440.489472,1709.703168,0.0,1063.256064,942.605312,s,10,0.8710727081298828,0.08710727081298827,0.0020526611434407644,0.0869946403503418,0.08830768203735351,0.09043918418884277,0.09214438591003418,"[0.09257068634033203, 0.08689478302001953, 0.08709449768066406, 0.08734588623046875, 0.0876402587890625, 0.08528950500488282, 0.08547952270507812, 0.08543309020996094, 0.08549046325683594, 0.08783401489257812]",tokens/s,2938.9050719957663,kWh,1.0307077509193605e-06,5.647778186555472e-07,2.6262726324173835e-06,4.221758201992291e-06,tokens/kWh,60638243.06166823,MB,1440.817152,1709.703168,0.0,1063.256064,942.607872,s,10,54.10364501953125,5.410364501953125,0.04442393039305028,5.402393798828125,5.47523671875,5.4771017578125,5.478593789062501,"[5.402685546875, 5.4450302734375, 5.474822265625, 5.478966796875, 5.39001171875, 5.366296875, 5.3479423828125, 5.3592861328125, 5.4365009765625, 5.40210205078125]",tokens/s,11.64431712082563,kWh,6.414297001805283e-05,3.5154539095709204e-05,0.00015373626912418487,0.00025303377823794696,tokens/kWh,248978.61636779693,,s,629,54.788874206542985,0.08710472846827182,0.01042761753839409,0.08542822265625,0.08754749298095703,0.08829050750732421,0.17079922241210937,"[0.08673382568359375, 0.08457215881347656, 0.08464179229736328, 0.08488550567626953, 0.08813362884521485, 0.08856473541259766, 0.08671949005126953, 0.08648601531982422, 0.08687923431396484, 0.08737894439697266, 0.08506470489501954, 0.0844554214477539, 0.0844400634765625, 0.08486911773681641, 0.08474524688720703, 0.08481378936767578, 0.08675328063964843, 0.08531455993652344, 0.08472268676757813, 0.08468172454833985, 0.08468172454833985, 0.0845322265625, 0.08479436492919921, 0.08471552276611329, 0.08475545501708984, 0.08474521636962891, 0.08502886199951172, 0.0846899871826172, 0.0848424301147461, 0.084569091796875, 0.08475955200195312, 0.08495104217529297, 0.08461721801757813, 0.08463571166992187, 0.08458643341064453, 0.08476876831054687, 0.08473804473876953, 0.08473190307617187, 0.0844554214477539, 0.08442777252197266, 0.08484761810302735, 0.08466124725341796, 0.08562483215332031, 0.08599142456054687, 0.08450764465332031, 0.0840970230102539, 0.08538521575927735, 0.08838041687011719, 0.0868485107421875, 0.08694374084472656, 0.08736255645751953, 0.08686080169677735, 0.08700518035888671, 0.08699289703369141, 0.08753254699707032, 0.0868485107421875, 0.08696627044677735, 0.08728985595703125, 0.08770150756835937, 0.08642969512939454, 0.08510566711425781, 0.08681779479980468, 0.1740953674316406, 0.08652799987792968, 0.08564736175537109, 0.0842209243774414, 0.08706150054931641, 0.08500326538085938, 0.08664473724365235, 0.08596275329589843, 0.0853391342163086, 0.08664268493652344, 0.08708096313476563, 0.08690073394775391, 0.08696934509277343, 0.08694783782958984, 0.08664575958251954, 0.08597196960449219, 0.08684748840332031, 0.08683110046386719, 0.08509439849853516, 0.08638566589355469, 0.08632217407226563, 0.08716287994384765, 0.087119873046875, 0.08713215637207031, 0.08506470489501954, 0.08651168060302734, 0.08632109069824219, 0.08708403015136719, 0.08612662506103516, 0.08650748443603516, 0.08707481384277344, 0.08641535949707031, 0.08600678253173828, 0.08589107513427735, 0.08514252471923828, 0.08699494171142579, 0.08681676483154296, 0.08650956726074219, 0.0871383056640625, 0.08712806701660156, 0.08630374145507813, 0.08593612670898437, 0.08639590454101563, 0.08506681823730469, 0.08687712097167968, 0.08704512023925781, 0.08696115112304688, 0.08838451385498047, 0.08482816314697265, 0.08464895629882813, 0.08431206512451171, 0.08453427124023437, 0.08795238494873046, 0.08794931030273437, 0.08684134674072265, 0.08686592102050782, 0.08699801635742188, 0.08754073333740234, 0.087947265625, 0.08687513732910156, 0.08657100677490234, 0.08599350738525391, 0.08582653045654297, 0.17401548767089844, 0.0865423355102539, 0.08694374084472656, 0.08704204559326172, 0.08744652557373046, 0.08659967803955078, 0.08703897857666015, 0.08700109100341796, 0.08715776062011718, 0.08681574249267578, 0.08705433654785157, 0.08619929504394531, 0.08701548767089844, 0.086857666015625, 0.08725811004638671, 0.08656588745117187, 0.08733184051513672, 0.0871731185913086, 0.08719257354736328, 0.08709120178222657, 0.0869713897705078, 0.0868485107421875, 0.08620236968994141, 0.08700415802001953, 0.08701337432861328, 0.0868331527709961, 0.08713420867919922, 0.08696627044677735, 0.08672972869873047, 0.08645836639404297, 0.08661094665527344, 0.08728678131103515, 0.08693452453613282, 0.08721206665039062, 0.08688022613525391, 0.08789299011230468, 0.08672870635986328, 0.08694477081298828, 0.087731201171875, 0.08747007751464844, 0.08717005157470703, 0.08711270141601563, 0.08729190063476562, 0.08698271942138672, 0.08669484710693359, 0.08654745483398438, 0.08669286346435547, 0.08840806579589844, 0.08711475372314453, 0.08676044464111328, 0.08631705474853515, 0.08654956817626953, 0.08599648284912109, 0.08647679901123047, 0.08694989013671875, 0.08696524810791016, 0.08698060607910156, 0.08548556518554687, 0.08556953430175782, 0.0868823013305664, 0.08658534240722657, 0.08709939575195312, 0.08710451507568359, 0.17466163635253906, 0.08715673828125, 0.08728371429443359, 0.08680345916748047, 0.08702361297607422, 0.0872099838256836, 0.08695299530029296, 0.0865228500366211, 0.08702054595947266, 0.08711270141601563, 0.08681267547607421, 0.0862402572631836, 0.08722022247314454, 0.08714342498779297, 0.08641433715820312, 0.08614297485351563, 0.08621363067626953, 0.08743526458740235, 0.0874260482788086, 0.08645123291015624, 0.08728675079345703, 0.08692940521240235, 0.08707891082763672, 0.08719055938720703, 0.08709014129638672, 0.08714854431152344, 0.08714035034179687, 0.08686592102050782, 0.0871751708984375, 0.08751411437988281, 0.08707379150390625, 0.08701952362060547, 0.08471449279785156, 0.08613683319091797, 0.08813772583007813, 0.08714240264892578, 0.08702566528320313, 0.0872959976196289, 0.08773222351074218, 0.08883302307128907, 0.08757759857177734, 0.08561151885986328, 0.08676659393310547, 0.08834764862060547, 0.08476467132568359, 0.08778854370117188, 0.08729497528076172, 0.08688441467285156, 0.08712287902832032, 0.0869744644165039, 0.0868136978149414, 0.08646246337890626, 0.08621875, 0.08678707122802734, 0.08692428588867188, 0.08732262420654296, 0.08703692626953125, 0.08800665283203125, 0.08684236907958984, 0.08649215698242188, 0.0869048309326172, 0.08684236907958984, 0.08680857849121094, 0.1740042266845703, 0.08717619323730469, 0.0870635528564453, 0.0868219223022461, 0.08698159790039063, 0.08682291412353516, 0.08556953430175782, 0.08729804992675781, 0.08690278625488282, 0.08721305847167969, 0.08680857849121094, 0.08707686614990234, 0.08708812713623047, 0.0862003173828125, 0.08698265838623047, 0.08703385925292968, 0.08684031677246094, 0.08691814422607422, 0.08724281311035156, 0.08704608154296875, 0.08688742065429687, 0.08603648376464844, 0.08850739288330078, 0.08509849548339844, 0.08658636474609376, 0.08696934509277343, 0.08461007690429688, 0.08453526306152344, 0.08452095794677734, 0.08469811248779296, 0.084959228515625, 0.08464383697509766, 0.08490086364746094, 0.08458649444580078, 0.08620851135253907, 0.08480665588378906, 0.08471449279785156, 0.08450867462158203, 0.08490086364746094, 0.0845486068725586, 0.08454962921142578, 0.08446873474121094, 0.0846909408569336, 0.0845506591796875, 0.08456806182861328, 0.08460185241699218, 0.08472268676757813, 0.08459366607666016, 0.08461414337158203, 0.0848005142211914, 0.08458854675292969, 0.08446669006347657, 0.08458854675292969, 0.08470323181152344, 0.08453427124023437, 0.08449433898925782, 0.08460192108154296, 0.08460384368896484, 0.08456192016601563, 0.08444825744628906, 0.08477184295654297, 0.0844421157836914, 0.08439193725585938, 0.17255935668945313, 0.08706764984130859, 0.0871720962524414, 0.0869908447265625, 0.08572518157958985, 0.08475033569335938, 0.0848353271484375, 0.08454962921142578, 0.08473804473876953, 0.08482406616210937, 0.08465305328369141, 0.08468479919433594, 0.08473395538330078, 0.08460902404785156, 0.08453529357910156, 0.08463053131103515, 0.0845455322265625, 0.08432128143310547, 0.08473190307617187, 0.08471449279785156, 0.084578369140625, 0.08457823944091797, 0.08466944122314453, 0.08652288055419922, 0.08498278045654296, 0.08480255889892578, 0.08473702239990234, 0.08468787384033204, 0.08633650970458985, 0.0850851821899414, 0.08458751678466797, 0.08468991851806641, 0.08474726104736328, 0.0846909408569336, 0.0885032958984375, 0.08480870056152344, 0.0858071060180664, 0.08694374084472656, 0.08694989013671875, 0.08657920074462891, 0.08678604888916015, 0.08691814422607422, 0.084853759765625, 0.08535858917236328, 0.08599654388427734, 0.08458137512207031, 0.08471347045898438, 0.084495361328125, 0.08461516571044922, 0.0845322265625, 0.08443698883056641, 0.0841891860961914, 0.08461619567871094, 0.0845137939453125, 0.0844031982421875, 0.08491827392578125, 0.08468889617919922, 0.08446873474121094, 0.08431718444824218, 0.0844595489501953, 0.08526640319824219, 0.08521421051025391, 0.0846397476196289, 0.1709055938720703, 0.08506777954101563, 0.08515379333496094, 0.08487731170654297, 0.08472576141357421, 0.08464383697509766, 0.08466124725341796, 0.08488448333740234, 0.08489778900146484, 0.08490598297119141, 0.08465203094482422, 0.0848189468383789, 0.08532991790771484, 0.08477286529541016, 0.08459468841552735, 0.08518246459960938, 0.08490803527832032, 0.0846561279296875, 0.08462028503417969, 0.08444313812255859, 0.0862208023071289, 0.08462643432617188, 0.08517222595214843, 0.08487423706054688, 0.08629145812988281, 0.08506470489501954, 0.0851937255859375, 0.08485785675048828, 0.08452505493164063, 0.0856289291381836, 0.08493059539794921, 0.08512406158447265, 0.08508620452880859, 0.08450457763671874, 0.084515869140625, 0.08459465789794922, 0.08446975708007813, 0.08475852966308593, 0.08493772888183594, 0.08463161468505859, 0.08454649353027344, 0.0845322265625, 0.08463667297363281, 0.08475753784179688, 0.08451376342773438, 0.08450662231445312, 0.08457318115234375, 0.08449842834472657, 0.08456294250488282, 0.08457523345947265, 0.08489676666259766, 0.08472576141357421, 0.087193603515625, 0.08506368255615235, 0.08472064208984376, 0.08463565063476562, 0.08460390472412109, 0.08499712371826172, 0.08570368194580077, 0.08466124725341796, 0.08457625579833984, 0.08450355529785156, 0.08474214172363281, 0.17052569580078125, 0.0847298583984375, 0.08458444976806641, 0.08468275451660157, 0.08454041290283203, 0.08447081756591797, 0.08448406219482422, 0.08457523345947265, 0.0845301742553711, 0.0844247055053711, 0.08477286529541016, 0.08456502532958984, 0.08428745269775391, 0.08627097320556641, 0.08500633239746094, 0.08606105804443359, 0.08684646606445312, 0.08513126373291016, 0.08685874938964844, 0.08501042938232421, 0.08475545501708984, 0.0846346206665039, 0.08462438201904297, 0.08665497589111328, 0.08509542083740235, 0.08484454345703125, 0.08662322998046874, 0.08476467132568359, 0.08460492706298828, 0.0845998077392578, 0.08459878540039062, 0.08473702239990234, 0.08481484985351563, 0.08530022430419922, 0.08468275451660157, 0.08468685150146485, 0.08445645141601563, 0.08475548553466797, 0.08516194915771484, 0.08527359771728515, 0.08474931335449219, 0.08479542541503907, 0.08579888153076172, 0.08668876647949218, 0.08599346923828124, 0.08607129669189453, 0.08541696166992188, 0.08659865570068359, 0.08641024017333984, 0.08462847900390626, 0.08492339324951172, 0.0848875503540039, 0.084674560546875, 0.08466534423828125, 0.08485273742675781, 0.08462643432617188, 0.08465408325195313, 0.08468479919433594, 0.08457011413574218, 0.0847984619140625, 0.08467558288574219, 0.08471142578125, 0.08483334350585937, 0.1702665557861328, 0.08738406372070312, 0.08923648071289063, 0.08826573181152343, 0.08826470184326173, 0.08866099548339844, 0.08817049407958985, 0.08779468536376953, 0.08849612426757812, 0.08879206085205078, 0.08808345794677734, 0.08809164428710937, 0.0881233901977539, 0.08886784362792968, 0.08884019470214843, 0.08862515258789062, 0.08821043395996093, 0.08844905853271484, 0.08820121765136718, 0.08817353820800782, 0.08852275085449218, 0.08854227447509766, 0.08476563262939453, 0.0872243194580078, 0.08712806701660156, 0.08852275085449218, 0.08827391815185547, 0.08493363189697266, 0.08474214172363281, 0.08446873474121094, 0.08490598297119141, 0.08460697937011719, 0.08487014770507813, 0.08471858978271485, 0.08479743957519531, 0.0852490234375, 0.08817971038818359, 0.0883599395751953, 0.08766566467285156, 0.0848189468383789, 0.08454348754882812, 0.08476876831054687, 0.08462950134277344, 0.08455782318115235, 0.0845998077392578, 0.08457523345947265, 0.08428851318359375, 0.08459468841552735, 0.08465920257568359, 0.08506470489501954, 0.08472268676757813, 0.08413491058349609, 0.08476467132568359, 0.08465715026855469, 0.08452915191650391, 0.08448102569580078, 0.0847267837524414, 0.08448614501953125, 0.08460594940185547, 0.08444416046142578, 0.08624127960205077, 0.08508006286621093, 0.08459878540039062, 0.1760184326171875, 0.08723353576660156, 0.08769331359863282, 0.08806092834472656, 0.08757453155517578, 0.08732978820800781, 0.08919449615478516, 0.08813977813720703, 0.08797286224365235, 0.08746189117431641, 0.08733798217773438, 0.08667750549316407, 0.08574054718017578, 0.08622182464599609, 0.08452095794677734, 0.08488345336914062, 0.08462950134277344, 0.08516403198242188, 0.08760736083984375, 0.08629344177246094, 0.08519782257080079, 0.08494694519042968, 0.08468275451660157, 0.08495308685302734, 0.08589212799072266, 0.08511488342285156, 0.08629449462890625, 0.08508927917480469, 0.08481075286865235, 0.08436326599121094, 0.08498995208740234, 0.08503705596923829, 0.08542822265625, 0.08469503784179687, 0.08443392181396485, 0.08501554870605468, 0.0846376953125, 0.08496742248535157, 0.08612351989746093, 0.08490290832519531, 0.08461516571044922, 0.08477081298828125, 0.0848005142211914, 0.08621977233886718, 0.08505651092529297, 0.08459571075439454, 0.08459878540039062, 0.08486918640136719, 0.08474515533447266, 0.08450969696044922, 0.08503501129150391, 0.0847267837524414, 0.08468889617919922, 0.08508927917480469, 0.0886118392944336, 0.08830156707763671, 0.08738508605957031, 0.08487321472167969, 0.0855920639038086, 0.08490496063232422, 0.08478208160400391, 0.08481689453125, 0.08506368255615235]",tokens/s,11.480433009606973,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1610.354688,2254.962688,0.0,1608.515584,1463.6928,s,10,1.209205665588379,0.1209205665588379,0.0011005379634326184,0.1205662727355957,0.1215480598449707,0.12274063606262207,0.12369469703674317,"[0.12393321228027344, 0.12128304290771484, 0.12008969879150391, 0.12004278564453125, 0.12018284606933594, 0.12025001525878906, 0.12034620666503906, 0.12078633880615235, 0.12113209533691406, 0.121159423828125]",tokens/s,2117.0922969124094,kWh,1.4171419990441157e-06,7.762810317477228e-07,6.305967346357218e-06,8.499390377149056e-06,tokens/kWh,30119807.261502665,MB,1610.952704,2254.962688,0.0,1608.515584,1560.974848,s,10,70.14807763671874,7.0148077636718735,0.00472223733563277,7.012995361328125,7.0220806640625,7.0226084472656245,7.023030673828125,"[7.017875, 7.0128837890625, 7.0119638671875, 7.01192919921875, 7.01310693359375, 7.0105234375, 7.0078984375, 7.02196337890625, 7.01679736328125, 7.02313623046875]",tokens/s,8.981001635748731,kWh,8.273579535030184e-05,4.5345370896494765e-05,0.0003660393860852451,0.0004941205523320418,tokens/kWh,127499.25034015773,,s,629,71.1210855636597,0.11307008833650187,0.01439647456429658,0.11126681518554687,0.11167272796630859,0.11198136138916015,0.23187720581054685,"[0.11166719818115234, 0.11130470275878906, 0.11149517059326172, 0.11213107299804688, 0.11133849334716797, 0.11115827178955077, 0.11096371459960938, 0.11128832244873046, 0.11117977905273438, 0.11118182373046875, 0.11122688293457031, 0.1112647705078125, 0.11119513702392578, 0.11100774383544922, 0.111087646484375, 0.11108963012695312, 0.11133439636230469, 0.11160678100585937, 0.11127910614013672, 0.1123737564086914, 0.1114972152709961, 0.11121971130371094, 0.11113676452636718, 0.1111009292602539, 0.11132621002197265, 0.11129344177246094, 0.11116851043701172, 0.11124531555175782, 0.1112442855834961, 0.11126681518554687, 0.11126579284667969, 0.11136102294921875, 0.11114291381835938, 0.11164262390136719, 0.1114081268310547, 0.11137843322753906, 0.11116134643554687, 0.11157810974121093, 0.1113333740234375, 0.11127091217041016, 0.11125965118408203, 0.11129241943359375, 0.11120745849609374, 0.11137942504882813, 0.11125247955322265, 0.11137229156494141, 0.11129138946533203, 0.11126988983154297, 0.11214335632324218, 0.11184333038330078, 0.11213414764404298, 0.11166515350341796, 0.11144300842285157, 0.11137529754638673, 0.11148185729980468, 0.11138457489013671, 0.11156582641601563, 0.1114777603149414, 0.1114081268310547, 0.11125965118408203, 0.11127603149414063, 0.11128425598144531, 0.2332733154296875, 0.11126271820068359, 0.11134259033203125, 0.1112136001586914, 0.11113475036621094, 0.1112872314453125, 0.11119516754150391, 0.11118179321289062, 0.11123712158203125, 0.11110195159912109, 0.11118694305419922, 0.11108454132080078, 0.11117158508300781, 0.1111357421875, 0.1112248306274414, 0.11135078430175781, 0.1113016357421875, 0.11121049499511719, 0.11146649932861329, 0.11143270111083985, 0.11139379119873047, 0.11125862121582031, 0.11111116790771484, 0.11113369750976562, 0.11115213012695313, 0.11124736022949219, 0.11156070709228516, 0.11131289672851563, 0.11127193450927735, 0.11115929412841796, 0.11136819458007813, 0.11125145721435546, 0.11122073364257813, 0.11129753875732422, 0.11136614227294922, 0.11128217315673829, 0.11125145721435546, 0.11142047882080078, 0.11116230773925781, 0.11125759887695312, 0.1110487060546875, 0.11122994995117187, 0.11132518768310547, 0.1113026885986328, 0.11126576232910156, 0.11120947265625, 0.11332198333740234, 0.11140608215332032, 0.11135590362548828, 0.11126579284667969, 0.11120845031738281, 0.11143065643310547, 0.1113917465209961, 0.11126988983154297, 0.11125350189208984, 0.11131187438964844, 0.11131903839111328, 0.11127091217041016, 0.11129446411132812, 0.11129446411132812, 0.11138969421386719, 0.1114603500366211, 0.11127808380126954, 0.23181004333496094, 0.11102105712890625, 0.1111910400390625, 0.11175424194335938, 0.11164672088623047, 0.11138969421386719, 0.11125247955322265, 0.11111219024658203, 0.11101798248291016, 0.11089920043945313, 0.11102413177490235, 0.11136723327636719, 0.1116630401611328, 0.11121561431884766, 0.11134259033203125, 0.11122585296630859, 0.11111116790771484, 0.11116339111328125, 0.11112857818603515, 0.11112242889404297, 0.11111014556884766, 0.11112966156005859, 0.11107218933105469, 0.11174297332763672, 0.11127603149414063, 0.11119821166992187, 0.11121663665771485, 0.11118080139160157, 0.11127808380126954, 0.1111583023071289, 0.11111011505126953, 0.11111014556884766, 0.11128832244873046, 0.11131903839111328, 0.1111562271118164, 0.11147058868408204, 0.11126681518554687, 0.1112279052734375, 0.11108051300048828, 0.11122681427001953, 0.11114086151123047, 0.11119923400878906, 0.11140201568603515, 0.11115004730224609, 0.1115494384765625, 0.11138969421386719, 0.11126886749267578, 0.11192524719238281, 0.111351806640625, 0.11120127868652344, 0.11122994995117187, 0.11163238525390624, 0.11214540863037109, 0.11136921691894532, 0.11118592071533204, 0.11130879974365235, 0.11120845031738281, 0.11130470275878906, 0.1112442855834961, 0.11167743682861328, 0.11148799896240234, 0.11126988983154297, 0.11132012939453124, 0.23181817626953125, 0.11116236877441406, 0.11126585388183594, 0.11114182281494141, 0.11116748809814453, 0.1111695327758789, 0.11111014556884766, 0.11164057922363281, 0.11159449768066407, 0.11120127868652344, 0.11121766662597657, 0.11119206237792968, 0.11146649932861329, 0.11107635498046875, 0.11124121856689453, 0.11109375762939454, 0.11119718170166015, 0.11117465972900391, 0.11109478759765624, 0.11113471984863281, 0.11132415771484375, 0.11108249664306641, 0.11122585296630859, 0.11124838256835938, 0.11113267517089843, 0.11119821166992187, 0.11118386840820313, 0.11122380828857421, 0.11127808380126954, 0.11117056274414062, 0.11127500915527344, 0.111388671875, 0.11147878265380859, 0.111246337890625, 0.11121868896484376, 0.1112616958618164, 0.1112811508178711, 0.11174400329589844, 0.11160575866699218, 0.11130368041992188, 0.11123001861572265, 0.11139884948730469, 0.11166207885742188, 0.11131187438964844, 0.11219455718994141, 0.11126377868652344, 0.11129955291748046, 0.11126886749267578, 0.11122278594970703, 0.11126886749267578, 0.1113169937133789, 0.11135282897949218, 0.11117362976074219, 0.1112442855834961, 0.11130060577392578, 0.11121766662597657, 0.11142655944824219, 0.111246337890625, 0.11133132934570313, 0.11140409851074219, 0.11129542541503906, 0.11128422546386718, 0.11133952331542969, 0.23190016174316405, 0.11121459197998047, 0.11119718170166015, 0.11123302459716797, 0.11124736022949219, 0.1111551971435547, 0.1117624282836914, 0.11134259033203125, 0.11116851043701172, 0.11123609924316406, 0.11112960052490234, 0.11113983917236328, 0.11123097229003906, 0.11135692596435547, 0.11133030700683594, 0.1111756820678711, 0.11099750518798829, 0.11165388488769531, 0.11119923400878906, 0.11110399627685547, 0.11134361267089844, 0.11165695953369141, 0.11130470275878906, 0.11117874908447266, 0.11137843322753906, 0.11113881683349609, 0.11112754821777343, 0.11104460906982422, 0.11110195159912109, 0.1110456314086914, 0.11133542633056641, 0.11121971130371094, 0.11121459197998047, 0.11156275177001954, 0.11139590454101563, 0.11135379028320312, 0.11121868896484376, 0.11118592071533204, 0.11130982208251954, 0.11158528137207031, 0.11178291320800782, 0.11122176361083984, 0.11130470275878906, 0.11126681518554687, 0.111246337890625, 0.11125452423095702, 0.11126374053955078, 0.11133030700683594, 0.11145932769775391, 0.11136000061035156, 0.11132621002197265, 0.11127705383300782, 0.11152998352050782, 0.1112442855834961, 0.11130265808105469, 0.11128729248046874, 0.11133952331542969, 0.11251200103759766, 0.11131084442138672, 0.1113221435546875, 0.11128931427001953, 0.11129036712646484, 0.11130879974365235, 0.2321817626953125, 0.11112652587890624, 0.11125247955322265, 0.11120230102539062, 0.11118284606933594, 0.11110912322998047, 0.11104768371582031, 0.11113471984863281, 0.11123814392089844, 0.11114495849609375, 0.11111321258544922, 0.11121971130371094, 0.11117874908447266, 0.11118592071533204, 0.11127603149414063, 0.11113369750976562, 0.11118796539306641, 0.11104364776611328, 0.11120121765136719, 0.11109683227539062, 0.11134873962402343, 0.11129241943359375, 0.11124531555175782, 0.11129138946533203, 0.1111695327758789, 0.11111936187744141, 0.11120230102539062, 0.11098214721679688, 0.11109478759765624, 0.11121772766113282, 0.11126573181152344, 0.11151564788818359, 0.11180134582519531, 0.1111551971435547, 0.11124018859863281, 0.11106508636474609, 0.11120845031738281, 0.11125452423095702, 0.11243007659912109, 0.11165286254882813, 0.11120947265625, 0.11119308471679687, 0.11124224090576172, 0.11133747100830078, 0.1112647705078125, 0.11123200225830078, 0.11110912322998047, 0.11123097229003906, 0.11112140655517579, 0.11116134643554687, 0.11120230102539062, 0.11124838256835938, 0.11131597137451171, 0.11115929412841796, 0.11126681518554687, 0.11124326324462891, 0.11124940490722657, 0.11136819458007813, 0.1113364486694336, 0.11219558715820313, 0.11167436981201172, 0.11133542633056641, 0.11132006072998046, 0.23260365295410157, 0.1113016357421875, 0.11109069061279297, 0.11130675506591797, 0.11117772674560547, 0.11111116790771484, 0.11099852752685548, 0.11096575927734376, 0.11118592071533204, 0.11106201934814453, 0.11124940490722657, 0.1109964828491211, 0.11128729248046874, 0.11114086151123047, 0.11112041473388672, 0.11092476654052734, 0.11116646575927734, 0.11122585296630859, 0.11110297393798828, 0.11115929412841796, 0.1110282211303711, 0.11134361267089844, 0.11134054565429688, 0.11161497497558594, 0.11118796539306641, 0.11125145721435546, 0.1112965087890625, 0.11122898864746093, 0.11125344085693359, 0.11107839965820313, 0.1114081268310547, 0.11136511993408203, 0.11113164520263671, 0.11111936187744141, 0.11107635498046875, 0.11111628723144532, 0.1109964828491211, 0.11101900482177735, 0.11115827178955077, 0.11171942138671875, 0.11166413116455078, 0.11109580993652343, 0.11116649627685547, 0.11109782409667969, 0.11119821166992187, 0.11115110778808594, 0.11119206237792968, 0.11109580993652343, 0.1113333740234375, 0.111388671875, 0.11111014556884766, 0.11112242889404297, 0.11119926452636719, 0.11117769622802734, 0.11110707092285156, 0.11125350189208984, 0.111246337890625, 0.11176140594482421, 0.11179315185546874, 0.11137229156494141, 0.1111234588623047, 0.1113917465209961, 0.11129138946533203, 0.23285554504394532, 0.11228876495361328, 0.1119969253540039, 0.11132012939453124, 0.1113087387084961, 0.1110118408203125, 0.11107635498046875, 0.11119821166992187, 0.11114905548095703, 0.11114291381835938, 0.11154227447509765, 0.111283203125, 0.11145011138916015, 0.11127091217041016, 0.11112140655517579, 0.11115110778808594, 0.11166719818115234, 0.11149517059326172, 0.11157708740234375, 0.11116441345214843, 0.11162931060791016, 0.11222220611572266, 0.11127398681640625, 0.11119721221923828, 0.11121660614013672, 0.11111833953857422, 0.11153817749023437, 0.11167436981201172, 0.11123817443847656, 0.11156681823730469, 0.11157299041748046, 0.11123200225830078, 0.11113267517089843, 0.11120339202880859, 0.11119302368164062, 0.11129446411132812, 0.11130265808105469, 0.11117874908447266, 0.11140914916992188, 0.11178803253173829, 0.11143987274169923, 0.111388671875, 0.11134054565429688, 0.11126271820068359, 0.11172557067871093, 0.11178189086914063, 0.11163648223876953, 0.11228876495361328, 0.11165491485595704, 0.11137843322753906, 0.11123712158203125, 0.11235433959960937, 0.11163645172119141, 0.11138969421386719, 0.11162419128417969, 0.11138253021240234, 0.11193548583984375, 0.11135289764404296, 0.11133433532714844, 0.11140198516845704, 0.11156582641601563, 0.11123814392089844, 0.11139584350585938, 0.23310745239257813, 0.11154329681396484, 0.11122585296630859, 0.11103334045410156, 0.11112242889404297, 0.11111321258544922, 0.11110502624511719, 0.11125759887695312, 0.11132006072998046, 0.11105689239501954, 0.11160371398925781, 0.11119308471679687, 0.11138662719726562, 0.11129039764404297, 0.11160777282714844, 0.1111203842163086, 0.11147058868408204, 0.11210137939453126, 0.11113471984863281, 0.11226214599609376, 0.11134054565429688, 0.11112960052490234, 0.11109683227539062, 0.11101696014404297, 0.11132927703857422, 0.11150438690185546, 0.11192934417724609, 0.11167231750488281, 0.11156684875488282, 0.11118489837646485, 0.11123814392089844, 0.11113683319091797, 0.11117254638671875, 0.11120333099365234, 0.11124940490722657, 0.11127398681640625, 0.11138355255126953, 0.11188531494140624, 0.11153510284423829, 0.11116134643554687, 0.11122892761230468, 0.11117874908447266, 0.11126892852783203, 0.11124422454833985, 0.11132723236083984, 0.111246337890625, 0.1125038070678711, 0.11145523071289062, 0.1111695327758789, 0.11121663665771485, 0.11127193450927735, 0.11120537567138672, 0.11170098876953125, 0.11128729248046874, 0.11114701080322266, 0.112110595703125, 0.11148185729980468, 0.11123916625976563, 0.1111910400390625, 0.11130879974365235, 0.11141222381591796, 0.11127603149414063, 0.11140608215332032, 0.23328665161132814, 0.11162009429931641, 0.11125350189208984, 0.11111116790771484, 0.11114086151123047, 0.1111234588623047, 0.11114803314208985, 0.11112454223632813, 0.11118073272705079, 0.11184435272216797, 0.11149311828613281, 0.1110835189819336, 0.11113676452636718, 0.11192729949951172, 0.11148902130126953, 0.1111900177001953, 0.1112074203491211, 0.11114393615722656, 0.11168256378173828, 0.11129446411132812, 0.11139584350585938, 0.11125043487548827, 0.11120435333251953, 0.11126886749267578, 0.11185561370849609, 0.11130675506591797, 0.11252838134765625, 0.11186688232421875, 0.11209420776367188, 0.11134873962402343, 0.11136819458007813, 0.11132006072998046, 0.11117056274414062, 0.1115688934326172, 0.111425537109375, 0.11140096282958985, 0.11195801544189453, 0.11133439636230469, 0.11169280242919923, 0.11158016204833984, 0.11150745391845703, 0.11149209594726563, 0.11174915313720703, 0.11140297698974609, 0.11168153381347656, 0.11251609802246093, 0.11149823760986328, 0.1112125473022461, 0.11124947357177735, 0.11128211212158202, 0.11113369750976562, 0.11132518768310547, 0.11279666900634766, 0.11159654235839844, 0.11184844970703126, 0.11136102294921875, 0.11125350189208984, 0.11140096282958985, 0.11135078430175781, 0.11131084442138672, 0.11126579284667969, 0.11146348571777344, 0.111246337890625]",tokens/s,8.844071979708314,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 81847 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1625.034752,2254.962688,0.0,1608.515584,1463.6928,s,10,1.2100343170166015,0.12100343170166014,0.001022141281647672,0.12073452758789063,0.12208388671874999,0.1227883918762207,0.12335199600219726,"[0.1234928970336914, 0.12192733001708984, 0.12002118682861328, 0.12029216003417968, 0.12038220977783202, 0.1198985595703125, 0.1206871337890625, 0.12078192138671875, 0.12138086700439453, 0.12117005157470703]",tokens/s,2115.6424772413106,kWh,1.4169729851856437e-06,7.764314618920512e-07,6.3009111253815796e-06,8.494315572459274e-06,tokens/kWh,30137801.90013389,MB,1625.034752,2254.962688,0.0,1608.515584,1560.974848,s,10,70.266875,7.0266874999999995,0.015802312461115554,7.021189208984375,7.04873154296875,7.0518982421875,7.0544316015624995,"[7.05506494140625, 7.04802783203125, 7.0200771484375, 7.04010400390625, 7.03416455078125, 7.01334814453125, 7.0082919921875, 7.02230126953125, 7.0167861328125, 7.008708984375]",tokens/s,8.965817819562917,kWh,8.271868514320838e-05,4.533574606695635e-05,0.00036366391262401533,0.0004917183438341799,tokens/kWh,128122.12680282927,,s,629,71.24009573364258,0.11325929369418533,0.01441184935436087,0.111351806640625,0.11215667266845704,0.11239075622558593,0.2323562939453125,"[0.11357389068603516, 0.11349612426757813, 0.11200812530517579, 0.11255910491943359, 0.1121167984008789, 0.11198560333251953, 0.11219558715820313, 0.11200819396972657, 0.11196825408935547, 0.11275468444824219, 0.11222630310058594, 0.11180441284179687, 0.11134464263916016, 0.11147980499267578, 0.11213721466064454, 0.11178495788574219, 0.11166719818115234, 0.11174092864990234, 0.11117977905273438, 0.1110456314086914, 0.11137638092041016, 0.11204710388183593, 0.11143679809570313, 0.11129449462890625, 0.11121353912353515, 0.1112074203491211, 0.11121561431884766, 0.11156582641601563, 0.11124736022949219, 0.1111910400390625, 0.1112965087890625, 0.11185664367675781, 0.11158732604980469, 0.1118187484741211, 0.11172659301757812, 0.11204096221923829, 0.11242803192138671, 0.11206655883789063, 0.11170611572265625, 0.1124925765991211, 0.11240444946289062, 0.11224883270263672, 0.11220172882080078, 0.11224473571777344, 0.11245168304443359, 0.11209001922607421, 0.11190579223632813, 0.11188838195800781, 0.11227750396728516, 0.11232051086425782, 0.11256114959716797, 0.11237478637695313, 0.11200921630859376, 0.11215462493896484, 0.1119549789428711, 0.11200611114501953, 0.11207987213134765, 0.11194371032714844, 0.11197846221923828, 0.11205836486816406, 0.11220787048339843, 0.11210034942626954, 0.2345594940185547, 0.1117286376953125, 0.11186585235595703, 0.11204914855957031, 0.11177677154541016, 0.1115832290649414, 0.11116851043701172, 0.1114071044921875, 0.11131391906738282, 0.11137741088867187, 0.11209728240966797, 0.11197542572021485, 0.11190681457519532, 0.11151052856445312, 0.11113471984863281, 0.11245875549316406, 0.11208191680908203, 0.11218540954589844, 0.11216889953613281, 0.11212287902832031, 0.11191302490234376, 0.11190470123291016, 0.11167948913574219, 0.11192729949951172, 0.11203174591064453, 0.11200819396972657, 0.11200409698486329, 0.11204198455810546, 0.11203584289550782, 0.11207577514648437, 0.11203174591064453, 0.11206861114501954, 0.11190476989746094, 0.11196518707275391, 0.11201945495605468, 0.11220070648193359, 0.1121239013671875, 0.11213619232177735, 0.11214745330810547, 0.11210034942626954, 0.11117158508300781, 0.11123609924316406, 0.11129138946533203, 0.11250688171386719, 0.11131903839111328, 0.11113369750976562, 0.11244338989257813, 0.11134259033203125, 0.11169075012207032, 0.11205427551269531, 0.11214745330810547, 0.11191705322265624, 0.11207884979248046, 0.11197849273681641, 0.11211673736572265, 0.11211161804199218, 0.11191500854492188, 0.11222118377685547, 0.11239730834960937, 0.11132415771484375, 0.11126681518554687, 0.1112985610961914, 0.11182182312011718, 0.23266201782226562, 0.11202662658691406, 0.11120025634765625, 0.11109683227539062, 0.11149005126953125, 0.11119513702392578, 0.11112754821777343, 0.11123916625976563, 0.11105177307128906, 0.1111900177001953, 0.11129036712646484, 0.11116851043701172, 0.11114803314208985, 0.11196927642822266, 0.11116134643554687, 0.11125965118408203, 0.11103539276123046, 0.11127398681640625, 0.11154434967041016, 0.11195798492431641, 0.11191193389892579, 0.1111377944946289, 0.1111562271118164, 0.11188735961914062, 0.11112754821777343, 0.11104972839355469, 0.11122278594970703, 0.11126271820068359, 0.11140402984619141, 0.11229183959960938, 0.11202559661865234, 0.11156588745117188, 0.1111817626953125, 0.11196927642822266, 0.11145932769775391, 0.11130470275878906, 0.11111219024658203, 0.11142041778564453, 0.11112140655517579, 0.11115110778808594, 0.11208601379394531, 0.11188428497314454, 0.11182284545898437, 0.11141017913818359, 0.11125043487548827, 0.11123404693603516, 0.11116646575927734, 0.11186994934082031, 0.11114189147949219, 0.11129446411132812, 0.11111833953857422, 0.1112442855834961, 0.11120845031738281, 0.11120435333251953, 0.11193344116210938, 0.11145116424560547, 0.11117052459716797, 0.11132723236083984, 0.11132109069824218, 0.1112074203491211, 0.11136307525634766, 0.11232870483398437, 0.11170201873779297, 0.2324981689453125, 0.11132723236083984, 0.11152384185791016, 0.11143577575683594, 0.11334963226318359, 0.11229798126220703, 0.11186176300048828, 0.11187916564941407, 0.11197337341308594, 0.11203993225097657, 0.11198976135253906, 0.11205836486816406, 0.11139788818359375, 0.11115929412841796, 0.11112754821777343, 0.11136921691894532, 0.11119513702392578, 0.11117056274414062, 0.11119206237792968, 0.11117465972900391, 0.11126067352294922, 0.11122380828857421, 0.11115827178955077, 0.11134873962402343, 0.1123768310546875, 0.11169586944580077, 0.11113676452636718, 0.11131903839111328, 0.11172249603271485, 0.11171635437011719, 0.11152793884277344, 0.11154022216796874, 0.11121971130371094, 0.11125247955322265, 0.11109174346923828, 0.11109065246582031, 0.111283203125, 0.11159961700439452, 0.11181670379638672, 0.11218841552734375, 0.11207065582275391, 0.11193856048583985, 0.11212083435058594, 0.11199385833740234, 0.11282943725585938, 0.11240140533447265, 0.11203072357177735, 0.11216486358642579, 0.11200102233886719, 0.11202969360351563, 0.112, 0.11213005065917969, 0.11189555358886719, 0.11197647857666015, 0.11237065887451173, 0.11224269104003906, 0.11207270050048829, 0.11202355194091797, 0.11200204467773438, 0.11213520050048828, 0.1117081298828125, 0.11131187438964844, 0.11138969421386719, 0.23215206909179686, 0.11141426849365234, 0.11262156677246093, 0.11177779388427735, 0.11186585235595703, 0.11191295623779297, 0.11129036712646484, 0.11202355194091797, 0.11225702667236329, 0.11216793823242187, 0.11205120086669922, 0.11202252960205078, 0.11183513641357422, 0.11183513641357422, 0.1118361587524414, 0.11228672027587891, 0.11196723175048828, 0.11265023803710937, 0.11201439666748046, 0.11204192352294921, 0.11198668670654296, 0.11204812622070312, 0.1128499526977539, 0.11216585540771484, 0.11173174285888672, 0.11194057464599609, 0.11185971069335937, 0.11190889739990234, 0.11204195404052734, 0.11242700958251953, 0.1118361587524414, 0.11156070709228516, 0.11167436981201172, 0.11125555419921875, 0.11129446411132812, 0.11137535858154297, 0.1112125473022461, 0.11132723236083984, 0.11112652587890624, 0.11121356964111329, 0.11136511993408203, 0.11127808380126954, 0.11119308471679687, 0.11122278594970703, 0.11130265808105469, 0.11126374053955078, 0.11110604858398437, 0.11118386840820313, 0.11117158508300781, 0.11144499206542968, 0.11132316589355469, 0.11124323272705078, 0.11118899536132812, 0.11127705383300782, 0.11124531555175782, 0.11131084442138672, 0.11114189147949219, 0.11251712036132812, 0.11123916625976563, 0.1111562271118164, 0.11102105712890625, 0.111172607421875, 0.11132518768310547, 0.23168409729003905, 0.11126067352294922, 0.11120333099365234, 0.11120333099365234, 0.11132112121582032, 0.11132003021240235, 0.1112279052734375, 0.11117772674560547, 0.11110399627685547, 0.1111541748046875, 0.1111695327758789, 0.11118796539306641, 0.11124940490722657, 0.11112960052490234, 0.11125452423095702, 0.11125759887695312, 0.11122585296630859, 0.11124224090576172, 0.11116646575927734, 0.11114598083496094, 0.11131494140625, 0.11128832244873046, 0.11116134643554687, 0.11131391906738282, 0.11123404693603516, 0.11126681518554687, 0.11123916625976563, 0.1112985610961914, 0.11178495788574219, 0.11137741088867187, 0.11124121856689453, 0.11152281951904297, 0.11121766662597657, 0.11117362976074219, 0.1112125473022461, 0.11121868896484376, 0.111098876953125, 0.1111900177001953, 0.11227442932128906, 0.11120025634765625, 0.11103846740722656, 0.11157708740234375, 0.11146240234375, 0.11182284545898437, 0.11149517059326172, 0.111499267578125, 0.11131289672851563, 0.11149209594726563, 0.11197853088378906, 0.11136713409423828, 0.11115724945068359, 0.11126067352294922, 0.11121151733398438, 0.11126067352294922, 0.11109478759765624, 0.11183001708984375, 0.11150643157958984, 0.11150745391845703, 0.11111833953857422, 0.11116544342041015, 0.11122994995117187, 0.11125452423095702, 0.1113088607788086, 0.23280429077148437, 0.11111014556884766, 0.11133030700683594, 0.11111119842529296, 0.11112957000732422, 0.11102105712890625, 0.11110809326171875, 0.11106201934814453, 0.11103334045410156, 0.11108870697021485, 0.11110905456542969, 0.11238092803955078, 0.11154227447509765, 0.11123506927490234, 0.11109273529052735, 0.11120333099365234, 0.11103129577636718, 0.11100672149658203, 0.11094937896728516, 0.1110487060546875, 0.11100672149658203, 0.1109534683227539, 0.11100569915771484, 0.11172767639160157, 0.11108550262451172, 0.11122688293457031, 0.11107020568847656, 0.11119721221923828, 0.11113673400878907, 0.11125043487548827, 0.1111562271118164, 0.11116236877441406, 0.1111551971435547, 0.11128012847900391, 0.11118694305419922, 0.11127091217041016, 0.11107123565673828, 0.11122994995117187, 0.11122380828857421, 0.1111551971435547, 0.11100466918945312, 0.11141836547851562, 0.11136511993408203, 0.11123814392089844, 0.11123302459716797, 0.11117670440673828, 0.11112960052490234, 0.11146444702148438, 0.11135078430175781, 0.11130470275878906, 0.11135897827148437, 0.11127808380126954, 0.11142451477050781, 0.11136716461181641, 0.1120901107788086, 0.11140300750732422, 0.11124326324462891, 0.11130985260009765, 0.111257568359375, 0.1111551971435547, 0.1111756820678711, 0.11127295684814453, 0.11109580993652343, 0.23243571472167968, 0.11107839965820313, 0.11138361358642578, 0.11138758087158203, 0.111172607421875, 0.11116441345214843, 0.11112754821777343, 0.11145728302001953, 0.11120230102539062, 0.11218739318847656, 0.11158425903320313, 0.11142348480224609, 0.11124326324462891, 0.11134566497802735, 0.11134770965576171, 0.11127603149414063, 0.11175628662109376, 0.11245772552490234, 0.11182899475097656, 0.11139379119873047, 0.11116031646728515, 0.11131187438964844, 0.11135897827148437, 0.11138253021240234, 0.11129446411132812, 0.11140914916992188, 0.1112995834350586, 0.11167334747314453, 0.11146444702148438, 0.11127398681640625, 0.1114286117553711, 0.11160883331298828, 0.111388671875, 0.11126783752441406, 0.111283203125, 0.11141529846191406, 0.11138355255126953, 0.11169382476806641, 0.111425537109375, 0.11148297882080078, 0.11134454345703125, 0.11160371398925781, 0.11167334747314453, 0.11133235168457031, 0.11129036712646484, 0.11148089599609375, 0.11127085113525391, 0.11113471984863281, 0.11119821166992187, 0.11146444702148438, 0.1114419174194336, 0.11144499206542968, 0.11137741088867187, 0.11137126159667969, 0.11174604797363281, 0.11149619293212891, 0.11148902130126953, 0.1114419174194336, 0.1115525131225586, 0.11180646514892578, 0.11210854339599609, 0.11200819396972657, 0.11173580932617187, 0.2335253143310547, 0.11135481262207031, 0.11140300750732422, 0.11150540924072265, 0.11128627014160156, 0.11110707092285156, 0.11116851043701172, 0.11114393615722656, 0.1112995834350586, 0.11127808380126954, 0.11109273529052735, 0.11120953369140625, 0.1113087387084961, 0.11121971130371094, 0.11118284606933594, 0.11124736022949219, 0.1122508773803711, 0.11217715454101562, 0.1113733139038086, 0.11129344177246094, 0.11147058868408204, 0.11150335693359376, 0.111351806640625, 0.11135897827148437, 0.11148492431640625, 0.11133952331542969, 0.11130675506591797, 0.11139481353759766, 0.11144703674316406, 0.11154124450683593, 0.11132825469970703, 0.11111219024658203, 0.11122073364257813, 0.11118386840820313, 0.11125971221923828, 0.1125447006225586, 0.11146546936035157, 0.11134361267089844, 0.11123097229003906, 0.11145728302001953, 0.11127091217041016, 0.1112074203491211, 0.1114224624633789, 0.11159859466552734, 0.11146854400634766, 0.11118284606933594, 0.11170816040039062, 0.11130777740478516, 0.11138662719726562, 0.11138253021240234, 0.11114701080322266, 0.11128729248046874, 0.11132109069824218, 0.11136102294921875, 0.11118284606933594, 0.11117881774902344, 0.11109677124023437, 0.11118796539306641, 0.11113267517089843, 0.11119411468505859, 0.111246337890625, 0.1112279052734375, 0.11181465911865235, 0.23307980346679688, 0.11142041778564453, 0.1113538589477539, 0.11121561431884766, 0.11131903839111328, 0.11121561431884766, 0.11105689239501954, 0.111246337890625, 0.11114291381835938, 0.1110835189819336, 0.1110241928100586, 0.11124934387207032, 0.11121561431884766, 0.11143475341796875, 0.11115007781982422, 0.11116031646728515, 0.11110912322998047, 0.11113881683349609, 0.11112754821777343, 0.11146137237548828, 0.11116646575927734, 0.1111173095703125, 0.11114291381835938, 0.11114495849609375, 0.11101900482177735, 0.11122176361083984, 0.11194265747070313, 0.11108147430419922, 0.11138662719726562, 0.11130368041992188, 0.11116748809814453, 0.11115110778808594, 0.11116649627685547, 0.11112751770019531, 0.1111695327758789, 0.11108454132080078, 0.11120127868652344, 0.11118796539306641, 0.11125555419921875, 0.11124018859863281, 0.11112140655517579, 0.11123712158203125, 0.11195391845703125, 0.1116231689453125, 0.111388671875, 0.11123404693603516, 0.11115110778808594, 0.11121459197998047, 0.1111562271118164, 0.11116748809814453, 0.11117772674560547, 0.11126374053955078, 0.1111910400390625, 0.11120333099365234, 0.11108454132080078, 0.11108761596679688, 0.1111203842163086, 0.1111357421875, 0.11103231811523437, 0.11127603149414063, 0.11121868896484376, 0.11217817687988281, 0.11138457489013671]",tokens/s,8.829297511779728,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmppi9ax6e5/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,6386.765824,20902.838272,0.0,20256.391168,19273.711616,s,10,26.8813134765625,2.68813134765625,0.0041309885323175775,2.6865931396484375,2.691104760742187,2.6949006469726564,2.6979373559570314,"[2.68579150390625, 2.698696533203125, 2.6865712890625, 2.6859423828125, 2.686614990234375, 2.68409033203125, 2.68391357421875, 2.689296875, 2.690134765625, 2.69026123046875]",tokens/s,95.23344170782555,kWh,3.169923381672966e-05,1.7372348204735315e-05,0.00014952039739400956,0.00019859197941547455,tokens/kWh,1289075.222239575,MB,6390.304768,20902.838272,0.0,20256.391168,19862.692352,s,10,1586.219609375,158.62196093749998,0.021140987905695616,158.62285937500002,158.64970156249998,158.65553046875,158.66019359375,"[158.621765625, 158.64840625, 158.661359375, 158.62828125, 158.623953125, 158.628453125, 158.605140625, 158.5859375, 158.5989375, 158.617375]",tokens/s,0.39717072987657226,kWh,0.0018725793671773541,0.001026339254797931,0.00885864822580179,0.011757566847777077,tokens/kWh,5358.251483121356,,s,629,1607.8922929687508,2.55626755638911,0.31948633074222244,2.517612548828125,2.519399658203125,2.519920654296875,5.205208671875,"[2.518139892578125, 2.51872265625, 2.517171142578125, 2.517675048828125, 2.517256103515625, 2.51740869140625, 2.51825048828125, 2.5169521484375, 2.517097412109375, 2.516339599609375, 2.5180908203125, 2.51845947265625, 2.51869189453125, 2.518560791015625, 2.519083984375, 2.517611572265625, 2.517076904296875, 2.5172490234375, 2.518533203125, 2.518801513671875, 2.5181748046875, 2.51648193359375, 2.517393310546875, 2.516537353515625, 2.517125, 2.516431884765625, 2.518256591796875, 2.516644775390625, 2.5168720703125, 2.516345947265625, 2.51714453125, 2.516572265625, 2.516453369140625, 2.516675537109375, 2.517611572265625, 2.516471923828125, 2.516367431640625, 2.517232666015625, 2.517244873046875, 2.51826904296875, 2.517273681640625, 2.51704638671875, 2.5178828125, 2.51706884765625, 2.51754296875, 2.518129638671875, 2.519542724609375, 2.5176494140625, 2.517353515625, 2.51711279296875, 2.519594970703125, 2.52010693359375, 2.51871435546875, 2.519090087890625, 2.519048095703125, 2.519532470703125, 2.51997802734375, 2.518859619140625, 2.51943017578125, 2.520383544921875, 2.517666748046875, 2.51677392578125, 5.2204013671875, 2.5192919921875, 2.51740771484375, 2.516737060546875, 2.5188486328125, 2.5170791015625, 2.516949951171875, 2.517423095703125, 2.517424072265625, 2.51706787109375, 2.517665771484375, 2.519553955078125, 2.51833642578125, 2.519075927734375, 2.519300048828125, 2.51761962890625, 2.519573486328125, 2.518274169921875, 2.517041259765625, 2.5171435546875, 2.516961181640625, 2.517271484375, 2.516989013671875, 2.52028515625, 2.518149169921875, 2.5176474609375, 2.517328857421875, 2.51751220703125, 2.51903271484375, 2.51806005859375, 2.51780810546875, 2.51837548828125, 2.519371826171875, 2.517011474609375, 2.519920654296875, 2.5178798828125, 2.517094482421875, 2.51761669921875, 2.51815625, 2.51747216796875, 2.516791259765625, 2.5166181640625, 2.517612548828125, 2.51765576171875, 2.51951513671875, 2.516619384765625, 2.519645263671875, 2.518657958984375, 2.516588623046875, 2.519201904296875, 2.51913720703125, 2.518130615234375, 2.517403564453125, 2.5184736328125, 2.519552001953125, 2.523171875, 2.51789013671875, 2.516390869140625, 2.5189345703125, 2.518498291015625, 2.519341064453125, 2.5183642578125, 2.519233642578125, 5.20647900390625, 2.5165556640625, 2.518474853515625, 2.51786962890625, 2.51740478515625, 2.517843994140625, 2.51732177734375, 2.51726123046875, 2.516327392578125, 2.517376953125, 2.518192138671875, 2.519117919921875, 2.518830078125, 2.516476806640625, 2.51763916015625, 2.517055419921875, 2.517540771484375, 2.51723681640625, 2.519719970703125, 2.51848095703125, 2.517832763671875, 2.516706298828125, 2.51780517578125, 2.518971435546875, 2.5205986328125, 2.51799853515625, 2.51905029296875, 2.52092919921875, 2.517274658203125, 2.51875634765625, 2.5179462890625, 2.51730126953125, 2.51972802734375, 2.519468017578125, 2.518427734375, 2.5177353515625, 2.517422119140625, 2.5163857421875, 2.51761767578125, 2.517665771484375, 2.52023291015625, 2.51808154296875, 2.517843017578125, 2.52073974609375, 2.519414794921875, 2.51922216796875, 2.518572021484375, 2.519636962890625, 2.520416259765625, 2.518666259765625, 2.522156005859375, 2.518053955078125, 2.519404541015625, 2.518847412109375, 2.517646240234375, 2.51810205078125, 2.51926123046875, 2.519097412109375, 2.521001953125, 2.518910888671875, 2.5173779296875, 2.517789794921875, 2.520220703125, 5.20475830078125, 2.51871240234375, 2.5201982421875, 2.520436767578125, 2.51780908203125, 2.516747314453125, 2.51753271484375, 2.516557861328125, 2.51795458984375, 2.51775390625, 2.518048828125, 2.517761962890625, 2.518373291015625, 2.51766064453125, 2.517230712890625, 2.516822021484375, 2.51799755859375, 2.5179033203125, 2.5168720703125, 2.518266845703125, 2.517353515625, 2.5180712890625, 2.518619140625, 2.516643798828125, 2.517336181640625, 2.517548095703125, 2.517536865234375, 2.517360595703125, 2.517929931640625, 2.517650390625, 2.517623779296875, 2.518117431640625, 2.51952734375, 2.51759521484375, 2.517037109375, 2.517159912109375, 2.51675537109375, 2.518679443359375, 2.517864501953125, 2.51694189453125, 2.517684326171875, 2.519645263671875, 2.518003662109375, 2.517291015625, 2.516994140625, 2.51759912109375, 2.5174609375, 2.51873388671875, 2.517940185546875, 2.51837841796875, 2.51715185546875, 2.5170146484375, 2.51955517578125, 2.518741943359375, 2.5189775390625, 2.5173525390625, 2.517677978515625, 2.51769140625, 2.51719580078125, 2.51702783203125, 2.5181552734375, 2.519466064453125, 2.519775146484375, 5.2051865234375, 2.517904296875, 2.51915869140625, 2.51858935546875, 2.519103515625, 2.519520263671875, 2.517877685546875, 2.517642333984375, 2.518906982421875, 2.51755419921875, 2.517623779296875, 2.517487548828125, 2.518580322265625, 2.518183837890625, 2.517307373046875, 2.516926513671875, 2.51662353515625, 2.519920654296875, 2.516429931640625, 2.518091796875, 2.5164921875, 2.5171865234375, 2.51658251953125, 2.517351318359375, 2.516623291015625, 2.517256103515625, 2.517856201171875, 2.517544921875, 2.516708251953125, 2.519246826171875, 2.518055908203125, 2.518032470703125, 2.516893798828125, 2.519097412109375, 2.517005126953125, 2.517876708984375, 2.5175634765625, 2.518494140625, 2.5167412109375, 2.5177138671875, 2.518095947265625, 2.51822998046875, 2.518499267578125, 2.516704345703125, 2.520115234375, 2.517539794921875, 2.5169130859375, 2.517727294921875, 2.517357666015625, 2.5173330078125, 2.518453369140625, 2.520203369140625, 2.517231689453125, 2.517478515625, 2.517189697265625, 2.517262451171875, 2.517794921875, 2.519332763671875, 2.517783447265625, 2.517359619140625, 2.51770458984375, 2.517918701171875, 2.5172275390625, 5.20521728515625, 2.518489013671875, 2.51708203125, 2.5163857421875, 2.51787060546875, 2.5188076171875, 2.518327392578125, 2.517561279296875, 2.519435302734375, 2.52038037109375, 2.517274658203125, 2.517645263671875, 2.517359619140625, 2.517127197265625, 2.516441162109375, 2.516781982421875, 2.518454345703125, 2.517274658203125, 2.51644921875, 2.51639306640625, 2.5190185546875, 2.5164248046875, 2.516654052734375, 2.5163837890625, 2.51848388671875, 2.516992919921875, 2.5170400390625, 2.51709033203125, 2.517877685546875, 2.516539306640625, 2.516367431640625, 2.51928173828125, 2.519300048828125, 2.51730029296875, 2.516928466796875, 2.516885498046875, 2.518531982421875, 2.5179638671875, 2.518365234375, 2.521257080078125, 2.5186845703125, 2.51814697265625, 2.518978515625, 2.518739013671875, 2.51789208984375, 2.517880859375, 2.5175458984375, 2.51723779296875, 2.520966064453125, 2.5173125, 2.517146728515625, 2.517482421875, 2.51751123046875, 2.518137939453125, 2.517230712890625, 2.51829150390625, 2.51913720703125, 2.51747216796875, 2.518295654296875, 2.51835693359375, 2.5180498046875, 2.51679443359375, 2.516978759765625, 5.20618505859375, 2.516756591796875, 2.516822021484375, 2.516474853515625, 2.517072998046875, 2.51698583984375, 2.517000244140625, 2.517266357421875, 2.51782666015625, 2.51772119140625, 2.518513671875, 2.518520751953125, 2.517814208984375, 2.516486083984375, 2.516232177734375, 2.51664892578125, 2.517416015625, 2.51725, 2.517421142578125, 2.51761767578125, 2.517550048828125, 2.517233642578125, 2.51706982421875, 2.517667724609375, 2.518662109375, 2.517381103515625, 2.5167001953125, 2.517783447265625, 2.51818603515625, 2.51659375, 2.51644921875, 2.516968505859375, 2.51725927734375, 2.517319580078125, 2.51740673828125, 2.517291015625, 2.51770068359375, 2.516655029296875, 2.516579345703125, 2.518679443359375, 2.51792578125, 2.517526611328125, 2.518116455078125, 2.51873291015625, 2.517396484375, 2.517900390625, 2.51812255859375, 2.5177353515625, 2.5179853515625, 2.516726806640625, 2.5171015625, 2.517467041015625, 2.518310791015625, 2.51728173828125, 2.516833251953125, 2.51797412109375, 2.517671875, 2.518767578125, 2.51833251953125, 2.518369384765625, 2.519214111328125, 2.5175, 2.517960693359375, 5.207421875, 2.516853759765625, 2.517536865234375, 2.51687841796875, 2.516634521484375, 2.516380615234375, 2.51723681640625, 2.517261474609375, 2.51818798828125, 2.518327392578125, 2.518880126953125, 2.518677490234375, 2.517012451171875, 2.516658203125, 2.5166181640625, 2.51793505859375, 2.517747802734375, 2.519637939453125, 2.5187236328125, 2.518320068359375, 2.517434326171875, 2.516303955078125, 2.516716552734375, 2.516737060546875, 2.516611083984375, 2.516365234375, 2.51681591796875, 2.516877197265625, 2.517065673828125, 2.516994140625, 2.517845947265625, 2.518003662109375, 2.517454833984375, 2.51677685546875, 2.517212158203125, 2.517548095703125, 2.51734228515625, 2.51691015625, 2.5173115234375, 2.51702587890625, 2.51687109375, 2.517181396484375, 2.517095458984375, 2.517434326171875, 2.518919189453125, 2.51658447265625, 2.516896728515625, 2.51681787109375, 2.51647900390625, 2.5165732421875, 2.516769775390625, 2.51728271484375, 2.516929443359375, 2.516444091796875, 2.5165341796875, 2.517580810546875, 2.51643798828125, 2.51633984375, 2.51785302734375, 2.517928955078125, 2.516727783203125, 2.51658544921875, 2.516779052734375, 5.207884765625, 2.51725, 2.51802001953125, 2.517275634765625, 2.516849609375, 2.516008056640625, 2.516381591796875, 2.51746826171875, 2.51627734375, 2.516135986328125, 2.5164912109375, 2.5164912109375, 2.516265869140625, 2.5161103515625, 2.516736083984375, 2.51685693359375, 2.51656396484375, 2.51710986328125, 2.517779541015625, 2.51814697265625, 2.517348388671875, 2.516961181640625, 2.518197265625, 2.51780810546875, 2.518084716796875, 2.518426513671875, 2.517813232421875, 2.517560302734375, 2.51685693359375, 2.51721728515625, 2.516894775390625, 2.517078125, 2.516939697265625, 2.516675537109375, 2.516822021484375, 2.516694091796875, 2.51648828125, 2.516349853515625, 2.51686083984375, 2.51789404296875, 2.516864013671875, 2.516809814453125, 2.516926513671875, 2.51690185546875, 2.51774462890625, 2.518223876953125, 2.51867236328125, 2.520320068359375, 2.518391845703125, 2.518958984375, 2.5195068359375, 2.51765771484375, 2.51833251953125, 2.519100341796875, 2.51811328125, 2.5185771484375, 2.51877587890625, 2.5190849609375, 2.51871337890625, 2.517376953125, 2.516873291015625, 2.516703125, 2.517409912109375, 5.20814892578125, 2.516905029296875, 2.517455810546875, 2.51675341796875, 2.5166611328125, 2.5164248046875, 2.51766064453125, 2.516621337890625, 2.516347900390625, 2.51616455078125, 2.5168916015625, 2.516533203125, 2.51630078125, 2.51683935546875, 2.5170791015625, 2.5168505859375, 2.51635498046875, 2.516509765625, 2.517519287109375, 2.517980224609375, 2.517612548828125, 2.51685888671875, 2.5184287109375, 2.517878662109375, 2.51812548828125, 2.51820849609375, 2.519699462890625, 2.51765771484375, 2.517099609375, 2.517208984375, 2.51761962890625, 2.5169130859375, 2.51656591796875, 2.5162998046875, 2.51715185546875, 2.517620849609375, 2.516823974609375, 2.516893798828125, 2.517906494140625, 2.517166259765625, 2.5167236328125, 2.51692236328125, 2.51753271484375, 2.51745166015625, 2.516619384765625, 2.517034912109375, 2.51966259765625, 2.5194423828125, 2.519153564453125, 2.519391357421875, 2.519334716796875, 2.520791015625, 2.5193984375, 2.51885986328125, 2.518571044921875, 2.519627685546875, 2.519412841796875, 2.5196328125, 2.5182587890625, 2.516928466796875, 2.517813232421875, 2.518243408203125, 2.5184306640625]",tokens/s,0.3911953572702552,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1255.170048,2645.03296,0.0,1998.585856,1692.285952,s,10,0.19596438217163084,0.019596438217163083,0.0005758753327798738,0.019594847679138182,0.020116428565979005,0.020397430515289305,0.020622232074737547,"[0.02067843246459961, 0.01995683288574219, 0.018657087326049804, 0.019397823333740235, 0.01875289535522461, 0.019354879379272463, 0.019658687591552735, 0.019531007766723632, 0.020053983688354492, 0.019922752380371094]",tokens/s,13063.598454120523,kWh,2.196003245524462e-07,1.203308225381941e-07,6.683390311598506e-07,1.008270178250491e-06,tokens/kWh,253900200.08744156,MB,1255.46496,2645.03296,0.0,1998.585856,1740.085248,s,10,11.909100097656252,1.1909100097656249,0.015963798664011486,1.1926223754882814,1.1998400390625,1.2087030395507812,1.215793439941406,"[1.2175660400390624, 1.1913038330078125, 1.18787646484375, 1.1978704833984375, 1.1491290283203126, 1.195384033203125, 1.1938314208984375, 1.19254345703125, 1.19089404296875, 1.1927012939453125]",tokens/s,52.90072254275419,kWh,1.3774184907753079e-05,7.5477682720333215e-06,2.8730712265842306e-05,5.005266544562869e-05,tokens/kWh,1258674.227218444,,s,629,12.064169954299933,0.019179920436088915,0.0023597437619934914,0.018893823623657227,0.019228466796875003,0.019791871643066405,0.038067117614746133,"[0.02008166313171387, 0.01979801559448242, 0.0196177921295166, 0.01978265571594238, 0.019717119216918946, 0.019862527847290038, 0.019771392822265626, 0.01966592025756836, 0.01981439971923828, 0.019919872283935547, 0.019991552352905274, 0.0198154239654541, 0.01984102439880371, 0.019713024139404296, 0.019945472717285157, 0.019693599700927735, 0.019734495162963866, 0.019999744415283204, 0.019870719909667968, 0.019916799545288084, 0.019698688507080078, 0.01944063949584961, 0.019574783325195313, 0.019809280395507813, 0.019941375732421874, 0.019547136306762695, 0.019590143203735352, 0.019150848388671874, 0.018918399810791017, 0.019148799896240236, 0.01879756736755371, 0.018762752532958983, 0.01876479911804199, 0.018760704040527345, 0.018949119567871094, 0.01901158332824707, 0.018997247695922852, 0.018856960296630858, 0.018832384109497072, 0.018873344421386717, 0.018898944854736328, 0.018821151733398437, 0.01887945556640625, 0.018886655807495118, 0.018907136917114258, 0.01886412811279297, 0.01882521629333496, 0.01883033561706543, 0.01877299118041992, 0.01884569549560547, 0.01886310386657715, 0.018886655807495118, 0.01883340835571289, 0.01906790351867676, 0.01885081672668457, 0.018737152099609376, 0.018815999984741212, 0.01884774398803711, 0.019143680572509765, 0.018949119567871094, 0.020289535522460937, 0.01920512008666992, 0.03925708770751953, 0.018815999984741212, 0.01889587211608887, 0.018832384109497072, 0.018815999984741212, 0.018876415252685547, 0.01883647918701172, 0.018874368667602538, 0.018893823623657227, 0.018908159255981445, 0.018892799377441406, 0.01882624053955078, 0.018942975997924806, 0.018923519134521484, 0.018984960556030273, 0.01907711982727051, 0.018955263137817382, 0.018844671249389648, 0.018907136917114258, 0.018997247695922852, 0.0189040641784668, 0.018984960556030273, 0.01885081672668457, 0.01881705665588379, 0.018884576797485352, 0.01886720085144043, 0.018889728546142577, 0.01884774398803711, 0.018916351318359375, 0.018886655807495118, 0.018868223190307617, 0.018915327072143554, 0.01883852767944336, 0.018860031127929687, 0.018909183502197266, 0.018886655807495118, 0.018922496795654296, 0.018905088424682616, 0.018777088165283205, 0.01884364891052246, 0.018903039932250978, 0.018914304733276367, 0.018931711196899414, 0.018917375564575196, 0.018893823623657227, 0.018744319915771485, 0.019489791870117186, 0.01881497573852539, 0.018911231994628908, 0.018894847869873048, 0.018968576431274413, 0.01905971145629883, 0.018966527938842775, 0.01887027168273926, 0.018861055374145508, 0.018988031387329102, 0.018940959930419922, 0.01884156799316406, 0.018929664611816405, 0.018964479446411133, 0.018922496795654296, 0.01888467216491699, 0.018768831253051756, 0.03698995208740234, 0.018000896453857423, 0.01803878402709961, 0.01861529541015625, 0.01846886444091797, 0.018893823623657227, 0.01886207962036133, 0.018841632843017576, 0.018986976623535157, 0.01903001594543457, 0.018913280487060546, 0.018872352600097658, 0.018723808288574218, 0.01885593605041504, 0.018965503692626954, 0.018915327072143554, 0.018923519134521484, 0.01879347229003906, 0.018799615859985352, 0.01884876823425293, 0.01886310386657715, 0.018957311630249024, 0.0188723201751709, 0.01919385528564453, 0.018920480728149416, 0.018839519500732423, 0.018932735443115235, 0.01881395149230957, 0.018926591873168946, 0.019110912322998046, 0.01883647918701172, 0.018770944595336913, 0.018880512237548826, 0.01885081672668457, 0.018919424057006837, 0.018876415252685547, 0.018939903259277344, 0.01885081672668457, 0.018897920608520507, 0.018840576171875, 0.018856960296630858, 0.01884876823425293, 0.01887539291381836, 0.01882009506225586, 0.018813983917236328, 0.018836448669433594, 0.018860031127929687, 0.019740671157836915, 0.019281919479370118, 0.018899967193603515, 0.018890783309936522, 0.018863071441650392, 0.01886412811279297, 0.01881804847717285, 0.018817024230957033, 0.018861055374145508, 0.01885081672668457, 0.018868223190307617, 0.018972671508789063, 0.018760704040527345, 0.018917375564575196, 0.018985984802246093, 0.01883647918701172, 0.03859558486938477, 0.018811904907226562, 0.019163135528564454, 0.02009702491760254, 0.019518463134765626, 0.018994176864624023, 0.01887948799133301, 0.019116031646728517, 0.01887539291381836, 0.018940927505493164, 0.01884364891052246, 0.01884364891052246, 0.01904435157775879, 0.018973695755004884, 0.018964479446411133, 0.019170303344726563, 0.01942630386352539, 0.018782207489013672, 0.018939903259277344, 0.018861055374145508, 0.018964479446411133, 0.018869247436523438, 0.018767871856689454, 0.01895427131652832, 0.018802656173706054, 0.018971647262573242, 0.018899967193603515, 0.01889587211608887, 0.01884876823425293, 0.019386367797851564, 0.018991104125976564, 0.019079168319702147, 0.018916351318359375, 0.018882560729980468, 0.01925632095336914, 0.018929664611816405, 0.018899967193603515, 0.018811935424804686, 0.01888355255126953, 0.018973695755004884, 0.018752511978149415, 0.0189040641784668, 0.019373056411743163, 0.02027008056640625, 0.020695039749145508, 0.01957683181762695, 0.019105791091918945, 0.01883852767944336, 0.018929664611816405, 0.018824192047119142, 0.01904025650024414, 0.01886412811279297, 0.01887539291381836, 0.01883852767944336, 0.01881292724609375, 0.018732032775878905, 0.018886655807495118, 0.018578432083129884, 0.01839926338195801, 0.018844640731811524, 0.018790399551391602, 0.01878937530517578, 0.018752511978149415, 0.03698483276367188, 0.017928192138671875, 0.01796505546569824, 0.01789952087402344, 0.0178657283782959, 0.017949695587158202, 0.017992704391479493, 0.017977344512939454, 0.017977344512939454, 0.018036735534667968, 0.017976320266723633, 0.017991680145263672, 0.017904640197753906, 0.01803468894958496, 0.01804902458190918, 0.018043903350830077, 0.017953792572021485, 0.01813811111450195, 0.01799782371520996, 0.018018304824829103, 0.017966079711914062, 0.018051071166992186, 0.017977344512939454, 0.01800704002380371, 0.01799679946899414, 0.01804083251953125, 0.017994752883911135, 0.018061311721801757, 0.018028543472290038, 0.018044927597045898, 0.017864704132080078, 0.017921024322509766, 0.017951744079589844, 0.017963008880615236, 0.017930240631103517, 0.01801420783996582, 0.017941503524780272, 0.018001920700073244, 0.017939456939697264, 0.017915903091430666, 0.017966079711914062, 0.017979391098022462, 0.017955839157104494, 0.01800294494628906, 0.017899551391601563, 0.017976287841796876, 0.01799679946899414, 0.01827123260498047, 0.01843097686767578, 0.018742271423339844, 0.01887948799133301, 0.018784255981445314, 0.018868223190307617, 0.01879859161376953, 0.01882009506225586, 0.01982771110534668, 0.02038377571105957, 0.020016096115112306, 0.01902079963684082, 0.018759679794311524, 0.018523136138916017, 0.018774015426635742, 0.018930688858032226, 0.03848601531982422, 0.019079168319702147, 0.018916351318359375, 0.018967552185058592, 0.01886207962036133, 0.01879347229003906, 0.018959360122680666, 0.01920921516418457, 0.019406848907470704, 0.018989055633544923, 0.018952192306518553, 0.019162111282348633, 0.018876415252685547, 0.01877299118041992, 0.01900748825073242, 0.018959360122680666, 0.019094528198242186, 0.01884364891052246, 0.018897920608520507, 0.018896896362304686, 0.01880473518371582, 0.018931711196899414, 0.018745344161987306, 0.01838591957092285, 0.01842995262145996, 0.01898089599609375, 0.01894806480407715, 0.018896896362304686, 0.018777151107788086, 0.018982847213745116, 0.01923583984375, 0.018766847610473633, 0.01887846374511719, 0.018757631301879883, 0.018769920349121092, 0.019090431213378906, 0.019116031646728517, 0.01899929618835449, 0.019524608612060547, 0.02007961654663086, 0.01907711982727051, 0.01902284812927246, 0.018975744247436522, 0.019171327590942384, 0.018874368667602538, 0.018787328720092773, 0.01904844856262207, 0.018750463485717773, 0.01922662353515625, 0.018884607315063476, 0.018779136657714843, 0.01897881507873535, 0.019174400329589843, 0.019117055892944337, 0.01881907272338867, 0.01883955192565918, 0.019125247955322267, 0.01887846374511719, 0.018976768493652343, 0.018964479446411133, 0.018942975997924806, 0.01901260757446289, 0.019120128631591796, 0.03889766311645508, 0.018888704299926756, 0.01882624053955078, 0.01916316795349121, 0.01887331199645996, 0.01903104019165039, 0.018936832427978514, 0.018971647262573242, 0.018852863311767578, 0.018949119567871094, 0.018922496795654296, 0.018955263137817382, 0.0188221435546875, 0.018824192047119142, 0.018950143814086915, 0.018929664611816405, 0.018951168060302736, 0.01887129592895508, 0.019127296447753905, 0.019130367279052735, 0.018890752792358398, 0.018958335876464845, 0.0188723201751709, 0.019166208267211913, 0.01922150421142578, 0.018877439498901367, 0.018980863571166993, 0.018884607315063476, 0.0188590087890625, 0.018983936309814452, 0.019647487640380858, 0.018966527938842775, 0.018917375564575196, 0.01881907272338867, 0.018967552185058592, 0.018912256240844725, 0.018912256240844725, 0.01900032043457031, 0.018948095321655273, 0.01884979248046875, 0.018891775131225585, 0.018865152359008788, 0.018935808181762694, 0.018865152359008788, 0.018948095321655273, 0.018892799377441406, 0.018840576171875, 0.01882009506225586, 0.018924543380737305, 0.01885798454284668, 0.018974752426147462, 0.018999263763427733, 0.018900991439819336, 0.018874368667602538, 0.018922496795654296, 0.018955263137817382, 0.018931711196899414, 0.01878937530517578, 0.018860031127929687, 0.018972671508789063, 0.018959360122680666, 0.019132448196411134, 0.018904031753540038, 0.03866828918457031, 0.01882521629333496, 0.01878118324279785, 0.018948095321655273, 0.018792448043823243, 0.018868223190307617, 0.018778112411499022, 0.018958368301391602, 0.01888559913635254, 0.0188272647857666, 0.018973695755004884, 0.01886720085144043, 0.018776063919067384, 0.01901055908203125, 0.018993152618408202, 0.018917375564575196, 0.019162111282348633, 0.018950143814086915, 0.018990079879760743, 0.018930688858032226, 0.018890752792358398, 0.018946048736572265, 0.018886655807495118, 0.018832384109497072, 0.018940927505493164, 0.018965503692626954, 0.01929113578796387, 0.018768896102905275, 0.018882560729980468, 0.018877439498901367, 0.01883340835571289, 0.018912256240844725, 0.018927648544311525, 0.01891529655456543, 0.018811904907226562, 0.018910207748413087, 0.01882009506225586, 0.018910207748413087, 0.018745344161987306, 0.018956287384033203, 0.018868223190307617, 0.018768896102905275, 0.01880575942993164, 0.018736127853393555, 0.018824192047119142, 0.018956287384033203, 0.01884979248046875, 0.018948095321655273, 0.01881907272338867, 0.01880985641479492, 0.01883955192565918, 0.018891775131225585, 0.01883340835571289, 0.01884979248046875, 0.01884569549560547, 0.018924543380737305, 0.018920448303222655, 0.018898944854736328, 0.018893823623657227, 0.019284992218017577, 0.01971609687805176, 0.01968639945983887, 0.01919692802429199, 0.03932364654541016, 0.018924543380737305, 0.019127296447753905, 0.01901875114440918, 0.018907136917114258, 0.018896896362304686, 0.01885798454284668, 0.01881292724609375, 0.01900441551208496, 0.018884607315063476, 0.018885631561279297, 0.01883955192565918, 0.018886688232421876, 0.018969663619995115, 0.018922399520874024, 0.018976768493652343, 0.018914304733276367, 0.01907302474975586, 0.019050495147705078, 0.018923519134521484, 0.018939903259277344, 0.0189040641784668, 0.018877439498901367, 0.01883852767944336, 0.018897920608520507, 0.018779136657714843, 0.018916351318359375, 0.018919424057006837, 0.018974720001220705, 0.018869247436523438, 0.01886617660522461, 0.018947071075439453, 0.018897920608520507, 0.018967552185058592, 0.018948095321655273, 0.018880512237548826, 0.01887846374511719, 0.018902015686035157, 0.01886310386657715, 0.01887846374511719, 0.018799615859985352, 0.018912256240844725, 0.018832384109497072, 0.01883852767944336, 0.018920448303222655, 0.019014656066894533, 0.0188272647857666, 0.018974720001220705, 0.01883852767944336, 0.018874368667602538, 0.018527231216430663, 0.018898944854736328, 0.01881292724609375, 0.01879756736755371, 0.018902015686035157, 0.018908159255981445, 0.018832384109497072, 0.018918399810791017, 0.018792448043823243, 0.018888704299926756, 0.01884880065917969, 0.018927583694458006, 0.018953216552734374, 0.039212032318115236, 0.018920448303222655, 0.018912256240844725, 0.018958335876464845, 0.018861055374145508, 0.018928640365600585, 0.01884774398803711, 0.01884569549560547, 0.01886617660522461, 0.018934783935546876, 0.018942975997924806, 0.01904332733154297, 0.018899967193603515, 0.018918399810791017, 0.01901670455932617, 0.018998271942138673, 0.019309568405151366, 0.02033459281921387, 0.01902899169921875, 0.018954240798950195, 0.019001344680786132, 0.018902015686035157, 0.018910207748413087, 0.018935808181762694, 0.01882828712463379, 0.0188590087890625, 0.019208192825317383, 0.019014656066894533, 0.018882560729980468, 0.019042303085327148, 0.019025951385498046, 0.019065824508666993, 0.018945024490356444, 0.018880512237548826, 0.018861055374145508, 0.018893823623657227, 0.01887539291381836, 0.018876415252685547, 0.01903104019165039, 0.018890752792358398, 0.01881497573852539, 0.01882009506225586, 0.01887948799133301, 0.01878835105895996, 0.018860031127929687, 0.018661376953125, 0.018603008270263673, 0.01880473518371582, 0.018817024230957033, 0.0188723201751709, 0.018905088424682616, 0.018860031127929687, 0.018917375564575196, 0.018925567626953126, 0.018750463485717773, 0.01882009506225586, 0.018754560470581053, 0.0188538875579834, 0.01883443260192871, 0.01882931137084961, 0.01881088066101074, 0.018900991439819336, 0.018924543380737305]",tokens/s,52.13785966068979,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949941-15319ee233ca581836f6074e;b3d9eb1e-b318-4073-a1d6-3dbbb7c11305) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1662.808064,5516.034048,0.0,4869.586944,4743.593472,s,10,6.1237835083007806,0.6123783508300781,0.001253925145814982,0.6120383911132812,0.6140244384765625,0.6141366088867187,0.6142263452148438,"[0.6138787841796876, 0.614248779296875, 0.6107066650390625, 0.61129736328125, 0.6111465454101562, 0.6114146728515625, 0.6116507568359375, 0.612426025390625, 0.61399951171875, 0.613014404296875]",tokens/s,418.0422114089963,kWh,7.21943411562178e-06,3.95425479798766e-06,3.477342651153299e-05,4.594711542514243e-05,tokens/kWh,5571622.889299289,MB,1662.808064,5516.034048,0.0,4869.586944,4769.651712,s,10,360.87087890624997,36.087087890625,0.01748383820965051,36.082986328125,36.11191953125,36.119573046875,36.125695859375,"[36.11021875, 36.1272265625, 36.08849609375, 36.07920703125, 36.0845625, 36.08481640625, 36.08141015625, 36.0650625, 36.07158203125, 36.078296875]",tokens/s,1.7457767773045123,kWh,0.00042595356252458363,0.00023346088450391426,0.0019820382702698735,0.0026414527172983716,tokens/kWh,23850.51210170259,,s,629,365.80599053955063,0.5815675525271077,0.07276675093795772,0.5726207885742187,0.5739550537109376,0.5744299926757812,1.1846730029296875,"[0.5736171264648438, 0.573517822265625, 0.5738854370117188, 0.57415576171875, 0.5742704467773437, 0.5731041259765625, 0.57312255859375, 0.5737461547851562, 0.5739632568359375, 0.5725850219726563, 0.5728665161132812, 0.5735147705078125, 0.5726494750976563, 0.573000732421875, 0.57260546875, 0.5726546020507812, 0.572073974609375, 0.5728809204101563, 0.572885986328125, 0.5725767822265625, 0.5731676025390625, 0.5745838012695312, 0.5729105834960937, 0.5730211791992188, 0.5729566650390625, 0.5723678588867187, 0.5724866333007812, 0.5726197509765625, 0.5735526123046875, 0.5740676879882812, 0.5729720458984375, 0.5732095947265625, 0.5726760864257813, 0.5724497680664062, 0.5724518432617187, 0.572790771484375, 0.5723668823242187, 0.5727159423828125, 0.5731563720703124, 0.5741424560546875, 0.5723576049804687, 0.5724334106445312, 0.5723883666992188, 0.5724548950195313, 0.5725368041992187, 0.5722142944335937, 0.5739407348632812, 0.5740206298828125, 0.572927001953125, 0.573212646484375, 0.5730181274414062, 0.5728184204101563, 0.5730816040039063, 0.5726177368164063, 0.572484619140625, 0.5727293701171875, 0.5740543823242188, 0.5732669677734376, 0.57483056640625, 0.574424072265625, 0.5744517211914062, 0.5744885864257813, 1.18734130859375, 0.5729505004882812, 0.5725030517578125, 0.5725654907226563, 0.5738137817382812, 0.572379150390625, 0.5741465454101562, 0.574750732421875, 0.573475830078125, 0.5734666137695312, 0.5736826782226563, 0.5750292358398438, 0.5742151489257813, 0.5739274291992188, 0.5746913452148438, 0.5744281616210938, 0.5742459106445312, 0.5746903076171875, 0.5750845336914062, 0.5739735107421875, 0.5736888427734375, 0.574476318359375, 0.5740001220703125, 0.573032470703125, 0.5722992553710937, 0.57221630859375, 0.571994140625, 0.573065185546875, 0.573844482421875, 0.5739673461914062, 0.5742745361328125, 0.5743206787109375, 0.5743236694335937, 0.5738577880859375, 0.5742110595703125, 0.5753764038085938, 0.5745797119140625, 0.57402880859375, 0.5725982666015625, 0.5732260131835938, 0.5737594604492188, 0.5739530029296875, 0.5731553344726562, 0.572927978515625, 0.57257470703125, 0.5744312133789062, 0.5755975952148438, 0.5727283325195313, 0.5721149291992188, 0.572199951171875, 0.5723617553710938, 0.572105712890625, 0.57270068359375, 0.5733294067382813, 0.5723873291015625, 0.57191015625, 0.573065185546875, 0.5727866821289063, 0.5724027099609375, 0.5726883544921875, 0.5723258666992187, 0.5720657958984375, 0.5731727294921874, 1.1836395263671875, 0.5721917724609376, 0.5728533325195313, 0.5732484741210937, 0.5725409545898438, 0.5732628784179687, 0.5741773071289062, 0.5730109252929687, 0.5731737670898438, 0.573043701171875, 0.5734686889648437, 0.5732505493164063, 0.5726914672851563, 0.5729924926757812, 0.5729525756835937, 0.5732260131835938, 0.574719970703125, 0.5727836303710937, 0.5730283813476562, 0.5724508056640625, 0.5733519287109375, 0.5728767700195313, 0.5725736694335938, 0.5743431396484375, 0.5745357055664062, 0.573507568359375, 0.572695556640625, 0.5729177856445312, 0.573022216796875, 0.5733980102539062, 0.5731030883789062, 0.5730037841796874, 0.572927978515625, 0.5738895263671875, 0.573137939453125, 0.57310107421875, 0.5718549194335938, 0.5721885986328125, 0.57208935546875, 0.5725255737304688, 0.5722398681640625, 0.5733836669921875, 0.5725736694335938, 0.57238525390625, 0.5722880249023438, 0.5720064086914063, 0.5721456909179687, 0.57385986328125, 0.5721773681640625, 0.5720176391601562, 0.5720484008789063, 0.5730570068359375, 0.5720934448242188, 0.5720852661132813, 0.5719961547851562, 0.5719992065429688, 0.5727928466796876, 0.5721221313476562, 0.5721630859375, 0.572600341796875, 0.5723125610351563, 0.5724631958007812, 0.5727794799804687, 1.1843287353515626, 0.572779541015625, 0.57261669921875, 0.5734041748046875, 0.5739274291992188, 0.57350146484375, 0.57328125, 0.5731553344726562, 0.5729392700195313, 0.5729403076171875, 0.5732833251953126, 0.57289013671875, 0.5743707885742187, 0.5724887084960938, 0.5720043334960937, 0.5718681640625, 0.5722941284179688, 0.5721036987304687, 0.5721763916015625, 0.57200537109375, 0.5717247924804687, 0.572310546875, 0.5729740600585937, 0.5724682006835937, 0.5719664916992188, 0.5723975830078125, 0.5722286376953125, 0.5723289794921875, 0.572221435546875, 0.5728389282226563, 0.5737000732421875, 0.5728737182617187, 0.5730037841796874, 0.572178466796875, 0.5720411376953125, 0.5717974853515625, 0.5721036987304687, 0.5720719604492187, 0.5719695434570312, 0.5730037841796874, 0.5722941284179688, 0.5718763427734375, 0.5725706176757812, 0.5727713012695312, 0.5752719116210937, 0.5729822998046875, 0.5730355224609375, 0.5741107177734375, 0.5727109375, 0.5726207885742187, 0.5727815551757812, 0.5729136352539063, 0.572896240234375, 0.572600341796875, 0.5730191650390625, 0.5724456787109375, 0.573222900390625, 0.57379736328125, 0.5722828979492187, 0.572052490234375, 0.5719766845703125, 0.5719429321289062, 0.5717380981445312, 1.1848990478515624, 0.5739724731445313, 0.5733632202148438, 0.5733693237304688, 0.5734880981445313, 0.5738117065429688, 0.5729403076171875, 0.5730672607421875, 0.5729863891601562, 0.5730140380859375, 0.57394482421875, 0.5726064453125, 0.5722265625, 0.5720391845703126, 0.5731246337890625, 0.5720433349609375, 0.57220703125, 0.5742510375976563, 0.5736365966796875, 0.572663818359375, 0.5723699340820313, 0.5724794921875, 0.5720340576171875, 0.5722705688476563, 0.5724047241210938, 0.57236376953125, 0.5725368041992187, 0.5725255737304688, 0.5728256225585937, 0.5726392211914062, 0.5721354370117188, 0.5721804809570312, 0.5721774291992188, 0.5723484497070312, 0.5721793823242187, 0.572990478515625, 0.5730048217773438, 0.5721558837890625, 0.572242919921875, 0.5721978759765625, 0.5722470703125, 0.5720596313476562, 0.5724047241210938, 0.5724436645507812, 0.5722009887695313, 0.5737861328125, 0.57619970703125, 0.5726668701171875, 0.5722962036132813, 0.5730099487304687, 0.5725439453125, 0.5726085205078125, 0.5728368530273438, 0.5743206176757812, 0.5726044311523437, 0.5726791381835937, 0.5723678588867187, 0.5721937866210938, 0.572822509765625, 0.5727109375, 0.5722070922851562, 0.5722654418945312, 0.5724968872070313, 1.1851029052734374, 0.5723607177734376, 0.57244775390625, 0.5727620849609375, 0.5731502075195313, 0.5729341430664062, 0.5733734130859375, 0.5732301025390625, 0.572663818359375, 0.5735577392578125, 0.5724036865234375, 0.5722737426757812, 0.5723555297851562, 0.5722726440429687, 0.5723494262695312, 0.5725409545898438, 0.5730109252929687, 0.57223681640625, 0.5723402099609375, 0.5722613525390625, 0.572516357421875, 0.5727579956054687, 0.5724467163085938, 0.572221435546875, 0.5731195068359375, 0.5724169921875, 0.5723781127929688, 0.5720811767578124, 0.5718343505859375, 0.5718425903320312, 0.57302734375, 0.5730027465820312, 0.5725501708984375, 0.5730252685546875, 0.57335498046875, 0.5724067993164063, 0.5720145874023438, 0.5724354858398437, 0.5753599853515625, 0.572169189453125, 0.5720125732421875, 0.5726105346679687, 0.5732147216796875, 0.57269970703125, 0.5723483276367187, 0.572010498046875, 0.5722890014648437, 0.5727958984375, 0.5723873901367188, 0.57226953125, 0.5728460693359375, 0.57364892578125, 0.5736980590820312, 0.5732618408203125, 0.5729075317382812, 0.5730396118164063, 0.5730027465820312, 0.5737277221679687, 0.5737984008789062, 0.574740478515625, 0.5733519287109375, 0.5725675659179688, 0.572537841796875, 1.184806884765625, 0.5725839233398438, 0.5726617431640625, 0.573106201171875, 0.5745018920898437, 0.5735618286132812, 0.573259765625, 0.5744578857421875, 0.5749258422851562, 0.5728265991210938, 0.5725081787109375, 0.5727150268554687, 0.5735505981445312, 0.573075439453125, 0.5722562255859375, 0.57212109375, 0.5720760498046875, 0.5723033447265625, 0.5736908569335938, 0.5737195434570312, 0.5725010375976562, 0.57371337890625, 0.5728123168945313, 0.5738147583007812, 0.571978759765625, 0.5718763427734375, 0.5724252319335937, 0.5723299560546875, 0.5721712646484375, 0.5726679077148438, 0.57254296875, 0.5723995971679687, 0.5719193725585937, 0.5722777709960938, 0.571779052734375, 0.5716900024414062, 0.5719306030273438, 0.5717493896484375, 0.571821044921875, 0.5734307861328125, 0.5732904663085937, 0.5728604125976563, 0.572748779296875, 0.57318603515625, 0.5727365112304688, 0.5732413330078125, 0.57249072265625, 0.5729822998046875, 0.573169677734375, 0.5721641235351562, 0.5722808227539062, 0.5722900390625, 0.57253173828125, 0.5722101440429688, 0.5723197631835938, 0.5721200561523437, 0.5727498168945313, 0.5729525756835937, 0.5725030517578125, 0.5723596801757812, 0.5721190185546875, 0.5728788452148438, 0.57232177734375, 1.18618115234375, 0.5729382934570313, 0.5724763793945312, 0.5728051147460937, 0.572410888671875, 0.5722828979492187, 0.5726187744140625, 0.57223681640625, 0.5720278930664062, 0.5722142944335937, 0.5735720825195313, 0.5724139404296875, 0.5722695922851563, 0.5720924072265625, 0.572410888671875, 0.5720698852539062, 0.5738741455078125, 0.5721026611328125, 0.5729136352539063, 0.5728265991210938, 0.5724375, 0.5722224731445312, 0.57202587890625, 0.573212646484375, 0.572484619140625, 0.5720780639648437, 0.57230859375, 0.57369384765625, 0.5728798828125, 0.572020751953125, 0.5721231079101563, 0.5717554931640625, 0.572156982421875, 0.5722654418945312, 0.5718435668945312, 0.5726340942382813, 0.573137939453125, 0.5722542114257813, 0.5726156616210938, 0.572095458984375, 0.572031982421875, 0.5723658447265625, 0.5724303588867188, 0.5722726440429687, 0.5725307006835938, 0.5731655883789063, 0.5721272583007813, 0.5720043334960937, 0.572042236328125, 0.572242919921875, 0.5721856079101563, 0.5724548950195313, 0.5724661865234375, 0.5730764770507812, 0.5732301025390625, 0.5725450439453125, 0.5724866333007812, 0.5721734008789062, 0.5719572143554688, 0.5721549072265625, 0.572000244140625, 0.5720064086914063, 0.5725757446289063, 1.1873709716796874, 0.5724487915039063, 0.5732085571289063, 0.572284912109375, 0.5727989501953125, 0.5728409423828125, 0.5729740600585937, 0.573497314453125, 0.572516357421875, 0.572705810546875, 0.5721886596679687, 0.5721958618164062, 0.5724036865234375, 0.5723381958007813, 0.57270068359375, 0.5734564208984375, 0.5733170776367188, 0.5739089965820312, 0.5724610595703125, 0.5728818969726562, 0.5730293579101563, 0.57270068359375, 0.572495849609375, 0.5729935302734375, 0.5724405517578125, 0.572822509765625, 0.5726105346679687, 0.572315673828125, 0.5717718505859375, 0.5719408569335938, 0.5722603759765625, 0.5722347412109375, 0.5724713134765625, 0.5740123901367188, 0.5722296142578125, 0.5719521484375, 0.572314697265625, 0.572317626953125, 0.5720811767578124, 0.5719705810546875, 0.5717545166015625, 0.5723504638671875, 0.5726515502929688, 0.5724016723632812, 0.5723504638671875, 0.572231689453125, 0.572205078125, 0.5721190185546875, 0.572031982421875, 0.5724467163085938, 0.573043701171875, 0.5730468139648438, 0.5726412353515625, 0.5725030517578125, 0.5725399169921875, 0.5724713134765625, 0.5720606689453125, 0.5723914184570312, 0.5728767700195313, 0.573179931640625, 0.5726597290039063, 0.572295166015625, 0.5719317016601563, 1.1857969970703126, 0.5727313842773437, 0.572368896484375, 0.572347412109375, 0.5732669677734376, 0.5725573120117188, 0.5721907348632812, 0.5723095092773437, 0.5727498168945313, 0.5725081787109375, 0.5725491333007813, 0.5726720581054687, 0.5733457641601563, 0.5733345336914063, 0.5721354370117188, 0.5723043823242188, 0.5719183349609375, 0.5717770385742188, 0.5724968872070313, 0.5726105346679687, 0.573053955078125, 0.5741567993164063, 0.5731102905273437, 0.5720698852539062, 0.5720145874023438, 0.57215185546875, 0.574278564453125, 0.5722521362304688, 0.5720453491210937, 0.5730723266601563, 0.5723924560546875, 0.5727354736328125, 0.572337158203125, 0.5723197631835938, 0.5723668212890625, 0.572221435546875, 0.5729075317382812, 0.5726556396484375, 0.5724456787109375, 0.5738014526367188, 0.5731779174804688, 0.572559326171875, 0.5726812133789062, 0.5728235473632812, 0.572885986328125, 0.5726618041992187, 0.5722745971679688, 0.5731082153320313, 0.5732781982421875, 0.5720135498046875, 0.5721507568359375, 0.57227880859375, 0.5728726806640625, 0.572346435546875, 0.5726781005859375, 0.5728286743164063, 0.5733345336914063, 0.5729228515625, 0.572737548828125, 0.5724548950195313, 0.5741793212890625, 0.5722265625, 0.572494873046875]",tokens/s,1.7194907034525255,,,,,,main,False,False -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2109.853696,2844.2624,0.0,2197.815296,1927.351296,s,10,2.3800795135498047,0.23800795135498043,0.0010254107368052306,0.23830428314208985,0.23930529022216795,0.23934926986694335,0.23938445358276367,"[0.2383564453125, 0.23939324951171875, 0.23619731140136718, 0.23845465087890624, 0.23723452758789063, 0.23663900756835937, 0.23750480651855468, 0.2382521209716797, 0.2387518768310547, 0.23929551696777343]",tokens/s,1075.594317511624,kWh,2.793223439723023e-06,1.5305452026781454e-06,1.2162554949675677e-05,1.6486323592076845e-05,tokens/kWh,15528022.276781643,MB,2109.853696,2844.2624,0.0,2197.815296,2031.97184,s,10,139.4725029296875,13.94725029296875,0.016539028053013265,13.941937500000002,13.97250703125,13.977812109375,13.982056171875,"[13.953560546875, 13.945388671875, 13.94201171875, 13.9831171875, 13.9309609375, 13.9364580078125, 13.92993359375, 13.94186328125, 13.971328125, 13.937880859375]",tokens/s,4.517019389245513,kWh,0.000164597820511391,9.021296607492203e-05,0.0007178143901425277,0.0009726251767288406,tokens/kWh,64773.153633430826,,s,629,141.39150625610358,0.2247877682926924,0.028297149074530483,0.2211778564453125,0.22228438720703125,0.22277959289550783,0.45843198486328124,"[0.22260429382324218, 0.22169497680664063, 0.2212351989746094, 0.22115533447265626, 0.22176870727539064, 0.22137344360351563, 0.22118502807617188, 0.22141644287109374, 0.22102528381347655, 0.22195199584960937, 0.2219018249511719, 0.22150553894042968, 0.22119935607910157, 0.2213509063720703, 0.22125465393066407, 0.22120550537109376, 0.22150962829589843, 0.2213017578125, 0.22201548767089843, 0.2213632049560547, 0.22165196228027345, 0.22151373291015625, 0.22148095703125, 0.22154649353027345, 0.22147174072265624, 0.22180044555664064, 0.22122802734375, 0.22110105895996093, 0.22116249084472656, 0.221444091796875, 0.22163456726074218, 0.22287872314453125, 0.2216417236328125, 0.222202880859375, 0.2216407012939453, 0.22104165649414062, 0.22102323913574218, 0.22114508056640625, 0.22116044616699218, 0.22107955932617188, 0.2210508728027344, 0.22112460327148437, 0.22108979797363282, 0.2215557098388672, 0.22159564208984375, 0.22146662902832032, 0.2216785888671875, 0.22263194274902343, 0.22135398864746095, 0.22125056457519532, 0.22187930297851563, 0.22126797485351563, 0.2213396453857422, 0.22115225219726561, 0.22118502807617188, 0.22127410888671875, 0.22127104187011717, 0.22109901428222656, 0.22111744689941407, 0.22118911743164063, 0.2214871063232422, 0.22157005310058595, 0.46144613647460936, 0.22118707275390626, 0.2218403778076172, 0.2209945526123047, 0.22095564270019533, 0.22111538696289063, 0.22113792419433595, 0.22112973022460938, 0.22129766845703125, 0.22095974731445311, 0.22107244873046875, 0.2212720031738281, 0.22100991821289062, 0.22110617065429689, 0.22098739624023436, 0.22138470458984374, 0.22130073547363283, 0.22108773803710938, 0.22093721008300782, 0.2209566650390625, 0.2209310760498047, 0.2208204803466797, 0.22089932250976563, 0.22095155334472658, 0.22101708984375, 0.22186495971679687, 0.22131610107421876, 0.22127513122558592, 0.22142361450195314, 0.22218751525878908, 0.2213519287109375, 0.2209669189453125, 0.22220083618164063, 0.2210744323730469, 0.22159461975097655, 0.22113792419433595, 0.22117990112304686, 0.2222161865234375, 0.22108262634277343, 0.2210293731689453, 0.22102117919921874, 0.22107955932617188, 0.22112562561035157, 0.22216499328613282, 0.22140518188476563, 0.22183526611328125, 0.22283879089355468, 0.22166937255859376, 0.2215045166015625, 0.22186904907226562, 0.22122700500488282, 0.2214256591796875, 0.22139903259277344, 0.2215004119873047, 0.22137753295898438, 0.22115737915039063, 0.22106419372558594, 0.22163967895507813, 0.22111334228515625, 0.22137753295898438, 0.2212833251953125, 0.2219683837890625, 0.22127923583984374, 0.45813043212890625, 0.22103347778320312, 0.2213949432373047, 0.22112973022460938, 0.22099250793457031, 0.22118502807617188, 0.2218219451904297, 0.22100376892089843, 0.221154296875, 0.22091162109375, 0.22253260803222658, 0.22119935607910157, 0.2210529327392578, 0.22278041076660157, 0.22130482482910158, 0.22150553894042968, 0.2213519287109375, 0.22121983337402343, 0.2217584686279297, 0.22111436462402342, 0.22102015686035156, 0.22090444946289062, 0.22078054809570313, 0.2211778564453125, 0.22104576110839844, 0.2211420135498047, 0.22199909973144533, 0.22127308654785155, 0.22181170654296875, 0.2217164764404297, 0.2210918426513672, 0.2213201904296875, 0.22124339294433593, 0.2210150451660156, 0.22111231994628905, 0.22106008911132813, 0.22112051391601562, 0.22103245544433595, 0.22106419372558594, 0.2211778564453125, 0.22102323913574218, 0.22111846923828124, 0.22116146850585938, 0.2212884521484375, 0.22095872497558594, 0.22103347778320312, 0.221048828125, 0.2216294403076172, 0.22115122985839844, 0.22118400573730468, 0.221127685546875, 0.22102117919921874, 0.22113177490234376, 0.22128025817871094, 0.2211031036376953, 0.22143180847167968, 0.22129254150390626, 0.22108773803710938, 0.22222642517089844, 0.22198886108398438, 0.2217205810546875, 0.2217574462890625, 0.22159461975097655, 0.4614256591796875, 0.22283775329589844, 0.22295756530761718, 0.22274969482421875, 0.2225858612060547, 0.22346035766601563, 0.22301695251464843, 0.22341127014160156, 0.22270252990722655, 0.22250291442871092, 0.22281216430664064, 0.22286746215820313, 0.22281011962890626, 0.22280703735351562, 0.22268620300292968, 0.22271487426757813, 0.22270976257324218, 0.22246092224121095, 0.222814208984375, 0.22279373168945313, 0.22285516357421875, 0.222635009765625, 0.2227783660888672, 0.2227271728515625, 0.22253773498535157, 0.22261862182617187, 0.223025146484375, 0.22222540283203124, 0.22242611694335937, 0.22123930358886718, 0.22103450012207032, 0.2209105987548828, 0.22106008911132813, 0.22086451721191405, 0.2209740753173828, 0.22110208129882813, 0.2218260498046875, 0.2210365447998047, 0.2210713653564453, 0.22134988403320313, 0.22149017333984375, 0.22106419372558594, 0.22114309692382814, 0.22111225891113281, 0.2225971221923828, 0.22118400573730468, 0.22139187622070314, 0.22111538696289063, 0.22112153625488282, 0.22092594909667967, 0.22134783935546876, 0.221127685546875, 0.22192127990722657, 0.2215854034423828, 0.22127308654785155, 0.22121881103515625, 0.2214686737060547, 0.2213396453857422, 0.22123008728027344, 0.22115327453613282, 0.22119218444824218, 0.22103347778320312, 0.22114303588867187, 0.45854925537109376, 0.22104678344726564, 0.22133247375488282, 0.22091571044921876, 0.22102630615234375, 0.2210508728027344, 0.22108364868164063, 0.22100274658203126, 0.22103141784667968, 0.22081741333007812, 0.22105906677246093, 0.22115737915039063, 0.22105702209472655, 0.22100991821289062, 0.22107749938964844, 0.22084402465820313, 0.22094540405273438, 0.22115327453613282, 0.22110719299316406, 0.220980224609375, 0.22154444885253907, 0.22112460327148437, 0.2211031036376953, 0.22097817993164062, 0.2210498504638672, 0.22134375, 0.2211266632080078, 0.22089112854003906, 0.2209566650390625, 0.22103756713867187, 0.2209976348876953, 0.22094540405273438, 0.22101708984375, 0.2214256591796875, 0.22106623840332032, 0.22102528381347655, 0.2213017578125, 0.2209976348876953, 0.221233154296875, 0.22100274658203126, 0.22102220153808594, 0.22098329162597657, 0.2211584014892578, 0.2211092529296875, 0.22123417663574219, 0.22111744689941407, 0.2211235809326172, 0.22127001953125, 0.22116761779785157, 0.22121881103515625, 0.2211031036376953, 0.2210498504638672, 0.221085693359375, 0.22113587951660157, 0.22125978088378906, 0.2211041259765625, 0.221154296875, 0.22228172302246094, 0.22109901428222656, 0.22121267700195313, 0.22124339294433593, 0.2211584014892578, 0.2214686737060547, 0.457933837890625, 0.2211962890625, 0.22194586181640624, 0.22120755004882814, 0.22100889587402345, 0.2212843475341797, 0.22189056396484375, 0.2212884521484375, 0.2211727294921875, 0.22092083740234375, 0.22090956115722657, 0.2211768341064453, 0.2210160675048828, 0.22107545471191406, 0.22154035949707032, 0.22111436462402342, 0.22106521606445312, 0.22105599975585938, 0.22096998596191406, 0.22127615356445313, 0.22098329162597657, 0.22110617065429689, 0.2209003448486328, 0.22102117919921874, 0.22100376892089843, 0.2210365447998047, 0.22115635681152343, 0.2209239044189453, 0.22113996887207032, 0.22109490966796874, 0.22142259216308594, 0.22121983337402343, 0.22105599975585938, 0.2209105987548828, 0.221159423828125, 0.22130482482910158, 0.22137651062011718, 0.22106521606445312, 0.22213119506835938, 0.22088088989257812, 0.2209105987548828, 0.2210150451660156, 0.22098739624023436, 0.22129458618164063, 0.22125363159179687, 0.22113894653320312, 0.22119833374023437, 0.2218956756591797, 0.22099250793457031, 0.22140518188476563, 0.22130892944335936, 0.22184857177734374, 0.22106930541992187, 0.22177484130859376, 0.22129254150390626, 0.22115635681152343, 0.22096485900878907, 0.22116864013671875, 0.22148915100097658, 0.22126797485351563, 0.221384765625, 0.2214911346435547, 0.22094540405273438, 0.45877044677734374, 0.2209187774658203, 0.22111744689941407, 0.22087065124511718, 0.22097509765625, 0.22129971313476562, 0.2210744323730469, 0.2212843475341797, 0.22100991821289062, 0.22094744873046876, 0.2208368682861328, 0.22104473876953126, 0.22097305297851563, 0.2210682830810547, 0.2209914855957031, 0.2208757781982422, 0.2209628143310547, 0.2210426940917969, 0.22097509765625, 0.22097203063964843, 0.22091468811035156, 0.22095872497558594, 0.22083993530273438, 0.22126591491699218, 0.2211420135498047, 0.22217318725585938, 0.22125978088378906, 0.22093618774414062, 0.2212351989746094, 0.22171852111816406, 0.22144717407226563, 0.22102323913574218, 0.221486083984375, 0.220980224609375, 0.2209935302734375, 0.2213939208984375, 0.22103858947753907, 0.22095462036132812, 0.22107647705078126, 0.22094744873046876, 0.22103858947753907, 0.22127206420898438, 0.22109286499023437, 0.2211461181640625, 0.22218240356445312, 0.22110719299316406, 0.2209812469482422, 0.22119833374023437, 0.22108876037597655, 0.22112870788574218, 0.22101913452148436, 0.22104678344726564, 0.2209669189453125, 0.22112870788574218, 0.2210682830810547, 0.22112051391601562, 0.22107034301757814, 0.22104473876953126, 0.22089421081542968, 0.221011962890625, 0.2210160675048828, 0.2210846710205078, 0.2209628143310547, 0.45941656494140626, 0.2209495086669922, 0.221739013671875, 0.2209863739013672, 0.22102117919921874, 0.221048828125, 0.22226022338867188, 0.22110617065429689, 0.2219622344970703, 0.22180557250976562, 0.22233804321289063, 0.2211041259765625, 0.22118911743164063, 0.22098329162597657, 0.2222192687988281, 0.22136012268066407, 0.22214041137695312, 0.2213079071044922, 0.22128947448730468, 0.22100787353515625, 0.221127685546875, 0.22152703857421874, 0.2220943298339844, 0.2211031036376953, 0.22113792419433595, 0.22100889587402345, 0.22107034301757814, 0.22086758422851563, 0.2210048065185547, 0.22096588134765624, 0.2210846710205078, 0.22087680053710937, 0.221154296875, 0.22088607788085937, 0.22132730102539064, 0.2210529327392578, 0.22137548828125, 0.22103450012207032, 0.22103450012207032, 0.22075392150878906, 0.22111949157714844, 0.2211420135498047, 0.2211461181640625, 0.2211041259765625, 0.22117990112304686, 0.2209812469482422, 0.22081843566894532, 0.22104371643066406, 0.22111949157714844, 0.22109286499023437, 0.22118502807617188, 0.22213119506835938, 0.22129049682617188, 0.22118092346191406, 0.22175027465820313, 0.2221670379638672, 0.22145330810546876, 0.221517822265625, 0.22109490966796874, 0.22186087036132812, 0.2214615020751953, 0.22115122985839844, 0.22144102478027344, 0.46024600219726564, 0.2209976348876953, 0.22151475524902345, 0.22144717407226563, 0.2214246368408203, 0.22156083679199218, 0.2218076171875, 0.22113690185546875, 0.2215905303955078, 0.22111949157714844, 0.2214993896484375, 0.22210354614257813, 0.221412353515625, 0.22127104187011717, 0.22124339294433593, 0.22104165649414062, 0.22167552185058595, 0.22163148498535157, 0.2216048583984375, 0.22217523193359376, 0.22170419311523437, 0.22118092346191406, 0.22107034301757814, 0.22108876037597655, 0.22134988403320313, 0.22161715698242188, 0.22139698791503906, 0.22093209838867187, 0.22118707275390626, 0.22141542053222657, 0.22160383605957032, 0.2212833251953125, 0.22284288024902343, 0.2227220458984375, 0.22272000122070312, 0.22289511108398438, 0.22466764831542968, 0.2222950439453125, 0.22271078491210938, 0.22245887756347657, 0.22309580993652345, 0.2229698486328125, 0.22274867248535157, 0.22293504333496095, 0.22252748107910156, 0.22249267578125, 0.22271795654296875, 0.22294834899902344, 0.2227640380859375, 0.22101913452148436, 0.2211420135498047, 0.22163967895507813, 0.22111949157714844, 0.22116761779785157, 0.2211420135498047, 0.22103347778320312, 0.22134066772460936, 0.22165606689453124, 0.22108979797363282, 0.22110823059082033, 0.2213621826171875, 0.22120448303222656, 0.22122802734375, 0.46055218505859374, 0.22127308654785155, 0.22119218444824218, 0.22088088989257812, 0.22082456970214845, 0.22128128051757812, 0.22141439819335937, 0.22119935607910157, 0.22096383666992186, 0.22105804443359375, 0.22094137573242187, 0.2210590057373047, 0.22101094055175782, 0.22181581115722657, 0.22244248962402344, 0.22087271118164062, 0.22137753295898438, 0.22102732849121093, 0.2222520294189453, 0.22111129760742188, 0.22082662963867186, 0.22154853820800782, 0.22087986755371095, 0.2212034606933594, 0.22097920227050782, 0.22116659545898437, 0.22161100769042968, 0.22113075256347656, 0.2215413818359375, 0.22253465270996095, 0.22111949157714844, 0.2211102752685547, 0.2211788787841797, 0.22141952514648439, 0.22129766845703125, 0.22112562561035157, 0.22104063415527345, 0.22099046325683594, 0.22102835083007813, 0.22090444946289062, 0.22094438171386718, 0.2210181121826172, 0.22109907531738282, 0.22128121948242188, 0.22107647705078126, 0.22143795776367187, 0.2210846710205078, 0.22113792419433595, 0.22100274658203126, 0.22100889587402345, 0.2211266632080078, 0.22113587951660157, 0.2213580780029297, 0.2212833251953125, 0.221154296875, 0.22113587951660157, 0.2210672607421875, 0.22181581115722657, 0.22194586181640624, 0.22132325744628906, 0.22108572387695313, 0.22134576416015625, 0.22117170715332032]",tokens/s,4.448640633764008,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1979.4944,5480.382464,0.0,4833.93536,4503.282688,s,10,5.706418334960937,0.5706418334960938,0.0011422166985642673,0.5705734558105469,0.5721191223144532,0.5723321746826172,0.5725026165771484,"[0.5707071533203125, 0.5725452270507813, 0.570284912109375, 0.5698196411132812, 0.5708470458984375, 0.569266357421875, 0.5704397583007812, 0.571673828125, 0.57207177734375, 0.5687626342773437]",tokens/s,448.6176529182773,kWh,6.72348948356546e-06,3.6841831159411714e-06,3.142670724010705e-05,4.183437983961368e-05,tokens/kWh,6119368.829691346,MB,1979.4944,5480.382464,0.0,4833.93536,4688.699392,s,10,334.87464062500004,33.487464062499996,0.006284503345517177,33.486009765625,33.4966609375,33.498351953125,33.499704765625005,"[33.50004296875, 33.49628515625, 33.487125, 33.48791796875, 33.48489453125, 33.48313671875, 33.490703125, 33.48450390625, 33.48125, 33.47878125]",tokens/s,1.8813010111013093,kWh,0.0003953183352579305,0.00021666870724086642,0.0018133419907958965,0.002425329033294693,tokens/kWh,25975.85693946752,,s,629,339.4783874511717,0.5397112678079045,0.0678655066405644,0.531504150390625,0.5319763916015625,0.5321570190429687,1.1021465185546875,"[0.531127197265625, 0.5313873901367188, 0.5311314086914063, 0.5313760986328125, 0.5314212036132813, 0.5316044921875, 0.5316137084960938, 0.5314949340820313, 0.5322680053710938, 0.531794921875, 0.5314816284179688, 0.5319925537109375, 0.531863525390625, 0.5316638793945313, 0.5320120239257813, 0.5315921630859375, 0.5318819580078125, 0.5321605224609375, 0.53214208984375, 0.531884033203125, 0.5315399780273438, 0.5322833862304688, 0.532052978515625, 0.5314406127929687, 0.5317867431640625, 0.5322066040039063, 0.531673095703125, 0.53195263671875, 0.5317314453125, 0.5317376098632812, 0.5320519409179687, 0.5320724487304688, 0.531541015625, 0.5316557006835938, 0.53180517578125, 0.5318154296875, 0.5315655517578125, 0.5316137084960938, 0.5321820068359375, 0.531557373046875, 0.5312399291992187, 0.5314426879882812, 0.5312901000976562, 0.53134130859375, 0.531072998046875, 0.5317980346679687, 0.5320304565429688, 0.5319249877929687, 0.5319178466796874, 0.5318133544921875, 0.531989501953125, 0.5314345092773437, 0.531373046875, 0.531641357421875, 0.5318492431640625, 0.5319464721679688, 0.5321226196289063, 0.5319137573242188, 0.5322987670898438, 0.531788818359375, 0.53163623046875, 0.5317109985351562, 1.1043450927734375, 0.531673095703125, 0.5321574096679688, 0.5315389404296875, 0.5321922607421875, 0.531525634765625, 0.5317652587890624, 0.531631103515625, 0.5321697387695312, 0.5315164184570312, 0.5316024169921875, 0.5313341674804688, 0.5321021728515625, 0.5321441040039062, 0.5318215942382812, 0.531609619140625, 0.5315983276367188, 0.5313085327148438, 0.5319906005859375, 0.5317283325195312, 0.5320519409179687, 0.5316751098632813, 0.5315983276367188, 0.5317775268554688, 0.5322587890625, 0.5316044921875, 0.5319588012695312, 0.5312645263671875, 0.5314641723632813, 0.5313402709960937, 0.531599365234375, 0.5311528930664062, 0.5314417114257812, 0.5315245361328125, 0.5315297241210938, 0.531078125, 0.53165771484375, 0.5314437255859376, 0.5313720092773437, 0.53186767578125, 0.531873779296875, 0.5317017822265625, 0.53191064453125, 0.5315491943359375, 0.531904541015625, 0.53142529296875, 0.532937744140625, 0.5315389404296875, 0.5317601318359375, 0.5315614624023437, 0.5316034545898437, 0.5312798461914062, 0.5315686645507812, 0.53125634765625, 0.5316055297851563, 0.5312440185546875, 0.5317672729492188, 0.5314345092773437, 0.5318533325195313, 0.5322587890625, 0.5322587890625, 0.5315594482421875, 0.5315952758789062, 1.10245166015625, 0.5316915283203125, 0.5318369140625, 0.531167236328125, 0.5314037475585938, 0.5311907958984375, 0.531399658203125, 0.5313310546875, 0.5314713745117188, 0.5310955810546875, 0.5314068603515625, 0.5314180908203125, 0.5313095703125, 0.5320089721679687, 0.532147216796875, 0.5318717041015625, 0.5314703369140625, 0.5313587036132813, 0.5315277099609375, 0.5315932006835937, 0.5320427856445312, 0.5314682006835938, 0.5316864013671875, 0.5312532348632812, 0.531578857421875, 0.5313218383789062, 0.5315369262695312, 0.5313751220703125, 0.5314324340820312, 0.5314744262695312, 0.53136181640625, 0.5313638305664062, 0.5315297241210938, 0.5316331787109375, 0.5315000610351562, 0.5318041381835937, 0.5314805908203125, 0.5310812377929688, 0.5316290283203124, 0.531251220703125, 0.5314805908203125, 0.5312870483398437, 0.5313802490234375, 0.5314498291015625, 0.531378173828125, 0.5312542724609375, 0.531356689453125, 0.5313423461914063, 0.5315717163085938, 0.5312665405273438, 0.5319905395507812, 0.5315286865234375, 0.5317857055664063, 0.5316792602539062, 0.5319916381835937, 0.5315020141601563, 0.5320150756835937, 0.531610595703125, 0.5316116333007812, 0.531620849609375, 0.5315635375976563, 0.53176318359375, 0.5320653076171875, 1.1017093505859374, 0.5309368286132813, 0.5314478149414062, 0.531114990234375, 0.5318041381835937, 0.531794921875, 0.5318615112304688, 0.5316116333007812, 0.5316034545898437, 0.5310453491210938, 0.5313955688476563, 0.5313310546875, 0.5316249389648438, 0.5315194702148438, 0.5316045532226562, 0.5311835327148438, 0.5314447631835938, 0.53148876953125, 0.5315830688476563, 0.5314241943359375, 0.5317130126953125, 0.5312276611328125, 0.5314457397460938, 0.5311815795898438, 0.5318450927734375, 0.5317929077148438, 0.5315983276367188, 0.5316392822265625, 0.5314345092773437, 0.5313597412109375, 0.5314928588867187, 0.53110986328125, 0.5317550048828125, 0.531198974609375, 0.5312501831054688, 0.5313074951171874, 0.5314006958007812, 0.5310812377929688, 0.5314263305664062, 0.53121435546875, 0.5314088745117187, 0.5312266235351563, 0.5319813232421875, 0.5324287719726563, 0.53281494140625, 0.532264892578125, 0.5315143432617188, 0.531272705078125, 0.5317017822265625, 0.5313546142578125, 0.531945556640625, 0.5313545532226562, 0.53178369140625, 0.5314877319335938, 0.5314969482421875, 0.5313597412109375, 0.5315460815429688, 0.5314918212890625, 0.5317130126953125, 0.5316566772460938, 0.5317969970703125, 0.5315768432617187, 0.53257421875, 1.1031951904296875, 0.5310873413085937, 0.531431396484375, 0.5310433349609375, 0.5313966064453125, 0.5312911376953126, 0.5316239624023438, 0.5313822631835937, 0.5315317993164063, 0.53124609375, 0.5313659057617187, 0.5311549682617187, 0.5316239624023438, 0.531420166015625, 0.5314857177734374, 0.531357666015625, 0.5316966552734375, 0.5313771362304688, 0.5317969970703125, 0.5319751586914062, 0.531504150390625, 0.5317877807617187, 0.53207958984375, 0.531504150390625, 0.5317980346679687, 0.5316649169921875, 0.5317243041992188, 0.5313710327148438, 0.53178369140625, 0.5310208129882813, 0.5313812255859375, 0.5311692504882812, 0.5312440185546875, 0.530914306640625, 0.531140625, 0.5312010498046875, 0.5316300659179688, 0.5319229736328125, 0.5317314453125, 0.5311651611328125, 0.5311211547851562, 0.531178466796875, 0.5316658935546875, 0.5312122802734375, 0.5314109497070313, 0.5314529418945313, 0.5315552978515625, 0.531398681640625, 0.5312880859375, 0.5313443603515625, 0.53163623046875, 0.5312163696289063, 0.5315215454101563, 0.5311047973632812, 0.5318932495117188, 0.531894287109375, 0.5316249389648438, 0.5334948120117188, 0.5316904907226563, 0.5312880859375, 0.5315880737304688, 0.5312071533203125, 0.5317755126953125, 1.1012259521484375, 0.5312655639648437, 0.531800048828125, 0.5310771484375, 0.5316597900390625, 0.5314877319335938, 0.5313556518554687, 0.531009521484375, 0.5315706787109375, 0.5319342041015624, 0.5316925659179688, 0.5315604248046875, 0.5314334716796875, 0.5310894165039063, 0.531968994140625, 0.53129931640625, 0.5314898071289063, 0.5309706420898438, 0.5316198120117187, 0.5311743774414063, 0.53146826171875, 0.5311262817382812, 0.5313914794921875, 0.5310750732421875, 0.5313494873046875, 0.53096142578125, 0.5314263916015625, 0.5310238037109375, 0.5317867431640625, 0.5314058227539062, 0.531926025390625, 0.5319659423828125, 0.5314160766601562, 0.531178466796875, 0.5314283447265625, 0.5311897583007813, 0.5313648681640625, 0.5312819213867187, 0.53264794921875, 0.531478515625, 0.5314662475585937, 0.5313802490234375, 0.53146728515625, 0.531188720703125, 0.531399658203125, 0.5311815795898438, 0.5315440673828125, 0.5322147827148438, 0.531962890625, 0.5316351928710937, 0.5318799438476562, 0.5312614135742187, 0.5315430297851562, 0.5312973022460937, 0.531641357421875, 0.5314744262695312, 0.5315491943359375, 0.5312296752929687, 0.531863525390625, 0.5313689575195313, 0.5315000610351562, 0.5313095703125, 0.5315379028320313, 1.1023165283203125, 0.5312081909179688, 0.5320724487304688, 0.5311488037109375, 0.5318819580078125, 0.5312911376953126, 0.5314949340820313, 0.5315706787109375, 0.5317857055664063, 0.5313474731445312, 0.5318563842773437, 0.5311016845703125, 0.5315194702148438, 0.53150927734375, 0.5318553466796875, 0.5311559448242188, 0.5314652099609375, 0.5313792114257813, 0.5314171142578125, 0.5314180908203125, 0.5321830444335938, 0.5318389892578125, 0.5316331787109375, 0.5316177368164062, 0.5313392944335937, 0.5313668823242188, 0.5315061645507813, 0.5312501831054688, 0.5319915771484375, 0.5316126708984374, 0.5317601318359375, 0.5316557006835938, 0.53146826171875, 0.5313116455078125, 0.5315369262695312, 0.5313966064453125, 0.5317078857421875, 0.5314437255859376, 0.5320560913085938, 0.5326489868164063, 0.5319014282226563, 0.531631103515625, 0.5314703369140625, 0.5317478637695312, 0.5316761474609375, 0.5316065063476563, 0.531609619140625, 0.5315194702148438, 0.5318502197265625, 0.5314826049804687, 0.5319669799804687, 0.5315072021484375, 0.5315419921875, 0.5310453491210938, 0.5313034057617188, 0.5311867065429687, 0.5317181396484375, 0.531683349609375, 0.5318041381835937, 0.5316966552734375, 0.5320130615234375, 0.5314692993164063, 0.5316198120117187, 1.103932373046875, 0.5310914306640625, 0.531578857421875, 0.5308538818359375, 0.5313659057617187, 0.5314221801757812, 0.5314703369140625, 0.5309716186523438, 0.5315850830078125, 0.5310443115234375, 0.5311928100585938, 0.53098291015625, 0.5311600341796875, 0.5316239624023438, 0.531800048828125, 0.531646484375, 0.5318225708007812, 0.5314242553710937, 0.5319229736328125, 0.53176318359375, 0.5316587524414063, 0.53146826171875, 0.531483642578125, 0.5312266235351563, 0.5317498779296875, 0.5314273071289063, 0.5314631958007813, 0.5311232299804688, 0.5317816162109374, 0.5313494873046875, 0.5318584594726562, 0.5314795532226563, 0.5324308471679687, 0.5313074951171874, 0.5318523559570313, 0.5310309448242188, 0.5311488037109375, 0.5311488037109375, 0.5314488525390625, 0.530966552734375, 0.5313894653320312, 0.5310975952148438, 0.5314426879882812, 0.5313054809570312, 0.5314180908203125, 0.5313187866210938, 0.5313914794921875, 0.5315809326171875, 0.5312921752929688, 0.5312911376953126, 0.531684326171875, 0.5320560913085938, 0.5315133666992188, 0.5317980346679687, 0.5318328247070313, 0.5314559936523438, 0.5321195678710937, 0.5318225708007812, 0.5315963134765626, 0.5315829467773437, 0.5318184814453125, 0.531431396484375, 0.5319127197265625, 1.103847412109375, 0.531304443359375, 0.5315451049804687, 0.531357666015625, 0.5316792602539062, 0.5319639282226563, 0.5315829467773437, 0.5313710327148438, 0.5315205078125, 0.5312491455078125, 0.5314611206054688, 0.5316341552734375, 0.5313760986328125, 0.5311549682617187, 0.531583984375, 0.5310637817382813, 0.5312645263671875, 0.53108837890625, 0.5316741333007813, 0.531162109375, 0.5312429809570313, 0.5314140014648437, 0.531314697265625, 0.5313054809570312, 0.531968017578125, 0.531599365234375, 0.5316085815429688, 0.5312634887695312, 0.5314406127929687, 0.53096240234375, 0.5313423461914063, 0.5313720092773437, 0.5316188354492187, 0.5309050903320313, 0.5311948852539062, 0.5310873413085937, 0.5315645141601563, 0.5312706298828125, 0.5313065185546875, 0.5313433837890625, 0.53119384765625, 0.5309767456054687, 0.5315348510742187, 0.532041748046875, 0.5317161254882813, 0.5314119873046875, 0.5313494873046875, 0.5311979370117188, 0.5316075439453125, 0.5313341674804688, 0.5313423461914063, 0.531177490234375, 0.5312553100585937, 0.531019775390625, 0.5315665893554687, 0.5312440185546875, 0.5314283447265625, 0.5314006958007812, 0.5314877319335938, 0.5311262817382812, 0.5316321411132813, 0.5312686157226563, 0.5320028076171875, 1.103824951171875, 0.5310279541015624, 0.53146826171875, 0.5310341186523437, 0.5313740844726562, 0.5310853271484375, 0.5314447631835938, 0.5311447143554687, 0.5314212036132813, 0.531420166015625, 0.5311948852539062, 0.5308630981445313, 0.5313853149414063, 0.5309020385742188, 0.53150830078125, 0.5310576171875, 0.5312839965820313, 0.5317191772460937, 0.5318482055664062, 0.5312532348632812, 0.5314641723632813, 0.5309531860351563, 0.5312553100585937, 0.5309808349609375, 0.5311590576171875, 0.5310494995117188, 0.531863525390625, 0.5317703857421875, 0.5314180908203125, 0.53118359375, 0.5314396362304687, 0.5311129150390625, 0.5312420043945313, 0.5310259399414062, 0.5311918334960938, 0.5312973022460937, 0.5321564331054688, 0.5315194702148438, 0.5316976928710937, 0.5313034057617188, 0.5316218872070313, 0.531430419921875, 0.5316812744140625, 0.5315625, 0.5316239624023438, 0.5310105590820312, 0.5315389404296875, 0.5312665405273438, 0.5317191772460937, 0.5312142944335938, 0.5314641723632813, 0.53167822265625, 0.5313607788085938, 0.5311918334960938, 0.5314918212890625, 0.53161474609375, 0.5318031616210938, 0.5315338134765625, 0.5315020751953125, 0.5313065185546875, 0.531968994140625, 0.5318994140625, 0.5317734375]",tokens/s,1.8528425468336207,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3081.781248,9521.594368,0.0,8875.147264,8264.141824,s,10,10.63840625,1.0638406249999999,0.0011581823391520636,1.0637650756835937,1.0652460571289062,1.0654749572753905,1.0656580773925781,"[1.0649481201171875, 1.065703857421875, 1.0634771728515624, 1.0627923583984376, 1.063096435546875, 1.0621585693359374, 1.064052978515625, 1.0644849853515626, 1.0651951904296875, 1.06249658203125]",tokens/s,240.63754850497463,kWh,1.2549749844604067e-05,6.874677577252441e-06,5.877399146358986e-05,7.819841888544636e-05,tokens/kWh,3273723.479946787,MB,3081.781248,9521.594368,0.0,8875.147264,8556.643328,s,10,631.86266796875,63.186266796874996,0.007118498406389631,63.1850546875,63.19375234375,63.198333203125,63.201997890625,"[63.192734375, 63.2029140625, 63.18565625, 63.17730078125, 63.18101953125, 63.1805546875, 63.18579296875, 63.18146875, 63.1907734375, 63.184453125]",tokens/s,0.9970520999844192,kWh,0.0007460630791551536,0.0004089103833046283,0.003496975492022614,0.004651948954482396,tokens/kWh,13542.710940389017,,s,629,640.4335947265614,1.0181774160994632,0.12635276340148643,1.0029219970703125,1.0034655395507812,1.0036467651367187,2.0656750390625,"[1.00284619140625, 1.002883056640625, 1.0025042114257812, 1.0028574829101562, 1.0028175659179688, 1.002925048828125, 1.0030203247070313, 1.0030990600585938, 1.0027622680664063, 1.0032742309570313, 1.0026455078125, 1.003062255859375, 1.0025420532226563, 1.0029680786132813, 1.0027857666015625, 1.00307763671875, 1.0029732055664062, 1.0030858154296876, 1.002630126953125, 1.0030980834960936, 1.0028267822265624, 1.003293701171875, 1.0026495971679688, 1.0032005004882814, 1.0028339233398438, 1.0035537719726562, 1.0033336181640624, 1.0031492919921876, 1.0029854736328125, 1.0029179077148438, 1.0027100219726564, 1.0030469360351562, 1.0027202758789062, 1.0029025268554688, 1.002692626953125, 1.003177978515625, 1.0032864990234376, 1.0032291870117187, 1.0031237182617188, 1.0033694458007814, 1.0031052856445313, 1.0034606323242188, 1.0029014892578125, 1.0031063232421875, 1.002977294921875, 1.0030796508789062, 1.0032691040039063, 1.003052001953125, 1.003093994140625, 1.0031728515625, 1.0026219482421874, 1.003230224609375, 1.0031892700195313, 1.003472900390625, 1.0031318969726561, 1.0034933471679688, 1.0038251342773437, 1.0035701904296874, 1.0035916748046876, 1.0035220336914064, 1.0029598999023437, 1.0031124267578124, 2.068518798828125, 1.0033325805664062, 1.0031472778320312, 1.0031912841796875, 1.0029844360351563, 1.0031206665039063, 1.0034401245117188, 1.0029373168945312, 1.0032005004882814, 1.00276123046875, 1.003198486328125, 1.0033233642578125, 1.0032588500976563, 1.0031349487304688, 1.0031708374023438, 1.0031646728515624, 1.0034074096679688, 1.0032445068359375, 1.0034769897460938, 1.0028963623046876, 1.0031943969726562, 1.0029393920898437, 1.0032639770507812, 1.0037217407226562, 1.0034319458007812, 1.0026045532226562, 1.003093017578125, 1.0026946411132813, 1.0035282592773438, 1.0035670166015624, 1.0035159301757812, 1.0033766479492188, 1.0034708251953126, 1.0032630004882812, 1.0034298706054687, 1.002693603515625, 1.0028206176757812, 1.0027888793945312, 1.0029660034179688, 1.0028257446289062, 1.0030294799804687, 1.00284619140625, 1.003236328125, 1.0038517456054687, 1.0034739379882813, 1.0031769409179687, 1.003452392578125, 1.0032343139648439, 1.0034002075195312, 1.0033592529296875, 1.0034892578125, 1.003430908203125, 1.0036060180664061, 1.0037442626953126, 1.0036817626953125, 1.003325439453125, 1.0032271118164062, 1.0029475708007813, 1.0032077026367188, 1.0028257446289062, 1.0033950805664062, 1.0032373657226563, 1.0033449096679687, 2.06620166015625, 1.003240478515625, 1.0030346069335938, 1.0028810424804688, 1.0028892211914062, 1.0025840454101562, 1.0026762084960938, 1.0028451538085938, 1.0030079956054687, 1.0030458984375, 1.0029598999023437, 1.0025256958007813, 1.002809326171875, 1.0025021362304687, 1.002545166015625, 1.002588134765625, 1.0025830688476562, 1.0028103637695311, 1.002876953125, 1.0027571411132812, 1.0027847900390625, 1.0027694091796875, 1.0029219970703125, 1.0027468872070313, 1.0030151977539064, 1.002692626953125, 1.0029998168945313, 1.0028185424804688, 1.002977294921875, 1.0030653686523439, 1.0028308715820313, 1.0027254028320312, 1.0027396850585937, 1.002481689453125, 1.0026741943359374, 1.002397705078125, 1.0026148071289063, 1.0031001586914063, 1.003051025390625, 1.0032445678710937, 1.0031226806640625, 1.00276123046875, 1.0029619140625, 1.0027110595703126, 1.00299365234375, 1.0028328857421875, 1.0028206176757812, 1.0037903442382812, 1.0038609619140626, 1.0040064086914062, 1.0035978393554688, 1.0031810302734374, 1.0034503784179687, 1.0034176025390624, 1.0035722045898436, 1.0029711303710938, 1.0032793579101562, 1.003167724609375, 1.0029445190429687, 1.0031759643554687, 1.0028656616210938, 1.002893310546875, 1.0028052368164062, 2.0654345703125, 1.0026424560546876, 1.0026322021484375, 1.0025789184570313, 1.0027899169921874, 1.0024918823242188, 1.0032875366210938, 1.002587158203125, 1.002587158203125, 1.0025154418945312, 1.0026045532226562, 1.0023638916015625, 1.002629150390625, 1.0024959716796875, 1.0028257446289062, 1.0028626098632814, 1.0027550659179687, 1.0028287963867188, 1.0026536865234374, 1.0024857788085937, 1.0027151489257813, 1.0024734497070313, 1.0031738891601563, 1.0028011474609375, 1.003345947265625, 1.002503173828125, 1.0029957275390624, 1.0027591552734374, 1.003062255859375, 1.003240478515625, 1.0029967651367186, 1.002982421875, 1.0035916748046876, 1.0024970092773438, 1.0031472778320312, 1.0028124389648438, 1.0027591552734374, 1.0026690673828125, 1.0027110595703126, 1.0026762084960938, 1.0028493041992188, 1.0025471801757813, 1.0032220458984376, 1.0027745361328124, 1.0030745849609375, 1.0030172119140626, 1.0028216552734375, 1.002661865234375, 1.0027734985351562, 1.00288818359375, 1.0029906005859375, 1.00273046875, 1.0033080444335938, 1.0029434814453124, 1.0028789672851564, 1.0029700927734375, 1.0029117431640624, 1.0027509765625, 1.002767333984375, 1.0027171630859375, 1.0029578247070312, 1.002545166015625, 1.003062255859375, 2.0657685546875, 1.0028359375, 1.0028124389648438, 1.0024847412109374, 1.0028124389648438, 1.0025441284179688, 1.0027601928710939, 1.0023495483398437, 1.0028533935546875, 1.0025604858398438, 1.0030325927734376, 1.0025574340820314, 1.0029946899414062, 1.0025062255859376, 1.0027683715820312, 1.002640380859375, 1.0027284545898438, 1.0027683715820312, 1.0030612182617187, 1.002598388671875, 1.0030366821289063, 1.0028635864257813, 1.0032056274414063, 1.0027100219726564, 1.0028585205078124, 1.0025328369140625, 1.0029660034179688, 1.0023075561523438, 1.0032752685546875, 1.002946533203125, 1.0032271118164062, 1.0027387084960937, 1.0030264282226562, 1.0025748291015626, 1.0029188842773438, 1.0027284545898438, 1.002767333984375, 1.0028052368164062, 1.0030530395507813, 1.0026967163085938, 1.0031349487304688, 1.0030233764648437, 1.0033059692382813, 1.002514404296875, 1.0027438354492189, 1.0027816772460938, 1.0031032104492188, 1.0027970581054688, 1.0033837890625, 1.0027315063476563, 1.0034063110351563, 1.0029496459960938, 1.0030530395507813, 1.0026383056640624, 1.00305615234375, 1.0027919311523437, 1.0027427978515624, 1.0032557983398438, 1.0037903442382812, 1.0031646728515624, 1.003399169921875, 1.0027448120117188, 1.0029649658203126, 2.064649169921875, 1.0027888793945312, 1.0026015014648437, 1.0022328491210937, 1.0030059814453125, 1.0026045532226562, 1.0029281005859374, 1.0032117919921875, 1.0038660888671875, 1.0030458984375, 1.0028523559570313, 1.00279296875, 1.0029219970703125, 1.00295068359375, 1.0030786743164062, 1.00269775390625, 1.0029230346679687, 1.0029240112304687, 1.0026373291015624, 1.0023751220703125, 1.0028328857421875, 1.0023956298828125, 1.002513427734375, 1.002144775390625, 1.003087890625, 1.0024324951171875, 1.0031769409179687, 1.0026843872070312, 1.0027018432617187, 1.0026710815429687, 1.0026076049804689, 1.0026281127929688, 1.00263525390625, 1.0025758666992188, 1.00335107421875, 1.0025952758789063, 1.0027919311523437, 1.0030172119140626, 1.0029312133789063, 1.0026270751953126, 1.0026127319335938, 1.0024099731445313, 1.0028635864257813, 1.0025287475585938, 1.003124755859375, 1.0027868041992187, 1.0029752197265625, 1.0032691040039063, 1.0028994750976563, 1.0029813842773438, 1.0031022338867188, 1.0025799560546875, 1.0030069580078125, 1.0030786743164062, 1.0034902954101563, 1.00322509765625, 1.0034298706054687, 1.0037340087890625, 1.0030960693359374, 1.00282470703125, 1.0031349487304688, 1.0028359375, 1.0032772827148437, 2.06746826171875, 1.0032711791992188, 1.00282470703125, 1.0029578247070312, 1.0026793212890626, 1.0024990844726562, 1.0027325439453125, 1.0022778930664062, 1.0023464965820312, 1.0023444213867188, 1.0030346069335938, 1.0042449951171875, 1.0037616577148438, 1.0032691040039063, 1.0031185913085938, 1.0024017944335937, 1.0026875, 1.0032916259765625, 1.0031452026367187, 1.0032855224609376, 1.0035599365234376, 1.0035138549804687, 1.0038927612304687, 1.0032435302734375, 1.002841064453125, 1.0024775390625, 1.0027438354492189, 1.0022512817382812, 1.002450927734375, 1.00248779296875, 1.002692626953125, 1.0029946899414062, 1.0030489501953126, 1.0032496337890624, 1.0034002075195312, 1.003052001953125, 1.0032271118164062, 1.0028707885742187, 1.0030796508789062, 1.0030028686523438, 1.0040872802734375, 1.00299365234375, 1.0028626098632814, 1.00265576171875, 1.002756103515625, 1.002808349609375, 1.0027335815429688, 1.0026813354492188, 1.00265673828125, 1.002919921875, 1.0032476196289062, 1.0029752197265625, 1.0029025268554688, 1.00297216796875, 1.0027991333007813, 1.0026178588867187, 1.002767333984375, 1.0026639404296875, 1.0027540283203125, 1.0026741943359374, 1.0029127807617189, 1.002988525390625, 1.0027018432617187, 2.066872314453125, 1.0026639404296875, 1.00259228515625, 1.0022307739257812, 1.0025277709960938, 1.0030377197265625, 1.0026045532226562, 1.002482666015625, 1.0029168701171876, 1.002771484375, 1.0028124389648438, 1.0024642333984375, 1.0027807006835938, 1.0025420532226563, 1.0025379638671874, 1.002556396484375, 1.002534912109375, 1.0028687133789063, 1.002840087890625, 1.0028328857421875, 1.0028451538085938, 1.0025236206054688, 1.0027632446289063, 1.0025379638671874, 1.0027888793945312, 1.0026751708984376, 1.003261962890625, 1.0027161865234375, 1.0028973999023438, 1.0030325927734376, 1.00279296875, 1.0027479248046876, 1.0025471801757813, 1.0023690185546874, 1.0029291381835939, 1.0029486083984376, 1.0027970581054688, 1.0028840942382813, 1.0029475708007813, 1.0030294799804687, 1.0031657104492187, 1.0029014892578125, 1.003304931640625, 1.0028472290039063, 1.0032578735351563, 1.0034647216796875, 1.0028277587890626, 1.0030632934570312, 1.0030980834960936, 1.00314111328125, 1.0028748779296874, 1.002660888671875, 1.0028431396484374, 1.0028472290039063, 1.0029691162109375, 1.0030172119140626, 1.002925048828125, 1.0033325805664062, 1.0035189819335937, 1.0036541137695312, 1.004179443359375, 1.0036951293945313, 1.002919921875, 2.06862841796875, 1.0036357421875, 1.0038538208007812, 1.0032230224609375, 1.0033796997070312, 1.002618896484375, 1.002708984375, 1.0022072143554688, 1.0026813354492188, 1.0022891235351563, 1.002555419921875, 1.0025963745117188, 1.00295166015625, 1.0026690673828125, 1.002914794921875, 1.003019287109375, 1.0031646728515624, 1.002956787109375, 1.0028861694335938, 1.0026751708984376, 1.003303955078125, 1.0028124389648438, 1.0030386962890625, 1.0028052368164062, 1.00309912109375, 1.0027315063476563, 1.0034688110351562, 1.0020556640625, 1.003325439453125, 1.0033530883789064, 1.003283447265625, 1.0034749145507813, 1.0035108032226563, 1.0035435791015626, 1.0037903442382812, 1.0029168701171876, 1.0031943969726562, 1.0024591064453126, 1.0026526489257812, 1.0026332397460938, 1.0027837524414063, 1.0034381103515626, 1.0032527465820313, 1.0027008056640625, 1.0030745849609375, 1.002945556640625, 1.0031943969726562, 1.0027479248046876, 1.0030663452148438, 1.002672119140625, 1.0029179077148438, 1.0026751708984376, 1.002841064453125, 1.0025738525390624, 1.0032977905273437, 1.0027786254882813, 1.0031134643554687, 1.00257177734375, 1.0031124267578124, 1.0026741943359374, 1.0032947387695312, 1.0030386962890625, 1.0032332763671874, 2.068487060546875, 1.0029629516601561, 1.0030684204101563, 1.0028482666015626, 1.0030899047851562, 1.0025728149414062, 1.0029260864257812, 1.0024949951171875, 1.0026229858398437, 1.0027960205078126, 1.003514892578125, 1.0023598022460938, 1.0027949829101563, 1.0025420532226563, 1.0026260375976563, 1.00259228515625, 1.0028103637695311, 1.002603515625, 1.00250830078125, 1.002608642578125, 1.0032783203125, 1.0025861206054687, 1.0032824096679687, 1.0027950439453126, 1.0030663452148438, 1.0023987426757812, 1.0027417602539062, 1.0023075561523438, 1.0025850830078125, 1.0028851318359375, 1.003109375, 1.0031339721679688, 1.003430908203125, 1.0028308715820313, 1.00331005859375, 1.0026096801757813, 1.0028687133789063, 1.0031749267578125, 1.003378662109375, 1.0034810791015625, 1.0036776733398438, 1.0027991333007813, 1.0026577758789061, 1.0023618774414063, 1.002555419921875, 1.0025308227539063, 1.00274072265625, 1.0025973510742188, 1.0027908935546874, 1.003040771484375, 1.0038589477539062, 1.002471435546875, 1.003072509765625, 1.0029865112304688, 1.0029404296875, 1.0026116943359376, 1.003293701171875, 1.00347802734375, 1.0035588989257813, 1.00347802734375, 1.0037841796875, 1.0034053344726563, 1.0032077026367188]",tokens/s,0.9821471034300691,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1279.725568,872.93952,0.0,226.492416,184.397824,s,12,0.17399129676818847,0.014499274730682373,0.0004405777377392672,0.014355583667755127,0.01475712022781372,0.015286664152145383,0.01576770728111267,"[0.01588796806335449, 0.014419008255004882, 0.014310144424438476, 0.01430396842956543, 0.014377471923828124, 0.014377087593078612, 0.01433407974243164, 0.014250847816467285, 0.014390144348144531, 0.01479468822479248, 0.014266719818115234, 0.014279168128967285]",tokens/s,17656.055544507362,kWh,1.7155437431226257e-07,9.399407219055296e-08,3.2474264005518264e-07,5.902910865579981e-07,tokens/kWh,433684339.52262825,MB,1280.053248,872.93952,0.0,226.492416,197.932544,s,12,10.420531616210937,0.8683776346842448,0.009197306451406238,0.8653596801757812,0.8716784362792969,0.8835345275878906,0.8948784387207032,"[0.8977144165039063, 0.866240966796875, 0.8642362670898438, 0.8648695068359376, 0.8719328002929687, 0.8635298461914063, 0.862948974609375, 0.866379638671875, 0.86938916015625, 0.865849853515625, 0.8638052368164062, 0.8636349487304688]",tokens/s,72.54908174012076,kWh,1.0699516429705703e-05,5.862862695252688e-06,1.9223555826850592e-05,3.578593495180898e-05,tokens/kWh,1760468.186309475,,s,755,10.563054601669307,0.013990800796912995,0.001781085498877292,0.013702143669128418,0.014027775764465332,0.014339276885986327,0.028389335021972658,"[0.015265791893005372, 0.014698495864868164, 0.014765055656433105, 0.014334976196289062, 0.014128128051757812, 0.014334976196289062, 0.014543871879577636, 0.014234623908996581, 0.0144650239944458, 0.01454694366455078, 0.014418944358825684, 0.01439027214050293, 0.01425100803375244, 0.014261247634887696, 0.014215167999267577, 0.014323712348937988, 0.014708736419677734, 0.014633983612060546, 0.01434931182861328, 0.01467801570892334, 0.014370816230773926, 0.014229503631591797, 0.014525440216064453, 0.014231552124023437, 0.014211071968078613, 0.014244864463806153, 0.014368767738342286, 0.014796799659729003, 0.014531583786010742, 0.014297087669372559, 0.014436384201049805, 0.014063584327697753, 0.014220288276672363, 0.013916159629821777, 0.014133248329162598, 0.014468095779418945, 0.014696479797363281, 0.014910431861877441, 0.014256128311157227, 0.014102527618408203, 0.01407590389251709, 0.014150655746459961, 0.014071807861328126, 0.013922304153442382, 0.013725728034973144, 0.01371132755279541, 0.013733887672424316, 0.013731840133666993, 0.014045184135437011, 0.014156800270080566, 0.014639103889465332, 0.014141440391540527, 0.013728863716125488, 0.013714336395263671, 0.013676544189453126, 0.013729791641235351, 0.013732864379882812, 0.013691904067993164, 0.013711359977722168, 0.013743103981018067, 0.013743103981018067, 0.013660223960876464, 0.0284968318939209, 0.013691904067993164, 0.01368883228302002, 0.013686783790588379, 0.013670399665832519, 0.01399295997619629, 0.013728768348693847, 0.013667327880859375, 0.013691904067993164, 0.01365503978729248, 0.013710335731506347, 0.013666303634643554, 0.013699071884155273, 0.013669376373291015, 0.013689855575561523, 0.013734911918640137, 0.013744128227233888, 0.01374617576599121, 0.01366323184967041, 0.013693951606750488, 0.013799424171447755, 0.013740032196044923, 0.013728768348693847, 0.013709312438964843, 0.013709312438964843, 0.01379532814025879, 0.013731840133666993, 0.013713408470153808, 0.013707263946533203, 0.013776896476745605, 0.01368883228302002, 0.013706239700317382, 0.013716480255126954, 0.013789183616638183, 0.014757887840270996, 0.013946911811828614, 0.013715423583984376, 0.013734911918640137, 0.013693951606750488, 0.01369600009918213, 0.013839360237121581, 0.013735936164855958, 0.013712384223937989, 0.013700096130371094, 0.013719552040100098, 0.013707263946533203, 0.013677568435668945, 0.013913087844848633, 0.013707263946533203, 0.013698047637939453, 0.013667360305786132, 0.013723615646362305, 0.013713408470153808, 0.013699071884155273, 0.013731840133666993, 0.013686783790588379, 0.013784064292907714, 0.01374828815460205, 0.013744064331054687, 0.013783040046691895, 0.013717503547668456, 0.013740032196044923, 0.013710335731506347, 0.028412927627563478, 0.013706239700317382, 0.013686783790588379, 0.013731840133666993, 0.013669376373291015, 0.013661184310913087, 0.013645824432373046, 0.013658111572265624, 0.013669376373291015, 0.013628416061401367, 0.013694975852966309, 0.013636608123779297, 0.013705216407775878, 0.013678591728210449, 0.013858816146850587, 0.01386086368560791, 0.01368883228302002, 0.013693951606750488, 0.013660160064697266, 0.01374518394470215, 0.013757408142089844, 0.013689855575561523, 0.013717503547668456, 0.01369600009918213, 0.013725695610046386, 0.013678591728210449, 0.013732895851135254, 0.013722592353820801, 0.0136878080368042, 0.013707263946533203, 0.013698047637939453, 0.013684736251831055, 0.013669376373291015, 0.013684736251831055, 0.01375641632080078, 0.013708288192749024, 0.013765631675720215, 0.013703167915344238, 0.013721599578857421, 0.013702143669128418, 0.013694975852966309, 0.013763584136962891, 0.01369600009918213, 0.013714431762695312, 0.013794303894042969, 0.013682687759399414, 0.013697024345397948, 0.013739007949829102, 0.013708288192749024, 0.013678591728210449, 0.013742079734802246, 0.013710335731506347, 0.013716480255126954, 0.013678591728210449, 0.013706239700317382, 0.013776896476745605, 0.013827072143554688, 0.013702143669128418, 0.013737983703613281, 0.013683712005615235, 0.013726719856262207, 0.013710335731506347, 0.013788160324096679, 0.028406784057617186, 0.013711359977722168, 0.013690879821777344, 0.013694975852966309, 0.013714431762695312, 0.013697024345397948, 0.013701120376586913, 0.01367142391204834, 0.013676544189453126, 0.013768704414367675, 0.01366528034210205, 0.013711423873901367, 0.013736895561218262, 0.013741056442260742, 0.01368064022064209, 0.013705216407775878, 0.013719552040100098, 0.013714431762695312, 0.013728768348693847, 0.013700096130371094, 0.01368883228302002, 0.013689855575561523, 0.013683712005615235, 0.013835264205932616, 0.013717503547668456, 0.013843520164489746, 0.013794239997863769, 0.013710335731506347, 0.013718527793884277, 0.013725695610046386, 0.01368064022064209, 0.013674495697021484, 0.013712384223937989, 0.013718527793884277, 0.013752320289611816, 0.013718527793884277, 0.013736960411071777, 0.01367347240447998, 0.013757439613342285, 0.01370419216156006, 0.013725695610046386, 0.013716480255126954, 0.013707263946533203, 0.013740032196044923, 0.013692928314208984, 0.013726719856262207, 0.013685759544372558, 0.013721664428710938, 0.013703104019165038, 0.01367142391204834, 0.013768704414367675, 0.01369600009918213, 0.013686783790588379, 0.013691904067993164, 0.01368883228302002, 0.013734911918640137, 0.013686783790588379, 0.01375334358215332, 0.013711359977722168, 0.013979647636413574, 0.013771776199340821, 0.013735936164855958, 0.013831232070922852, 0.028631999969482423, 0.013729824066162109, 0.013675488471984864, 0.01368172836303711, 0.013717439651489258, 0.013657088279724122, 0.01366528034210205, 0.013697024345397948, 0.013677568435668945, 0.01365503978729248, 0.013760512351989745, 0.013660160064697266, 0.01366220760345459, 0.013717503547668456, 0.013717503547668456, 0.013676544189453126, 0.013710335731506347, 0.013657088279724122, 0.013947903633117676, 0.013619199752807617, 0.013640704154968262, 0.01366528034210205, 0.013693951606750488, 0.01365401554107666, 0.013627391815185547, 0.013701120376586913, 0.013742079734802246, 0.013648896217346192, 0.013675552368164062, 0.013744095802307128, 0.01440665626525879, 0.01619865608215332, 0.016926719665527345, 0.014181376457214356, 0.01386188793182373, 0.013735936164855958, 0.01376153564453125, 0.013664256095886231, 0.013693951606750488, 0.013689855575561523, 0.013661184310913087, 0.013678591728210449, 0.013649920463562011, 0.014139391899108887, 0.013820927619934082, 0.013752320289611816, 0.013699071884155273, 0.014027775764465332, 0.01368064022064209, 0.013682687759399414, 0.013675519943237305, 0.013695039749145509, 0.01367750358581543, 0.01376460838317871, 0.014016511917114258, 0.0136878080368042, 0.013678591728210449, 0.01369600009918213, 0.013848575592041015, 0.013717503547668456, 0.0136878080368042, 0.013729791641235351, 0.013838335990905762, 0.028457984924316407, 0.013737983703613281, 0.013685759544372558, 0.013691904067993164, 0.013822976112365723, 0.013697024345397948, 0.013604864120483399, 0.013649920463562011, 0.013724672317504882, 0.013774847984313965, 0.013691904067993164, 0.01366528034210205, 0.013736960411071777, 0.013702143669128418, 0.0136878080368042, 0.013701120376586913, 0.013822976112365723, 0.013692928314208984, 0.013674495697021484, 0.013799424171447755, 0.013674495697021484, 0.01368166446685791, 0.013725695610046386, 0.013814784049987794, 0.013684736251831055, 0.01368883228302002, 0.013848608016967773, 0.013685728073120117, 0.013669376373291015, 0.013674495697021484, 0.01367961597442627, 0.01367244815826416, 0.013739007949829102, 0.01367961597442627, 0.013712384223937989, 0.013697024345397948, 0.013697024345397948, 0.013740032196044923, 0.01376460838317871, 0.013657088279724122, 0.013855744361877441, 0.013742079734802246, 0.013683712005615235, 0.013691904067993164, 0.013676544189453126, 0.013682687759399414, 0.013661184310913087, 0.013661184310913087, 0.013647904396057129, 0.013673439979553222, 0.013668352127075196, 0.013643775939941406, 0.013797375679016113, 0.013658111572265624, 0.013664256095886231, 0.013652031898498534, 0.01367033576965332, 0.013643775939941406, 0.013650943756103515, 0.013666303634643554, 0.013641728401184081, 0.01367142391204834, 0.013726719856262207, 0.028403711318969727, 0.013668352127075196, 0.013674495697021484, 0.01363046360015869, 0.013641728401184081, 0.013643775939941406, 0.01365503978729248, 0.01366220760345459, 0.01366220760345459, 0.01368166446685791, 0.013640704154968262, 0.013685759544372558, 0.013637632369995116, 0.013725695610046386, 0.013702143669128418, 0.013674495697021484, 0.013714431762695312, 0.013770751953125, 0.013796352386474609, 0.013739007949829102, 0.013711359977722168, 0.01367244815826416, 0.013684736251831055, 0.013736960411071777, 0.01367244815826416, 0.013675519943237305, 0.01368992042541504, 0.01368569564819336, 0.01368166446685791, 0.01367961597442627, 0.0136878080368042, 0.01367961597442627, 0.013676544189453126, 0.013953023910522461, 0.013818880081176758, 0.01379532814025879, 0.013685759544372558, 0.01367347240447998, 0.013747200012207032, 0.013718527793884277, 0.013677568435668945, 0.013668352127075196, 0.013685759544372558, 0.01365503978729248, 0.013649920463562011, 0.013637632369995116, 0.013633536338806153, 0.013768704414367675, 0.01365401554107666, 0.013760543823242188, 0.013644767761230468, 0.013698047637939453, 0.013644800186157227, 0.013639679908752441, 0.013675519943237305, 0.013670399665832519, 0.013651968002319336, 0.013715456008911133, 0.01376972770690918, 0.013659135818481445, 0.01366528034210205, 0.013699071884155273, 0.013642751693725585, 0.02831974411010742, 0.013700096130371094, 0.013639679908752441, 0.013622271537780761, 0.013687840461730957, 0.013710304260253907, 0.013718527793884277, 0.01368166446685791, 0.01366323184967041, 0.013705216407775878, 0.01368064022064209, 0.013686783790588379, 0.013666303634643554, 0.013668352127075196, 0.013682687759399414, 0.013727744102478028, 0.013724672317504882, 0.013721599578857421, 0.013724672317504882, 0.014102527618408203, 0.01380352020263672, 0.013740032196044923, 0.013737983703613281, 0.013920255661010742, 0.013740032196044923, 0.013717503547668456, 0.013843456268310546, 0.01380352020263672, 0.013684736251831055, 0.013757439613342285, 0.013758463859558106, 0.013974528312683105, 0.013858816146850587, 0.01417420768737793, 0.013920255661010742, 0.013936639785766602, 0.01377280044555664, 0.013703167915344238, 0.01368883228302002, 0.013677568435668945, 0.013684736251831055, 0.013694975852966309, 0.013688863754272461, 0.013667296409606934, 0.013786111831665039, 0.013828096389770507, 0.013668352127075196, 0.013664256095886231, 0.013733887672424316, 0.013702143669128418, 0.013656064033508301, 0.013676544189453126, 0.01377894401550293, 0.013708288192749024, 0.0136878080368042, 0.013683712005615235, 0.013735936164855958, 0.01395404815673828, 0.013817855834960938, 0.013650943756103515, 0.013703167915344238, 0.013696063995361328, 0.013751232147216796, 0.028461055755615236, 0.013700096130371094, 0.013645824432373046, 0.014017536163330077, 0.013718527793884277, 0.015047679901123047, 0.014234623908996581, 0.014027775764465332, 0.01408512020111084, 0.014048255920410157, 0.014173184394836426, 0.013982720375061035, 0.014036992073059081, 0.013888511657714844, 0.013721599578857421, 0.01374617576599121, 0.013685759544372558, 0.013708288192749024, 0.013652992248535157, 0.013644800186157227, 0.013687871932983398, 0.013667263984680177, 0.01366220760345459, 0.01367347240447998, 0.01367961597442627, 0.013639679908752441, 0.01368166446685791, 0.013686783790588379, 0.013645824432373046, 0.013686783790588379, 0.01366220760345459, 0.013650943756103515, 0.013632512092590332, 0.013693951606750488, 0.013690879821777344, 0.013845503807067871, 0.013686783790588379, 0.013660160064697266, 0.013715456008911133, 0.01387724781036377, 0.014060544013977052, 0.01405951976776123, 0.013849599838256836, 0.013645824432373046, 0.013682687759399414, 0.013699071884155273, 0.013699071884155273, 0.013707263946533203, 0.013682687759399414, 0.013713408470153808, 0.013652992248535157, 0.013708288192749024, 0.01368166446685791, 0.01368064022064209, 0.013719552040100098, 0.013873151779174805, 0.01370419216156006, 0.013788224220275879, 0.013749183654785157, 0.013675519943237305, 0.01366329574584961, 0.01380140781402588, 0.014252032279968262, 0.029146112442016602, 0.013622271537780761, 0.013830143928527832, 0.013846528053283692, 0.013924351692199707, 0.013684736251831055, 0.013667327880859375, 0.01367347240447998, 0.01363865566253662, 0.01366323184967041, 0.013697024345397948, 0.013644800186157227, 0.013669376373291015, 0.01386086368560791, 0.013948927879333496, 0.013683712005615235, 0.013719552040100098, 0.013699071884155273, 0.013691904067993164, 0.013702143669128418, 0.013697024345397948, 0.013853728294372558, 0.013726688385009766, 0.013720576286315917, 0.013678591728210449, 0.01369600009918213, 0.013735936164855958, 0.013697024345397948, 0.013715456008911133, 0.013755392074584961, 0.013735936164855958, 0.013721599578857421, 0.013707263946533203, 0.013883392333984374, 0.013845503807067871, 0.013723648071289063, 0.013712384223937989, 0.013705216407775878, 0.01368883228302002, 0.013685759544372558, 0.013721599578857421, 0.013728768348693847, 0.013697024345397948, 0.013751296043395997, 0.013793279647827148, 0.013677568435668945, 0.01369600009918213, 0.013702143669128418, 0.013920255661010742, 0.013789183616638183, 0.013728768348693847, 0.013712384223937989, 0.01368166446685791, 0.013733920097351074, 0.013666272163391113, 0.01387724781036377, 0.013720576286315917, 0.013702143669128418, 0.013740032196044923, 0.013721599578857421, 0.013722623825073242, 0.013712384223937989, 0.01376153564453125, 0.02837708854675293, 0.01365401554107666, 0.01364684772491455, 0.013660160064697266, 0.013664256095886231, 0.013692928314208984, 0.013957119941711426, 0.01405951976776123, 0.013983743667602539, 0.01367961597442627, 0.013649920463562011, 0.013858880043029785, 0.013722559928894044, 0.013641728401184081, 0.013668352127075196, 0.013664256095886231, 0.013723648071289063, 0.01365503978729248, 0.013669376373291015, 0.01367142391204834, 0.01370419216156006, 0.01368166446685791, 0.01367142391204834, 0.013675519943237305, 0.013628416061401367, 0.013627391815185547, 0.013661184310913087, 0.013818880081176758, 0.013710335731506347, 0.013647871971130371, 0.013670399665832519, 0.01368064022064209, 0.01367347240447998, 0.013683712005615235, 0.01368883228302002, 0.013701120376586913, 0.013677568435668945, 0.013657088279724122, 0.013692928314208984, 0.013644800186157227, 0.013676544189453126, 0.013677568435668945, 0.01380352020263672, 0.013749247550964355, 0.013674495697021484, 0.013695039749145509, 0.013658047676086426, 0.013690879821777344, 0.01367244815826416, 0.013657088279724122, 0.013640704154968262, 0.013657088279724122, 0.013793279647827148, 0.013695008277893066, 0.013856736183166504, 0.013647871971130371, 0.013661215782165528, 0.013750240325927735, 0.013722623825073242, 0.013768704414367675, 0.013710335731506347, 0.013717503547668456, 0.01366220760345459, 0.028322816848754883, 0.013648896217346192, 0.013686783790588379, 0.01369600009918213, 0.013699071884155273, 0.0136878080368042, 0.013691904067993164, 0.013702143669128418, 0.013697024345397948, 0.013683712005615235, 0.013700096130371094, 0.013693951606750488, 0.013722623825073242, 0.013705216407775878, 0.013691935539245605, 0.013717472076416016, 0.013724672317504882, 0.013700096130371094, 0.013664256095886231, 0.013639679908752441, 0.013757439613342285, 0.013656064033508301, 0.013667327880859375, 0.01366431999206543, 0.013849535942077636, 0.013924351692199707, 0.013633567810058594, 0.013722592353820801, 0.013668352127075196, 0.013906944274902343, 0.013789183616638183, 0.013662240028381348, 0.013687775611877442, 0.013670399665832519, 0.01367347240447998, 0.01366528034210205, 0.013657088279724122, 0.013661184310913087, 0.013678591728210449, 0.013684736251831055, 0.013658143997192384, 0.013722592353820801, 0.01366528034210205, 0.013637632369995116, 0.01367347240447998, 0.01365503978729248, 0.01367347240447998, 0.013744128227233888, 0.01370419216156006, 0.013693951606750488, 0.013711359977722168, 0.01368064022064209, 0.01368064022064209, 0.013691904067993164, 0.01369600009918213, 0.013724672317504882, 0.013697024345397948, 0.01367142391204834, 0.013682687759399414, 0.01365503978729248, 0.013683712005615235, 0.013806591987609864, 0.013955072402954101]",tokens/s,71.4755369986145,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3011.678208,9259.450368,0.0,8613.003264,8211.364864,s,10,10.948309448242187,1.0948309448242186,0.0017810888898920402,1.0946644287109375,1.0975797607421875,1.097637109375,1.09768298828125,"[1.0947955322265626, 1.0975670166015625, 1.0945333251953124, 1.09283447265625, 1.093535400390625, 1.0924375, 1.0933472900390624, 1.0952611083984376, 1.0963033447265624, 1.0976944580078125]",tokens/s,233.8260543422092,kWh,1.2896189077032937e-05,7.064633461468475e-06,6.102240992899577e-05,8.098323246749718e-05,tokens/kWh,3161148.20562079,MB,3011.678208,9330.753536,0.0,8684.306432,8503.627264,s,10,640.83317578125,64.083317578125,0.01786692489370897,64.076908203125,64.113046875,64.1131875,64.1133,"[64.113015625, 64.113328125, 64.1000546875, 64.0766796875, 64.07603515625, 64.07292578125, 64.06015625, 64.0786328125, 64.0652109375, 64.07713671875]",tokens/s,0.983095170177413,kWh,0.0007565677230060101,0.0004146680905150424,0.003575465860370403,0.0047467016738914555,tokens/kWh,13272.374024793335,,s,629,649.6630466918949,1.0328506306707386,0.1300798719514248,1.0170562744140625,1.01796640625,1.0183720947265624,2.11047625,"[1.0181212158203126, 1.0173245239257813, 1.0185758666992188, 1.0180556640625, 1.018123291015625, 1.0182379760742188, 1.0183464965820312, 1.0172989501953125, 1.0187325439453125, 1.0184806518554688, 1.0173450317382813, 1.0169671630859376, 1.0177156982421875, 1.016869873046875, 1.0170828857421874, 1.0171054077148438, 1.017406494140625, 1.0178303833007813, 1.0179625244140624, 1.0176153564453125, 1.0172446899414063, 1.0175590209960939, 1.0186322021484375, 1.0177269897460937, 1.017723876953125, 1.0172518310546874, 1.0174484252929688, 1.0174822387695313, 1.0188011474609375, 1.0174320678710937, 1.0174351196289062, 1.0169671630859376, 1.0170900268554688, 1.0175518798828125, 1.0169671630859376, 1.0169815063476562, 1.01698046875, 1.017164794921875, 1.0184673461914062, 1.0172026977539061, 1.0175672607421875, 1.017354248046875, 1.0178375854492188, 1.01718017578125, 1.0172631225585937, 1.0175252685546874, 1.0175877075195312, 1.0180198364257813, 1.01806591796875, 1.0169200439453125, 1.0169722900390625, 1.0176399536132812, 1.0175457153320313, 1.0184530029296874, 1.0189946899414062, 1.017486328125, 1.0173870239257812, 1.0177218627929687, 1.0176204833984375, 1.0178468017578124, 1.0180025024414063, 1.0178231811523437, 2.115484619140625, 1.0167900390625, 1.0173245239257813, 1.0184478759765625, 1.0185051879882812, 1.018660888671875, 1.0180894775390625, 1.01798193359375, 1.0169508056640626, 1.0166098022460937, 1.0167337036132813, 1.0166784057617186, 1.0166610107421874, 1.0168370971679688, 1.0182635498046875, 1.017359375, 1.017260009765625, 1.0175375366210937, 1.0179983520507812, 1.0172620849609375, 1.0179829711914063, 1.0172477416992187, 1.0176378784179687, 1.0178222045898437, 1.018113037109375, 1.0180044555664063, 1.0183690185546874, 1.0185379638671874, 1.017069580078125, 1.0172241821289063, 1.0169231567382813, 1.0170255126953125, 1.016953857421875, 1.0170449829101562, 1.0174218139648437, 1.0171945190429688, 1.0167879638671875, 1.0176768188476562, 1.0178262939453124, 1.017734130859375, 1.01724365234375, 1.0175989990234375, 1.0170029907226563, 1.0170224609375, 1.0177013549804688, 1.0169600219726562, 1.0178672485351563, 1.0179348754882813, 1.01819287109375, 1.0178980102539064, 1.0185748291015626, 1.0183229370117188, 1.0191605834960937, 1.0187940063476562, 1.0190294799804687, 1.018576904296875, 1.0186598510742189, 1.0183741455078126, 1.0177904663085937, 1.0180341796875, 1.0173480834960937, 1.0171412353515625, 1.017723876953125, 2.11309765625, 1.0174781494140626, 1.0174166870117187, 1.0177935180664062, 1.0169886474609375, 1.0179368896484375, 1.01819189453125, 1.0179256591796875, 1.01707568359375, 1.0180884399414063, 1.0175416259765624, 1.0177484741210938, 1.01865673828125, 1.01788671875, 1.0186629028320313, 1.0175426635742189, 1.0182052001953126, 1.0181621704101562, 1.017913330078125, 1.0183598022460938, 1.0172467041015625, 1.0170460205078125, 1.0173931274414063, 1.0176419677734374, 1.0177720336914062, 1.016953857421875, 1.01732763671875, 1.0172713012695314, 1.01722216796875, 1.0184335327148437, 1.0174474487304688, 1.0173317260742187, 1.017807861328125, 1.0172548828125, 1.0176010131835938, 1.0181068725585938, 1.0175713500976562, 1.0182727661132813, 1.0180116577148437, 1.0184151000976562, 1.0177402954101562, 1.017481201171875, 1.0176061401367187, 1.0172026977539061, 1.016680419921875, 1.0167982177734376, 1.016784912109375, 1.0172897338867188, 1.0168883056640625, 1.01704296875, 1.0168678588867188, 1.0165729370117187, 1.0167131958007813, 1.0166456298828126, 1.0165667724609375, 1.0168145751953126, 1.016875, 1.0166814575195313, 1.0167070922851562, 1.0168309936523436, 1.0168156127929688, 1.01669580078125, 1.01747607421875, 2.11044970703125, 1.0169733276367188, 1.0170706176757813, 1.0172846069335937, 1.0169989013671874, 1.0167142333984376, 1.01673779296875, 1.0168524780273438, 1.0172088623046875, 1.017069580078125, 1.0166159057617188, 1.0165391235351562, 1.0165780639648438, 1.016901611328125, 1.0178887939453125, 1.0169794311523437, 1.0168237915039062, 1.017164794921875, 1.017049072265625, 1.0169600219726562, 1.0166497192382813, 1.0166763305664062, 1.0166876220703125, 1.017296875, 1.0177669067382813, 1.0170214233398438, 1.01680126953125, 1.0170153198242187, 1.0169190673828126, 1.0170368041992188, 1.0171064453125, 1.0172354736328124, 1.0170460205078125, 1.0172548828125, 1.017270263671875, 1.01686474609375, 1.016853515625, 1.01725390625, 1.0170050659179688, 1.0172303466796875, 1.01722314453125, 1.01692724609375, 1.0170449829101562, 1.0169825439453124, 1.0179143676757811, 1.0177525634765625, 1.0174228515625, 1.0177669067382813, 1.0174218139648437, 1.017470947265625, 1.016974365234375, 1.01671728515625, 1.0168442993164062, 1.01711669921875, 1.0169733276367188, 1.017064453125, 1.016784912109375, 1.0175057983398437, 1.0172620849609375, 1.0171259155273438, 1.0172252197265625, 1.0170562744140625, 1.0171351318359374, 2.110795654296875, 1.0166190185546875, 1.0167347412109375, 1.0169712524414063, 1.0166886596679687, 1.016806396484375, 1.0165883178710937, 1.0169190673828126, 1.0171340942382812, 1.0177515258789063, 1.0171422729492188, 1.0173849487304687, 1.0172548828125, 1.0168411865234375, 1.0169845581054688, 1.0166845703125, 1.0168084716796875, 1.017049072265625, 1.0166876220703125, 1.0172395629882813, 1.016953857421875, 1.016732666015625, 1.0170419311523438, 1.016896484375, 1.016853515625, 1.0166179809570313, 1.0168473510742186, 1.0165565185546874, 1.0171422729492188, 1.0176635131835938, 1.0167357177734375, 1.0171463623046875, 1.017027587890625, 1.0171187133789064, 1.0174617309570313, 1.0175324096679688, 1.01707568359375, 1.017122802734375, 1.0175518798828125, 1.0175703125, 1.0169467163085937, 1.0168923950195312, 1.0171740112304688, 1.0170286254882812, 1.0166773681640624, 1.0171105346679687, 1.0168504028320313, 1.0169476928710937, 1.0173501586914062, 1.0176942138671874, 1.0172774658203125, 1.0176573486328124, 1.01741259765625, 1.0166753540039062, 1.0169313354492187, 1.0168473510742186, 1.016975341796875, 1.0182573852539063, 1.0172119140625, 1.017554931640625, 1.017101318359375, 1.0170040283203126, 1.0173255615234376, 2.110369873046875, 1.0170951538085937, 1.0169763793945312, 1.0166558837890625, 1.017407470703125, 1.0166405029296874, 1.0165350341796875, 1.0173060913085938, 1.0170572509765625, 1.0168780517578124, 1.0168340454101563, 1.0168545532226563, 1.0169609985351562, 1.0170890502929688, 1.0174730224609374, 1.0169528198242188, 1.0169354248046876, 1.0171760864257813, 1.0170286254882812, 1.0169978637695312, 1.0167285766601561, 1.016795166015625, 1.0171883544921876, 1.0171248779296875, 1.0169425659179687, 1.0167398681640625, 1.016647705078125, 1.017091064453125, 1.0167838745117188, 1.0169794311523437, 1.0168053588867187, 1.0165780639648438, 1.0166886596679687, 1.0169876708984376, 1.016974365234375, 1.017037841796875, 1.0167971801757814, 1.0169210815429688, 1.0174884033203124, 1.0167941284179687, 1.0167725830078125, 1.0167675170898438, 1.0176215209960937, 1.0169835815429686, 1.017260009765625, 1.0171054077148438, 1.017228271484375, 1.01694873046875, 1.0170050659179688, 1.017069580078125, 1.017248779296875, 1.01722216796875, 1.0173972778320313, 1.017143310546875, 1.0172692260742187, 1.0171156616210937, 1.0175375366210937, 1.0173552856445311, 1.0170654907226562, 1.0171207885742188, 1.017069580078125, 1.0168463134765624, 1.0169405517578125, 2.110486572265625, 1.0164469604492188, 1.0168002319335938, 1.016369140625, 1.0169415893554687, 1.0168094482421874, 1.016580078125, 1.0167101440429687, 1.0166886596679687, 1.0167756958007812, 1.0168053588867187, 1.0167296142578126, 1.0168402099609375, 1.01680126953125, 1.0167633666992189, 1.0166968383789063, 1.01671630859375, 1.0167347412109375, 1.0167684936523438, 1.01724365234375, 1.016858642578125, 1.01710546875, 1.0170787353515625, 1.0171729736328126, 1.0169989013671874, 1.0165247802734374, 1.0167285766601561, 1.0167684936523438, 1.0166016235351563, 1.0166517944335938, 1.0164653930664063, 1.0165411987304687, 1.0165831909179688, 1.01671630859375, 1.0165924072265624, 1.0165606689453126, 1.016543212890625, 1.0166599731445312, 1.0167716064453125, 1.0168186645507813, 1.01701123046875, 1.0169200439453125, 1.0169108276367187, 1.0166343383789063, 1.016553466796875, 1.0164080810546876, 1.0169200439453125, 1.016774658203125, 1.01694873046875, 1.0167030029296875, 1.016711181640625, 1.017343994140625, 1.0178682861328125, 1.017290771484375, 1.0172057495117188, 1.0169927978515625, 1.016964111328125, 1.0171351318359374, 1.0170265502929687, 1.0169200439453125, 1.0168862915039063, 1.0171627807617187, 1.0168002319335938, 2.112787353515625, 1.0166026000976562, 1.0169682006835938, 1.0167654418945313, 1.0172047119140626, 1.0169948120117187, 1.0164418334960938, 1.0169876708984376, 1.0169866333007813, 1.0173931274414063, 1.0172119140625, 1.017186279296875, 1.0169815063476562, 1.0177986450195313, 1.0174259033203126, 1.017359375, 1.0174873657226562, 1.0177362060546875, 1.01767578125, 1.017439208984375, 1.0174658813476563, 1.01768701171875, 1.0180730590820313, 1.0177741088867187, 1.0177136840820313, 1.0177843017578125, 1.0180269775390625, 1.0175682373046875, 1.0174586791992188, 1.0174105834960938, 1.01734912109375, 1.0172139282226562, 1.0173184204101562, 1.0173480834960937, 1.0167736206054687, 1.0164807739257813, 1.0166026000976562, 1.0172415771484375, 1.01684326171875, 1.0170951538085937, 1.0171576538085938, 1.0169251708984375, 1.0167306518554688, 1.0167562255859375, 1.0166569213867187, 1.0165821533203125, 1.0164367065429687, 1.0164756469726564, 1.016543212890625, 1.0165913696289062, 1.0170572509765625, 1.0170706176757813, 1.0169989013671874, 1.0169262084960937, 1.0167183227539063, 1.01669580078125, 1.0168842163085938, 1.0169262084960937, 1.016958984375, 1.0170848999023439, 1.0168914184570312, 1.0169169921875, 1.017260009765625, 2.113946533203125, 1.0165718994140625, 1.0166046752929687, 1.016953857421875, 1.0168678588867188, 1.0169241333007812, 1.0169292602539062, 1.0173470458984375, 1.0169763793945312, 1.0174136352539063, 1.0167705688476563, 1.017006103515625, 1.0173532104492187, 1.0174310302734375, 1.0171514892578124, 1.017064453125, 1.0168955078125, 1.0172467041015625, 1.016848388671875, 1.016911865234375, 1.016647705078125, 1.0169364624023438, 1.0165985107421875, 1.0165892944335937, 1.0166067504882812, 1.0164940795898438, 1.0166220703125, 1.0168770751953125, 1.0167285766601561, 1.0169609985351562, 1.0163701782226562, 1.0168135375976564, 1.01677978515625, 1.01676953125, 1.0166210327148437, 1.016521728515625, 1.0165770263671876, 1.0167521362304688, 1.0170449829101562, 1.0166937866210937, 1.0167091064453124, 1.01682177734375, 1.0167613525390624, 1.0167510986328125, 1.0168627319335937, 1.016816650390625, 1.0168186645507813, 1.0168770751953125, 1.0175191040039062, 1.0170839233398437, 1.0171156616210937, 1.0169978637695312, 1.0167551879882812, 1.0168893432617188, 1.0166149291992188, 1.0167460327148437, 1.0174617309570313, 1.0178723754882812, 1.0175508422851562, 1.0171740112304688, 1.0164818115234375, 1.016890380859375, 1.0167900390625, 2.115039306640625, 1.0166619873046876, 1.0170449829101562, 1.0170029907226563, 1.0168207397460938, 1.0171340942382812, 1.0175303955078125, 1.0170664672851562, 1.0168944702148437, 1.0170839233398437, 1.0168811645507811, 1.0166190185546875, 1.0173521728515624, 1.0172764282226563, 1.0172467041015625, 1.0180219116210938, 1.0176358642578125, 1.0167941284179687, 1.0168053588867187, 1.016974365234375, 1.0168657836914063, 1.0167101440429687, 1.0172078247070313, 1.017523193359375, 1.0172088623046875, 1.0169343872070313, 1.0170183715820313, 1.0173358154296874, 1.0169886474609375, 1.0170839233398437, 1.0169886474609375, 1.0171361083984376, 1.0170203857421876, 1.0177321166992188, 1.01720166015625, 1.0167777099609374, 1.0169886474609375, 1.0173583374023438, 1.0168914184570312, 1.0169722900390625, 1.0168985595703126, 1.0167562255859375, 1.016875, 1.0164859008789062, 1.0166527709960937, 1.01667431640625, 1.0164827880859375, 1.0166220703125, 1.0165606689453126, 1.0166558837890625, 1.016774658203125, 1.0169169921875, 1.0172507934570312, 1.0167654418945313, 1.0169661254882814, 1.0170787963867187, 1.0171719970703126, 1.0167449340820311, 1.0168955078125, 1.016932373046875, 1.0169548950195313, 1.0173470458984375, 1.0174095458984376]",tokens/s,0.968194209602791,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492dc-0b78ae6813f5de6f676b55b5;76c09d7c-92b9-4864-8f20-c75ed288e03c) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1373.622272,6237.454336,0.0,5591.007232,5346.100224,s,10,5.623939697265624,0.5623939697265625,0.0018664729250946,0.5620524597167968,0.563123095703125,0.5654235778808594,0.5672639636230469,"[0.5677240600585938, 0.5623804931640625, 0.5615673828125, 0.5610968627929688, 0.561818603515625, 0.5608659057617188, 0.5612388305664062, 0.5622863159179687, 0.562349365234375, 0.5626118774414063]",tokens/s,455.19691493930475,kWh,6.624708389058526e-06,3.630042331800116e-06,3.1119932303329425e-05,4.1374683024188065e-05,tokens/kWh,6187358.57989147,MB,1373.622272,6237.454336,0.0,5591.007232,5555.342336,s,10,326.497560546875,32.649756054687494,0.006483931910806726,32.650626953125,32.658396484375,32.6586953125,32.658934375,"[32.65203515625, 32.6462578125, 32.658994140625, 32.65576953125, 32.658330078125, 32.651564453125, 32.649689453125, 32.63969140625, 32.6434453125, 32.641783203125]",tokens/s,1.9295703126993242,kWh,0.0003854154377200722,0.00021124086242082436,0.0017644989764128718,0.0023611552765537684,tokens/kWh,26681.853847389422,,s,629,331.03564239501964,0.5262887796423203,0.06675128508752028,0.51820849609375,0.5186688842773437,0.5188689819335938,1.07959478515625,"[0.5180374755859375, 0.5182678833007812, 0.5178931274414063, 0.5179525146484375, 0.517771240234375, 0.5180712890625, 0.5179750366210938, 0.5180805053710937, 0.5178931274414063, 0.51806103515625, 0.5181265869140625, 0.5183170776367187, 0.5182382202148438, 0.5182545776367188, 0.5179371337890625, 0.5179320068359375, 0.5178828735351563, 0.5182423095703125, 0.5177661743164063, 0.5184501953125, 0.5183272705078125, 0.5182412719726562, 0.518329345703125, 0.5184378662109375, 0.51814501953125, 0.518319091796875, 0.5182443237304688, 0.518181884765625, 0.5181737060546875, 0.5180671997070313, 0.5180620727539063, 0.5182228393554688, 0.518096923828125, 0.5180733642578125, 0.5180712890625, 0.5181204223632813, 0.518097900390625, 0.51814501953125, 0.5181767578125, 0.5185771484375, 0.518513671875, 0.5182904052734375, 0.5183529052734375, 0.5182494506835937, 0.5183231811523438, 0.518898681640625, 0.5186907958984375, 0.5187061767578125, 0.5188659057617188, 0.5184491577148438, 0.5182515258789062, 0.5187911376953125, 0.518445068359375, 0.518139892578125, 0.5181849365234374, 0.518667236328125, 0.518803466796875, 0.5185535888671875, 0.5189119873046875, 0.518846435546875, 0.518677490234375, 0.518724609375, 1.0810408935546876, 0.518350830078125, 0.51835595703125, 0.5182074584960937, 0.518297607421875, 0.5182279663085938, 0.5184102172851562, 0.5181460571289063, 0.5180006103515625, 0.5179330444335938, 0.5180252075195313, 0.5179064331054688, 0.5179320068359375, 0.5177804565429688, 0.5182894287109375, 0.5181163330078125, 0.5180518188476563, 0.5180508422851563, 0.5179484252929687, 0.5180457153320313, 0.5182883911132813, 0.5182054443359375, 0.5181962280273438, 0.5179832153320313, 0.5181480712890625, 0.518150146484375, 0.5184532470703125, 0.5180272827148438, 0.5180825805664062, 0.518097900390625, 0.5182258911132812, 0.5182105712890624, 0.5181552734375, 0.5183569946289063, 0.518529052734375, 0.518128662109375, 0.5181829223632812, 0.5179422607421875, 0.5181511840820312, 0.518192138671875, 0.5183323974609375, 0.5181368408203125, 0.5180845947265625, 0.5180733642578125, 0.5180211181640625, 0.5182935180664062, 0.5184542846679687, 0.5183917846679688, 0.5182208251953125, 0.5182781372070312, 0.5181317138671875, 0.5180057373046875, 0.51827099609375, 0.5181931762695312, 0.5183539428710937, 0.5180200805664062, 0.5180907592773437, 0.518160400390625, 0.5181306762695312, 0.518550537109375, 0.5182576904296875, 0.5182935180664062, 0.5190768432617188, 1.0802135009765625, 0.5184102172851562, 0.5183016967773437, 0.5183057861328125, 0.518302734375, 0.5182258911132812, 0.5182843017578125, 0.5182535400390625, 0.5182894287109375, 0.5180989379882812, 0.5188106079101562, 0.5188167724609375, 0.5182730102539063, 0.5184215087890625, 0.5188740844726563, 0.518466552734375, 0.5184788208007812, 0.5185781860351563, 0.518593505859375, 0.5185474853515625, 0.5185228881835937, 0.5185556640625, 0.5184358520507812, 0.5184542846679687, 0.5182996215820312, 0.518150146484375, 0.5181685791015626, 0.5181951904296875, 0.5183805541992188, 0.5179638061523437, 0.5181430053710937, 0.5183969116210938, 0.518614013671875, 0.5185802001953125, 0.518518798828125, 0.5182832641601562, 0.5184378662109375, 0.5184542846679687, 0.5183897705078125, 0.5185802001953125, 0.5184993286132813, 0.518413330078125, 0.518582275390625, 0.5183180541992187, 0.5183262939453125, 0.5186201782226563, 0.5186109619140625, 0.5183580322265625, 0.5184757690429688, 0.5184901123046874, 0.5186160888671875, 0.5187286987304688, 0.5188710327148438, 0.5184942016601563, 0.5181839599609375, 0.5181829223632812, 0.5181522216796876, 0.518096923828125, 0.5179955444335937, 0.5181552734375, 0.51811328125, 0.5180682373046875, 0.5181757202148437, 1.07947314453125, 0.5179699096679687, 0.5179678955078125, 0.51793408203125, 0.5179218139648437, 0.5176954956054688, 0.517823486328125, 0.517739501953125, 0.51822900390625, 0.5179279174804687, 0.517939208984375, 0.5180466918945312, 0.517918701171875, 0.5178296508789062, 0.5179985961914062, 0.5177743530273438, 0.5179566040039062, 0.5178408813476563, 0.5179791259765625, 0.5178828735351563, 0.5179432983398438, 0.5181634521484375, 0.5181337890625, 0.5179771118164063, 0.5182044067382813, 0.51795556640625, 0.5179996337890626, 0.5179801635742187, 0.5180671997070313, 0.518134765625, 0.5181143188476562, 0.5187368774414063, 0.5187942504882812, 0.5189273681640625, 0.5193850708007812, 0.5187921752929687, 0.5189058837890625, 0.5189734497070313, 0.5189846801757813, 0.5192017822265625, 0.5188218994140625, 0.5181880493164063, 0.5183334350585938, 0.518235107421875, 0.5181430053710937, 0.5183231811523438, 0.5186498413085937, 0.5190901489257812, 0.5187317504882812, 0.51865087890625, 0.5185198364257813, 0.5186201782226563, 0.5188372192382813, 0.51859765625, 0.518645751953125, 0.5186754760742187, 0.518487060546875, 0.5186324462890625, 0.5185587158203125, 0.5188239135742188, 0.5186519165039063, 0.5187880859375, 0.5186846923828125, 1.0807930908203125, 0.51770166015625, 0.51816650390625, 0.5179269409179688, 0.518044677734375, 0.5178726196289063, 0.5178777465820312, 0.5178327026367188, 0.5186160888671875, 0.5184112548828125, 0.5178265380859375, 0.5183109130859375, 0.518076416015625, 0.5183846435546875, 0.5180590209960938, 0.5186488037109375, 0.5181992797851562, 0.5183733520507813, 0.5187184448242188, 0.5183948974609375, 0.5185996704101562, 0.518761474609375, 0.518898681640625, 0.5185392456054687, 0.5188966674804687, 0.51905126953125, 0.519035888671875, 0.5190471801757812, 0.5186109619140625, 0.5184972534179687, 0.5185576782226563, 0.5184890747070312, 0.518635498046875, 0.5185792236328125, 0.5188178100585937, 0.5181685791015626, 0.51841845703125, 0.5182699584960937, 0.5182064819335938, 0.518287353515625, 0.5185095825195313, 0.5183477783203125, 0.5183303833007813, 0.51831396484375, 0.5182330932617187, 0.51831396484375, 0.5185115966796875, 0.5181265869140625, 0.5181378784179688, 0.5181726684570312, 0.5181337890625, 0.5182105712890624, 0.51820849609375, 0.518345703125, 0.51832421875, 0.5183549194335938, 0.518307861328125, 0.5184307250976562, 0.5182935180664062, 0.5186068725585937, 0.5185156860351563, 0.5184860229492188, 0.5183590698242188, 1.079120849609375, 0.5180108642578125, 0.5178880004882812, 0.5183908081054688, 0.5182914428710937, 0.5184788208007812, 0.5184235229492188, 0.518445068359375, 0.518530029296875, 0.51832421875, 0.5184675903320313, 0.51827099609375, 0.5184603881835937, 0.5187942504882812, 0.5182791748046875, 0.518677490234375, 0.51831396484375, 0.5182689208984375, 0.5182883911132813, 0.5182525634765625, 0.5183006591796875, 0.5184860229492188, 0.51837646484375, 0.5183355102539062, 0.5185433349609375, 0.5184737548828126, 0.518582275390625, 0.5182945556640625, 0.5185679321289063, 0.5183539428710937, 0.5182320556640625, 0.5178674926757812, 0.5180487670898437, 0.5178419189453125, 0.518265869140625, 0.5178624267578125, 0.5180108642578125, 0.5179617309570312, 0.5181962280273438, 0.5183150024414063, 0.5182945556640625, 0.51801806640625, 0.5179739990234375, 0.5180282592773438, 0.5179627685546875, 0.5179453735351562, 0.5180723266601562, 0.5182699584960937, 0.5180877075195313, 0.5185106201171875, 0.51859765625, 0.5188925170898437, 0.5190215454101562, 0.51888232421875, 0.5181071166992187, 0.5179473876953125, 0.5181839599609375, 0.5182371826171875, 0.5180457153320313, 0.5183866577148437, 0.5179750366210938, 0.518129638671875, 0.51801806640625, 1.07964208984375, 0.5178245239257813, 0.51821875, 0.5180845947265625, 0.5184706420898437, 0.5179371337890625, 0.5181102294921875, 0.5177528076171874, 0.5178818359375, 0.5179115600585937, 0.5181040649414063, 0.51799755859375, 0.5179422607421875, 0.517833740234375, 0.5180364990234375, 0.5180262451171875, 0.5184931640625, 0.518465576171875, 0.518752197265625, 0.5185177612304688, 0.5182627563476563, 0.51831396484375, 0.5179750366210938, 0.5179299926757812, 0.518255615234375, 0.5179238891601563, 0.5180384521484375, 0.5178255615234375, 0.5182105712890624, 0.5178951416015625, 0.5179678955078125, 0.5183211669921876, 0.518451171875, 0.5179801635742187, 0.5185515747070313, 0.518012939453125, 0.5183160400390625, 0.5181572875976562, 0.518414306640625, 0.51803955078125, 0.5180856323242188, 0.5184501953125, 0.5183150024414063, 0.5181255493164062, 0.517992431640625, 0.5180579833984374, 0.5187839965820312, 0.5185628051757812, 0.518540283203125, 0.518202392578125, 0.518066162109375, 0.5179484252929687, 0.5179596557617188, 0.518540283203125, 0.5186969604492188, 0.518518798828125, 0.518950927734375, 0.5188731079101563, 0.5186795654296875, 0.5185914916992187, 0.5187307739257813, 0.5183908081054688, 0.51898876953125, 1.0808607177734375, 0.5182371826171875, 0.5184461059570312, 0.5177640991210938, 0.5177876586914063, 0.517739501953125, 0.5178665161132813, 0.5178562622070313, 0.5178859252929687, 0.5180989379882812, 0.5182730102539063, 0.5177979125976563, 0.5178357543945312, 0.5179658203125, 0.517981201171875, 0.517939208984375, 0.5179945068359375, 0.5179227905273438, 0.518240234375, 0.5178214111328125, 0.5179320068359375, 0.5179780883789062, 0.5183355102539062, 0.5180651245117187, 0.5181480712890625, 0.5180057373046875, 0.5182074584960937, 0.5179883422851562, 0.518451171875, 0.5178818359375, 0.5180845947265625, 0.5179965209960937, 0.51812353515625, 0.5180364990234375, 0.5182320556640625, 0.5181358032226563, 0.5180743408203125, 0.5179351196289063, 0.518034423828125, 0.51783984375, 0.5181317138671875, 0.518181884765625, 0.5181368408203125, 0.518096923828125, 0.5181122436523438, 0.5180252075195313, 0.5180845947265625, 0.5183109130859375, 0.5181265869140625, 0.5181849365234374, 0.5181675415039062, 0.51801904296875, 0.5181522216796876, 0.5180067749023437, 0.5182208251953125, 0.5181409301757812, 0.5182740478515625, 0.5183969116210938, 0.5182013549804687, 0.5183569946289063, 0.518540283203125, 0.5182197875976563, 0.5181859741210938, 1.08067431640625, 0.5179432983398438, 0.5182156982421875, 0.5180016479492188, 0.5179719848632812, 0.51789208984375, 0.5180518188476563, 0.5180037231445312, 0.5178480834960938, 0.5180877075195313, 0.5180457153320313, 0.517917724609375, 0.5178644409179688, 0.5178849487304688, 0.518024169921875, 0.518054931640625, 0.5180067749023437, 0.5178900756835938, 0.5179576416015625, 0.5179576416015625, 0.5179586791992188, 0.5180856323242188, 0.5181337890625, 0.5180282592773438, 0.5181214599609375, 0.5182494506835937, 0.518319091796875, 0.5179115600585937, 0.5183477783203125, 0.5183272705078125, 0.5182802124023438, 0.5183600463867187, 0.5181358032226563, 0.5181696166992188, 0.518255615234375, 0.5183529052734375, 0.5181572875976562, 0.5181859741210938, 0.5181696166992188, 0.51808154296875, 0.51820849609375, 0.5184255981445313, 0.5182371826171875, 0.5183518676757812, 0.5184041137695312, 0.5182156982421875, 0.518381591796875, 0.5181542358398438, 0.5183303833007813, 0.5183754272460938, 0.5185751342773437, 0.5181276245117188, 0.51801904296875, 0.517982177734375, 0.5183969116210938, 0.5180518188476563, 0.5181931762695312, 0.5182013549804687, 0.51814501953125, 0.5182044067382813, 0.5184788208007812, 0.5181685791015626, 0.5182545776367188, 1.0810133056640625, 0.5179525146484375, 0.5181378784179688, 0.5180538940429688, 0.51814501953125, 0.517855224609375, 0.5180466918945312, 0.5178275756835937, 0.5180252075195313, 0.51799755859375, 0.5179699096679687, 0.5178880004882812, 0.5178541870117187, 0.5178347778320312, 0.51806103515625, 0.5181675415039062, 0.5181859741210938, 0.5180692749023438, 0.5179873046875, 0.5182177124023437, 0.518108154296875, 0.5182166748046875, 0.5182371826171875, 0.5179218139648437, 0.5183621215820312, 0.5186570434570312, 0.5182105712890624, 0.5181808471679688, 0.5182515258789062, 0.5181306762695312, 0.5180211181640625, 0.5178193969726562, 0.5181091918945312, 0.5179535522460937, 0.5180016479492188, 0.5181306762695312, 0.518223876953125, 0.5179525146484375, 0.5181911010742187, 0.5179227905273438, 0.5183364868164062, 0.5182883911132813, 0.5184389038085937, 0.5180641479492187, 0.5185689697265625, 0.5180364990234375, 0.5181071166992187, 0.5180067749023437, 0.5183314208984375, 0.5180825805664062, 0.5181122436523438, 0.5179945068359375, 0.5179750366210938, 0.5179535522460937, 0.5183303833007813, 0.5182197875976563, 0.5182955322265625, 0.5183119506835937, 0.51829248046875, 0.518128662109375, 0.518055908203125, 0.5183754272460938, 0.5182371826171875]",tokens/s,1.9000975104953335,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpiuu1nskh/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1273.409536,921.174016,0.0,274.726912,220.646912,s,10,0.326115104675293,0.032611510467529296,0.0012484582231843272,0.03218409538269043,0.032784641265869136,0.034564048767089836,0.035987574768066403,"[0.036343456268310544, 0.03226867294311524, 0.03213779067993164, 0.032141822814941406, 0.03203395080566406, 0.03214742279052735, 0.032220767974853515, 0.03238921737670898, 0.03208832168579102, 0.032343681335449216]",tokens/s,7849.989047728858,kWh,3.834838485734976e-07,2.1012961289151069e-07,8.25654520552228e-07,1.419267982017236e-06,tokens/kWh,180374674.2994524,MB,1273.409536,921.174016,0.0,274.726912,250.722304,s,10,20.085640136718748,2.008564013671875,0.006065788108395492,2.006720520019531,2.0181213745117184,2.0185785705566404,2.018944327392578,"[2.018019775390625, 2.006443115234375, 2.0069979248046876, 2.0103148193359375, 2.0040799560546874, 2.00180322265625, 2.00428125, 2.0133179931640623, 2.0013463134765623, 2.0190357666015624]",tokens/s,31.36569189290069,kWh,2.3731767884623614e-05,1.3005590929520364e-05,4.874369069043466e-05,8.548104950457861e-05,tokens/kWh,737005.457526882,,s,629,20.34066634368894,0.03233810229521298,0.0038819833531807996,0.03179007911682129,0.0323544822692871,0.032856268310546874,0.06427897552490235,"[0.03209625625610352, 0.031882240295410154, 0.031893503189086916, 0.032045055389404296, 0.03220787048339844, 0.031922176361083986, 0.031958015441894534, 0.03196518325805664, 0.031784959793090824, 0.03223756790161133, 0.031974399566650394, 0.031955968856811526, 0.03202969741821289, 0.03302809524536133, 0.03192835235595703, 0.031831008911132816, 0.03254272079467774, 0.033226783752441404, 0.032270305633544924, 0.032140289306640625, 0.031940607070922854, 0.032451583862304685, 0.03203788757324219, 0.03188531112670898, 0.032231422424316404, 0.031936511993408204, 0.03298099136352539, 0.03213516616821289, 0.03167948722839355, 0.031526912689208986, 0.031693824768066405, 0.03172966384887695, 0.03170918464660644, 0.03180953598022461, 0.03179929542541504, 0.031628288269042966, 0.031508480072021484, 0.03155660820007324, 0.03158937644958496, 0.03198259162902832, 0.03151974487304687, 0.03146342468261719, 0.031752191543579104, 0.03203481674194336, 0.03182796859741211, 0.03228160095214844, 0.03201331329345703, 0.03220377731323242, 0.03181056022644043, 0.031865856170654294, 0.031473663330078124, 0.031682559967041016, 0.031628288269042966, 0.03277414321899414, 0.03190790367126465, 0.0320777587890625, 0.03253350448608398, 0.032473087310791016, 0.032159744262695314, 0.03166924858093262, 0.03159654426574707, 0.031492095947265625, 0.06451507568359376, 0.032, 0.03342745590209961, 0.032282623291015625, 0.03176243209838867, 0.0318525447845459, 0.03180646324157715, 0.03143065643310547, 0.03154841613769531, 0.03168870353698731, 0.031514623641967776, 0.03150643157958984, 0.03214438247680664, 0.033160190582275394, 0.03213312149047851, 0.03156684875488281, 0.031958015441894534, 0.03179110336303711, 0.03184332847595215, 0.03142963218688965, 0.03191705513000488, 0.031768575668334964, 0.03164057540893555, 0.0317573127746582, 0.03179827117919922, 0.031731712341308595, 0.03301375961303711, 0.031920127868652344, 0.03172352027893066, 0.03289395141601562, 0.031927295684814457, 0.03162521553039551, 0.03168358421325684, 0.03179110336303711, 0.031666175842285156, 0.03177369689941406, 0.031678464889526366, 0.03179315185546875, 0.031765504837036135, 0.031699968338012696, 0.03142860794067383, 0.03162419128417969, 0.031615999221801756, 0.03140505599975586, 0.03147776031494141, 0.031458303451538085, 0.03143987274169922, 0.03174399948120117, 0.0317255687713623, 0.03142758369445801, 0.03156991958618164, 0.031783935546875, 0.03183616065979004, 0.0317890567779541, 0.03189248085021973, 0.03191910362243652, 0.031932416915893554, 0.032277503967285154, 0.032382976531982424, 0.0317706241607666, 0.03181260871887207, 0.031749120712280275, 0.03174502372741699, 0.06404198455810547, 0.03170611190795898, 0.031704063415527346, 0.031628288269042966, 0.03138764762878418, 0.031698944091796875, 0.03154022407531738, 0.03150540733337402, 0.03196416091918945, 0.03177881622314453, 0.032020481109619144, 0.03214233779907227, 0.03199692726135254, 0.031780864715576174, 0.03226009750366211, 0.032115711212158206, 0.03199180793762207, 0.03156787109375, 0.031453184127807614, 0.03144908714294434, 0.0314839038848877, 0.03146649551391602, 0.031458303451538085, 0.03172147178649903, 0.031893503189086916, 0.0328007698059082, 0.03156787109375, 0.031731712341308595, 0.031849472045898435, 0.03165388870239258, 0.03178700828552246, 0.03168259239196777, 0.03179311943054199, 0.031783935546875, 0.03212083053588867, 0.03178188705444336, 0.03179724884033203, 0.033549312591552735, 0.03284889602661133, 0.031834112167358396, 0.03212492752075195, 0.03198259162902832, 0.03174297523498535, 0.031731712341308595, 0.031753215789794925, 0.03182387161254883, 0.031473663330078124, 0.03165388870239258, 0.03157196807861328, 0.031453184127807614, 0.031734783172607424, 0.03165695953369141, 0.03180953598022461, 0.03184537506103516, 0.03207372665405273, 0.032285694122314454, 0.032556095123291016, 0.0321484146118164, 0.0324956169128418, 0.03182592010498047, 0.03148287963867188, 0.03179110336303711, 0.03181056022644043, 0.06443007659912109, 0.03183001518249512, 0.03196928024291992, 0.03182694435119629, 0.03170099258422852, 0.03181056022644043, 0.0315043830871582, 0.03177881622314453, 0.03177574348449707, 0.03191910362243652, 0.0321710090637207, 0.03204403305053711, 0.03181875228881836, 0.031703039169311525, 0.03174604797363281, 0.03175526428222656, 0.031764480590820314, 0.031731712341308595, 0.03183923149108887, 0.03340390396118164, 0.032922622680664065, 0.032161792755126956, 0.03223756790161133, 0.031734783172607424, 0.03291340637207031, 0.032074752807617186, 0.03202150344848633, 0.031716352462768556, 0.03155046463012695, 0.031692800521850584, 0.03226828765869141, 0.03171737670898438, 0.03140505599975586, 0.03134976005554199, 0.03197952079772949, 0.03200307083129883, 0.031696895599365234, 0.03169484710693359, 0.03176755142211914, 0.0321710090637207, 0.03153919982910156, 0.03143987274169922, 0.03146444892883301, 0.03160371208190918, 0.03245568084716797, 0.0314337272644043, 0.03163545608520508, 0.03231846237182617, 0.03222323226928711, 0.031942655563354495, 0.03174399948120117, 0.03162112045288086, 0.03139788818359375, 0.032302078247070314, 0.03218841552734375, 0.03275161743164062, 0.0322529296875, 0.03198361587524414, 0.03178803253173828, 0.031665151596069335, 0.03129241561889649, 0.03186073684692383, 0.031955968856811526, 0.06424063873291015, 0.03157811164855957, 0.03165695953369141, 0.03165081596374512, 0.03146854400634766, 0.031693824768066405, 0.03204095840454101, 0.03246182250976563, 0.03183103942871094, 0.03218636703491211, 0.03204095840454101, 0.031784959793090824, 0.03218431854248047, 0.03177779197692871, 0.031698944091796875, 0.03156684875488281, 0.031731712341308595, 0.03239424133300781, 0.03142758369445801, 0.03172966384887695, 0.03203379058837891, 0.03207167816162109, 0.03187302398681641, 0.033006591796875, 0.03175526428222656, 0.0316180477142334, 0.031959039688110355, 0.031514623641967776, 0.031884288787841795, 0.03161497688293457, 0.031440895080566404, 0.031442943572998046, 0.03174092864990234, 0.03172966384887695, 0.03197747230529785, 0.03176755142211914, 0.03205222320556641, 0.03165695953369141, 0.03194777679443359, 0.03171327972412109, 0.031562751770019534, 0.03168972778320313, 0.031731712341308595, 0.03172966384887695, 0.03186483192443847, 0.032143360137939454, 0.03146240043640137, 0.03147776031494141, 0.03138355255126953, 0.0313753604888916, 0.03180646324157715, 0.0317255687713623, 0.03186380767822266, 0.03160678482055664, 0.032494590759277346, 0.03222528076171875, 0.032045055389404296, 0.03183206367492676, 0.03183206367492676, 0.03180851173400879, 0.03134464073181152, 0.031470592498779294, 0.031731712341308595, 0.06429388427734375, 0.03167436790466309, 0.031923200607299806, 0.03159756851196289, 0.03151667213439941, 0.031647743225097655, 0.031802400588989255, 0.03235427093505859, 0.03198259162902832, 0.03172147178649903, 0.031888383865356446, 0.03179417610168457, 0.031507455825805664, 0.03172761535644531, 0.031579135894775394, 0.031692800521850584, 0.0315156478881836, 0.031440895080566404, 0.03173990440368652, 0.03155356788635254, 0.031624160766601565, 0.03153510475158691, 0.033051647186279294, 0.03177369689941406, 0.031783935546875, 0.03135897636413574, 0.03179007911682129, 0.031714303970336914, 0.031848447799682614, 0.031476736068725586, 0.03139993667602539, 0.03133440017700195, 0.03155763244628906, 0.03146956825256347, 0.03141024017333984, 0.031344575881958006, 0.03177574348449707, 0.03194675254821777, 0.03219046401977539, 0.03170918464660644, 0.031678464889526366, 0.03172249603271484, 0.03172147178649903, 0.031513599395751955, 0.03161497688293457, 0.03177779197692871, 0.03177574348449707, 0.03137228775024414, 0.03147369575500488, 0.031712223052978515, 0.03170918464660644, 0.03205222320556641, 0.03178803253173828, 0.03221299362182617, 0.03220889663696289, 0.03131391906738281, 0.03146956825256347, 0.03166720008850098, 0.031661056518554685, 0.03207376098632812, 0.03320111846923828, 0.032717823028564456, 0.032519168853759765, 0.06434508514404297, 0.031736831665039066, 0.031752191543579104, 0.031851520538330076, 0.03142963218688965, 0.03157196807861328, 0.032121856689453124, 0.03311513519287109, 0.03222534561157227, 0.03178079986572266, 0.03179827117919922, 0.03183206367492676, 0.03181465530395508, 0.03183206367492676, 0.03177267265319824, 0.03184025573730469, 0.031542272567749025, 0.03150540733337402, 0.03178598403930664, 0.03185663986206055, 0.03176038360595703, 0.03312639999389649, 0.03152179145812988, 0.032102401733398435, 0.03188019180297851, 0.03193036842346191, 0.031324159622192385, 0.031851520538330076, 0.03158016014099121, 0.031389696121215824, 0.03189555168151856, 0.03170099258422852, 0.03173785591125488, 0.031285247802734374, 0.03177068710327149, 0.031356864929199216, 0.03158527946472168, 0.031526912689208986, 0.03160985565185547, 0.03185868835449219, 0.031848447799682614, 0.031437824249267575, 0.0317388801574707, 0.03174297523498535, 0.0319866886138916, 0.03175014305114746, 0.031922176361083986, 0.031817728042602536, 0.03180544090270996, 0.03139072036743164, 0.03181260871887207, 0.03197952079772949, 0.03224166488647461, 0.03209011077880859, 0.03164057540893555, 0.03165184020996094, 0.031438848495483396, 0.03207372665405273, 0.03194675254821777, 0.03155148887634277, 0.03187302398681641, 0.03263488006591797, 0.03184025573730469, 0.06429801940917969, 0.03187299156188965, 0.03203276824951172, 0.032094207763671875, 0.03197849655151367, 0.031958015441894534, 0.031926271438598636, 0.031834112167358396, 0.03189452743530274, 0.03218841552734375, 0.032353279113769534, 0.0318791675567627, 0.03179315185546875, 0.031887359619140625, 0.03185868835449219, 0.03156480026245117, 0.03182796859741211, 0.0322426872253418, 0.032043006896972655, 0.03181670379638672, 0.03299020767211914, 0.032031742095947266, 0.03186892890930176, 0.032304126739501955, 0.03237171173095703, 0.0319866886138916, 0.031454208374023435, 0.031525888442993165, 0.03151155281066895, 0.031492095947265625, 0.03167334365844727, 0.031730688095092774, 0.03897753524780274, 0.032933887481689454, 0.031889408111572266, 0.03211980819702148, 0.031719423294067385, 0.031352832794189454, 0.03140505599975586, 0.031687711715698244, 0.031402975082397463, 0.03169177627563476, 0.031749120712280275, 0.03167948722839355, 0.03165695953369141, 0.0316753921508789, 0.03172659111022949, 0.03139072036743164, 0.03140812873840332, 0.03175526428222656, 0.03156684875488281, 0.03263590240478516, 0.03202252960205078, 0.03183923149108887, 0.0319682559967041, 0.031649791717529296, 0.031784959793090824, 0.03172352027893066, 0.03167129516601563, 0.0314337272644043, 0.03163443183898926, 0.03186483192443847, 0.03146137619018555, 0.06431948852539063, 0.03137843132019043, 0.03158016014099121, 0.031425535202026365, 0.031645696640014646, 0.031730688095092774, 0.03186486434936524, 0.031797216415405276, 0.0318156795501709, 0.03175628852844238, 0.031699968338012696, 0.031410175323486327, 0.03138355255126953, 0.03278540802001953, 0.03300454330444336, 0.032178176879882815, 0.0318156795501709, 0.031855615615844726, 0.031870975494384765, 0.03283967971801758, 0.0314204158782959, 0.0314081916809082, 0.031652799606323244, 0.03211775970458984, 0.031666175842285156, 0.03154022407531738, 0.031373311996459964, 0.03173785591125488, 0.031546367645263675, 0.0316753921508789, 0.0315043830871582, 0.031529983520507815, 0.031764480590820314, 0.03139481544494629, 0.03133440017700195, 0.03138559913635254, 0.03137843132019043, 0.031735807418823245, 0.031543296813964845, 0.03143065643310547, 0.03159859275817871, 0.031936511993408204, 0.031735807418823245, 0.03193548774719238, 0.031458303451538085, 0.031456256866455076, 0.031716352462768556, 0.0317388801574707, 0.032037952423095706, 0.031414207458496095, 0.0326901741027832, 0.03216793441772461, 0.031585344314575194, 0.03156883239746094, 0.03191398429870605, 0.031631359100341795, 0.031903743743896484, 0.03156076812744141, 0.03184736061096191, 0.03181363105773926, 0.03224371337890625, 0.03225702285766602, 0.031955968856811526, 0.06444134521484375, 0.03154431915283203, 0.031665151596069335, 0.03288780975341797, 0.031886335372924804, 0.03204403305053711, 0.03172659111022949, 0.031509504318237305, 0.03173785591125488, 0.032, 0.032320510864257815, 0.03182387161254883, 0.03230003356933594, 0.03187404823303223, 0.032198688507080075, 0.03247203063964844, 0.03235532760620117, 0.032702465057373044, 0.03311718368530273, 0.03155148887634277, 0.03170201683044434, 0.03225094223022461, 0.03220576095581055, 0.03237580871582031, 0.032230400085449216, 0.03218431854248047, 0.03240447998046875, 0.032449535369873043, 0.03225600051879883, 0.03243622589111328, 0.0319498233795166, 0.03165081596374512, 0.031528959274291994, 0.03161702346801758, 0.0321003532409668, 0.03190784072875977, 0.03203788757324219, 0.032328704833984374, 0.032467967987060545, 0.03184435272216797, 0.031665151596069335, 0.03177369689941406, 0.03174604797363281, 0.03233894348144531, 0.031704063415527346, 0.03172454452514648, 0.032039936065673826, 0.03196211242675781, 0.031926271438598636, 0.0328611831665039, 0.03221200180053711, 0.03217712020874024, 0.03181056022644043, 0.03184332847595215, 0.032064510345458985, 0.03207372665405273, 0.03184339141845703, 0.03191596794128418, 0.031817760467529294, 0.031796192169189455, 0.03153408050537109, 0.032161792755126956, 0.03232460784912109]",tokens/s,30.92327406447812,,,,,,main,False,False -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2054.77888,5566.365696,0.0,4919.918592,4635.537408,s,10,5.159750671386719,0.5159750671386718,0.0024202673095528563,0.5153813171386719,0.5170659973144531,0.5199656463623047,0.5222853656005859,"[0.5228652954101562, 0.5143955078125, 0.5157650146484375, 0.5144519653320313, 0.5155265502929688, 0.5141107177734375, 0.516309814453125, 0.5146680908203125, 0.516421630859375, 0.515236083984375]",tokens/s,496.14800463061573,kWh,6.087385805116759e-06,3.3354087369571064e-06,2.7887036198498415e-05,3.7309830740572285e-05,tokens/kWh,6861462.379179726,MB,2054.77888,5566.365696,0.0,4919.918592,4794.439168,s,10,301.012869140625,30.101286914062502,0.007474756939868433,30.101162109375,30.1092380859375,30.11128505859375,30.11292263671875,"[30.104474609375, 30.0874609375, 30.10366796875, 30.09865625, 30.095248046875, 30.1085703125, 30.108783203125, 30.11333203125, 30.097923828125, 30.094751953125]",tokens/s,2.0929337732257594,kWh,0.00035535723721815483,0.00019476662748246779,0.0016039330748120972,0.00215405693951272,tokens/kWh,29247.13773548231,,s,629,305.1713383178704,0.4851690593288888,0.06123176770881389,0.4777687072753906,0.47843901367187497,0.47872921142578123,0.9921051538085938,"[0.47712460327148437, 0.4769034118652344, 0.47702426147460936, 0.47709390258789064, 0.47730584716796876, 0.4771205139160156, 0.4773304443359375, 0.47765298461914063, 0.4770846862792969, 0.4785274963378906, 0.47798681640625, 0.47815167236328127, 0.4778588256835937, 0.4773570556640625, 0.477907958984375, 0.47804620361328126, 0.47764480590820313, 0.4777400207519531, 0.4774901733398437, 0.4775546875, 0.4790906982421875, 0.47841998291015625, 0.4775454711914062, 0.4774205322265625, 0.4777062377929687, 0.4773007507324219, 0.47782296752929687, 0.477517822265625, 0.47927194213867186, 0.47922592163085936, 0.4796824951171875, 0.4796200866699219, 0.478603271484375, 0.47771649169921876, 0.4775034790039063, 0.47786904907226563, 0.4779376525878906, 0.4778526611328125, 0.47831039428710936, 0.47768167114257815, 0.4777820129394531, 0.47838104248046875, 0.4779438171386719, 0.4779376525878906, 0.47762432861328125, 0.47779022216796874, 0.47766015625, 0.4779346008300781, 0.47730584716796876, 0.47776666259765627, 0.4777215881347656, 0.4779438171386719, 0.4776980590820312, 0.4786534423828125, 0.47755673217773437, 0.4775034790039063, 0.47755059814453127, 0.47741543579101564, 0.4771686401367187, 0.47783526611328125, 0.4779069519042969, 0.4776560668945313, 0.9921392822265624, 0.4773294067382812, 0.4772085876464844, 0.477370361328125, 0.4770396728515625, 0.4769504699707031, 0.47741748046875, 0.47746356201171875, 0.4771512451171875, 0.47819979858398437, 0.4786329650878906, 0.47734783935546876, 0.477412353515625, 0.4770017395019531, 0.47757003784179686, 0.47735910034179685, 0.47719219970703125, 0.4772116394042969, 0.4775659484863281, 0.47811892700195313, 0.4780257263183594, 0.4779304809570312, 0.4777492370605469, 0.4775045166015625, 0.4776427612304687, 0.4780738525390625, 0.4773498840332031, 0.47781170654296873, 0.4772178039550781, 0.47686654663085937, 0.4779991149902344, 0.47811175537109374, 0.47742669677734373, 0.4774471740722656, 0.47721368408203124, 0.4768757629394531, 0.47745944213867186, 0.4774481811523438, 0.4774799499511719, 0.4782315979003906, 0.4776785583496094, 0.4772024230957031, 0.477707275390625, 0.47777587890625, 0.47741030883789065, 0.47758950805664063, 0.47756494140625, 0.47724337768554687, 0.4777215881347656, 0.47767962646484374, 0.47741543579101564, 0.47842715454101564, 0.4785602111816406, 0.4774819946289062, 0.4774615173339844, 0.4775465087890625, 0.4775475158691406, 0.4778106994628906, 0.4775577697753906, 0.4773294067382812, 0.47762738037109376, 0.47781375122070313, 0.4781895751953125, 0.9939957885742188, 0.47789157104492186, 0.4779029235839844, 0.4775321044921875, 0.4773294067382812, 0.47682968139648435, 0.4777994384765625, 0.478455810546875, 0.4786780090332031, 0.47789056396484375, 0.47756390380859376, 0.4770979919433594, 0.47737344360351563, 0.4772915344238281, 0.4772536315917969, 0.4778649597167969, 0.47748095703125, 0.4771614685058594, 0.47814862060546875, 0.47750143432617187, 0.4773918762207031, 0.47743896484375, 0.4776662902832031, 0.47875582885742185, 0.4773284606933594, 0.4779539794921875, 0.4778567810058594, 0.47754238891601564, 0.47780154418945314, 0.47773382568359374, 0.4781598815917969, 0.4780472412109375, 0.4777594909667969, 0.477765625, 0.47762841796875, 0.47739596557617187, 0.47771444702148436, 0.47763250732421875, 0.4777687072753906, 0.4783943786621094, 0.47779736328125, 0.4777215881347656, 0.47773284912109376, 0.47762841796875, 0.4779315185546875, 0.4779735107421875, 0.4777461853027344, 0.4787445983886719, 0.47787722778320313, 0.4785080261230469, 0.4784373779296875, 0.47873638916015626, 0.47828274536132814, 0.4777001037597656, 0.47793560791015627, 0.4780277709960937, 0.47800115966796874, 0.47787213134765627, 0.47758950805664063, 0.47742156982421874, 0.47888998413085937, 0.478159912109375, 0.47799700927734373, 0.9920173950195312, 0.47721368408203124, 0.4778260498046875, 0.477939697265625, 0.47749530029296877, 0.4772659912109375, 0.4777287292480469, 0.4779949645996094, 0.47825204467773436, 0.4779581298828125, 0.4773775329589844, 0.4774225769042969, 0.47868109130859376, 0.4778076171875, 0.47798785400390625, 0.478139404296875, 0.47792538452148436, 0.4779366455078125, 0.4774615173339844, 0.47746356201171875, 0.477728759765625, 0.47751473999023436, 0.47717376708984377, 0.47695462036132813, 0.4774942626953125, 0.47761920166015626, 0.4774625244140625, 0.4780001220703125, 0.4778526611328125, 0.47788134765625, 0.4775301513671875, 0.47762127685546873, 0.4773498229980469, 0.4779581298828125, 0.4775413818359375, 0.47721881103515623, 0.47748504638671874, 0.4778824462890625, 0.47803488159179686, 0.47796633911132813, 0.47795098876953124, 0.47843328857421874, 0.4778895263671875, 0.47771749877929687, 0.47763766479492187, 0.47745123291015623, 0.47731610107421873, 0.47720550537109374, 0.47810763549804686, 0.47794686889648436, 0.478023681640625, 0.47817214965820315, 0.4775679931640625, 0.4772167663574219, 0.4781783142089844, 0.4780738525390625, 0.4782264404296875, 0.4780769348144531, 0.4779223022460938, 0.4776662902832031, 0.47806668090820315, 0.4780738525390625, 0.47803903198242187, 0.9936957397460937, 0.47736831665039064, 0.4772413330078125, 0.47760589599609377, 0.47733966064453126, 0.4780892028808594, 0.47794790649414065, 0.4774604797363281, 0.47752294921875, 0.4775792541503906, 0.47779022216796874, 0.4776407165527344, 0.4775096435546875, 0.4774523010253906, 0.4773304443359375, 0.4777021484375, 0.477939697265625, 0.4781055908203125, 0.4771205749511719, 0.477898681640625, 0.47731201171875, 0.47782196044921876, 0.4773775329589844, 0.4771747741699219, 0.4777697143554688, 0.47787213134765627, 0.4778188781738281, 0.4779683837890625, 0.47738983154296877, 0.4772915344238281, 0.4775802917480469, 0.47765914916992186, 0.47731201171875, 0.47755877685546877, 0.4773355407714844, 0.4775382995605469, 0.477918212890625, 0.47747378540039065, 0.4770508728027344, 0.4778793029785156, 0.477528076171875, 0.47719937133789064, 0.4775516052246094, 0.4772341613769531, 0.4773990478515625, 0.47757415771484374, 0.47758746337890623, 0.47739495849609376, 0.47834423828125, 0.4778536682128906, 0.4773058776855469, 0.47772262573242186, 0.4777635498046875, 0.4777185363769531, 0.4785080261230469, 0.4780687255859375, 0.4775168151855469, 0.4807618408203125, 0.47829605102539063, 0.47767245483398435, 0.4783206481933594, 0.4779980773925781, 0.4780625915527344, 0.9916989135742188, 0.47712973022460936, 0.4770652160644531, 0.4772781982421875, 0.47782296752929687, 0.47800833129882814, 0.47793356323242187, 0.47763150024414064, 0.47709390258789064, 0.47777484130859377, 0.4777667236328125, 0.47767550659179686, 0.47757720947265625, 0.4771798400878906, 0.47727410888671873, 0.47779531860351565, 0.4776089477539063, 0.4775792541503906, 0.47766937255859376, 0.47783627319335936, 0.47713690185546875, 0.47779226684570314, 0.4778567810058594, 0.4774615173339844, 0.4775833740234375, 0.47805950927734375, 0.47846502685546877, 0.478561279296875, 0.4782633056640625, 0.4781055908203125, 0.47865753173828124, 0.47780044555664064, 0.47805950927734375, 0.4787906494140625, 0.47858688354492185, 0.47910400390625, 0.47804107666015627, 0.4773498840332031, 0.47763455200195315, 0.4782264404296875, 0.47779736328125, 0.47735809326171874, 0.4779683837890625, 0.4779571228027344, 0.47788134765625, 0.47783627319335936, 0.4788439025878906, 0.47873126220703127, 0.47824075317382814, 0.47784039306640624, 0.47777279663085936, 0.47869952392578125, 0.4775126953125, 0.47748504638671874, 0.4778311767578125, 0.47821005249023435, 0.478708740234375, 0.4786903076171875, 0.4786278381347656, 0.4780533752441406, 0.47773080444335936, 0.47787213134765627, 0.47809127807617186, 0.9952440185546875, 0.4780052490234375, 0.47796734619140624, 0.47861553955078123, 0.47840972900390627, 0.478055419921875, 0.4771358642578125, 0.47734375, 0.4771768188476562, 0.47729766845703125, 0.477949951171875, 0.4772239379882813, 0.4777277526855469, 0.47779531860351565, 0.47714407348632815, 0.47731814575195314, 0.47762841796875, 0.47755877685546877, 0.477528076171875, 0.4772034606933594, 0.47703143310546875, 0.47779531860351565, 0.4779202575683594, 0.4790067138671875, 0.478593017578125, 0.4792412109375, 0.47822540283203124, 0.4779069519042969, 0.4779427795410156, 0.47770932006835937, 0.47803802490234376, 0.4777840576171875, 0.47797247314453123, 0.477939697265625, 0.47781991577148436, 0.4778240051269531, 0.47803289794921877, 0.47818240356445313, 0.47798175048828123, 0.4779959716796875, 0.4778680419921875, 0.4780902404785156, 0.47819674682617186, 0.47809945678710936, 0.4779765625, 0.479388671875, 0.4785356750488281, 0.4774143981933594, 0.4776642456054688, 0.47775238037109374, 0.4779253234863281, 0.4778547058105469, 0.4781363220214844, 0.47750860595703126, 0.4780830688476562, 0.47821728515625, 0.4779703674316406, 0.47782196044921876, 0.4780851135253906, 0.4781322326660156, 0.47786392211914064, 0.4776509399414062, 0.4776099853515625, 0.9929461669921875, 0.47778509521484375, 0.48122674560546874, 0.47778509521484375, 0.4777943115234375, 0.47731814575195314, 0.4780748901367188, 0.47792538452148436, 0.47784756469726564, 0.4781158447265625, 0.4780155029296875, 0.47748403930664063, 0.4777891845703125, 0.4779100036621094, 0.4776908874511719, 0.47778713989257815, 0.47860736083984373, 0.47720037841796875, 0.4777891845703125, 0.4777216491699219, 0.4775577087402344, 0.47795404052734375, 0.47750143432617187, 0.47806668090820315, 0.4786780090332031, 0.477744140625, 0.47708673095703125, 0.47796429443359373, 0.4774686584472656, 0.4776775817871094, 0.4779796447753906, 0.47754855346679687, 0.4775096435546875, 0.4782417907714844, 0.47757107543945315, 0.4774205322265625, 0.4781158447265625, 0.4779376525878906, 0.47806362915039063, 0.47810763549804686, 0.4782090148925781, 0.478413818359375, 0.47833804321289064, 0.4782417907714844, 0.4790927429199219, 0.4781537170410156, 0.47856845092773437, 0.4775301208496094, 0.4781506652832031, 0.4774615173339844, 0.4780472412109375, 0.4780318603515625, 0.47794891357421876, 0.47838311767578123, 0.47825408935546876, 0.4780349426269531, 0.47770111083984373, 0.4779857788085938, 0.47789266967773436, 0.47788128662109375, 0.4787712097167969, 0.47795306396484377, 0.47804415893554686, 0.9943838500976563, 0.4787189636230469, 0.47804107666015627, 0.47781375122070313, 0.477370361328125, 0.4773304443359375, 0.47816705322265624, 0.47811993408203124, 0.4777738342285156, 0.47763250732421875, 0.4774573974609375, 0.47708673095703125, 0.47772467041015626, 0.47725567626953125, 0.47783627319335936, 0.47736831665039064, 0.4771112976074219, 0.478445556640625, 0.47864935302734374, 0.4780984191894531, 0.47761102294921876, 0.4776365966796875, 0.4789073791503906, 0.4771778564453125, 0.47777484130859377, 0.47784756469726564, 0.4784169006347656, 0.4775045166015625, 0.47752191162109375, 0.47713177490234376, 0.47731610107421873, 0.47756390380859376, 0.4773212280273437, 0.47744512939453126, 0.4777543640136719, 0.477222900390625, 0.477707275390625, 0.47724850463867186, 0.47723724365234377, 0.4778823547363281, 0.477412353515625, 0.4772392883300781, 0.4777994384765625, 0.47849676513671874, 0.4779949951171875, 0.47791104125976563, 0.47725466918945314, 0.47723629760742187, 0.47771026611328127, 0.478139404296875, 0.4780195922851562, 0.478129150390625, 0.47857867431640627, 0.47747174072265625, 0.47793869018554686, 0.47770932006835937, 0.4779571228027344, 0.47765509033203124, 0.4775464172363281, 0.47778713989257815, 0.477918212890625, 0.4777820129394531, 0.47804107666015627, 0.9942221069335937, 0.47767550659179686, 0.4771686401367187, 0.477431884765625, 0.4771603698730469, 0.47666278076171875, 0.4774676513671875, 0.47758746337890623, 0.47736114501953125, 0.4776488952636719, 0.47701708984375, 0.47684402465820314, 0.47745944213867186, 0.477633544921875, 0.4778076171875, 0.4772536315917969, 0.47761715698242185, 0.47721881103515623, 0.4775362548828125, 0.47752294921875, 0.4779653015136719, 0.47805645751953124, 0.47783526611328125, 0.47724032592773435, 0.4773447570800781, 0.4780175476074219, 0.4793231506347656, 0.4781588439941406, 0.47761715698242185, 0.47719937133789064, 0.4781537170410156, 0.47752294921875, 0.4775311279296875, 0.47761920166015626, 0.4774993896484375, 0.4768399353027344, 0.4775946960449219, 0.47729248046875, 0.47762841796875, 0.4775372924804687, 0.4772720642089844, 0.4776089477539063, 0.4787261352539062, 0.47772262573242186, 0.47747378540039065, 0.47768780517578124, 0.477528076171875, 0.477149169921875, 0.4776355895996094, 0.47729766845703125, 0.4775628662109375, 0.47870361328125, 0.4778188781738281, 0.4776509399414062, 0.4779898986816406, 0.4782561340332031, 0.47833087158203125, 0.47798681640625, 0.4781035461425781, 0.477955078125, 0.47816192626953125, 0.4783472595214844, 0.47877734375]",tokens/s,2.061137207272146,,,,,,main,False,False -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2287.439872,9584.508928,0.0,8938.061824,8629.72416,s,10,10.131507629394532,1.013150762939453,0.002692660193456455,1.0122420349121093,1.01416416015625,1.017623095703125,1.020390244140625,"[1.02108203125, 1.01157763671875, 1.012674072265625, 1.0126704711914063, 1.0133955078125, 1.012330322265625, 1.0118361206054687, 1.0117332763671876, 1.012054443359375, 1.0121537475585938]",tokens/s,252.67710331408875,kWh,1.195108746488889e-05,6.548659351992683e-06,5.668768423900405e-05,7.518743105588562e-05,tokens/kWh,3404824.4022291345,MB,2287.439872,9584.508928,0.0,8938.061824,8715.664896,s,10,591.45866796875,59.145866796875,0.006617972035565828,59.14582421874999,59.155115234374996,59.1558447265625,59.1564283203125,"[59.14538671875, 59.13608984375, 59.150515625, 59.1451953125, 59.154953125, 59.15657421875, 59.14678515625, 59.1360078125, 59.1408984375, 59.14626171875]",tokens/s,1.0651631874186793,kWh,0.0006982415291666984,0.00038269747479935173,0.0032679308921203906,0.004348869896086441,tokens/kWh,14486.52213226564,,s,629,599.6304891357424,0.9533076138883024,0.12026100633018022,0.9387591552734375,0.9396539428710937,0.9399756713867188,1.9507973779296877,"[0.9389322509765625, 0.9398538208007813, 0.938841064453125, 0.93900390625, 0.9390182495117188, 0.9385973510742187, 0.9396909790039063, 0.938871826171875, 0.9403566284179687, 0.9389732055664063, 0.939009033203125, 0.9392967529296875, 0.9398015747070313, 0.938829833984375, 0.9383045043945313, 0.9377362060546875, 0.9381539916992188, 0.9390131225585937, 0.9386055908203125, 0.9399725952148438, 0.93948828125, 0.939114501953125, 0.939462646484375, 0.93991015625, 0.9391953735351563, 0.9389260864257812, 0.9383475341796875, 0.9386793212890625, 0.9386414184570312, 0.9392066650390625, 0.9393878784179688, 0.9381703491210938, 0.9381140747070312, 0.9383218994140625, 0.9380587768554688, 0.9380526123046875, 0.9383915405273437, 0.9382041625976563, 0.9384386596679688, 0.9379778442382812, 0.9379686279296875, 0.9389004516601562, 0.938387451171875, 0.93830859375, 0.9408757934570312, 0.9392742309570312, 0.9384775390625, 0.9388594970703125, 0.9387110595703125, 0.938039306640625, 0.9378734130859375, 0.93863525390625, 0.9386055908203125, 0.9390182495117188, 0.938181640625, 0.939652099609375, 0.9389639892578125, 0.9383065795898438, 0.939093017578125, 0.9385164794921875, 0.9385799560546875, 0.9382952880859375, 1.9507384033203126, 0.9382799072265625, 0.9386485595703125, 0.938119140625, 0.9379522705078125, 0.9383259887695312, 0.9382891235351563, 0.938335205078125, 0.9381273803710938, 0.9386373291015625, 0.938450927734375, 0.9386209106445312, 0.9381652221679687, 0.9398865966796875, 0.93909912109375, 0.9384284057617187, 0.9378529052734375, 0.9383649291992188, 0.938302490234375, 0.9382522583007813, 0.9403709716796875, 0.9398660888671875, 0.93905615234375, 0.9389526977539062, 0.9395927124023438, 0.9383956298828126, 0.9380956420898438, 0.9384847412109375, 0.9385236206054688, 0.9388482666015625, 0.9386148071289062, 0.93890966796875, 0.938439697265625, 0.93823486328125, 0.9391492919921876, 0.9383259887695312, 0.9384806518554687, 0.9384683227539062, 0.9383198852539063, 0.9388380126953125, 0.9383782348632812, 0.939188232421875, 0.9388707885742188, 0.9384468383789063, 0.9386332397460937, 0.9392158813476562, 0.9388359375, 0.9386854248046875, 0.9379727172851563, 0.9386455078125, 0.9383505859375, 0.938419189453125, 0.93814990234375, 0.9399869384765625, 0.9389414672851563, 0.9384857788085937, 0.93823486328125, 0.9385400390625, 0.9385010986328125, 0.9388738403320313, 0.9391431884765625, 0.9389598999023437, 0.9393756103515625, 1.951994873046875, 0.938608642578125, 0.9388524169921875, 0.9380965576171875, 0.9379512329101563, 0.9383270263671875, 0.9390551147460937, 0.9391697998046875, 0.938060791015625, 0.9392967529296875, 0.9386834106445312, 0.9387222900390625, 0.93907763671875, 0.940732421875, 0.9389188842773437, 0.9391503295898438, 0.9383372802734375, 0.938250244140625, 0.9387908935546875, 0.9389424438476562, 0.939419677734375, 0.9392752685546875, 0.9382041625976563, 0.9396817626953125, 0.9394974975585938, 0.93903564453125, 0.938745849609375, 0.9387335815429687, 0.9390694580078125, 0.9386281127929688, 0.9390643310546875, 0.9388779296875, 0.9388635864257813, 0.9396141967773437, 0.9400760498046875, 0.9385093383789063, 0.9377218627929688, 0.9381908569335937, 0.9382952880859375, 0.9383936157226562, 0.9382778930664063, 0.9385912475585938, 0.9392332763671875, 0.9384796142578125, 0.9385226440429687, 0.9392466430664063, 0.9381528930664063, 0.9381519165039063, 0.938461181640625, 0.9395015869140625, 0.93941455078125, 0.9390643310546875, 0.9385420532226563, 0.94017333984375, 0.939114501953125, 0.9396920166015625, 0.9393316040039063, 0.938903564453125, 0.9387530517578125, 0.9390448608398437, 0.9396613159179688, 0.9389629516601562, 0.938998779296875, 1.9516702880859376, 0.9386065673828125, 0.9402235107421875, 0.9383372802734375, 0.9382932739257812, 0.938365966796875, 0.9387254028320312, 0.938693603515625, 0.9385010986328125, 0.938767333984375, 0.9391104125976563, 0.9382758178710937, 0.93905712890625, 0.9390704345703125, 0.93931005859375, 0.9386598510742188, 0.9379256591796875, 0.938017822265625, 0.9386240234375, 0.9385728149414062, 0.9389793090820312, 0.9392128295898438, 0.938334228515625, 0.9388523559570312, 0.938787841796875, 0.9384253540039063, 0.9385072631835938, 0.93846630859375, 0.9390551147460937, 0.939335693359375, 0.9389865112304687, 0.9403515014648437, 0.938693603515625, 0.9387028198242188, 0.9390755615234375, 0.9390796508789062, 0.9388257446289062, 0.9389475708007813, 0.9399132080078125, 0.938587158203125, 0.9390377197265625, 0.9394114379882812, 0.940221435546875, 0.9385441284179687, 0.9391052856445312, 0.938335205078125, 0.9382625122070313, 0.9383117065429688, 0.9383178100585937, 0.9387202758789063, 0.9384796142578125, 0.9386547241210937, 0.9381621704101563, 0.93876123046875, 0.9382215576171875, 0.9389844360351562, 0.9389209594726563, 0.9384591064453125, 0.9386475219726562, 0.9395773315429687, 0.9389486083984375, 0.9387765502929688, 0.93916162109375, 1.9526563720703125, 0.9392178955078125, 0.9389434814453125, 0.93859326171875, 0.939198486328125, 0.93855126953125, 0.9389004516601562, 0.9386444702148438, 0.9396828002929688, 0.9400084228515625, 0.9395486450195313, 0.93916162109375, 0.939404296875, 0.9389486083984375, 0.939167724609375, 0.9397196655273438, 0.93857177734375, 0.938660888671875, 0.9386322021484375, 0.9401231079101563, 0.939725830078125, 0.9393346557617187, 0.9393930053710937, 0.9400657958984375, 0.9386649780273437, 0.9385441284179687, 0.9381509399414063, 0.9390632934570312, 0.9386444702148438, 0.9382697143554688, 0.939452392578125, 0.9390592041015625, 0.9396920166015625, 0.93861376953125, 0.9394022216796875, 0.939062255859375, 0.9391001586914063, 0.938335205078125, 0.9390151977539063, 0.9384151000976563, 0.9390755615234375, 0.93837109375, 0.9391544189453125, 0.9386065673828125, 0.9394503784179687, 0.9384161376953125, 0.938771484375, 0.9382778930664063, 0.9383301391601563, 0.9384806518554687, 0.9385072631835938, 0.939251708984375, 0.9386219482421875, 0.939736083984375, 0.9381171264648438, 0.939852783203125, 0.9383362426757812, 0.9384755249023438, 0.9382440795898438, 0.9390888671875, 0.9384498901367188, 0.9385430908203125, 0.93985791015625, 1.9517337646484374, 0.9388739013671875, 0.9396459350585937, 0.9385574951171874, 0.9385686645507813, 0.9384365844726562, 0.9387765502929688, 0.93935205078125, 0.9397186279296875, 0.93812939453125, 0.939156494140625, 0.938771484375, 0.9394319458007813, 0.9387591552734375, 0.9385062255859375, 0.9385093383789063, 0.9380311279296875, 0.9388994750976563, 0.9391216430664062, 0.9395814208984375, 0.939610107421875, 0.9395476684570313, 0.9393233642578125, 0.9399511108398437, 0.9398251342773437, 0.9391124267578125, 0.9394176025390625, 0.9400964965820312, 0.9399777221679687, 0.9401446533203125, 0.939125732421875, 0.9388021850585937, 0.9388124389648438, 0.9400350952148437, 0.9387550659179688, 0.9386065673828125, 0.9382359008789063, 0.9382369384765625, 0.93964697265625, 0.9381478271484375, 0.939219970703125, 0.9382389526367187, 0.9393500366210937, 0.9386393432617187, 0.9393213500976563, 0.9382932739257812, 0.9387704467773438, 0.9389281005859375, 0.9396756591796875, 0.9391441650390625, 0.9385687255859375, 0.9398343505859375, 0.93823486328125, 0.9392557983398437, 0.9380843505859375, 0.939009033203125, 0.9379246215820313, 0.938049560546875, 0.93809765625, 0.9396961059570312, 0.938603515625, 0.9394022216796875, 0.9386465454101562, 1.9507159423828124, 0.9391738891601562, 0.9382543334960938, 0.9382963256835938, 0.9384693603515625, 0.9383526611328125, 0.9399357299804687, 0.93943603515625, 0.9397831420898437, 0.93865673828125, 0.9389649658203125, 0.938598388671875, 0.9392752685546875, 0.938119140625, 0.9378037719726563, 0.9382062377929687, 0.938081298828125, 0.9376358642578125, 0.9382072143554687, 0.9395159301757813, 0.9385308227539062, 0.93964697265625, 0.9388390502929688, 0.9387509765625, 0.9381334838867188, 0.938144775390625, 0.9383526000976562, 0.9388922729492187, 0.9388226318359375, 0.9387222900390625, 0.9390540771484375, 0.9390018310546875, 0.9392066650390625, 0.9401517944335938, 0.9398220825195313, 0.938925048828125, 0.938987548828125, 0.9386342163085938, 0.9397770385742188, 0.9390172119140625, 0.9395302124023438, 0.9389547729492187, 0.9400145874023438, 0.9384099731445312, 0.939598876953125, 0.9384038696289062, 0.9381325073242187, 0.9380997314453124, 0.9389240112304688, 0.938376220703125, 0.9384140625, 0.9392455444335938, 0.9389741821289063, 0.9392086791992188, 0.9387509765625, 0.9395404663085938, 0.93880322265625, 0.938650634765625, 0.9383117065429688, 0.9391912841796874, 0.938625, 0.93869775390625, 0.9389117431640625, 1.9512227783203124, 0.9401149291992188, 0.938693603515625, 0.9384498901367188, 0.9391452026367187, 0.9389598999023437, 0.93937255859375, 0.9388656616210938, 0.939441162109375, 0.938603515625, 0.9388677368164062, 0.9383546752929688, 0.9392056274414062, 0.938287109375, 0.9377484741210937, 0.9378836669921875, 0.93793896484375, 0.9375641479492187, 0.9385707397460937, 0.9388339233398437, 0.938630126953125, 0.9394104614257812, 0.9392772827148438, 0.9391390991210937, 0.938872802734375, 0.9386659545898437, 0.9385103149414062, 0.9388339233398437, 0.9386782836914063, 0.9387857666015625, 0.9381437377929688, 0.9386362915039063, 0.9390151977539063, 0.939298828125, 0.9390264282226563, 0.9383864135742187, 0.9382738037109375, 0.9385748291015625, 0.938534912109375, 0.938250244140625, 0.9396162719726563, 0.9382584228515625, 0.9384591064453125, 0.9386362915039063, 0.9391001586914063, 0.9379573974609375, 0.9382379760742188, 0.938408935546875, 0.9383331909179687, 0.9385953369140625, 0.9381273803710938, 0.9385042114257812, 0.9381089477539063, 0.9387110595703125, 0.9393162231445312, 0.9386168823242188, 0.9382542724609375, 0.9378928833007812, 0.9382205200195313, 0.9389230346679688, 0.9386598510742188, 0.939430908203125, 0.9383444213867187, 1.9508203125, 0.9386854248046875, 0.9382471923828125, 0.938323974609375, 0.9382011108398437, 0.9390366821289062, 0.93852978515625, 0.9385359497070312, 0.9398814697265625, 0.9387048950195312, 0.9394933471679687, 0.939357177734375, 0.9405009765625, 0.9390079956054688, 0.9383884887695313, 0.9386322021484375, 0.9385226440429687, 0.9374658813476563, 0.9394872436523437, 0.9389967651367187, 0.9386393432617187, 0.939273193359375, 0.939357177734375, 0.9387940063476562, 0.9389414672851563, 0.9389557495117188, 0.9387868041992188, 0.9400811767578126, 0.9384970092773437, 0.9387786254882813, 0.938735595703125, 0.938808349609375, 0.939025390625, 0.9387807006835938, 0.9382564086914063, 0.9380014038085938, 0.9381089477539063, 0.9382532958984375, 0.938630126953125, 0.938618896484375, 0.938883056640625, 0.9382164306640625, 0.938872802734375, 0.9385338745117188, 0.9390315551757813, 0.9384253540039063, 0.938555419921875, 0.9387694091796875, 0.9386690673828125, 0.9383936157226562, 0.9389619140625, 0.9384304809570313, 0.9386782836914063, 0.9390888671875, 0.939125732421875, 0.9383280639648437, 0.938625, 0.9386373291015625, 0.9384110107421875, 0.9388840942382812, 0.9385308227539062, 0.9386843872070313, 0.9381119995117188, 1.9515452880859374, 0.9394534301757812, 0.93916259765625, 0.9390397338867188, 0.9386148071289062, 0.9386332397460937, 0.9383301391601563, 0.9385236206054688, 0.9390694580078125, 0.9383864135742187, 0.9387816772460937, 0.938566650390625, 0.9386209106445312, 0.938498046875, 0.938482666015625, 0.9383372802734375, 0.938598388671875, 0.9374115600585937, 0.9391605834960938, 0.9391114501953125, 0.9388984375, 0.9407098999023438, 0.9396029663085937, 0.9391421508789063, 0.938498046875, 0.9380679931640625, 0.9384048461914063, 0.93968896484375, 0.9385738525390624, 0.939087890625, 0.9386076049804688, 0.93890869140625, 0.938851318359375, 0.9392803955078125, 0.93926806640625, 0.9393264770507812, 0.9389168701171875, 0.9393695068359375, 0.938694580078125, 0.9390172119140625, 0.93890966796875, 0.9383895263671875, 0.9392957153320313, 0.9389629516601562, 0.938323974609375, 0.9382389526367187, 0.9378775024414062, 0.9381263427734375, 0.9386209106445312, 0.9382277221679688, 0.9391841430664063, 0.938829833984375, 0.938335205078125, 0.9392312622070312, 0.9389691162109375, 0.9388851318359375, 0.9389680786132812, 0.9386978149414062, 0.9398670654296875, 0.9389967651367187, 0.93882470703125, 0.938935302734375, 0.9384898681640625]",tokens/s,1.0489793487762582,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694812a-10ce2a3d2f4cf6aa71b5736a;9d3ee60d-36f8-4d2c-967d-24d6f4ce493a) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1723.056128,9941.024768,0.0,9294.577664,8910.495232,s,10,10.523069458007814,1.0523069458007812,0.0008028232056597039,1.0523904418945311,1.0530852783203124,1.05345859375,1.05375724609375,"[1.0527669677734375, 1.0538319091796875, 1.05145703125, 1.0517464599609374, 1.052252197265625, 1.0511856689453125, 1.0525286865234376, 1.0530023193359375, 1.0528663330078125, 1.051431884765625]",tokens/s,243.27502638043498,kWh,1.2425229681862726e-05,6.805541535795782e-06,6.044174279779546e-05,7.967251401545396e-05,tokens/kWh,3213153.283331113,MB,1723.056128,9941.024768,0.0,9294.577664,9220.876288,s,10,626.3534921875,62.63534921875,0.004239119732727424,62.635099609375004,62.63956796875,62.641526171875,62.643092734375,"[62.6319921875, 62.6329375, 62.6391328125, 62.643484375, 62.63139453125, 62.6282421875, 62.63577734375, 62.634421875, 62.63843359375, 62.63767578125]",tokens/s,1.0058218048721415,kWh,0.0007394197557369868,0.0004052695339850652,0.003591932262432407,0.004736621552154459,tokens/kWh,13300.619292952455,,s,629,634.8295839233394,1.009268018956025,0.1250286498200473,0.9942149047851563,0.9945757568359375,0.99473427734375,2.0461909814453128,"[0.9937213745117187, 0.9941636962890625, 0.9939281616210938, 0.9936691284179687, 0.9940469970703125, 0.9936640014648438, 0.99386572265625, 0.9940029296875, 0.9938134765625, 0.9938718872070312, 0.9940316162109375, 0.9940879516601563, 0.9947422485351562, 0.9942005615234375, 0.994029541015625, 0.993860595703125, 0.9938984985351562, 0.994188232421875, 0.9941381225585938, 0.9940950927734375, 0.9939527587890625, 0.9939947509765625, 0.99481396484375, 0.994408447265625, 0.994361328125, 0.9940614013671875, 0.99426806640625, 0.9940326538085937, 0.9939323120117187, 0.993944580078125, 0.9940070190429687, 0.994107421875, 0.994229248046875, 0.9940459594726563, 0.9943674926757813, 0.9942251586914063, 0.99422412109375, 0.9942691650390625, 0.9943838500976563, 0.994155517578125, 0.994197509765625, 0.9944873046875, 0.9943777465820313, 0.994418701171875, 0.994387939453125, 0.9942783813476562, 0.9941801147460938, 0.9943582763671875, 0.9945180053710938, 0.9940408325195312, 0.9938134765625, 0.9940326538085937, 0.9944760131835938, 0.9945057373046875, 0.9942804565429687, 0.99428662109375, 0.9941749877929688, 0.994018310546875, 0.9942005615234375, 0.994018310546875, 0.9942138671875, 0.99439306640625, 2.048400390625, 0.9937254638671875, 0.9939384155273437, 0.993649658203125, 0.993681396484375, 0.993491943359375, 0.9935380249023438, 0.993723388671875, 0.9939885864257813, 0.993955810546875, 0.9941115112304687, 0.9941227416992188, 0.9936732177734375, 0.9938882446289062, 0.993818603515625, 0.9939773559570313, 0.9937152099609375, 0.9938954467773438, 0.994029541015625, 0.9940746459960937, 0.9940223999023438, 0.9943040161132812, 0.9939876098632813, 0.9940018920898438, 0.993870849609375, 0.9941780395507812, 0.9940029296875, 0.9939568481445312, 0.9940223999023438, 0.9943408813476563, 0.9943121948242187, 0.9944913940429687, 0.994107421875, 0.9944105224609375, 0.9942425537109375, 0.9945681762695312, 0.9944022827148438, 0.9939404907226562, 0.994323486328125, 0.9944729614257812, 0.9945559692382813, 0.9941841430664062, 0.9939691772460938, 0.9944022827148438, 0.9942015991210937, 0.9941749877929688, 0.996316162109375, 0.994234375, 0.9943941040039063, 0.9945528564453125, 0.9943756713867188, 0.9942384643554687, 0.99406640625, 0.9946685180664062, 0.9946071166992188, 0.9947156372070313, 0.9943460083007812, 0.9942159423828125, 0.9944033203125, 0.994250732421875, 0.994207763671875, 0.9942732543945313, 0.9941258544921875, 2.0457523193359375, 0.9937326049804688, 0.993923095703125, 0.9941954345703125, 0.9940490112304687, 0.9940633544921875, 0.9942056884765625, 0.9942149047851563, 0.9938370361328125, 0.9937274780273437, 0.9940162353515625, 0.9940469970703125, 0.99371826171875, 0.9940367431640625, 0.9938534545898438, 0.9942271728515625, 0.9941575927734375, 0.9941626586914063, 0.9939773559570313, 0.9939947509765625, 0.993728515625, 0.993818603515625, 0.99376025390625, 0.9947381591796876, 0.9940807495117188, 0.9942988891601563, 0.9943746337890625, 0.9940838623046875, 0.9939465942382812, 0.9941636962890625, 0.9940090942382812, 0.99420263671875, 0.9942149047851563, 0.9942138671875, 0.9943306274414062, 0.9944053955078125, 0.9946675415039062, 0.9946388549804688, 0.9943070678710938, 0.9942528076171875, 0.9945692138671876, 0.9944391479492187, 0.9944893188476562, 0.9945906982421875, 0.9944791259765625, 0.99445556640625, 0.9944688720703125, 0.9956966552734375, 0.9946471557617188, 0.9943448486328125, 0.99428662109375, 0.9943275756835938, 0.9942988891601563, 0.9942845458984375, 0.99435107421875, 0.9946736450195313, 0.9949030151367187, 0.99470849609375, 0.9946101684570312, 0.99445556640625, 0.9944708862304688, 0.9945303344726563, 0.9944258422851563, 2.046361572265625, 0.9943121948242187, 0.993902587890625, 0.9938494262695312, 0.9939803466796875, 0.993817626953125, 0.99403369140625, 0.9938892822265625, 0.993890380859375, 0.9941790161132813, 0.9943521118164063, 0.994566162109375, 0.9950842895507812, 0.9945149536132812, 0.9943418579101563, 0.994255859375, 0.9942322998046875, 0.9944514770507813, 0.9944678344726563, 0.9941493530273438, 0.99386572265625, 0.994466796875, 0.994135009765625, 0.9944791259765625, 0.993997802734375, 0.9939465942382812, 0.993850341796875, 0.994050048828125, 0.994281494140625, 0.9941104736328125, 0.9941370849609374, 0.99445556640625, 0.9942169799804688, 0.9945046997070313, 0.9944360961914063, 0.994355224609375, 0.9945057373046875, 0.9943777465820313, 0.9944063720703125, 0.9943849487304688, 0.9944248046875, 0.9944248046875, 0.9944043579101562, 0.9945538330078125, 0.9957181396484375, 0.9942650756835938, 0.9944627075195313, 0.9945119018554688, 0.9939578857421875, 0.9942630615234375, 0.9940695190429687, 0.994323486328125, 0.9945487060546875, 0.9944248046875, 0.9942774047851562, 0.9945712890625, 0.9945681762695312, 0.994572265625, 0.9946071166992188, 0.9948231811523438, 0.994545654296875, 0.9945897216796875, 0.994703369140625, 2.0469268798828124, 0.9937100830078125, 0.99370703125, 0.9939507446289062, 0.9940214233398438, 0.993982421875, 0.9939998779296875, 0.994187255859375, 0.9942363891601562, 0.994165771484375, 0.9937623291015625, 0.9936098022460937, 0.9937017822265625, 0.99392919921875, 0.9935421142578125, 0.9941094360351562, 0.9936434936523437, 0.9947597045898438, 0.9939476318359375, 0.9940572509765625, 0.993876953125, 0.9940459594726563, 0.9939384155273437, 0.9936783447265625, 0.9934899291992187, 0.9937745971679688, 0.9935821533203125, 0.9939537353515625, 0.993924072265625, 0.9939793701171875, 0.9942067260742188, 0.9939005737304687, 0.9939332885742187, 0.9940510864257812, 0.9938903198242187, 0.9940438842773438, 0.9939251098632812, 0.9943121948242187, 0.9945211181640625, 0.9941473388671875, 0.9944022827148438, 0.9940101318359374, 0.9942968139648437, 0.9944330444335937, 0.994639892578125, 0.9945938110351562, 0.9943070678710938, 0.99437158203125, 0.9943593139648438, 0.9942528076171875, 0.9945589599609375, 0.994482177734375, 0.9939876098632813, 0.9945620727539063, 0.9944330444335937, 0.9943951416015625, 0.9943367919921875, 0.9962434692382812, 0.9943162841796875, 0.994186279296875, 0.9945906372070312, 0.9944248657226562, 0.9944125366210937, 2.04558642578125, 0.993912841796875, 0.9936803588867188, 0.9937315673828125, 0.9937705688476562, 0.9936362915039062, 0.9941083984375, 0.9939169311523437, 0.993712158203125, 0.9938135375976562, 0.9937837524414063, 0.9940541381835938, 0.9938093872070313, 0.9940275268554688, 0.9939824829101562, 0.9938954467773438, 0.9938401489257812, 0.9937479858398437, 0.9936363525390625, 0.9938779907226563, 0.9936895751953125, 0.9938565063476562, 0.9942282104492187, 0.9945733032226562, 0.9943101196289063, 0.994271240234375, 0.9942517700195312, 0.9937531127929687, 0.9937540893554687, 0.9940654296875, 0.9936906127929688, 0.9937561645507812, 0.9941299438476563, 0.994255859375, 0.9943173217773438, 0.99435107421875, 0.9943480224609375, 0.9938595581054688, 0.9955552978515625, 0.9940582275390625, 0.994145263671875, 0.9937991943359376, 0.9941483764648438, 0.994471923828125, 0.9942374267578125, 0.9941565551757813, 0.994735107421875, 0.99420263671875, 0.9940562133789063, 0.994302978515625, 0.9941094360351562, 0.9940899658203125, 0.9943009033203125, 0.9945149536132812, 0.9943971557617187, 0.9942517700195312, 0.9943009033203125, 0.9943582763671875, 0.9941473388671875, 0.9944699096679688, 0.9942067260742188, 0.9939999389648437, 0.994462646484375, 2.0474276123046873, 0.9942875366210937, 0.9945169677734375, 0.9941104736328125, 0.99388623046875, 0.99411865234375, 0.9940193481445313, 0.9938565063476562, 0.9939681396484376, 0.9939568481445312, 0.9942251586914063, 0.9939885864257813, 0.9937736206054687, 0.9940674438476562, 0.9934888916015625, 0.9936240844726563, 0.9936025390625, 0.9939957885742188, 0.9941309204101563, 0.9939783935546875, 0.9941585693359375, 0.9935872192382813, 0.9937633056640625, 0.99496142578125, 0.9941268310546875, 0.9944862670898438, 0.993723388671875, 0.9938739013671875, 0.9938882446289062, 0.9944063720703125, 0.9942282104492187, 0.9941043090820313, 0.9945272216796875, 0.9945446166992188, 0.9939815063476563, 0.9940366821289063, 0.9942149047851563, 0.9942435913085937, 0.9943531494140625, 0.9939906616210937, 0.994450439453125, 0.9943357543945313, 0.9943040161132812, 0.9945149536132812, 0.9941370849609374, 0.9942384643554687, 0.99449853515625, 0.9946624145507813, 0.9942548217773437, 0.9945487060546875, 0.9945855712890626, 0.9943418579101563, 0.9944760131835938, 0.9945589599609375, 0.9944545288085938, 0.9943593139648438, 0.9944330444335937, 0.9942958374023437, 0.9943746337890625, 0.9943889770507812, 0.9947330322265625, 0.9947310180664063, 0.9944688720703125, 2.047277099609375, 0.9937192993164062, 0.9936015625, 0.993828857421875, 0.9938534545898438, 0.9940787353515625, 0.9938462524414062, 0.9938677978515625, 0.9937264404296875, 0.9940316162109375, 0.9937715454101562, 0.9938134765625, 0.9937797241210937, 0.9938841552734375, 0.9937469482421875, 0.9938042602539062, 0.9948897094726562, 0.99428759765625, 0.99430810546875, 0.9943807983398437, 0.9940869140625, 0.994150390625, 0.9942937622070313, 0.994207763671875, 0.9942947998046875, 0.9942835083007813, 0.9943101196289063, 0.994366455078125, 0.9940070190429687, 0.9944422607421874, 0.9940172729492187, 0.9939844970703124, 0.9939251098632812, 0.9939476318359375, 0.9940101318359374, 0.9939158935546875, 0.9941954345703125, 0.9944647827148437, 0.994376708984375, 0.9946132202148438, 0.9943787231445312, 0.9942466430664062, 0.994313232421875, 0.9944729614257812, 0.9945261840820312, 0.9942916870117188, 0.9944391479492187, 0.9942671508789063, 0.994545654296875, 0.994798583984375, 0.9944873046875, 0.9943193359375, 0.994164794921875, 0.9943551635742187, 0.9943255004882813, 0.99406640625, 0.9942916870117188, 0.994587646484375, 0.9942916870117188, 0.9949767456054688, 0.9941903076171875, 0.9943889770507812, 0.9943162841796875, 2.047072265625, 0.9936138305664063, 0.9937510375976563, 0.9939783935546875, 0.9938709106445313, 0.9944656982421874, 0.993997802734375, 0.99403369140625, 0.993713134765625, 0.9936732177734375, 0.9935554809570313, 0.9939323120117187, 0.9941073608398437, 0.994260986328125, 0.9937418212890625, 0.9940961303710938, 0.9939844970703124, 0.9946102294921875, 0.993966064453125, 0.9940264892578125, 0.9938206787109375, 0.9939199829101563, 0.9943121948242187, 0.9942937622070313, 0.9941575927734375, 0.9946552124023438, 0.9939906616210937, 0.9938114624023437, 0.9936793823242187, 0.9941647338867188, 0.9941442260742187, 0.994471923828125, 0.9945323486328125, 0.99468798828125, 0.9943142700195312, 0.994466796875, 0.9942467041015625, 0.9940725708007813, 0.9942282104492187, 0.994255859375, 0.9944166259765626, 0.9944391479492187, 0.9945436401367187, 0.9948764038085938, 0.99462451171875, 0.9944658203125, 0.994460693359375, 0.994155517578125, 0.9942097778320312, 0.9942763671875, 0.9942702026367187, 0.994260986328125, 0.9944309692382812, 0.9945098266601563, 0.9942968139648437, 0.9946491088867188, 0.9945579223632812, 0.99460302734375, 0.994281494140625, 0.9945108642578125, 0.99460302734375, 0.9943797607421875, 0.9946961669921875, 2.04813623046875, 0.99420263671875, 0.99418115234375, 0.9938411254882813, 0.9938524169921875, 0.9939671020507812, 0.9941248168945312, 0.994044921875, 0.9940910034179687, 0.9938984985351562, 0.9937213745117187, 0.9939066772460937, 0.9943142700195312, 0.9939824829101562, 0.9937520751953125, 0.9941227416992188, 0.9939323120117187, 0.9941923828125, 0.9938759765625, 0.9943316650390625, 0.994208740234375, 0.99416064453125, 0.9944412231445312, 0.99399169921875, 0.9939773559570313, 0.9942149658203125, 0.9955389404296875, 0.9943828735351562, 0.9939456176757813, 0.9944596557617188, 0.9941442260742187, 0.9940377807617188, 0.9945589599609375, 0.994135009765625, 0.9938985595703125, 0.9942609252929687, 0.994212890625, 0.9942732543945313, 0.9943726196289062, 0.994460693359375, 0.9944248657226562, 0.9942844848632812, 0.9949808349609375, 0.9943121948242187, 0.9940910034179687, 0.994150390625, 0.9942640380859376, 0.9941688232421875, 0.9940387573242188, 0.9946736450195313, 0.994753662109375, 0.99428857421875, 0.994555908203125, 0.9943367919921875, 0.9942702026367187, 0.994302978515625, 0.994234375, 0.9944371337890625, 0.9942466430664062, 0.9943602905273438, 0.9947944946289062, 0.99437158203125, 0.9947586669921875]",tokens/s,0.9908170884423626,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949031-3a1f54a034a3a88c48a90592;8738dafb-8c32-45c9-a050-4e3569efb582) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2112.14336,2911.371264,0.0,2264.92416,2140.72832,s,10,2.4878771514892577,0.24878771514892578,0.001481785323819343,0.24867384338378906,0.25056690216064453,0.25108961105346683,0.2515077781677246,"[0.2516123199462891, 0.25045074462890626, 0.24760211181640626, 0.24728652954101563, 0.24780889892578126, 0.24664930725097656, 0.2480397186279297, 0.24930796813964845, 0.24954534912109375, 0.24957420349121093]",tokens/s,1028.9897145715452,kWh,2.9131745419851165e-06,1.5962883600656e-06,1.3288858869561026e-05,1.7798321771611744e-05,tokens/kWh,14383378.572710099,MB,2112.14336,2911.371264,0.0,2264.92416,2246.907904,s,10,146.0569638671875,14.605696386718751,0.006079082022590375,14.60555517578125,14.610718359375001,14.61447392578125,14.617478378906249,"[14.5972685546875, 14.605400390625, 14.6057099609375, 14.6049326171875, 14.5980126953125, 14.599310546875, 14.6182294921875, 14.609376953125, 14.6088388671875, 14.6098837890625]",tokens/s,4.313385567653395,kWh,0.00017247067014820533,9.452784657802511e-05,0.0007735646039460372,0.0010405631206722677,tokens/kWh,60544.14071421071,,s,629,148.0586790771484,0.23538740711788306,0.029533502655254593,0.23172300720214845,0.23236076049804688,0.2326812683105469,0.4793023583984375,"[0.23325080871582032, 0.23149772644042968, 0.23179673767089845, 0.2320568389892578, 0.2313912353515625, 0.23193907165527344, 0.23138099670410156, 0.23167181396484374, 0.2314608612060547, 0.2322554931640625, 0.23149363708496093, 0.23134413146972657, 0.23120999145507812, 0.23181106567382812, 0.23132569885253906, 0.23138099670410156, 0.2312243194580078, 0.23141888427734375, 0.23158682250976562, 0.23283609008789063, 0.2315028533935547, 0.23156224060058594, 0.2313502655029297, 0.23152435302734375, 0.23198617553710937, 0.23152435302734375, 0.23150592041015625, 0.23154483032226564, 0.23148646545410156, 0.2315581512451172, 0.23163084411621093, 0.23152537536621093, 0.23172300720214845, 0.23159397888183594, 0.23167079162597656, 0.23191448974609374, 0.2319974365234375, 0.2318376922607422, 0.23134413146972657, 0.23214079284667968, 0.23153254699707032, 0.231510009765625, 0.23141171264648439, 0.23148544311523436, 0.23137586975097657, 0.23179263305664063, 0.23232511901855468, 0.23154483032226564, 0.23182234191894532, 0.23154483032226564, 0.23155506896972655, 0.23143218994140624, 0.23180184936523437, 0.23193702697753907, 0.2313113555908203, 0.23155711364746093, 0.23150489807128907, 0.23152333068847655, 0.231404541015625, 0.23146803283691406, 0.2314721221923828, 0.23182131958007812, 0.48205722045898436, 0.23162675476074218, 0.23151513671875, 0.23147418212890625, 0.23139430236816405, 0.23154176330566406, 0.2315345916748047, 0.231478271484375, 0.2315161590576172, 0.2314055633544922, 0.23177113342285155, 0.23148748779296874, 0.23155506896972655, 0.23154995727539063, 0.23152435302734375, 0.23150796508789062, 0.23173222351074219, 0.23166157531738282, 0.23167897033691406, 0.23310336303710938, 0.23195033264160156, 0.23181925964355468, 0.23237017822265624, 0.2315888671875, 0.23168307495117188, 0.23146905517578126, 0.23271218872070312, 0.23225343322753905, 0.2316738586425781, 0.23232205200195313, 0.23157145690917968, 0.23149465942382813, 0.23150489807128907, 0.23166873168945312, 0.23170457458496094, 0.23163084411621093, 0.23201075744628907, 0.23196978759765624, 0.23187762451171876, 0.23154893493652343, 0.23200358581542968, 0.23128781127929687, 0.2326876220703125, 0.23151513671875, 0.2320189514160156, 0.23194111633300782, 0.2343126983642578, 0.23182745361328125, 0.23166975402832032, 0.2321029052734375, 0.2315478973388672, 0.23180390930175782, 0.23187455749511718, 0.23265895080566407, 0.23204351806640625, 0.23161549377441407, 0.23184384155273438, 0.23168307495117188, 0.23198822021484375, 0.23221965026855468, 0.23208038330078126, 0.2315028533935547, 0.2317189178466797, 0.4791541748046875, 0.23152537536621093, 0.23141786193847655, 0.23147109985351563, 0.2316881866455078, 0.23219097900390626, 0.23250738525390624, 0.23175474548339844, 0.23157862854003905, 0.23146905517578126, 0.23150796508789062, 0.23139942932128907, 0.23163084411621093, 0.23131954956054687, 0.23146290588378907, 0.2314721221923828, 0.23149977111816405, 0.23143116760253907, 0.23153561401367187, 0.23136460876464843, 0.23145574951171874, 0.23262821960449218, 0.23162060546875, 0.23222067260742188, 0.2317332458496094, 0.2321459197998047, 0.23204658508300782, 0.23222169494628905, 0.23193394470214843, 0.23173222351074219, 0.23207321166992187, 0.2314967041015625, 0.23171379089355468, 0.23195033264160156, 0.2315335693359375, 0.23165440368652343, 0.23151513671875, 0.2319278106689453, 0.23251968383789062, 0.2320762939453125, 0.2323722229003906, 0.23198104858398438, 0.23364813232421874, 0.231699462890625, 0.23159397888183594, 0.2315478973388672, 0.23241932678222657, 0.2327490539550781, 0.2325544891357422, 0.23194828796386718, 0.2318673858642578, 0.2316810302734375, 0.23178445434570313, 0.23171685791015625, 0.2330030059814453, 0.23198924255371095, 0.23177932739257812, 0.2315345916748047, 0.2316759033203125, 0.23150898742675782, 0.23172915649414064, 0.23147622680664062, 0.23150898742675782, 0.4793599853515625, 0.23149055480957031, 0.23156224060058594, 0.23211724853515625, 0.23157350158691406, 0.2316636199951172, 0.2315335693359375, 0.2317004852294922, 0.23198104858398438, 0.2317076416015625, 0.2317066192626953, 0.2316810302734375, 0.23214694213867187, 0.2314281005859375, 0.23171379089355468, 0.23145368957519533, 0.23175474548339844, 0.2320343017578125, 0.23201075744628907, 0.2313799743652344, 0.23178445434570313, 0.2324234313964844, 0.2315724792480469, 0.23148133850097657, 0.23172813415527344, 0.23166053771972656, 0.23147314453125, 0.2317332458496094, 0.2317066192626953, 0.23179878234863283, 0.23192472839355469, 0.231546875, 0.23191448974609374, 0.2325841979980469, 0.232310791015625, 0.23168716430664063, 0.23217356872558595, 0.23178752136230468, 0.23181004333496094, 0.23151922607421874, 0.23181517028808593, 0.23169024658203125, 0.23222067260742188, 0.23224114990234376, 0.233280517578125, 0.23156326293945312, 0.23260774230957032, 0.23164927673339844, 0.23165132141113282, 0.23178752136230468, 0.2314833984375, 0.23191654968261718, 0.23174041748046875, 0.23182847595214845, 0.23188172912597657, 0.23205477905273436, 0.23170252990722656, 0.23150592041015625, 0.23171072387695313, 0.23180697631835936, 0.23188787841796876, 0.23198924255371095, 0.2316083221435547, 0.48040447998046876, 0.2314649658203125, 0.23149465942382813, 0.23155506896972655, 0.23165338134765626, 0.23153050231933595, 0.23216741943359376, 0.23149977111816405, 0.2316738586425781, 0.2315141143798828, 0.23169229125976562, 0.23125503540039063, 0.23212237548828124, 0.231546875, 0.23149363708496093, 0.2313123779296875, 0.2316011505126953, 0.23193292236328125, 0.23160934448242188, 0.23140045166015624, 0.23142399597167967, 0.2315028533935547, 0.23264256286621093, 0.23136256408691405, 0.23141273498535156, 0.23175270080566407, 0.2318970947265625, 0.23143218994140624, 0.231625732421875, 0.23146803283691406, 0.23153561401367187, 0.23179263305664063, 0.23162265014648437, 0.23143833923339843, 0.23150489807128907, 0.2316769256591797, 0.2315028533935547, 0.23140351867675782, 0.23153254699707032, 0.23176396179199218, 0.23235379028320313, 0.23157145690917968, 0.23215000915527345, 0.23228416442871094, 0.23199436950683594, 0.2316083221435547, 0.2318725128173828, 0.2318663635253906, 0.2320168914794922, 0.2316748809814453, 0.23151718139648436, 0.2315161590576172, 0.23198104858398438, 0.23176908874511717, 0.2314844207763672, 0.23157452392578126, 0.2316400604248047, 0.2338518981933594, 0.23160319519042968, 0.2314608612060547, 0.23169842529296875, 0.23176499938964842, 0.23139634704589843, 0.47871487426757814, 0.23285452270507812, 0.2316759033203125, 0.23147109985351563, 0.2317619171142578, 0.23147418212890625, 0.23200460815429688, 0.23145368957519533, 0.23143423461914062, 0.23151513671875, 0.23195852661132813, 0.23152946472167968, 0.23166259765625, 0.2315407409667969, 0.231804931640625, 0.23130111694335936, 0.23147418212890625, 0.23145368957519533, 0.2317332458496094, 0.23229338073730468, 0.23143423461914062, 0.23145779418945311, 0.2318243865966797, 0.23271629333496094, 0.23164723205566407, 0.23130624389648438, 0.23173426818847656, 0.23160525512695312, 0.23191448974609374, 0.23143936157226563, 0.23173735046386718, 0.2313297882080078, 0.23191654968261718, 0.23161753845214844, 0.23174861145019532, 0.23163494873046875, 0.23162367248535157, 0.23179673767089845, 0.23365119934082032, 0.2315284423828125, 0.23203532409667968, 0.2314844207763672, 0.23151820373535156, 0.23153254699707032, 0.23219815063476562, 0.23247462463378907, 0.23148236083984375, 0.2316247100830078, 0.23154893493652343, 0.23167999267578124, 0.23152333068847655, 0.2313492431640625, 0.23187149047851563, 0.2317332458496094, 0.23147314453125, 0.23146803283691406, 0.231546875, 0.23138099670410156, 0.23195443725585937, 0.23131033325195313, 0.2316083221435547, 0.23169331359863282, 0.23267942810058595, 0.4799190979003906, 0.23168614196777343, 0.2323384246826172, 0.23134413146972657, 0.2315478973388672, 0.23153152465820312, 0.23206399536132813, 0.23240089416503906, 0.2316400604248047, 0.232195068359375, 0.23177113342285155, 0.23198208618164062, 0.231762939453125, 0.23141580200195314, 0.23243571472167968, 0.23172607421875, 0.23175167846679687, 0.23154380798339844, 0.23196159362792967, 0.2322186279296875, 0.23200767517089843, 0.23159706115722656, 0.23196774291992187, 0.23436288452148438, 0.23299481201171876, 0.23171994018554687, 0.23219917297363282, 0.23193702697753907, 0.23206809997558595, 0.23197900390625, 0.232015869140625, 0.23205580139160156, 0.2321817626953125, 0.23225856018066407, 0.23298252868652344, 0.23273779296875, 0.23192576599121092, 0.23256166076660156, 0.23193702697753907, 0.23206912231445312, 0.23249714660644533, 0.23228518676757812, 0.2316943359375, 0.23190733337402344, 0.2321776580810547, 0.23329791259765625, 0.2317434844970703, 0.23172300720214845, 0.23201791381835937, 0.23177830505371094, 0.23229849243164064, 0.23173631286621094, 0.23190631103515624, 0.23168716430664063, 0.23162879943847656, 0.2316021728515625, 0.23181619262695313, 0.23235891723632812, 0.23165338134765626, 0.23182540893554687, 0.23188890075683594, 0.23179878234863283, 0.23223193359375, 0.4815288391113281, 0.23210905456542968, 0.23236915588378906, 0.23213363647460938, 0.23194931030273438, 0.2322708435058594, 0.2319656982421875, 0.23167999267578124, 0.23199026489257812, 0.23194419860839843, 0.23190220642089843, 0.23164620971679686, 0.23152333068847655, 0.23174656677246094, 0.23165132141113282, 0.23153868103027345, 0.23241421508789062, 0.23152333068847655, 0.23160012817382813, 0.2317813720703125, 0.23216844177246093, 0.23163699340820312, 0.2317332458496094, 0.23213157653808594, 0.23221554565429686, 0.23165235900878905, 0.23165029907226561, 0.2321817626953125, 0.23166566467285157, 0.23215718078613282, 0.23198719787597658, 0.23195852661132813, 0.2326824951171875, 0.23238552856445313, 0.23246131896972655, 0.2325893096923828, 0.23215411376953124, 0.23172505187988282, 0.23226573181152343, 0.2317496337890625, 0.23191552734375, 0.23176908874511717, 0.23195545959472658, 0.2318008270263672, 0.23182643127441407, 0.23190016174316405, 0.232416259765625, 0.2320189514160156, 0.23202508544921874, 0.23167999267578124, 0.23148748779296874, 0.23161036682128905, 0.2314598388671875, 0.23159500122070312, 0.23170457458496094, 0.23154278564453126, 0.23169024658203125, 0.23182643127441407, 0.23170355224609376, 0.2315704345703125, 0.23159603881835938, 0.23155404663085938, 0.23165338134765626, 0.4812718200683594, 0.23177317810058592, 0.23226675415039064, 0.23194111633300782, 0.2323927001953125, 0.23203225708007813, 0.2321274871826172, 0.23159910583496093, 0.2321274871826172, 0.23236813354492186, 0.23202099609375, 0.23199026489257812, 0.2322821044921875, 0.23166873168945312, 0.2315704345703125, 0.2316953582763672, 0.2317066192626953, 0.23233638000488283, 0.23172402954101562, 0.23153765869140625, 0.2317015075683594, 0.23190835571289062, 0.2319349822998047, 0.23167079162597656, 0.23204454040527345, 0.23182949829101562, 0.2317066192626953, 0.23161241149902342, 0.23168716430664063, 0.23161651611328124, 0.2314915771484375, 0.23156736755371093, 0.23175578308105468, 0.2314915771484375, 0.2315898895263672, 0.2314639434814453, 0.23174041748046875, 0.23146188354492186, 0.23163699340820312, 0.231689208984375, 0.23188275146484374, 0.23198002624511718, 0.23282687377929687, 0.23209779357910157, 0.23285350036621094, 0.23174656677246094, 0.23175578308105468, 0.23238450622558593, 0.2317015075683594, 0.23150489807128907, 0.23172300720214845, 0.231625732421875, 0.23256166076660156, 0.23199641418457032, 0.232195068359375, 0.23204556274414062, 0.23192166137695314, 0.2320394287109375, 0.23225138854980468, 0.23171379089355468, 0.2317813720703125, 0.23161958312988282, 0.232158203125, 0.4819783630371094, 0.2318551025390625, 0.23196159362792967, 0.23218585205078124, 0.2324418487548828, 0.23145881652832032, 0.2316441650390625, 0.2316216278076172, 0.2316216278076172, 0.23146598815917968, 0.23160525512695312, 0.2316451873779297, 0.23194111633300782, 0.23152639770507813, 0.23173939514160155, 0.23201075744628907, 0.23212953186035157, 0.23183258056640624, 0.23173837280273438, 0.2314915771484375, 0.2315274200439453, 0.23239474487304687, 0.23221554565429686, 0.23157145690917968, 0.23228518676757812, 0.23169842529296875, 0.23386726379394532, 0.23193087768554688, 0.23240908813476563, 0.23179161071777343, 0.2318612518310547, 0.23229029846191407, 0.2322391052246094, 0.2321817626953125, 0.2317076416015625, 0.23165338134765626, 0.231762939453125, 0.23216537475585938, 0.23200869750976563, 0.23169024658203125, 0.2321448974609375, 0.23209881591796874, 0.23225343322753905, 0.2321080322265625, 0.2319605712890625, 0.23185305786132812, 0.23235276794433593, 0.23230157470703125, 0.23171379089355468, 0.23146905517578126, 0.2321694793701172, 0.2319288330078125, 0.231699462890625, 0.23157760620117188, 0.23183871459960936, 0.23164723205566407, 0.23158476257324218, 0.2316769256591797, 0.2315581512451172, 0.23161241149902342, 0.23160627746582033, 0.23157452392578126, 0.23163392639160157]",tokens/s,4.248315626753965,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpb_ykafi2/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1452.249088,2449.997824,0.0,1803.55072,1664.521216,s,10,1.3683243865966794,0.13683243865966793,0.001337233378547869,0.13643169403076172,0.13770939025878906,0.13902387695312501,0.14007546630859374,"[0.14033836364746094, 0.13655836486816406, 0.13525657653808593, 0.13630502319335938, 0.13605914306640626, 0.13580105590820313, 0.13619818115234375, 0.13698959350585938, 0.1374172821044922, 0.1374008026123047]",tokens/s,1870.9013922987058,kWh,1.6058788441859926e-06,8.799470783644112e-07,6.656266586270105e-06,9.14209250882051e-06,tokens/kWh,28002341.887593575,MB,1453.084672,2449.997824,0.0,1803.55072,1763.593728,s,10,81.66558447265625,8.166558447265626,0.005684297838363884,8.165376708984375,8.17551572265625,8.17556669921875,8.17560748046875,"[8.17105712890625, 8.1615234375, 8.15890625, 8.16109033203125, 8.165703125, 8.16225, 8.16505029296875, 8.17550439453125, 8.1688818359375, 8.17561767578125]",tokens/s,7.714387940380691,kWh,9.658040946623585e-05,5.293287202049287e-05,0.0003923776359739301,0.0005418909174606587,tokens/kWh,116259.56067915421,,s,629,82.76125488281265,0.13157592191226153,0.016262281457391132,0.129438720703125,0.13028741455078124,0.1309923309326172,0.26588740966796875,"[0.1330462646484375, 0.13207244873046875, 0.13133619689941406, 0.12958412170410155, 0.1294899139404297, 0.1299087371826172, 0.12952166748046876, 0.12904652404785155, 0.12932403564453124, 0.12959437561035156, 0.12922367858886719, 0.13178367614746095, 0.12983705139160157, 0.12965171813964843, 0.12943974304199218, 0.12977151489257813, 0.12933529663085938, 0.12956877136230469, 0.1295636444091797, 0.12963430786132812, 0.12953907775878906, 0.12950630187988282, 0.12945408630371094, 0.12915711975097657, 0.12958924865722657, 0.12955238342285155, 0.12918885803222657, 0.12946330261230468, 0.1300633544921875, 0.12945613098144532, 0.12942131042480468, 0.130693115234375, 0.1296414794921875, 0.12933836364746093, 0.129259521484375, 0.1293465576171875, 0.12949095153808593, 0.12932199096679686, 0.12919705200195314, 0.129396728515625, 0.12944178771972656, 0.12925645446777342, 0.12930149841308594, 0.12936805725097655, 0.12971110534667968, 0.12947967529296875, 0.1293475799560547, 0.12937420654296874, 0.12945510864257812, 0.12943974304199218, 0.12923904418945312, 0.12946636962890626, 0.12930047607421874, 0.12932710266113281, 0.12942848205566407, 0.1293578186035156, 0.12930149841308594, 0.12927488708496093, 0.12947865295410158, 0.12933529663085938, 0.12928306579589843, 0.12944178771972656, 0.2679490661621094, 0.12944281005859376, 0.12929330444335937, 0.1295994873046875, 0.1293639678955078, 0.12922061157226564, 0.1296312255859375, 0.1300070343017578, 0.129291259765625, 0.1292584991455078, 0.12935987854003905, 0.12954214477539064, 0.12965989685058593, 0.12922880554199218, 0.12928102111816406, 0.13002546691894531, 0.13103411865234374, 0.12998860168457033, 0.12921856689453126, 0.12967832946777344, 0.12930764770507813, 0.1296117706298828, 0.12917861938476563, 0.12936805725097655, 0.12924006652832032, 0.13045452880859376, 0.12976127624511719, 0.12940185546875, 0.129154052734375, 0.129870849609375, 0.12965274047851563, 0.12968447875976563, 0.12930560302734376, 0.129406982421875, 0.12957696533203125, 0.12942848205566407, 0.12912844848632812, 0.12938957214355468, 0.12942131042480468, 0.1295319061279297, 0.12902093505859374, 0.12939776611328124, 0.12929023742675783, 0.1295431671142578, 0.12914994812011718, 0.131240966796875, 0.1315758056640625, 0.12955442810058593, 0.12934962463378907, 0.12950119018554687, 0.1294192657470703, 0.12930560302734376, 0.12917555236816405, 0.12942745971679687, 0.12936909484863282, 0.1293506622314453, 0.1292216339111328, 0.12930560302734376, 0.12932710266113281, 0.12941722106933592, 0.12931890869140625, 0.1293260803222656, 0.12920217895507813, 0.26541976928710936, 0.12952268981933593, 0.12942335510253905, 0.129438720703125, 0.12940083312988282, 0.12945613098144532, 0.12953599548339845, 0.12932095336914062, 0.1292216339111328, 0.12939776611328124, 0.12944178771972656, 0.12960153198242189, 0.129470458984375, 0.13039923095703124, 0.1294571533203125, 0.12955955505371095, 0.12928614807128908, 0.1294202880859375, 0.129259521484375, 0.12932095336914062, 0.12926258850097655, 0.12934144592285157, 0.12937420654296874, 0.12949708557128906, 0.12930149841308594, 0.12997734069824218, 0.12926771545410157, 0.1293711395263672, 0.12937318420410157, 0.13014016723632812, 0.1300695037841797, 0.12950630187988282, 0.12973773193359375, 0.12933529663085938, 0.12930458068847656, 0.1293711395263672, 0.12979302978515625, 0.12978994750976564, 0.12939366149902343, 0.13079551696777345, 0.12929330444335937, 0.12930458068847656, 0.12950425720214845, 0.1293291473388672, 0.12928306579589843, 0.12939468383789063, 0.12921139526367187, 0.13056614685058593, 0.1297909698486328, 0.12958822631835937, 0.12945613098144532, 0.12936703491210938, 0.1293086700439453, 0.12931173706054688, 0.12922265625, 0.129512451171875, 0.12923802185058594, 0.12934144592285157, 0.13001522827148437, 0.1295636444091797, 0.1292533721923828, 0.12930458068847656, 0.12924517822265624, 0.2659522705078125, 0.1293527069091797, 0.129328125, 0.12934860229492187, 0.1293148193359375, 0.12935679626464844, 0.12930560302734376, 0.12933734130859376, 0.12967628479003906, 0.12982579040527345, 0.12926463317871092, 0.12944793701171875, 0.12915916442871095, 0.12936090087890625, 0.1293506622314453, 0.1294264373779297, 0.12922061157226564, 0.12930047607421874, 0.12926258850097655, 0.1294448699951172, 0.12934349060058595, 0.129328125, 0.12933836364746093, 0.12944383239746093, 0.12915711975097657, 0.12933938598632813, 0.1303726043701172, 0.1295083465576172, 0.1293885498046875, 0.12941107177734376, 0.12947763061523437, 0.12969778442382812, 0.12942848205566407, 0.12960972595214842, 0.13009408569335937, 0.1296506805419922, 0.12932095336914062, 0.12933631896972655, 0.1293148193359375, 0.12935577392578124, 0.12930149841308594, 0.129944580078125, 0.1299752960205078, 0.13082009887695312, 0.13123277282714843, 0.129976318359375, 0.12939776611328124, 0.12972647094726564, 0.1297725372314453, 0.12937522888183595, 0.12929843139648436, 0.12931686401367187, 0.12993843078613282, 0.12966297912597657, 0.129470458984375, 0.12965274047851563, 0.12924826049804689, 0.1299988555908203, 0.12943359375, 0.12968345642089843, 0.12955853271484374, 0.12938957214355468, 0.12927590942382813, 0.2658734130859375, 0.1293506622314453, 0.12944691467285158, 0.1293885498046875, 0.12947354125976562, 0.12939981079101562, 0.12928819274902345, 0.12991282653808595, 0.12973670959472655, 0.12963430786132812, 0.12942233276367188, 0.129364990234375, 0.12963226318359375, 0.12936090087890625, 0.12928819274902345, 0.12908441162109374, 0.12943565368652343, 0.1296312255859375, 0.1292093505859375, 0.12931993103027345, 0.12937216186523437, 0.1293834228515625, 0.12934553527832032, 0.12939263916015625, 0.12918988037109375, 0.12930252075195312, 0.1292359619140625, 0.12959027099609374, 0.12928306579589843, 0.12924313354492187, 0.1291550750732422, 0.129617919921875, 0.12926258850097655, 0.1292359619140625, 0.1293322296142578, 0.12940800476074218, 0.12928819274902345, 0.12947967529296875, 0.12928512573242187, 0.13084877014160157, 0.12966297912597657, 0.12936294555664063, 0.12929536437988282, 0.12989439392089844, 0.1293639678955078, 0.1298913269042969, 0.12968038940429688, 0.12977459716796874, 0.1292738494873047, 0.1299230651855469, 0.12984115600585938, 0.1298524169921875, 0.12960870361328125, 0.12983091735839844, 0.12971725463867187, 0.12981964111328126, 0.13102899169921875, 0.13176730346679688, 0.13048524475097656, 0.13046885681152343, 0.12975410461425782, 0.12993125915527343, 0.130081787109375, 0.2661335144042969, 0.1312348175048828, 0.13016166687011718, 0.12974490356445312, 0.12950425720214845, 0.12989849853515625, 0.13022105407714843, 0.13010943603515626, 0.12975205993652345, 0.13035621643066406, 0.12987493896484376, 0.1298892822265625, 0.13020057678222657, 0.13007872009277344, 0.12971827697753907, 0.12933427429199218, 0.1293506622314453, 0.12941619873046875, 0.12931277465820312, 0.12941311645507814, 0.12987596130371093, 0.12993434143066407, 0.12928921508789062, 0.12916940307617186, 0.12927897644042968, 0.129470458984375, 0.12943359375, 0.12916326904296874, 0.12934962463378907, 0.12948069763183595, 0.12918476867675782, 0.12958412170410155, 0.12934451293945312, 0.12950936889648437, 0.12927180480957032, 0.12952268981933593, 0.129328125, 0.1293578186035156, 0.13088050842285157, 0.12939878845214844, 0.12924006652832032, 0.12944178771972656, 0.12921856689453126, 0.12951962280273438, 0.12917759704589843, 0.12927488708496093, 0.12930047607421874, 0.1292359619140625, 0.12929638671875, 0.12937318420410157, 0.1292584991455078, 0.1293639678955078, 0.12952986145019532, 0.12940902709960939, 0.12924826049804689, 0.12945613098144532, 0.12931071472167968, 0.12931993103027345, 0.12928614807128908, 0.12941619873046875, 0.12934860229492187, 0.12924620056152344, 0.12929536437988282, 0.2658928527832031, 0.12976742553710938, 0.12933836364746093, 0.1294448699951172, 0.12936192321777343, 0.1305374755859375, 0.1294581756591797, 0.12936909484863282, 0.13020774841308594, 0.1295667266845703, 0.12935577392578124, 0.1293516845703125, 0.12933836364746093, 0.12947456359863282, 0.13004389953613282, 0.13297048950195312, 0.12997836303710938, 0.12952268981933593, 0.1292922821044922, 0.12959642028808593, 0.12940185546875, 0.1293527069091797, 0.1293834228515625, 0.13002546691894531, 0.12959129333496094, 0.1294202880859375, 0.1293854675292969, 0.12959744262695314, 0.12931686401367187, 0.1293711395263672, 0.1293516845703125, 0.1293291473388672, 0.12929638671875, 0.1294264373779297, 0.13180621337890625, 0.12990361022949218, 0.1295431671142578, 0.12950323486328125, 0.1293824005126953, 0.12971417236328125, 0.1290260467529297, 0.129438720703125, 0.1293291473388672, 0.12946022033691407, 0.12919398498535156, 0.12934860229492187, 0.12931993103027345, 0.1294571533203125, 0.12999270629882811, 0.12952677917480468, 0.12935679626464844, 0.1295800323486328, 0.12942950439453124, 0.13058047485351562, 0.12943463134765626, 0.1296117706298828, 0.12902093505859374, 0.1293578186035156, 0.12931788635253907, 0.12939366149902343, 0.12926054382324217, 0.129364990234375, 0.1295247344970703, 0.26704998779296873, 0.13152255249023437, 0.12972236633300782, 0.1295697937011719, 0.12936703491210938, 0.12935987854003905, 0.13026611328125, 0.130302978515625, 0.12928819274902345, 0.129544189453125, 0.12924006652832032, 0.1293322296142578, 0.12927999877929688, 0.12953599548339845, 0.12984831237792968, 0.12931993103027345, 0.12948480224609374, 0.1293824005126953, 0.13099110412597656, 0.1302159423828125, 0.12943052673339844, 0.12947148132324218, 0.12942233276367188, 0.12940083312988282, 0.12913253784179687, 0.12946022033691407, 0.1293096923828125, 0.12943463134765626, 0.1309276123046875, 0.13064703369140626, 0.12926873779296874, 0.129396728515625, 0.13017190551757812, 0.12957901000976563, 0.12975001525878907, 0.13083033752441406, 0.13063987731933593, 0.12994560241699218, 0.12968345642089843, 0.12934349060058595, 0.129364990234375, 0.12946022033691407, 0.12915711975097657, 0.1293260803222656, 0.13071565246582031, 0.1304012756347656, 0.12986265563964844, 0.13138021850585938, 0.13076173400878907, 0.12949197387695313, 0.12930560302734376, 0.1294520263671875, 0.12977049255371093, 0.12957183837890626, 0.12964454650878907, 0.13065728759765624, 0.12959437561035156, 0.12931788635253907, 0.12932199096679686, 0.1294929962158203, 0.12976025390625, 0.1295626220703125, 0.12928306579589843, 0.2670551147460937, 0.13002035522460936, 0.12955033874511718, 0.12985139465332032, 0.12954725646972656, 0.12931277465820312, 0.12925645446777342, 0.13027226257324218, 0.1294694366455078, 0.1295308837890625, 0.1293824005126953, 0.12973260498046876, 0.1293701171875, 0.13028352355957032, 0.13105357360839845, 0.13017805480957031, 0.1294264373779297, 0.1304217529296875, 0.12934144592285157, 0.12950425720214845, 0.12933836364746093, 0.12946124267578124, 0.1294581756591797, 0.12975514221191406, 0.12941107177734376, 0.1294151611328125, 0.12950527954101562, 0.12952064514160155, 0.12930560302734376, 0.12943052673339844, 0.12936703491210938, 0.1306746826171875, 0.12937625122070312, 0.1297418212890625, 0.12938957214355468, 0.12986880493164063, 0.1292728271484375, 0.12953497314453125, 0.13047296142578124, 0.1296373748779297, 0.1293711395263672, 0.13099314880371093, 0.1300480041503906, 0.12995071411132814, 0.12928102111816406, 0.12935475158691406, 0.12918988037109375, 0.12929843139648436, 0.12932301330566406, 0.1293711395263672, 0.12942745971679687, 0.1291304931640625, 0.12926976013183594, 0.12948684692382811, 0.12928102111816406, 0.12931993103027345, 0.1292728271484375, 0.12934860229492187, 0.1300746307373047, 0.1295749053955078, 0.13162393188476562, 0.12988621520996094, 0.1299630126953125, 0.2670069885253906, 0.1296711730957031, 0.12933734130859376, 0.129396728515625, 0.1302415313720703, 0.1293096923828125, 0.12932301330566406, 0.12936909484863282, 0.1292728271484375, 0.12964659118652344, 0.1292410888671875, 0.12936090087890625, 0.129227783203125, 0.12946124267578124, 0.12994969177246093, 0.12929536437988282, 0.12978585815429688, 0.12964659118652344, 0.1294264373779297, 0.1293639678955078, 0.12969676208496095, 0.12943052673339844, 0.1292349395751953, 0.1293148193359375, 0.1295564727783203, 0.12957594299316405, 0.13105255126953125, 0.131240966796875, 0.13059481811523438, 0.1293588409423828, 0.1293588409423828, 0.12939059448242188, 0.129328125, 0.12935577392578124, 0.12926463317871092, 0.1297838134765625, 0.13002546691894531, 0.12994969177246093, 0.1295677490234375, 0.12959642028808593, 0.12966297912597657, 0.12990054321289063, 0.1307125701904297, 0.12997222900390626, 0.13009202575683593, 0.12955955505371095, 0.12987187194824218, 0.13022822570800782, 0.1295114288330078, 0.12963328552246095, 0.12929638671875, 0.12961587524414062, 0.12946841430664063, 0.1294254150390625, 0.131989501953125, 0.131093505859375, 0.12946226501464844, 0.129470458984375, 0.13014732360839842, 0.13120204162597657, 0.13035110473632813, 0.12946124267578124, 0.13090509033203124]",tokens/s,7.600174754365983,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1308.962816,6256.328704,0.0,5609.8816,5292.617216,s,10,5.571213256835938,0.5571213256835937,0.0007666840176307737,0.5568714599609375,0.557456005859375,0.5584093994140625,0.5591721142578125,"[0.55936279296875, 0.556702392578125, 0.5567649536132813, 0.5570609741210938, 0.5566168212890625, 0.5568453979492187, 0.557244140625, 0.5567742919921875, 0.5569439697265625, 0.5568975219726563]",tokens/s,459.5049376110048,kWh,6.58186607890659e-06,3.606554268359711e-06,3.10157809854445e-05,4.1204201332710805e-05,tokens/kWh,6212958.672172324,MB,1308.962816,6256.328704,0.0,5609.8816,5503.948288,s,10,327.20778125,32.720778124999995,0.005308491364446453,32.7202353515625,32.7266998046875,32.72907548828125,32.73097603515625,"[32.72102734375, 32.713080078125, 32.72320703125, 32.723677734375, 32.719443359375, 32.726171875, 32.7142578125, 32.71880859375, 32.71665625, 32.731451171875]",tokens/s,1.9253820847208232,kWh,0.00038621306252148413,0.00021167820178359155,0.0018056511605691545,0.00240354242487423,tokens/kWh,26211.31183207494,,s,629,331.6996517333982,0.527344438367883,0.06619643329461054,0.519362548828125,0.5196838745117187,0.5197864990234375,1.0765131103515626,"[0.5193318481445313, 0.5191546630859375, 0.5195264282226563, 0.5193011474609375, 0.519161865234375, 0.5189949340820312, 0.519024658203125, 0.5189488525390625, 0.51919873046875, 0.518950927734375, 0.5190645751953125, 0.5191629028320313, 0.5196636352539062, 0.5194833984375, 0.5193001098632812, 0.5194168090820312, 0.5192283935546875, 0.5194127807617187, 0.5191557006835937, 0.5193123779296875, 0.5200199584960937, 0.5194977416992187, 0.5196605224609375, 0.519141357421875, 0.5195980834960937, 0.5191690063476563, 0.5194024658203125, 0.51964208984375, 0.5193021240234375, 0.5191884765625, 0.5192212524414063, 0.5194844360351563, 0.5194926147460938, 0.5195140991210937, 0.519436279296875, 0.5195079956054688, 0.5191536865234375, 0.5194281005859375, 0.5195120849609375, 0.5192755126953125, 0.5196728515625, 0.5195980834960937, 0.5193359375, 0.5193533325195312, 0.5194619140625, 0.5194895629882812, 0.5192806396484375, 0.5191629028320313, 0.51945166015625, 0.519245849609375, 0.5195632934570312, 0.519736328125, 0.5192724609375, 0.5192140502929687, 0.5193277587890625, 0.5196113891601563, 0.5192888793945313, 0.519089111328125, 0.5195858154296875, 0.519647216796875, 0.5195612182617187, 0.5193564453125, 1.0763663330078126, 0.518898681640625, 0.5189488525390625, 0.5190789184570312, 0.5190236206054688, 0.5189979858398438, 0.5195140991210937, 0.5191475219726562, 0.5191536865234375, 0.5191444702148438, 0.5191588134765625, 0.519103515625, 0.5190625, 0.5190420532226563, 0.5190901489257812, 0.5192744750976562, 0.5193707275390625, 0.5191854248046875, 0.5194649658203125, 0.5193666381835937, 0.5193236694335938, 0.5194281005859375, 0.5191649169921875, 0.5191342163085938, 0.519046142578125, 0.5191383056640625, 0.5194874877929687, 0.5194271240234375, 0.5192376098632813, 0.5192274169921876, 0.5190830078125, 0.519161865234375, 0.51907275390625, 0.51944140625, 0.5194660034179688, 0.5193697509765625, 0.5191290893554688, 0.5192161254882812, 0.51917822265625, 0.51928271484375, 0.5191895141601562, 0.5192591552734375, 0.5195140991210937, 0.51934619140625, 0.5191177978515625, 0.51966259765625, 0.5194188842773437, 0.5195499267578125, 0.5193359375, 0.5193554077148438, 0.5193215942382813, 0.5193707275390625, 0.5193646240234375, 0.5194526977539062, 0.5195642700195312, 0.5192232666015625, 0.5191710815429688, 0.519193603515625, 0.5191710815429688, 0.519245849609375, 0.5192489013671875, 0.519278564453125, 0.5190645751953125, 1.076937744140625, 0.5192662963867187, 0.5193164672851562, 0.5193584594726562, 0.5193011474609375, 0.5191680297851563, 0.5191188354492188, 0.5191270141601563, 0.5195693969726562, 0.5193901977539063, 0.5192611694335938, 0.519352294921875, 0.519352294921875, 0.5192232666015625, 0.5193594970703125, 0.51930419921875, 0.519245849609375, 0.5191546630859375, 0.5192427368164062, 0.5194291381835937, 0.5196943359375, 0.519520263671875, 0.5194066162109375, 0.5193318481445313, 0.5193011474609375, 0.5194178466796875, 0.5193533325195312, 0.5193809814453125, 0.5195612182617187, 0.5193554077148438, 0.5194926147460938, 0.5192898559570313, 0.5194373168945312, 0.5194137573242188, 0.519341064453125, 0.5194496459960938, 0.5192283935546875, 0.51919873046875, 0.5195048828125, 0.5196339111328125, 0.5195560913085937, 0.519572509765625, 0.51938916015625, 0.5192929077148437, 0.5191874389648438, 0.5194547119140625, 0.5193114013671875, 0.5195682983398437, 0.5193922729492187, 0.5194547119140625, 0.5196246948242188, 0.5193451538085937, 0.5195867919921875, 0.5193400268554688, 0.5194721069335938, 0.5192376098632813, 0.51959912109375, 0.5194547119140625, 0.5198776245117187, 0.5195980834960937, 0.519794677734375, 0.5196093139648438, 0.5194988403320312, 1.07694482421875, 0.51944140625, 0.5193348999023437, 0.519572509765625, 0.5193656616210938, 0.5192765502929687, 0.5190225830078125, 0.5192960205078125, 0.5193001098632812, 0.519414794921875, 0.5196113891601563, 0.5192478637695312, 0.5194208984375, 0.5195591430664063, 0.5194547119140625, 0.5194055786132813, 0.5193932495117187, 0.5193236694335938, 0.519299072265625, 0.519203857421875, 0.519172119140625, 0.519413818359375, 0.5194751586914063, 0.5194956665039062, 0.5193031616210938, 0.5192714233398438, 0.5192642822265625, 0.5195704345703125, 0.5194055786132813, 0.5192109985351563, 0.5192069702148437, 0.5193225708007813, 0.5193994140625, 0.5194393310546875, 0.5193717651367188, 0.51938916015625, 0.5194823608398438, 0.5195161743164063, 0.5194229736328125, 0.5193123779296875, 0.5194803466796875, 0.5196267700195313, 0.5195346069335938, 0.5195284423828125, 0.5194066162109375, 0.5194495849609375, 0.5196309204101562, 0.5191699829101563, 0.5192222900390625, 0.5191680297851563, 0.5192140502929687, 0.5192509155273437, 0.5197455444335938, 0.51945166015625, 0.5199011840820312, 0.5196165161132813, 0.5196021728515625, 0.51944140625, 0.519468017578125, 0.5196871948242188, 0.5198428344726562, 0.5196503295898437, 0.5193164672851562, 1.076336669921875, 0.5190471801757812, 0.51890380859375, 0.5190809326171875, 0.5190072021484375, 0.5190051879882812, 0.519067626953125, 0.5190942993164063, 0.5192550659179688, 0.5191608276367188, 0.51911474609375, 0.5190000610351563, 0.5190625, 0.5189099731445312, 0.5189284057617187, 0.5190532836914062, 0.5191976928710937, 0.5195181884765625, 0.5195192260742187, 0.5192243041992187, 0.5194373779296875, 0.5194208374023438, 0.5193380126953125, 0.51938818359375, 0.5193707275390625, 0.5193901977539063, 0.5194732055664063, 0.5196451416015625, 0.5194977416992187, 0.5196513061523438, 0.5195796508789062, 0.5192212524414063, 0.5195172119140625, 0.5192601318359376, 0.5197967529296875, 0.5195601806640625, 0.5197598876953125, 0.5193154296875, 0.5192755126953125, 0.5190604858398438, 0.519140380859375, 0.519161865234375, 0.5192714233398438, 0.5192130737304688, 0.519172119140625, 0.5193922729492187, 0.5197404174804687, 0.5195489501953126, 0.5196461791992187, 0.5194475708007813, 0.5194373168945312, 0.5194741821289063, 0.5194240112304688, 0.5196431274414063, 0.51991552734375, 0.5195888671875, 0.5194813232421875, 0.5196830444335937, 0.5193574829101563, 0.5195447387695312, 0.519552001953125, 0.5194495849609375, 0.5193912353515625, 1.0770308837890625, 0.519299072265625, 0.519161865234375, 0.519352294921875, 0.519202880859375, 0.519232421875, 0.5192161254882812, 0.5191495971679687, 0.5193451538085937, 0.5194649658203125, 0.5192969970703125, 0.51949365234375, 0.5189734497070313, 0.5189652709960938, 0.5190738525390625, 0.5190205078125, 0.5188731079101563, 0.5189837036132813, 0.5191946411132813, 0.5191895141601562, 0.5194915771484375, 0.5194823608398438, 0.5194649658203125, 0.5193861083984375, 0.519541748046875, 0.5195222778320312, 0.5193441162109375, 0.5196687622070313, 0.5194107055664062, 0.5194291381835937, 0.5196728515625, 0.519736328125, 0.5194956665039062, 0.5196932983398438, 0.51970458984375, 0.5194557495117188, 0.5194752197265625, 0.5195489501953126, 0.5196452026367188, 0.51976904296875, 0.5196585693359375, 0.5195037841796875, 0.5197609252929688, 0.5197005004882812, 0.5194229736328125, 0.51953564453125, 0.5195899047851562, 0.5200025634765625, 0.5196328735351563, 0.5195530395507812, 0.5195929565429688, 0.5197322387695312, 0.519456787109375, 0.5192703857421875, 0.5194905395507813, 0.5194588012695313, 0.5194240112304688, 0.5194588012695313, 0.519572509765625, 0.5200353393554688, 0.5195612182617187, 0.5195438232421875, 0.5196062622070312, 1.0774559326171875, 0.5191044921875, 0.5191157836914062, 0.5191874389648438, 0.5194168090820312, 0.519161865234375, 0.5189949340820312, 0.5192335205078125, 0.5191475219726562, 0.5193502807617187, 0.5192642822265625, 0.5191946411132813, 0.5190123291015625, 0.5190532836914062, 0.5194086303710937, 0.5190615234375, 0.5189734497070313, 0.5190645751953125, 0.5192376098632813, 0.5190942993164063, 0.5191700439453125, 0.5191280517578125, 0.5192765502929687, 0.5192151489257812, 0.5193707275390625, 0.5190543212890625, 0.5191290893554688, 0.5193359375, 0.5194874877929687, 0.5193267211914062, 0.5189846801757813, 0.51919873046875, 0.5191116943359375, 0.5194178466796875, 0.51928369140625, 0.5192315063476562, 0.51917724609375, 0.5193430786132812, 0.5191895141601562, 0.5191710815429688, 0.5193011474609375, 0.5194956665039062, 0.5192806396484375, 0.519357421875, 0.5193850708007812, 0.5195530395507812, 0.5191905517578125, 0.519109619140625, 0.5193072509765625, 0.5192550659179688, 0.5195325317382813, 0.5192079467773437, 0.5195632934570312, 0.5195438232421875, 0.5192283935546875, 0.5193164672851562, 0.5193871459960937, 0.519130126953125, 0.5190543212890625, 0.519066650390625, 0.519762939453125, 0.5196871948242188, 0.51964208984375, 1.0765701904296876, 0.5193727416992188, 0.5190225830078125, 0.51900927734375, 0.51913525390625, 0.5190553588867187, 0.5191137084960937, 0.519404541015625, 0.5193369750976562, 0.5193421020507812, 0.5189796142578125, 0.5190471801757812, 0.5191260375976563, 0.5190000610351563, 0.5188761596679687, 0.5193082885742187, 0.5192929077148437, 0.5189151000976563, 0.5193871459960937, 0.5191874389648438, 0.5193277587890625, 0.5193389892578125, 0.5194967041015625, 0.5193380126953125, 0.518867919921875, 0.5191762084960938, 0.5194178466796875, 0.5198345947265625, 0.5194864501953125, 0.519278564453125, 0.5193430786132812, 0.5191434326171875, 0.51901953125, 0.519066650390625, 0.5193963623046876, 0.5190963134765625, 0.51917822265625, 0.5191843872070312, 0.5193840942382812, 0.5189468383789062, 0.5197189331054688, 0.5195069580078125, 0.5195929565429688, 0.5196318969726562, 0.5192345581054687, 0.5199708251953125, 0.5198817138671875, 0.5194895629882812, 0.5193318481445313, 0.51960009765625, 0.519520263671875, 0.5197168579101562, 0.5195089721679688, 0.5197127685546875, 0.5197619018554688, 0.5193789672851562, 0.5196011352539063, 0.5199605712890625, 0.5192171630859375, 0.519773193359375, 0.5193154296875, 0.5193421020507812, 0.5191127319335938, 1.076674560546875, 0.5193175048828125, 0.5198510131835937, 0.5194219360351563, 0.5192171630859375, 0.5192335205078125, 0.5190491943359375, 0.5191076049804687, 0.519288818359375, 0.519351318359375, 0.51945166015625, 0.5192109985351563, 0.5194281005859375, 0.5190523071289063, 0.518887451171875, 0.5189160766601563, 0.5193380126953125, 0.5191219482421875, 0.5194137573242188, 0.5189949340820312, 0.5194024658203125, 0.5194833984375, 0.5191393432617187, 0.5193871459960937, 0.5191823120117187, 0.519035888671875, 0.5191137084960937, 0.5192263793945312, 0.5193850708007812, 0.5195980834960937, 0.519099365234375, 0.5191802978515625, 0.519103515625, 0.5190819702148437, 0.5191076049804687, 0.5194823608398438, 0.519362548828125, 0.5193380126953125, 0.5192847290039062, 0.5194066162109375, 0.5196646118164062, 0.519319580078125, 0.5197086791992187, 0.5193389892578125, 0.5191321411132812, 0.5191423950195313, 0.5191177978515625, 0.51974658203125, 0.5195028686523437, 0.5193082885742187, 0.5194967041015625, 0.5192734985351563, 0.5193185424804687, 0.5191393432617187, 0.5193789672851562, 0.5193001098632812, 0.519245849609375, 0.5193215942382813, 0.5193871459960937, 0.5200670776367188, 0.5194752197265625, 0.5193564453125, 0.519161865234375, 1.0765865478515626, 0.5188423461914062, 0.5193768920898437, 0.5195335693359375, 0.5195069580078125, 0.5196103515625, 0.5192714233398438, 0.5193687133789062, 0.5194761962890625, 0.5191976928710937, 0.5193594970703125, 0.5194977416992187, 0.5192212524414063, 0.5192960205078125, 0.5195693969726562, 0.51974755859375, 0.519372802734375, 0.5196605224609375, 0.5194926147460938, 0.5194332275390625, 0.5192581176757812, 0.5195612182617187, 0.519888916015625, 0.52008251953125, 0.519522216796875, 0.519762939453125, 0.5197742309570312, 0.5199441528320312, 0.5196011352539063, 0.5197128295898438, 0.519615478515625, 0.5195438232421875, 0.5197282104492188, 0.52025439453125, 0.52010595703125, 0.5197557983398438, 0.5197957153320313, 0.5193380126953125, 0.5193861083984375, 0.5192591552734375, 0.5193103637695312, 0.5194598388671875, 0.519741455078125, 0.5195346069335938, 0.519635986328125, 0.5194772338867187, 0.5194158325195313, 0.5195806884765625, 0.5193267211914062, 0.5193666381835937, 0.5194485473632813, 0.5195233154296875, 0.5197117309570313, 0.519857177734375, 0.519552001953125, 0.519488525390625, 0.5193113403320313, 0.519372802734375, 0.5194302368164062, 0.5193245849609375, 0.5197352905273438, 0.51986328125, 0.5195817260742187]",tokens/s,1.8962938209701676,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1743.122432,22129.672192,0.0,21483.225088,20799.036928,s,10,27.909502197265624,2.7909502197265623,0.0030225550178342967,2.7914050292968753,2.79439267578125,2.794928149414062,2.7953565283203123,"[2.794273681640625, 2.795463623046875, 2.7873525390625, 2.789448486328125, 2.787631103515625, 2.78628466796875, 2.790373291015625, 2.792436767578125, 2.79304931640625, 2.793188720703125]",tokens/s,91.72503263962948,kWh,3.290233454770512e-05,1.803171549396211e-05,0.0001583880155991868,0.00020932206564085407,tokens/kWh,1222995.765956342,MB,1743.720448,22129.672192,0.0,21483.225088,20902.142976,s,10,1661.799328125,166.1799328125,0.024199823300055656,166.1772734375,166.2117125,166.21745,166.22204,"[166.17496875, 166.167359375, 166.14703125, 166.2104375, 166.2231875, 166.181296875, 166.179578125, 166.204078125, 166.159546875, 166.15184375]",tokens/s,0.37910714569299164,kWh,0.0019616428730719624,0.001075153456676562,0.009382404450362024,0.012419200780110549,tokens/kWh,5072.790199261051,,s,629,1684.2762666015624,2.677704716377683,0.33156351074934576,2.63752294921875,2.6395720703125,2.6405790039062498,5.428127246093751,"[2.63648046875, 2.637477783203125, 2.636610595703125, 2.63773388671875, 2.63674365234375, 2.6374482421875, 2.63684716796875, 2.637740966796875, 2.636241943359375, 2.6376162109375, 2.636966796875, 2.6365615234375, 2.639180908203125, 2.637003662109375, 2.637414306640625, 2.638074951171875, 2.638488525390625, 2.637603759765625, 2.6381865234375, 2.6367314453125, 2.63737646484375, 2.636360595703125, 2.63697314453125, 2.6366044921875, 2.6370693359375, 2.636568603515625, 2.636980224609375, 2.636353515625, 2.63706103515625, 2.636314697265625, 2.637646728515625, 2.63864208984375, 2.637822998046875, 2.63769091796875, 2.636580810546875, 2.636631103515625, 2.6366474609375, 2.636380126953125, 2.635778076171875, 2.639795166015625, 2.637285400390625, 2.636735595703125, 2.63680419921875, 2.636505126953125, 2.636515380859375, 2.637959228515625, 2.6366884765625, 2.638927001953125, 2.6396015625, 2.6393056640625, 2.640069580078125, 2.638898193359375, 2.6396181640625, 2.639129638671875, 2.6404248046875, 2.63950439453125, 2.6398291015625, 2.639203369140625, 2.639447021484375, 2.638824462890625, 2.638972900390625, 2.637701171875, 5.43399951171875, 2.63682861328125, 2.637526123046875, 2.637077392578125, 2.636991455078125, 2.636851318359375, 2.63690966796875, 2.636982177734375, 2.638088134765625, 2.63716455078125, 2.63853662109375, 2.638309326171875, 2.63889111328125, 2.64060107421875, 2.637055908203125, 2.63606884765625, 2.63802880859375, 2.637928466796875, 2.6390927734375, 2.6384384765625, 2.637506591796875, 2.63756298828125, 2.63665869140625, 2.636583984375, 2.63687060546875, 2.6370908203125, 2.637446044921875, 2.63802685546875, 2.6375556640625, 2.637516845703125, 2.63621728515625, 2.637552734375, 2.637285400390625, 2.637663330078125, 2.6371767578125, 2.637763671875, 2.63699560546875, 2.63773583984375, 2.637076416015625, 2.637444091796875, 2.636642333984375, 2.636317626953125, 2.63705078125, 2.636695556640625, 2.637413330078125, 2.63615478515625, 2.6434365234375, 2.637322265625, 2.638057373046875, 2.63786181640625, 2.638676025390625, 2.637177734375, 2.63716455078125, 2.637470703125, 2.63781689453125, 2.636496826171875, 2.63722802734375, 2.637602783203125, 2.638507080078125, 2.637663330078125, 2.636621826171875, 2.63727001953125, 2.6375107421875, 5.42925830078125, 2.638592041015625, 2.6379560546875, 2.637390869140625, 2.636768310546875, 2.63889501953125, 2.638235595703125, 2.638288818359375, 2.637874267578125, 2.638148681640625, 2.637728759765625, 2.637327392578125, 2.63690869140625, 2.637347900390625, 2.6369453125, 2.637433837890625, 2.63707958984375, 2.636822509765625, 2.637656982421875, 2.63617529296875, 2.636454833984375, 2.6358466796875, 2.636423095703125, 2.63856640625, 2.63604833984375, 2.63657470703125, 2.637000732421875, 2.637095947265625, 2.636664794921875, 2.6381435546875, 2.640372802734375, 2.638454833984375, 2.636282958984375, 2.636675048828125, 2.635629638671875, 2.636507080078125, 2.63680615234375, 2.63701611328125, 2.636884033203125, 2.63752294921875, 2.6366669921875, 2.63699462890625, 2.63780859375, 2.6374072265625, 2.636739501953125, 2.6374296875, 2.637625244140625, 2.639678466796875, 2.63699658203125, 2.63729052734375, 2.637656982421875, 2.636547119140625, 2.6364794921875, 2.636135498046875, 2.636613525390625, 2.636599365234375, 2.63747265625, 2.6357861328125, 2.63600439453125, 2.6359716796875, 2.636907470703125, 2.636198974609375, 2.639195068359375, 5.42974560546875, 2.637918212890625, 2.638065673828125, 2.6382080078125, 2.639459228515625, 2.638834716796875, 2.640337890625, 2.639097900390625, 2.638284912109375, 2.639035400390625, 2.636672119140625, 2.63703955078125, 2.63772265625, 2.639283203125, 2.6380830078125, 2.639520751953125, 2.63948291015625, 2.637526123046875, 2.638147705078125, 2.637602783203125, 2.63681640625, 2.63762841796875, 2.638529541015625, 2.63696484375, 2.63714599609375, 2.637178955078125, 2.6376796875, 2.63855810546875, 2.641314697265625, 2.637876220703125, 2.63853369140625, 2.63912548828125, 2.637345703125, 2.636560302734375, 2.636971923828125, 2.636669921875, 2.636619873046875, 2.6392392578125, 2.63902099609375, 2.638465087890625, 2.63775341796875, 2.638636962890625, 2.637673583984375, 2.6372373046875, 2.64167431640625, 2.6387353515625, 2.637962158203125, 2.639416259765625, 2.637681640625, 2.639564697265625, 2.6377236328125, 2.638668701171875, 2.638095458984375, 2.63796728515625, 2.637655029296875, 2.637210693359375, 2.63780859375, 2.638158935546875, 2.637846435546875, 2.63988232421875, 2.641334228515625, 2.636745849609375, 2.63619482421875, 5.42609521484375, 2.63847412109375, 2.64098095703125, 2.6405458984375, 2.64087451171875, 2.64148388671875, 2.64074755859375, 2.64026318359375, 2.640604248046875, 2.640962646484375, 2.638622802734375, 2.63895458984375, 2.636958740234375, 2.636030029296875, 2.636072021484375, 2.637117431640625, 2.636577880859375, 2.642241455078125, 2.64078857421875, 2.641033203125, 2.641383544921875, 2.64137841796875, 2.639701904296875, 2.640848876953125, 2.640034912109375, 2.6397451171875, 2.640384033203125, 2.637106201171875, 2.636432373046875, 2.636267578125, 2.637424560546875, 2.636958740234375, 2.63680712890625, 2.637274169921875, 2.638011474609375, 2.63804931640625, 2.63707861328125, 2.636590087890625, 2.63739794921875, 2.63689013671875, 2.63980029296875, 2.639002685546875, 2.638836669921875, 2.63720849609375, 2.637477783203125, 2.637814697265625, 2.637075439453125, 2.637104248046875, 2.638521240234375, 2.63874755859375, 2.63686962890625, 2.63682763671875, 2.637802490234375, 2.63747998046875, 2.6362255859375, 2.63657568359375, 2.636940185546875, 2.642130859375, 2.6372998046875, 2.63684912109375, 2.63803271484375, 2.63796533203125, 2.637073486328125, 5.4244033203125, 2.636745849609375, 2.6364814453125, 2.6371123046875, 2.637408203125, 2.636239990234375, 2.63697607421875, 2.638215087890625, 2.639066162109375, 2.63918994140625, 2.639048583984375, 2.63975634765625, 2.639510498046875, 2.63817626953125, 2.637822021484375, 2.6385029296875, 2.637655029296875, 2.637551513671875, 2.636876708984375, 2.63750048828125, 2.63714599609375, 2.63798779296875, 2.63727099609375, 2.637442138671875, 2.636712890625, 2.637854736328125, 2.638200927734375, 2.6394736328125, 2.63754541015625, 2.636971923828125, 2.637844482421875, 2.63727197265625, 2.6361865234375, 2.636801025390625, 2.637619140625, 2.637947998046875, 2.63809228515625, 2.637560791015625, 2.6417490234375, 2.63729052734375, 2.63712158203125, 2.63689111328125, 2.63716259765625, 2.635864990234375, 2.6373251953125, 2.63663623046875, 2.63742578125, 2.637765625, 2.63775341796875, 2.638487548828125, 2.638257080078125, 2.63924951171875, 2.639295654296875, 2.639500244140625, 2.63644970703125, 2.637189208984375, 2.637624267578125, 2.639413330078125, 2.639233154296875, 2.63777587890625, 2.637401123046875, 2.637765625, 2.63680712890625, 5.42891748046875, 2.637751220703125, 2.6369423828125, 2.637287353515625, 2.6384208984375, 2.6376162109375, 2.637705078125, 2.6376396484375, 2.636984375, 2.636735595703125, 2.637454345703125, 2.63906005859375, 2.641005615234375, 2.639869873046875, 2.638488525390625, 2.638159912109375, 2.638043212890625, 2.6367958984375, 2.6363330078125, 2.637013916015625, 2.63684716796875, 2.63693115234375, 2.636872802734375, 2.64043212890625, 2.636421142578125, 2.63861962890625, 2.63849169921875, 2.638710693359375, 2.637824951171875, 2.6379111328125, 2.636610595703125, 2.6375966796875, 2.637263916015625, 2.63708984375, 2.637641845703125, 2.637759521484375, 2.6372197265625, 2.63781591796875, 2.63773583984375, 2.638950439453125, 2.637347900390625, 2.63666796875, 2.638234619140625, 2.638180419921875, 2.637719482421875, 2.63872216796875, 2.639690673828125, 2.63703662109375, 2.636780517578125, 2.636748779296875, 2.637224853515625, 2.636233642578125, 2.636378173828125, 2.636863525390625, 2.63923193359375, 2.638593994140625, 2.638180419921875, 2.63895556640625, 2.638899169921875, 2.637263916015625, 2.63600537109375, 2.637727783203125, 2.63825, 5.43155078125, 2.637918212890625, 2.63883056640625, 2.63889208984375, 2.63790185546875, 2.637695068359375, 2.636559326171875, 2.636966796875, 2.63714208984375, 2.639033447265625, 2.6404404296875, 2.640530517578125, 2.64198046875, 2.640649169921875, 2.637720458984375, 2.637758544921875, 2.639655029296875, 2.63736328125, 2.63708251953125, 2.636771240234375, 2.636508056640625, 2.63598583984375, 2.6370068359375, 2.636777587890625, 2.636590087890625, 2.63606884765625, 2.63714111328125, 2.63913671875, 2.637537353515625, 2.63705712890625, 2.636992431640625, 2.638615478515625, 2.636080078125, 2.637097900390625, 2.636854248046875, 2.640509033203125, 2.63925146484375, 2.6389052734375, 2.6383544921875, 2.63872705078125, 2.638085205078125, 2.6382275390625, 2.637551513671875, 2.637571044921875, 2.637200439453125, 2.63806982421875, 2.637642822265625, 2.637718505859375, 2.6373642578125, 2.6366044921875, 2.638297119140625, 2.63918798828125, 2.63943896484375, 2.6392646484375, 2.639478759765625, 2.637856689453125, 2.638085205078125, 2.638904296875, 2.6383564453125, 2.639803466796875, 2.640150634765625, 2.637856689453125, 2.640256103515625, 5.4306796875, 2.637127685546875, 2.6382705078125, 2.640280517578125, 2.639151123046875, 2.639075439453125, 2.63817529296875, 2.638035888671875, 2.638784423828125, 2.639287353515625, 2.637106201171875, 2.637173828125, 2.6368388671875, 2.6376591796875, 2.635720703125, 2.63617333984375, 2.636442626953125, 2.63904150390625, 2.6368818359375, 2.635509765625, 2.636674072265625, 2.6366484375, 2.637701171875, 2.638306396484375, 2.64076806640625, 2.63686865234375, 2.6378896484375, 2.639107177734375, 2.6401259765625, 2.6394111328125, 2.63695166015625, 2.63650927734375, 2.636439453125, 2.63674267578125, 2.637043701171875, 2.637275146484375, 2.637382568359375, 2.63693115234375, 2.636669921875, 2.637486083984375, 2.636729248046875, 2.636916748046875, 2.6372802734375, 2.638066650390625, 2.637075439453125, 2.637048828125, 2.636240966796875, 2.637787109375, 2.636506103515625, 2.636660888671875, 2.63712548828125, 2.637421630859375, 2.637157470703125, 2.636517333984375, 2.637552734375, 2.636777587890625, 2.63666064453125, 2.6360966796875, 2.637487060546875, 2.636655517578125, 2.6372158203125, 2.637021240234375, 2.63826025390625, 5.43210595703125, 2.637992919921875, 2.6383369140625, 2.636921875, 2.637557861328125, 2.637305908203125, 2.63912646484375, 2.63752099609375, 2.6370693359375, 2.638551025390625, 2.6377646484375, 2.638067626953125, 2.637068359375, 2.63798876953125, 2.6374892578125, 2.63644775390625, 2.636535888671875, 2.637727783203125, 2.63798876953125, 2.637382568359375, 2.637177734375, 2.637806640625, 2.63628173828125, 2.6376171875, 2.63735302734375, 2.63743896484375, 2.6397236328125, 2.637740966796875, 2.637970458984375, 2.6363720703125, 2.636199951171875, 2.6363935546875, 2.638331787109375, 2.636755859375, 2.636908447265625, 2.636675048828125, 2.63809033203125, 2.636525634765625, 2.63657373046875, 2.63596435546875, 2.63809130859375, 2.6373671875, 2.636396484375, 2.637010009765625, 2.637177734375, 2.636538818359375, 2.637276123046875, 2.638035888671875, 2.636796875, 2.636739501953125, 2.636539794921875, 2.63828076171875, 2.63714404296875, 2.637462646484375, 2.637360107421875, 2.638277587890625, 2.636430419921875, 2.6370283203125, 2.6365234375, 2.638316650390625, 2.636391357421875, 2.636579833984375, 2.636466064453125]",tokens/s,0.3734541728532224,,,,,,main,False,False -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1554.096128,1957.167104,0.0,1310.72,1163.82464,s,10,1.286997444152832,0.1286997444152832,0.0010709154419692667,0.12870168304443358,0.13024390106201172,0.13026507034301757,0.13028200576782226,"[0.13028623962402344, 0.13023919677734375, 0.1282391662597656, 0.12805363464355468, 0.1269244155883789, 0.12729046630859375, 0.12865869140625, 0.1287446746826172, 0.1295413055419922, 0.1290196533203125]",tokens/s,1989.1259393177147,kWh,1.4998026454163673e-06,8.218217431655106e-07,6.2844740852399835e-06,8.60609847382186e-06,tokens/kWh,29746347.985524923,MB,1554.096128,1959.264256,0.0,1312.817152,1232.774656,s,10,75.16664355468751,7.516664355468751,0.019324999879587898,7.517852294921875,7.5454459960937506,7.548604345703125,7.551131025390625,"[7.5517626953125, 7.5180322265625, 7.52255224609375, 7.52390966796875, 7.544744140625, 7.51767236328125, 7.49461669921875, 7.49106689453125, 7.50185205078125, 7.5004345703125]",tokens/s,8.3813772999142,kWh,8.89009176429398e-05,4.872418928564946e-05,0.00036540068437377195,0.0005030257913023612,tokens/kWh,125242.08716393956,,s,629,76.20247959136968,0.12114861620249542,0.01533479707935131,0.1192816619873047,0.12045271148681641,0.12072119903564453,0.24720068908691412,"[0.12236902618408203, 0.12192870330810547, 0.12110336303710938, 0.12021862030029297, 0.12056371307373047, 0.11989708709716797, 0.11929497528076172, 0.11829862213134766, 0.11859661102294922, 0.11861504364013672, 0.11821469116210938, 0.11968099212646484, 0.11959193420410157, 0.12045926666259765, 0.12021043395996094, 0.1199452133178711, 0.11970047760009765, 0.12003225708007813, 0.120163330078125, 0.11992678070068359, 0.11977011108398437, 0.11847372436523437, 0.11856588745117187, 0.11883724975585938, 0.11842662048339844, 0.11851676940917968, 0.11948745727539062, 0.11911475372314453, 0.119552001953125, 0.1215068130493164, 0.120257568359375, 0.1204623031616211, 0.12012236785888672, 0.12004659271240234, 0.12008038330078125, 0.12001689910888672, 0.11995954895019531, 0.12004761505126953, 0.12005683135986328, 0.12010189056396485, 0.12003635406494141, 0.11968307495117188, 0.12004863739013671, 0.12036300659179687, 0.11988992309570312, 0.1200558090209961, 0.1202534408569336, 0.12022169494628906, 0.12032921600341796, 0.12026265716552734, 0.12077056121826171, 0.12018585968017578, 0.11901235198974609, 0.1193861083984375, 0.11910348510742187, 0.11925606536865234, 0.11975270080566407, 0.12020941162109375, 0.11999846649169922, 0.12033843231201172, 0.11914854431152344, 0.12003942108154297, 0.2506158142089844, 0.12027391815185547, 0.11959091186523438, 0.11991551971435546, 0.1192816619873047, 0.11801292419433594, 0.11855052947998047, 0.11803955078125, 0.11798016357421875, 0.11803241729736329, 0.11873174285888671, 0.12019200134277344, 0.11996057891845703, 0.11952947235107422, 0.11857612609863281, 0.11973939514160156, 0.11993702697753907, 0.11889049530029297, 0.11839385223388672, 0.11943730926513672, 0.11985203552246093, 0.11852082824707032, 0.11822898864746094, 0.11862739562988281, 0.11842758178710938, 0.11852185821533204, 0.1185054702758789, 0.11835596466064453, 0.11835699462890625, 0.11841331481933594, 0.11834674835205078, 0.11860070037841797, 0.12062003326416015, 0.11992473602294922, 0.12043059539794922, 0.12157746887207031, 0.12022579193115235, 0.11964927673339844, 0.12050534057617188, 0.12049407958984375, 0.12015615844726563, 0.11910553741455078, 0.12046438598632812, 0.1204510726928711, 0.12023603057861328, 0.11975885009765624, 0.12176998138427735, 0.12023705291748046, 0.12054937744140624, 0.11995750427246094, 0.12132864379882813, 0.11925917053222657, 0.1186283187866211, 0.11835699462890625, 0.11851776123046875, 0.11839794921875, 0.11879730987548828, 0.11951107025146485, 0.11866006469726563, 0.11900109100341796, 0.11836723327636718, 0.11853414154052734, 0.11843583679199218, 0.24882893371582032, 0.11982233428955077, 0.11939225769042969, 0.12049919891357422, 0.12002508544921875, 0.1198919677734375, 0.12023910522460937, 0.11994217681884765, 0.11870614624023437, 0.11836006164550782, 0.11821260833740234, 0.11851570892333985, 0.11971686553955078, 0.12000460815429688, 0.11984076690673828, 0.11931136322021485, 0.11905126190185547, 0.11986329650878906, 0.11975373077392579, 0.11980595397949219, 0.1196267547607422, 0.11956735992431641, 0.12024422454833984, 0.12106342315673828, 0.11977728271484375, 0.1197127685546875, 0.11865599822998046, 0.11831404876708984, 0.11899692535400391, 0.11852082824707032, 0.11829145812988281, 0.11852595520019531, 0.11846348571777343, 0.11836518096923829, 0.11844812774658203, 0.11833757019042969, 0.11830780792236328, 0.11964620971679688, 0.11897650909423828, 0.11840921783447265, 0.11968511962890625, 0.11841024017333984, 0.11969535827636718, 0.11970559692382812, 0.12029644775390624, 0.12001078033447266, 0.11961955261230468, 0.12051967620849609, 0.11861196899414063, 0.11842150115966797, 0.11837133026123046, 0.12028313446044922, 0.11990322875976563, 0.1188751983642578, 0.12033939361572266, 0.11918643188476563, 0.1187583999633789, 0.12003942108154297, 0.11934515380859376, 0.12007730865478515, 0.12025138854980469, 0.12019916534423829, 0.12015615844726563, 0.24848793029785157, 0.11997798156738282, 0.11991763305664062, 0.12037625885009766, 0.12008345794677734, 0.11944652557373046, 0.12011827087402344, 0.12008448028564453, 0.11987149047851563, 0.12019404602050782, 0.12016851043701172, 0.11977823638916016, 0.11987865447998047, 0.11973426818847656, 0.12008038330078125, 0.11942092895507812, 0.11997491455078126, 0.11880242919921875, 0.11979673767089843, 0.1197127685546875, 0.11939532470703125, 0.11976703643798828, 0.11997494506835937, 0.12019094085693359, 0.12064665222167968, 0.12001996612548828, 0.12060364532470703, 0.12040294647216797, 0.11997388458251954, 0.11948646545410156, 0.11998822021484375, 0.11993907165527344, 0.11955097961425781, 0.11953561401367188, 0.11927756500244141, 0.11897036743164062, 0.11999334716796875, 0.11905126190185547, 0.11846553802490234, 0.11841843414306641, 0.11854847717285157, 0.11835596466064453, 0.11830995178222656, 0.11828428649902344, 0.1203987808227539, 0.11818905639648437, 0.1193707504272461, 0.11814604949951171, 0.11847782135009766, 0.1183078384399414, 0.11799350738525391, 0.11804771423339844, 0.11817372894287109, 0.11848700714111328, 0.11852082824707032, 0.11849932861328125, 0.12020735931396484, 0.120089599609375, 0.12004761505126953, 0.12012134552001953, 0.11869286346435547, 0.11855974578857421, 0.11863142395019531, 0.24572006225585938, 0.1183477783203125, 0.11846041870117187, 0.11811737823486328, 0.1181470718383789, 0.11830067443847657, 0.11928985595703125, 0.11911065673828125, 0.1185771484375, 0.11835187530517578, 0.12018994903564453, 0.12008550262451172, 0.11990534210205078, 0.1200219497680664, 0.12004659271240234, 0.11994931030273437, 0.12035686492919923, 0.12041522979736329, 0.12053298950195312, 0.12031999969482422, 0.12058419036865234, 0.12013158416748047, 0.12075929260253906, 0.11833036804199219, 0.1206794204711914, 0.12085862731933594, 0.12161433410644532, 0.11919667053222656, 0.1187041244506836, 0.11844300842285156, 0.11833650970458984, 0.11833856201171875, 0.1186355209350586, 0.11820236968994141, 0.12034559631347656, 0.12019712066650391, 0.12016947174072265, 0.11850035095214843, 0.12005375671386719, 0.12051763153076171, 0.11887615966796874, 0.11807539367675782, 0.11823411560058594, 0.11905741119384766, 0.12032307434082032, 0.1189775390625, 0.12029132843017579, 0.12084019470214843, 0.12052992248535156, 0.1207357406616211, 0.12054220581054688, 0.12078284454345703, 0.12055244445800781, 0.12059852600097656, 0.12064358520507812, 0.12082486724853515, 0.1207326431274414, 0.12082278442382813, 0.12053196716308594, 0.12073065948486328, 0.12090262603759766, 0.12064870452880859, 0.12058112335205078, 0.24601292419433593, 0.11817881774902343, 0.11808255767822265, 0.11800780487060547, 0.118255615234375, 0.11830067443847657, 0.11829145812988281, 0.11941478729248046, 0.12005375671386719, 0.1198202896118164, 0.12005785369873047, 0.11806412506103516, 0.11867750549316407, 0.11829145812988281, 0.11833548736572265, 0.11830989074707031, 0.11806822204589844, 0.11833241271972657, 0.11837747192382812, 0.11845120239257813, 0.11834572601318359, 0.11841228485107422, 0.11827609252929687, 0.11805286407470703, 0.11896729278564454, 0.11836313629150391, 0.11981619262695313, 0.12392044830322266, 0.1205656967163086, 0.12015821075439453, 0.11990016174316406, 0.12003533172607422, 0.11934003448486329, 0.1199974365234375, 0.1193154525756836, 0.11959500885009766, 0.11863040161132812, 0.1188106231689453, 0.12067327880859376, 0.11914444732666016, 0.11830477142333984, 0.11853721618652344, 0.1185269775390625, 0.11941990661621094, 0.11858329772949219, 0.1186693115234375, 0.12036608123779297, 0.12004557037353515, 0.1200558090209961, 0.12050841522216797, 0.12022681427001954, 0.12039065551757812, 0.12011212921142578, 0.12010495758056641, 0.12036608123779297, 0.1201817626953125, 0.12043673706054688, 0.12033843231201172, 0.12007526397705078, 0.12000972747802735, 0.12006505584716796, 0.11996975708007812, 0.11998822021484375, 0.2494791717529297, 0.12030156707763671, 0.11901952362060547, 0.11824127960205078, 0.1192816619873047, 0.11887308502197266, 0.11826175689697266, 0.11869388580322265, 0.11826687622070313, 0.11836211395263672, 0.11810918426513672, 0.11887104034423829, 0.11972198486328126, 0.11960012817382812, 0.1192468490600586, 0.118002685546875, 0.11875942230224609, 0.11941478729248046, 0.11890585327148437, 0.11843382263183594, 0.1191526107788086, 0.11959500885009766, 0.11970668792724609, 0.11957036590576171, 0.11933695983886719, 0.11909327697753906, 0.11842966461181641, 0.11905228424072266, 0.11911888122558593, 0.11894780731201172, 0.11848089599609375, 0.1181890869140625, 0.11840406036376953, 0.1190297622680664, 0.11982848358154297, 0.11972096252441407, 0.11864371490478516, 0.11924582672119141, 0.11929804992675781, 0.11825459289550781, 0.11951718139648437, 0.11916287994384765, 0.11947417449951171, 0.11800064086914062, 0.11845017242431641, 0.11818089294433594, 0.11930210876464843, 0.1199810562133789, 0.11853517150878906, 0.11870310211181641, 0.11945164489746093, 0.11844096374511719, 0.1184194564819336, 0.118508544921875, 0.11926220703125, 0.12017356872558593, 0.11892838287353516, 0.1193021469116211, 0.11871849822998047, 0.11844911956787109, 0.11832115173339844, 0.11867545318603516, 0.11844403076171875, 0.24766259765625, 0.11916802978515625, 0.12123133087158203, 0.12028313446044922, 0.11920588684082031, 0.11884953308105468, 0.11862528228759765, 0.11994931030273437, 0.11886182403564453, 0.11885363006591797, 0.12013670349121094, 0.11913011169433593, 0.11992473602294922, 0.11888435363769531, 0.11946598052978516, 0.11882707214355469, 0.1207070083618164, 0.11865702056884765, 0.11839590454101563, 0.11839078521728516, 0.11947724914550781, 0.11919769287109375, 0.11886080169677735, 0.11848601531982422, 0.11876966094970703, 0.11810099029541016, 0.11880960083007812, 0.11817369842529298, 0.11927142333984375, 0.11965235137939453, 0.118761474609375, 0.11863654327392578, 0.11901439666748047, 0.11827814483642578, 0.11898368072509766, 0.11854233551025391, 0.11844915008544922, 0.11848397064208985, 0.11839078521728516, 0.11844608306884766, 0.11859455871582031, 0.11865190124511718, 0.11820441436767579, 0.11907686614990234, 0.11970867156982422, 0.11921715545654298, 0.11824230194091796, 0.11921817779541016, 0.11916390228271484, 0.11929804992675781, 0.11828326416015625, 0.11838361358642578, 0.11848499298095704, 0.11860991668701172, 0.11808153533935548, 0.11826588439941406, 0.11823715209960937, 0.11888639831542969, 0.11884031677246094, 0.11812454223632812, 0.11833446502685546, 0.11847577667236328, 0.11946905517578126, 0.24866712951660155, 0.11849318695068359, 0.11980188751220704, 0.11915773010253906, 0.11846041870117187, 0.1203589096069336, 0.1205381088256836, 0.11978342437744141, 0.11976601409912109, 0.11947417449951171, 0.11962163543701172, 0.11930316925048828, 0.11854541015625, 0.11897344207763672, 0.11885158538818359, 0.11875020599365234, 0.1181839370727539, 0.11931443023681641, 0.11823616027832032, 0.11900313568115234, 0.11816345977783203, 0.11854847717285157, 0.11989708709716797, 0.11902668762207032, 0.11941683197021484, 0.11941580963134765, 0.11852185821533204, 0.11982540893554687, 0.11974553680419922, 0.11874918365478515, 0.11918950653076171, 0.11970355224609375, 0.11946189117431641, 0.11857100677490234, 0.11914035034179687, 0.11940966033935548, 0.1185689926147461, 0.11831394958496094, 0.11820441436767579, 0.11945574188232422, 0.11997388458251954, 0.1203240966796875, 0.11903180694580077, 0.11818495941162109, 0.11810201263427735, 0.11788288116455078, 0.11807539367675782, 0.11814604949951171, 0.11880038452148438, 0.11919872283935547, 0.11881983947753906, 0.11975987243652343, 0.11975373077392579, 0.11953568267822266, 0.12003424072265625, 0.11991961669921875, 0.12044185638427735, 0.11880550384521485, 0.11889356994628907, 0.11842253112792969, 0.11843379211425781, 0.11815936279296875, 0.11811634826660156, 0.24846543884277345, 0.11927139282226562, 0.11798732757568359, 0.11792998504638671, 0.11840512084960937, 0.11863859558105469, 0.11815321350097656, 0.11805184173583984, 0.11871437072753906, 0.11836109161376954, 0.1180794906616211, 0.11815424346923828, 0.1181317138671875, 0.11853004455566406, 0.11842150115966797, 0.11837542724609375, 0.11854847717285157, 0.11832217407226563, 0.11862220764160156, 0.11864985656738282, 0.11943526458740235, 0.11942912292480469, 0.11993395233154297, 0.12021145629882812, 0.12013568115234376, 0.12069990539550782, 0.12175667572021484, 0.12003942108154297, 0.11987763214111329, 0.11907788848876953, 0.1194076156616211, 0.11910553741455078, 0.11873177337646484, 0.11934413146972657, 0.11961958312988281, 0.11883519744873047, 0.11877273559570313, 0.11876761627197266, 0.12034662628173828, 0.12008140563964843, 0.11964415740966797, 0.12007014465332032, 0.12065280151367187, 0.11880754852294922, 0.11892736053466797, 0.11855974578857421, 0.11855052947998047, 0.11903590393066406, 0.11845938873291016, 0.11849523162841796, 0.1184716796875, 0.11835289764404297, 0.11853619384765625, 0.11834368133544922, 0.11937382507324219, 0.11912806701660156, 0.11848806762695313, 0.11961753845214844, 0.1190113296508789, 0.11965132904052735, 0.11860281372070312, 0.11934611511230468, 0.11991145324707031]",tokens/s,8.254324575433342,,,,,,main,False,False -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpxmn1wkr8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495d1-2b15a10d7e57708f3509f03a;6417b781-f373-4b15-803a-c6ba2cba3507) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,4499.935232,14621.868032,0.0,13975.420928,13365.937152,s,10,16.410234252929687,1.6410234252929687,0.0022480805464608366,1.6403901367187501,1.6418083374023436,1.6447224060058594,1.6470536608886719,"[1.647636474609375, 1.63996875, 1.6411607666015624, 1.640802978515625, 1.64035791015625, 1.6397760009765625, 1.6397884521484376, 1.6398060302734374, 1.6405145263671874, 1.64042236328125]",tokens/s,156.00021063337155,kWh,1.9377978377872043e-05,1.0617021217130968e-05,9.108710064739678e-05,0.00012108210024239978,tokens/kWh,2114267.9181109504,MB,4499.935232,14621.868032,0.0,13975.420928,13814.413824,s,10,975.3211015625,97.53211015625,0.011202982505412533,97.52955078125,97.54984375,97.55100390625,97.55193203125,"[97.540578125, 97.52715625, 97.522640625, 97.5219296875, 97.5304609375, 97.51653125, 97.5521640625, 97.5495859375, 97.5314140625, 97.528640625]",tokens/s,0.6459411151780855,kWh,0.0011513337257504463,0.0006310327948099438,0.005346427777138807,0.007128794297699197,tokens/kWh,8837.399056434146,,s,629,988.5350541992196,1.5715978604121124,0.19496209506894618,1.548072998046875,1.5490094970703125,1.549420166015625,3.189316650390625,"[1.547826171875, 1.5488624267578126, 1.5484969482421875, 1.5483443603515625, 1.5481630859375, 1.5493089599609375, 1.548452880859375, 1.5482459716796875, 1.5481619873046875, 1.5497840576171875, 1.54772998046875, 1.5493929443359375, 1.547630615234375, 1.5480760498046875, 1.547937744140625, 1.5482552490234376, 1.5470531005859376, 1.5489443359375, 1.5480074462890625, 1.548041259765625, 1.5472659912109374, 1.549042724609375, 1.5482132568359375, 1.5479869384765625, 1.54851123046875, 1.5473919677734376, 1.5474892578125, 1.547378662109375, 1.548716064453125, 1.548441650390625, 1.5491409912109375, 1.5472435302734375, 1.5476378173828125, 1.5480125732421874, 1.5484580078125, 1.5489659423828126, 1.5496949462890626, 1.5483074951171876, 1.5473623046875, 1.547552734375, 1.547135009765625, 1.548559326171875, 1.5482398681640626, 1.5498638916015626, 1.54827880859375, 1.5483668212890624, 1.5489197998046875, 1.548549072265625, 1.54787939453125, 1.547683837890625, 1.549091796875, 1.5491134033203124, 1.548537841796875, 1.548310546875, 1.547894775390625, 1.54776171875, 1.5491932373046875, 1.5472711181640626, 1.547672607421875, 1.5473602294921875, 1.5482440185546875, 1.548180419921875, 3.190286376953125, 1.5480146484375, 1.5486883544921874, 1.5482091064453125, 1.54775244140625, 1.5473919677734376, 1.547894775390625, 1.5473858642578124, 1.548252197265625, 1.5479337158203126, 1.54754052734375, 1.5482122802734375, 1.54810986328125, 1.5476787109375, 1.5477442626953124, 1.54800634765625, 1.5478446044921874, 1.5483709716796874, 1.5485460205078125, 1.5486474609375, 1.5467080078125, 1.546982421875, 1.5480340576171876, 1.5470633544921875, 1.5475240478515624, 1.5482020263671874, 1.5471483154296874, 1.5476029052734375, 1.548863525390625, 1.5478087158203124, 1.5482347412109374, 1.54766845703125, 1.54899658203125, 1.5479388427734375, 1.547747314453125, 1.547916259765625, 1.5490928955078125, 1.548327880859375, 1.548775390625, 1.5474073486328126, 1.547236328125, 1.5476397705078124, 1.5474749755859376, 1.5477001953125, 1.5486075439453124, 1.54838427734375, 1.5483780517578125, 1.5483616943359375, 1.5482193603515626, 1.548074951171875, 1.547906005859375, 1.549001708984375, 1.5483565673828126, 1.5475179443359375, 1.5475548095703124, 1.5477432861328124, 1.54893310546875, 1.54842724609375, 1.5487344970703125, 1.5480279541015625, 1.5475323486328125, 1.5480032958984375, 1.5488101806640624, 3.191152587890625, 1.5476182861328125, 1.548537841796875, 1.5480074462890625, 1.54785791015625, 1.548368896484375, 1.5487181396484375, 1.548142578125, 1.5480279541015625, 1.547535400390625, 1.5482972412109375, 1.54859521484375, 1.5482921142578125, 1.5488890380859375, 1.5477381591796875, 1.5481619873046875, 1.54773193359375, 1.54867919921875, 1.547737060546875, 1.5486033935546875, 1.5482552490234376, 1.5477698974609375, 1.5484517822265624, 1.5468912353515625, 1.5473387451171876, 1.5476397705078124, 1.5484375, 1.547588623046875, 1.5468543701171875, 1.54747802734375, 1.547462646484375, 1.5476142578125, 1.5486832275390625, 1.5478487548828126, 1.54682373046875, 1.547431884765625, 1.54697216796875, 1.5476439208984376, 1.547420654296875, 1.547546630859375, 1.5470274658203125, 1.5474892578125, 1.547757568359375, 1.547947021484375, 1.5482930908203125, 1.549538330078125, 1.547505615234375, 1.548253173828125, 1.549297607421875, 1.547484130859375, 1.548220458984375, 1.548078125, 1.547826171875, 1.54688916015625, 1.5470919189453125, 1.5475538330078125, 1.54765625, 1.5476009521484375, 1.5491522216796876, 1.5480196533203125, 1.5481773681640625, 1.5486719970703124, 1.54946044921875, 3.19001904296875, 1.5467274169921874, 1.5485091552734376, 1.54712060546875, 1.5472425537109376, 1.548200927734375, 1.5474647216796875, 1.5484794921875, 1.5470428466796875, 1.5474134521484375, 1.5471728515625, 1.54741455078125, 1.547334716796875, 1.5476378173828125, 1.5483443603515625, 1.5476234130859374, 1.5473817138671875, 1.548241943359375, 1.548495849609375, 1.5476910400390624, 1.5492137451171875, 1.5486300048828125, 1.547925537109375, 1.5481497802734374, 1.5477288818359376, 1.547779052734375, 1.5494564208984376, 1.5475230712890624, 1.547504638671875, 1.5472803955078125, 1.5474217529296874, 1.54842724609375, 1.547715576171875, 1.54804736328125, 1.54720458984375, 1.5474442138671876, 1.5468114013671874, 1.548011474609375, 1.5478446044921874, 1.5482470703125, 1.548304443359375, 1.5482255859375, 1.549054931640625, 1.548583984375, 1.5483463134765625, 1.5490682373046876, 1.5485091552734376, 1.5480699462890626, 1.5479337158203126, 1.5475302734375, 1.5479091796875, 1.548273681640625, 1.5492208251953126, 1.547388916015625, 1.54747802734375, 1.5476285400390626, 1.54806787109375, 1.548291015625, 1.5489659423828126, 1.5477606201171874, 1.5477279052734374, 1.5477841796875, 1.5487047119140624, 3.189357666015625, 1.54832177734375, 1.54777294921875, 1.5475640869140626, 1.548190673828125, 1.54800439453125, 1.54889111328125, 1.548938232421875, 1.547832275390625, 1.5472568359375, 1.5471380615234376, 1.5473438720703125, 1.5473499755859375, 1.548291015625, 1.5484302978515625, 1.547525146484375, 1.54766748046875, 1.54760400390625, 1.54752001953125, 1.547726806640625, 1.54796435546875, 1.5488388671875, 1.5483873291015624, 1.54832177734375, 1.54785791015625, 1.548337158203125, 1.548981201171875, 1.5488040771484375, 1.5480023193359376, 1.547864013671875, 1.5482235107421876, 1.54768994140625, 1.5485205078125, 1.5483873291015624, 1.5479071044921875, 1.54815380859375, 1.5473756103515626, 1.5484302978515625, 1.5488572998046874, 1.54754248046875, 1.548105712890625, 1.5480648193359374, 1.548222412109375, 1.5480238037109375, 1.547357177734375, 1.5477821044921876, 1.5485142822265625, 1.54811181640625, 1.5482706298828126, 1.547442138671875, 1.547783203125, 1.548359619140625, 1.54857373046875, 1.5488572998046874, 1.5484447021484375, 1.5478978271484376, 1.5483546142578124, 1.5478446044921874, 1.54789990234375, 1.548568603515625, 1.5481129150390625, 1.5489515380859376, 1.5481129150390625, 3.18936376953125, 1.5473162841796875, 1.548347412109375, 1.5468809814453126, 1.5473695068359374, 1.5474237060546876, 1.5484447021484375, 1.548760009765625, 1.5489996337890626, 1.547483154296875, 1.547210693359375, 1.5470223388671875, 1.5469271240234375, 1.5475926513671876, 1.54830029296875, 1.5476080322265624, 1.547652099609375, 1.5482685546875, 1.547872314453125, 1.5475855712890625, 1.5479234619140625, 1.5485009765625, 1.54821630859375, 1.54749853515625, 1.54756298828125, 1.5474810791015625, 1.5483934326171875, 1.5483914794921876, 1.5475958251953126, 1.5481610107421875, 1.5478609619140624, 1.5482193603515626, 1.5480648193359374, 1.5481988525390624, 1.5481087646484375, 1.5474114990234376, 1.54781494140625, 1.5477237548828124, 1.5477821044921876, 1.5474083251953126, 1.5495556640625, 1.5478026123046875, 1.54785693359375, 1.547255859375, 1.54735302734375, 1.5477073974609374, 1.547969482421875, 1.5477288818359376, 1.5476070556640624, 1.5480606689453125, 1.548168212890625, 1.5479029541015625, 1.5480186767578126, 1.5485255126953126, 1.5468739013671875, 1.546857421875, 1.54753125, 1.5483514404296874, 1.54785888671875, 1.548099609375, 1.54840478515625, 1.54806884765625, 1.5480648193359374, 3.188536376953125, 1.547326416015625, 1.5486146240234375, 1.5495721435546874, 1.5473060302734376, 1.5487816162109376, 1.549453369140625, 1.5489608154296874, 1.5496785888671876, 1.5480616455078124, 1.54798486328125, 1.5475947265625, 1.549253662109375, 1.5481968994140625, 1.548053466796875, 1.549401123046875, 1.5481220703125, 1.547989990234375, 1.548347412109375, 1.5480863037109376, 1.548441650390625, 1.5491942138671875, 1.5479951171875, 1.5477596435546874, 1.5479132080078124, 1.5479019775390626, 1.5476695556640625, 1.5486505126953125, 1.548642333984375, 1.5483924560546876, 1.548464111328125, 1.5485399169921874, 1.5489935302734374, 1.5492823486328124, 1.549126708984375, 1.5488082275390624, 1.5485020751953125, 1.5481436767578125, 1.5480247802734375, 1.5473438720703125, 1.5484078369140626, 1.5497154541015625, 1.548389404296875, 1.548622802734375, 1.5483453369140625, 1.5479080810546875, 1.548316650390625, 1.548980224609375, 1.5480084228515625, 1.5479080810546875, 1.5483177490234374, 1.54800634765625, 1.5485972900390625, 1.5479234619140625, 1.5486607666015626, 1.548347412109375, 1.5482081298828125, 1.547925537109375, 1.548674072265625, 1.548291015625, 1.55034423828125, 1.54886865234375, 1.5481036376953126, 3.189357666015625, 1.5474586181640626, 1.5491717529296876, 1.5481129150390625, 1.548072998046875, 1.5476490478515625, 1.5485419921875, 1.5480074462890625, 1.54784765625, 1.5483238525390626, 1.547341796875, 1.547925537109375, 1.547810791015625, 1.5478056640625, 1.5477412109375, 1.54859521484375, 1.5481068115234375, 1.548200927734375, 1.547937744140625, 1.547672607421875, 1.547968505859375, 1.5481712646484376, 1.5496058349609374, 1.54743603515625, 1.5484375, 1.548396484375, 1.5478035888671875, 1.5489986572265626, 1.54951171875, 1.547946044921875, 1.5478077392578125, 1.5477493896484376, 1.5488133544921876, 1.5480872802734376, 1.5481773681640625, 1.54931103515625, 1.5479542236328125, 1.5490611572265625, 1.547883544921875, 1.547809814453125, 1.5479930419921875, 1.549116455078125, 1.5483084716796875, 1.54863720703125, 1.54834130859375, 1.548610595703125, 1.5488941650390624, 1.5497103271484376, 1.5490672607421876, 1.5479500732421876, 1.5488173828125, 1.5487139892578126, 1.5484384765625, 1.5487242431640624, 1.5489197998046875, 1.5499970703125, 1.5479736328125, 1.549170654296875, 1.548205078125, 1.5482030029296876, 1.54952294921875, 1.548652587890625, 1.5488470458984376, 3.190570068359375, 1.5475732421875, 1.54899560546875, 1.5490723876953125, 1.548262451171875, 1.5477442626953124, 1.5488890380859375, 1.5484302978515625, 1.5486300048828125, 1.548895263671875, 1.5487139892578126, 1.54817333984375, 1.548970947265625, 1.5481988525390624, 1.5474852294921875, 1.548536865234375, 1.5487886962890625, 1.548294189453125, 1.54817333984375, 1.547431884765625, 1.54737353515625, 1.5477801513671876, 1.5483740234375, 1.547725830078125, 1.547284423828125, 1.5479593505859375, 1.5474329833984375, 1.5482255859375, 1.54836376953125, 1.54859521484375, 1.5475343017578125, 1.5487181396484375, 1.548205078125, 1.5478056640625, 1.5484302978515625, 1.54993359375, 1.5491451416015625, 1.548178466796875, 1.5477718505859375, 1.5477421875, 1.5473060302734376, 1.548078125, 1.548304443359375, 1.5474052734375, 1.5479808349609374, 1.547030517578125, 1.547672607421875, 1.5480975341796874, 1.5481231689453125, 1.547429931640625, 1.5478343505859375, 1.5476173095703125, 1.5473643798828125, 1.547039794921875, 1.5472691650390624, 1.5484384765625, 1.5475506591796875, 1.548432373046875, 1.54777294921875, 1.5470867919921876, 1.54768896484375, 1.5496007080078125, 1.548421142578125, 3.189211181640625, 1.5468963623046874, 1.547869140625, 1.54876416015625, 1.5484989013671875, 1.5492301025390625, 1.5475916748046874, 1.546978271484375, 1.5469158935546874, 1.54728759765625, 1.5486658935546875, 1.5481077880859375, 1.5487529296875, 1.54709912109375, 1.547663330078125, 1.5471912841796875, 1.54923828125, 1.54927001953125, 1.54796435546875, 1.5475865478515625, 1.5476448974609376, 1.547410400390625, 1.548801025390625, 1.5483770751953125, 1.547400146484375, 1.5478026123046875, 1.54826953125, 1.547557861328125, 1.547953125, 1.549897705078125, 1.5482357177734376, 1.5481138916015624, 1.547904052734375, 1.5478404541015625, 1.5473480224609375, 1.54821630859375, 1.5495045166015624, 1.547509765625, 1.5476746826171874, 1.546893310546875, 1.54714013671875, 1.5480177001953126, 1.548304443359375, 1.547969482421875, 1.547404296875, 1.5485450439453126, 1.547778076171875, 1.5482039794921876, 1.549432861328125, 1.54890234375, 1.5484302978515625, 1.5484652099609375, 1.547947998046875, 1.5474166259765625, 1.547483154296875, 1.5496939697265626, 1.5482757568359375, 1.547537353515625, 1.5478927001953124, 1.54752001953125, 1.547925537109375, 1.5486146240234375, 1.5490406494140625]",tokens/s,0.6362950887053097,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,4008.419328,15760.621568,0.0,15114.174464,14046.123008,s,10,15.848047607421876,1.5848047607421876,0.001058458314234293,1.584490295410156,1.5859812255859376,1.5864652832031252,1.5868525292968751,"[1.5851588134765624, 1.583447265625, 1.5845269775390625, 1.583828125, 1.58445361328125, 1.583693603515625, 1.5843594970703125, 1.5857567138671875, 1.5869493408203126, 1.5858736572265626]",tokens/s,161.53409324698862,kWh,1.869280167751842e-05,1.0243654597979911e-05,8.793465368099884e-05,0.00011687110995649717,tokens/kWh,2190447.23794692,MB,4008.419328,15760.621568,0.0,15114.174464,14170.904576,s,10,927.2718203125,92.72718203125001,0.005797509915113206,92.7283046875,92.73256640625,92.73465820312501,92.736331640625,"[92.7307421875, 92.72909375, 92.73675, 92.7321015625, 92.7264921875, 92.7241015625, 92.7158671875, 92.7194609375, 92.7296953125, 92.727515625]",tokens/s,0.6794124292353493,kWh,0.0010946948910421795,0.0005999896856933628,0.005209015139431006,0.006903699716166547,tokens/kWh,9125.541751543959,,s,629,940.0627094726563,1.494535309177514,0.18825786848907494,1.47182177734375,1.4724240966796875,1.4726346923828124,3.055808037109375,"[1.4722620849609376, 1.4717921142578125, 1.47181982421875, 1.4719560546875, 1.4718125, 1.4717183837890624, 1.472183349609375, 1.47190576171875, 1.4715218505859375, 1.47171630859375, 1.47156787109375, 1.4714951171875, 1.471922119140625, 1.4721187744140625, 1.4715177001953126, 1.47153515625, 1.4718924560546875, 1.4718126220703125, 1.4714593505859375, 1.4722181396484375, 1.4722314453125, 1.4714234619140625, 1.471951904296875, 1.4719990234375, 1.471762451171875, 1.4720880126953124, 1.472605224609375, 1.471382568359375, 1.4716119384765625, 1.471868896484375, 1.4716876220703126, 1.4716446533203125, 1.472257080078125, 1.4723829345703126, 1.472047119140625, 1.4720819091796875, 1.47234716796875, 1.4716507568359376, 1.47162109375, 1.471951904296875, 1.472247802734375, 1.472111572265625, 1.471752197265625, 1.4720460205078125, 1.4718433837890625, 1.471847412109375, 1.473238037109375, 1.4716826171875, 1.4716129150390624, 1.471711181640625, 1.4720286865234375, 1.4715771484375, 1.47129443359375, 1.4723441162109374, 1.47237890625, 1.4720123291015625, 1.4720389404296874, 1.4725570068359375, 1.4715821533203124, 1.47163232421875, 1.472152587890625, 1.4719139404296875, 3.055659912109375, 1.4719190673828124, 1.4714122314453124, 1.4715750732421875, 1.4726441650390625, 1.4716497802734374, 1.4719395751953126, 1.4714327392578126, 1.4716334228515624, 1.4717982177734374, 1.4725570068359375, 1.472247802734375, 1.4715555419921875, 1.4713795166015624, 1.47167431640625, 1.4718515625, 1.471489990234375, 1.4721033935546874, 1.4721280517578126, 1.471662109375, 1.47139990234375, 1.4714696044921876, 1.471595458984375, 1.4714849853515626, 1.472489501953125, 1.4717716064453126, 1.471542236328125, 1.4713282470703124, 1.4715074462890625, 1.4714327392578126, 1.4716241455078125, 1.4727454833984375, 1.4718914794921876, 1.472322509765625, 1.4719334716796875, 1.47182177734375, 1.4713436279296874, 1.4722703857421875, 1.47228466796875, 1.471656982421875, 1.471455322265625, 1.4724454345703124, 1.4727833251953124, 1.472058349609375, 1.472762939453125, 1.4725816650390624, 1.47173583984375, 1.4722109375, 1.4722037353515625, 1.4719927978515626, 1.4716068115234375, 1.4723389892578125, 1.4720194091796874, 1.4716221923828126, 1.4717747802734376, 1.47186279296875, 1.4714490966796876, 1.471636474609375, 1.4723154296875, 1.471784912109375, 1.47171533203125, 1.4717327880859374, 1.472248779296875, 3.057259521484375, 1.47224267578125, 1.4718597412109375, 1.472489501953125, 1.4721064453125, 1.4717808837890625, 1.471899658203125, 1.4724228515625, 1.472541748046875, 1.47203173828125, 1.472416748046875, 1.47173583984375, 1.471767578125, 1.4718350830078124, 1.472779296875, 1.4719927978515626, 1.47137841796875, 1.4718525390625, 1.4718648681640625, 1.4717603759765625, 1.4714869384765625, 1.47251611328125, 1.4720552978515624, 1.47161083984375, 1.4713856201171875, 1.4714736328125, 1.4710916748046876, 1.471134765625, 1.4725887451171875, 1.4723441162109374, 1.4716630859375, 1.471782958984375, 1.4716395263671875, 1.4718515625, 1.4722734375, 1.472278564453125, 1.4714920654296875, 1.47167431640625, 1.47209423828125, 1.4719703369140624, 1.47245263671875, 1.4724505615234376, 1.47195703125, 1.47188525390625, 1.472077880859375, 1.4723031005859375, 1.4720552978515624, 1.4719139404296875, 1.4722252197265624, 1.4726502685546874, 1.4722242431640624, 1.471540283203125, 1.472048095703125, 1.4718955078125, 1.4713743896484375, 1.47283154296875, 1.4720828857421875, 1.472143310546875, 1.472288818359375, 1.4720460205078125, 1.4718065185546876, 1.471910888671875, 1.472711669921875, 3.055857666015625, 1.471562744140625, 1.4717071533203125, 1.471541259765625, 1.4722652587890626, 1.47290625, 1.4720972900390625, 1.4717244873046875, 1.4717379150390626, 1.47239111328125, 1.4715084228515625, 1.4723399658203125, 1.47213720703125, 1.47205224609375, 1.472205810546875, 1.4718515625, 1.471974365234375, 1.4717972412109375, 1.472838623046875, 1.472647216796875, 1.47232666015625, 1.47219970703125, 1.4716876220703126, 1.4716630859375, 1.4720552978515624, 1.4729554443359374, 1.471932373046875, 1.471942626953125, 1.471614990234375, 1.4719990234375, 1.4717265625, 1.4722620849609376, 1.47253857421875, 1.47146337890625, 1.4715965576171874, 1.4714061279296875, 1.471909912109375, 1.4716488037109374, 1.4721239013671874, 1.472101318359375, 1.4714869384765625, 1.471604736328125, 1.4719661865234375, 1.4716927490234375, 1.4718228759765626, 1.471889404296875, 1.4719180908203124, 1.4718177490234374, 1.4717174072265624, 1.471542236328125, 1.471983642578125, 1.4715238037109375, 1.4717327880859374, 1.472067626953125, 1.471873046875, 1.4716876220703126, 1.4719764404296876, 1.4723194580078125, 1.4719017333984374, 1.4723870849609375, 1.4717716064453126, 1.4715146484375, 1.4714757080078125, 3.056364501953125, 1.4723338623046875, 1.472585693359375, 1.4714593505859375, 1.471646728515625, 1.4715494384765626, 1.4717880859375, 1.472016357421875, 1.4726204833984375, 1.4721822509765625, 1.4717747802734376, 1.4717244873046875, 1.4717716064453126, 1.47205322265625, 1.4717276611328125, 1.472067626953125, 1.471805419921875, 1.4715555419921875, 1.471551513671875, 1.4714962158203124, 1.47133544921875, 1.4717747802734376, 1.471994873046875, 1.47158837890625, 1.471520751953125, 1.4713466796875, 1.4714747314453125, 1.4717276611328125, 1.4718135986328125, 1.4724290771484374, 1.4716488037109374, 1.4714132080078124, 1.4716231689453125, 1.4723389892578125, 1.4713077392578124, 1.47160888671875, 1.471899658203125, 1.4723778076171874, 1.471910888671875, 1.4719447021484375, 1.47195703125, 1.47152685546875, 1.472251953125, 1.471962158203125, 1.4716077880859375, 1.471457275390625, 1.4723092041015624, 1.4719969482421875, 1.4716549072265626, 1.4718648681640625, 1.47160986328125, 1.4718760986328125, 1.471826904296875, 1.4713251953125, 1.47182177734375, 1.4716077880859375, 1.472786376953125, 1.472522216796875, 1.4720809326171875, 1.4717982177734374, 1.4721248779296876, 1.471731689453125, 1.4721033935546874, 3.055680419921875, 1.4718280029296875, 1.4716129150390624, 1.4713046875, 1.4718914794921876, 1.4720511474609375, 1.4723931884765624, 1.471605712890625, 1.4720911865234374, 1.47203173828125, 1.4718822021484375, 1.4717869873046876, 1.4723829345703126, 1.4720809326171875, 1.471552490234375, 1.4721136474609375, 1.471494140625, 1.4717767333984375, 1.471177734375, 1.472689208984375, 1.4716497802734374, 1.4718802490234375, 1.4714306640625, 1.472184326171875, 1.4717860107421874, 1.47194677734375, 1.47276904296875, 1.4713814697265626, 1.4714398193359375, 1.471753173828125, 1.472689208984375, 1.472320556640625, 1.472069580078125, 1.47273828125, 1.4716497802734374, 1.4718289794921875, 1.4717060546875, 1.471951904296875, 1.4716866455078126, 1.47211669921875, 1.471942626953125, 1.4714388427734375, 1.471298583984375, 1.4713538818359375, 1.4712965087890626, 1.4712689208984375, 1.4717244873046875, 1.472357421875, 1.471731689453125, 1.4715218505859375, 1.471215576171875, 1.4711285400390626, 1.4713046875, 1.47230615234375, 1.4722723388671874, 1.4720819091796875, 1.471382568359375, 1.4713026123046875, 1.472006103515625, 1.47163232421875, 1.472415771484375, 1.471131591796875, 1.4713272705078124, 3.056773193359375, 1.4716077880859375, 1.4716282958984375, 1.471858642578125, 1.4712208251953125, 1.4718197021484376, 1.472443359375, 1.4715648193359374, 1.471594482421875, 1.472443359375, 1.4719036865234374, 1.47152587890625, 1.4711285400390626, 1.4714869384765625, 1.4713077392578124, 1.4713907470703125, 1.47195703125, 1.47201123046875, 1.4714542236328125, 1.4714757080078125, 1.471456298828125, 1.471595458984375, 1.47175732421875, 1.4725191650390625, 1.4716539306640626, 1.4716416015625, 1.47211474609375, 1.4713333740234376, 1.47162109375, 1.4716436767578125, 1.4722816162109376, 1.47161083984375, 1.471515625, 1.4715064697265625, 1.4715709228515625, 1.47129345703125, 1.4712586669921874, 1.4722181396484375, 1.471456298828125, 1.4713170166015626, 1.4715576171875, 1.47175732421875, 1.47146337890625, 1.472247802734375, 1.471603759765625, 1.4716273193359375, 1.4715709228515625, 1.472047119140625, 1.4721197509765624, 1.472300048828125, 1.4724617919921874, 1.472036865234375, 1.4715872802734375, 1.47150537109375, 1.47152685546875, 1.4712227783203125, 1.4709483642578125, 1.4714481201171874, 1.4717808837890625, 1.4714285888671874, 1.4712176513671875, 1.4715013427734376, 1.471331298828125, 3.058272216796875, 1.4714920654296875, 1.4712022705078125, 1.47119921875, 1.4717470703125, 1.471373291015625, 1.4719886474609376, 1.4716968994140625, 1.4716273193359375, 1.4718914794921876, 1.471751220703125, 1.4717174072265624, 1.471711181640625, 1.472184326171875, 1.472363525390625, 1.471656982421875, 1.4721873779296875, 1.4718863525390624, 1.4719764404296876, 1.4720511474609375, 1.4719754638671876, 1.4718924560546875, 1.4713037109375, 1.4712841796875, 1.47162109375, 1.4717725830078126, 1.472, 1.4716497802734374, 1.4716558837890625, 1.4713426513671874, 1.471215576171875, 1.4712484130859376, 1.47142041015625, 1.471537109375, 1.4719190673828124, 1.4712698974609375, 1.471478759765625, 1.47183203125, 1.4715013427734376, 1.4721925048828124, 1.47177978515625, 1.472447509765625, 1.4714500732421876, 1.4717501220703124, 1.47160888671875, 1.4718299560546875, 1.471974365234375, 1.4720081787109376, 1.472126953125, 1.4715023193359376, 1.4715084228515625, 1.47169482421875, 1.471330322265625, 1.471171630859375, 1.4719794921875, 1.4723297119140626, 1.471705078125, 1.4717818603515624, 1.4718740234375, 1.4721126708984376, 1.471266845703125, 1.472437255859375, 1.4715064697265625, 3.05955126953125, 1.4719764404296876, 1.47195703125, 1.47245361328125, 1.472227294921875, 1.4713221435546875, 1.4720296630859375, 1.4715545654296875, 1.4716138916015624, 1.4715606689453125, 1.47230517578125, 1.4725037841796875, 1.471837158203125, 1.47186376953125, 1.47192724609375, 1.4720911865234374, 1.47192626953125, 1.4723450927734374, 1.4718555908203126, 1.47135791015625, 1.4716026611328126, 1.471425537109375, 1.4714224853515625, 1.4716395263671875, 1.4727669677734374, 1.4724013671875, 1.4724403076171875, 1.47211669921875, 1.4722672119140625, 1.472236572265625, 1.47209423828125, 1.4720142822265625, 1.471340576171875, 1.4719610595703125, 1.471952880859375, 1.4716343994140626, 1.4717020263671876, 1.472510009765625, 1.4722958984375, 1.47205322265625, 1.471494140625, 1.471520751953125, 1.4711224365234374, 1.4714521484375, 1.471753173828125, 1.4722447509765626, 1.4713282470703124, 1.47150341796875, 1.472290771484375, 1.471889404296875, 1.4715074462890625, 1.4724771728515624, 1.4729246826171876, 1.4722078857421874, 1.4716180419921876, 1.471309814453125, 1.47177978515625, 1.47196923828125, 1.4722120361328126, 1.471774658203125, 1.4713466796875, 1.471520751953125, 1.4712811279296876, 3.058113525390625, 1.4730526123046874, 1.4717235107421875, 1.4721668701171875, 1.471916015625, 1.472654296875, 1.4716385498046876, 1.4719088134765625, 1.4720716552734374, 1.4713763427734374, 1.471236083984375, 1.471573974609375, 1.4717041015625, 1.471826904296875, 1.4721710205078125, 1.472248779296875, 1.471709228515625, 1.47167333984375, 1.4722354736328125, 1.47183203125, 1.4724495849609376, 1.47257861328125, 1.4722344970703125, 1.471520751953125, 1.4716630859375, 1.47169384765625, 1.472069580078125, 1.4725938720703124, 1.472268310546875, 1.471952880859375, 1.4720880126953124, 1.4720225830078124, 1.471952880859375, 1.4723450927734374, 1.472373779296875, 1.472194580078125, 1.4716927490234375, 1.471921142578125, 1.471922119140625, 1.472109619140625, 1.471762451171875, 1.4723779296875, 1.4719671630859374, 1.4716590576171875, 1.47093603515625, 1.4710968017578125, 1.4713907470703125, 1.47113671875, 1.471277099609375, 1.4721495361328125, 1.4713795166015624, 1.4710025634765624, 1.471087646484375, 1.47147265625, 1.4714593505859375, 1.4727730712890625, 1.472385986328125, 1.4715115966796875, 1.47182177734375, 1.471372314453125, 1.471321044921875, 1.4717603759765625, 1.47186181640625]",tokens/s,0.6691042987470994,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,5945.012224,19933.954048,0.0,19287.506944,18376.2688,s,10,24.42857763671875,2.442857763671875,0.001982296286527604,2.4421226806640624,2.4459630615234373,2.446148400878906,2.4462966723632813,"[2.44311767578125, 2.441929443359375, 2.441054443359375, 2.44091455078125, 2.441591064453125, 2.440709716796875, 2.44231591796875, 2.446333740234375, 2.445921875, 2.444689208984375]",tokens/s,104.79529500530754,kWh,2.881985050108698e-05,1.5794228899012522e-05,0.00013673797050139935,0.00018135204990149886,tokens/kWh,1411619.0036950014,MB,5946.937344,19933.954048,0.0,19287.506944,18871.985152,s,10,1453.1662812499999,145.316628125,0.01677016451823821,145.3091796875,145.3392140625,145.34826328125,145.35550265625,"[145.31590625, 145.308921875, 145.3094375, 145.306953125, 145.3573125, 145.337203125, 145.304828125, 145.303359375, 145.319953125, 145.30240625]",tokens/s,0.4335360709430169,kWh,0.0017155244359870752,0.0009402592492990879,0.00808164824309121,0.010737431928377374,tokens/kWh,5867.324740238933,,s,629,1472.8442895507828,2.3415648482524345,0.29007180854476533,2.306472900390625,2.307840771484375,2.308159619140625,4.7478279296874994,"[2.30830078125, 2.30712939453125, 2.305491943359375, 2.305958984375, 2.306070556640625, 2.306164794921875, 2.306714599609375, 2.306167724609375, 2.30651708984375, 2.306343994140625, 2.306281494140625, 2.3058994140625, 2.306171875, 2.305965087890625, 2.3060673828125, 2.305934326171875, 2.306259033203125, 2.30677197265625, 2.30823828125, 2.3065693359375, 2.3062998046875, 2.3065107421875, 2.305712158203125, 2.3060634765625, 2.30664404296875, 2.30611865234375, 2.305919921875, 2.306153564453125, 2.30573876953125, 2.307145751953125, 2.307577880859375, 2.30742529296875, 2.3060234375, 2.306620361328125, 2.30689892578125, 2.306671630859375, 2.30721337890625, 2.306621337890625, 2.30584423828125, 2.306515869140625, 2.306025390625, 2.307158935546875, 2.307966064453125, 2.3080498046875, 2.30725927734375, 2.30751123046875, 2.30584326171875, 2.306826171875, 2.30643505859375, 2.306669677734375, 2.3066572265625, 2.306987060546875, 2.3064228515625, 2.30631640625, 2.3060048828125, 2.3066552734375, 2.306758544921875, 2.306385986328125, 2.306724853515625, 2.30693994140625, 2.3067626953125, 2.306996337890625, 4.747927734375, 2.3060458984375, 2.306315185546875, 2.305916015625, 2.30605615234375, 2.30666455078125, 2.306251708984375, 2.306472900390625, 2.306552734375, 2.30586376953125, 2.3063408203125, 2.306269287109375, 2.306483154296875, 2.3060849609375, 2.306608154296875, 2.306598876953125, 2.306947021484375, 2.306627685546875, 2.30656298828125, 2.306156494140625, 2.305994873046875, 2.305953857421875, 2.3070966796875, 2.307031005859375, 2.306716552734375, 2.30727587890625, 2.305818603515625, 2.30561181640625, 2.306303955078125, 2.30622607421875, 2.30601513671875, 2.306629638671875, 2.306018310546875, 2.307220458984375, 2.307166259765625, 2.30818310546875, 2.30658251953125, 2.306062255859375, 2.3065908203125, 2.3057265625, 2.306356201171875, 2.30654150390625, 2.306720703125, 2.306249755859375, 2.307949462890625, 2.307349609375, 2.30730859375, 2.306469970703125, 2.306974609375, 2.30639501953125, 2.30609716796875, 2.30603369140625, 2.306250732421875, 2.306716552734375, 2.306428955078125, 2.3060244140625, 2.306080810546875, 2.306798583984375, 2.306280517578125, 2.306587646484375, 2.306314208984375, 2.3067607421875, 2.307210205078125, 4.74750048828125, 2.30618017578125, 2.306073486328125, 2.306304931640625, 2.3058740234375, 2.306080810546875, 2.3063828125, 2.30624560546875, 2.305426513671875, 2.306010009765625, 2.3058330078125, 2.306188232421875, 2.305746826171875, 2.30639501953125, 2.305995849609375, 2.305857421875, 2.305976318359375, 2.306387939453125, 2.307175537109375, 2.3076884765625, 2.30548486328125, 2.305490966796875, 2.3058544921875, 2.30586474609375, 2.306364501953125, 2.30583203125, 2.306301025390625, 2.306060302734375, 2.306914306640625, 2.308274169921875, 2.307685302734375, 2.3084912109375, 2.306903076171875, 2.30611669921875, 2.306157470703125, 2.30603466796875, 2.306026611328125, 2.30809912109375, 2.30824658203125, 2.30763623046875, 2.30674853515625, 2.306740234375, 2.30609521484375, 2.305721435546875, 2.30651806640625, 2.307560546875, 2.30752880859375, 2.306269287109375, 2.30721630859375, 2.306631591796875, 2.30655908203125, 2.3057724609375, 2.30613818359375, 2.306503662109375, 2.306617431640625, 2.307072021484375, 2.30647802734375, 2.305734619140625, 2.3066552734375, 2.306449462890625, 2.30620361328125, 2.30637158203125, 2.306595947265625, 4.74802880859375, 2.306186279296875, 2.305531982421875, 2.305669189453125, 2.306135986328125, 2.3058759765625, 2.306019287109375, 2.306431884765625, 2.306522216796875, 2.30567529296875, 2.3055830078125, 2.306220947265625, 2.30667578125, 2.306291748046875, 2.306946044921875, 2.30719287109375, 2.3062763671875, 2.30605517578125, 2.3061484375, 2.306335693359375, 2.30618115234375, 2.30607666015625, 2.30662646484375, 2.30637158203125, 2.30767724609375, 2.308170654296875, 2.30727978515625, 2.306090087890625, 2.3074775390625, 2.30679248046875, 2.308010009765625, 2.306175048828125, 2.3065908203125, 2.306902099609375, 2.307230712890625, 2.30744677734375, 2.307564453125, 2.306438232421875, 2.306532470703125, 2.306220947265625, 2.306926513671875, 2.30791162109375, 2.30597021484375, 2.30626416015625, 2.306064453125, 2.305848388671875, 2.30631005859375, 2.305681396484375, 2.306680908203125, 2.3066787109375, 2.306912353515625, 2.30727587890625, 2.30600390625, 2.306438232421875, 2.306324462890625, 2.305838134765625, 2.305875, 2.305974365234375, 2.30620166015625, 2.306227294921875, 2.3058759765625, 2.306018310546875, 2.306186279296875, 4.7475712890625, 2.3056015625, 2.305901611328125, 2.30658251953125, 2.307598388671875, 2.307072998046875, 2.30600390625, 2.308192138671875, 2.307829833984375, 2.30786865234375, 2.307883056640625, 2.306185302734375, 2.305741943359375, 2.30852392578125, 2.307919921875, 2.30782763671875, 2.30862841796875, 2.308886474609375, 2.308350830078125, 2.308662353515625, 2.308884521484375, 2.30721337890625, 2.307564453125, 2.307115966796875, 2.307040283203125, 2.30601025390625, 2.307072998046875, 2.305795166015625, 2.3058740234375, 2.306260009765625, 2.3076474609375, 2.3075, 2.305462158203125, 2.305919921875, 2.306641845703125, 2.308442138671875, 2.30645654296875, 2.306227294921875, 2.306621337890625, 2.306598876953125, 2.3067822265625, 2.306314208984375, 2.307274658203125, 2.306165771484375, 2.30636962890625, 2.306336669921875, 2.3068681640625, 2.307556396484375, 2.307939208984375, 2.306641845703125, 2.308420654296875, 2.30788720703125, 2.30803857421875, 2.307458984375, 2.308116455078125, 2.3085107421875, 2.30814306640625, 2.309021728515625, 2.30691845703125, 2.3075400390625, 2.30881787109375, 2.30786962890625, 2.308601806640625, 4.747927734375, 2.30775, 2.307747802734375, 2.306754638671875, 2.30704638671875, 2.30609521484375, 2.305871826171875, 2.305828857421875, 2.306450439453125, 2.3074384765625, 2.30639404296875, 2.306155517578125, 2.30721240234375, 2.3061943359375, 2.306260986328125, 2.3063408203125, 2.305946533203125, 2.30626708984375, 2.3059384765625, 2.306785400390625, 2.30590576171875, 2.307837890625, 2.308127685546875, 2.307727294921875, 2.30624755859375, 2.30803759765625, 2.308209716796875, 2.306083740234375, 2.308106201171875, 2.30796484375, 2.307409912109375, 2.307322998046875, 2.3070556640625, 2.30601416015625, 2.30601220703125, 2.30685400390625, 2.3067841796875, 2.3062333984375, 2.30788818359375, 2.3078798828125, 2.307981201171875, 2.308041748046875, 2.307943359375, 2.3078779296875, 2.306785400390625, 2.306217041015625, 2.30694189453125, 2.3059833984375, 2.30669921875, 2.30706787109375, 2.307828857421875, 2.3062119140625, 2.30769970703125, 2.30689990234375, 2.306872314453125, 2.30632763671875, 2.307541015625, 2.307304443359375, 2.306280517578125, 2.305987548828125, 2.3064677734375, 2.30799365234375, 2.30702490234375, 4.74871826171875, 2.306419677734375, 2.30594775390625, 2.305490966796875, 2.3053935546875, 2.306144287109375, 2.305890380859375, 2.30618310546875, 2.30565380859375, 2.305838134765625, 2.3064453125, 2.306163818359375, 2.305967041015625, 2.30632958984375, 2.30599365234375, 2.30660205078125, 2.306093017578125, 2.30550927734375, 2.3062958984375, 2.30859375, 2.307629150390625, 2.305786865234375, 2.30675244140625, 2.30698291015625, 2.307533935546875, 2.306155517578125, 2.30588916015625, 2.30687744140625, 2.30660205078125, 2.30624658203125, 2.30635205078125, 2.30655078125, 2.30631005859375, 2.307313720703125, 2.306417724609375, 2.306740234375, 2.306871337890625, 2.30619140625, 2.3060859375, 2.305734619140625, 2.3065068359375, 2.30648828125, 2.306944091796875, 2.30637060546875, 2.3063173828125, 2.305910888671875, 2.306567138671875, 2.305996826171875, 2.306691162109375, 2.306502685546875, 2.306821044921875, 2.306130859375, 2.30672900390625, 2.307013671875, 2.306767822265625, 2.306404296875, 2.30620458984375, 2.306543701171875, 2.30641552734375, 2.30656298828125, 2.306662353515625, 2.306578369140625, 2.30736083984375, 4.75277099609375, 2.306112548828125, 2.305987548828125, 2.306069580078125, 2.305553466796875, 2.305196044921875, 2.306450439453125, 2.306493408203125, 2.306361328125, 2.305670166015625, 2.305954833984375, 2.30636328125, 2.305942626953125, 2.305912841796875, 2.3060244140625, 2.306492431640625, 2.306491455078125, 2.3065087890625, 2.306021484375, 2.30662255859375, 2.306044921875, 2.305946533203125, 2.306109375, 2.3060673828125, 2.30651708984375, 2.306368408203125, 2.306166748046875, 2.305617919921875, 2.3070966796875, 2.30624462890625, 2.30612060546875, 2.30616259765625, 2.306585693359375, 2.3061943359375, 2.305982421875, 2.30618212890625, 2.30691015625, 2.307801025390625, 2.30774169921875, 2.305775634765625, 2.30598974609375, 2.307757080078125, 2.307713134765625, 2.3067412109375, 2.306273193359375, 2.306452392578125, 2.30667578125, 2.306123779296875, 2.307871826171875, 2.3065498046875, 2.306324462890625, 2.3063818359375, 2.306296875, 2.306356201171875, 2.306536376953125, 2.30616162109375, 2.306343994140625, 2.3068017578125, 2.307167236328125, 2.3066982421875, 2.3068271484375, 2.30626611328125, 2.306765869140625, 4.75146533203125, 2.305406005859375, 2.306103271484375, 2.306335693359375, 2.306105224609375, 2.305650634765625, 2.305935302734375, 2.3065302734375, 2.306123779296875, 2.306839599609375, 2.306525146484375, 2.3065908203125, 2.3057275390625, 2.3065927734375, 2.306326416015625, 2.305699951171875, 2.306766845703125, 2.306298828125, 2.306177978515625, 2.306017333984375, 2.3064453125, 2.307205078125, 2.3069912109375, 2.30652099609375, 2.307852294921875, 2.30791357421875, 2.307249267578125, 2.30723388671875, 2.308533203125, 2.307859375, 2.307365966796875, 2.30736474609375, 2.306230224609375, 2.306595947265625, 2.306797607421875, 2.306817138671875, 2.307945556640625, 2.306934814453125, 2.306532470703125, 2.306154541015625, 2.306238525390625, 2.306093994140625, 2.306994140625, 2.30653125, 2.30648828125, 2.30630810546875, 2.306410400390625, 2.30582177734375, 2.30702392578125, 2.306610107421875, 2.30685888671875, 2.30723388671875, 2.307060791015625, 2.30736083984375, 2.307283935546875, 2.306974609375, 2.307143798828125, 2.30702685546875, 2.307147705078125, 2.306912353515625, 2.306466796875, 2.30637060546875, 2.305808349609375, 4.75029296875, 2.3062138671875, 2.305847412109375, 2.305751953125, 2.306163818359375, 2.305503173828125, 2.306188232421875, 2.305998779296875, 2.306451416015625, 2.305594482421875, 2.30626513671875, 2.30557080078125, 2.306021484375, 2.305986572265625, 2.30660498046875, 2.306206787109375, 2.305995849609375, 2.306838623046875, 2.306428955078125, 2.306533447265625, 2.306038818359375, 2.306441162109375, 2.306575439453125, 2.30774267578125, 2.30721435546875, 2.307527587890625, 2.30657421875, 2.3058974609375, 2.30637255859375, 2.305725341796875, 2.30624560546875, 2.306049072265625, 2.3060224609375, 2.306079833984375, 2.306041748046875, 2.305879150390625, 2.30664404296875, 2.306240478515625, 2.30664697265625, 2.306007080078125, 2.306346923828125, 2.3059384765625, 2.305849365234375, 2.306935791015625, 2.306491455078125, 2.306400146484375, 2.3062744140625, 2.306458740234375, 2.306747314453125, 2.307177490234375, 2.306862060546875, 2.306808837890625, 2.306905029296875, 2.30681298828125, 2.306693115234375, 2.30618017578125, 2.30659375, 2.306906005859375, 2.30719482421875, 2.306628662109375, 2.306923583984375, 2.306654296875, 2.3069306640625]",tokens/s,0.4270648326252095,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 103, in __init__ - assert out_features % (32 // self.w_bit) == 0 -AssertionError - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1378.24256,1340.604416,0.0,694.157312,598.498816,s,10,0.5756522903442383,0.05756522903442383,0.0016369727383896268,0.0568012638092041,0.05918429183959961,0.06064101028442383,0.061806385040283206,"[0.06209772872924805, 0.05672822570800781, 0.056688095092773436, 0.05679859161376953, 0.05886057662963867, 0.057347423553466795, 0.05689571380615235, 0.05671388626098633, 0.05680393600463867, 0.05671811294555664]",tokens/s,4447.129009890898,kWh,6.782567046937489e-07,3.716537143845926e-07,1.9805285685486103e-06,3.0304389876269517e-06,tokens/kWh,84476209.89738722,MB,1378.24256,1340.604416,0.0,694.157312,659.031552,s,10,35.66919848632813,3.566919848632813,0.03950000205433062,3.552089111328125,3.624692236328125,3.64104033203125,3.65411880859375,"[3.57450146484375, 3.532454345703125, 3.56766015625, 3.657388427734375, 3.621059326171875, 3.5402509765625, 3.56031982421875, 3.532387939453125, 3.539317626953125, 3.5438583984375]",tokens/s,17.662297633109883,kWh,4.217196008799568e-05,2.3111615147015983e-05,0.00011743306696224983,0.00018271664219726155,tokens/kWh,344796.1786205822,,s,629,36.1255710258484,0.05743334026366993,0.0068485805293125165,0.05611315155029297,0.058175487518310545,0.05834158096313477,0.11305373931884766,"[0.05799008178710938, 0.05708492660522461, 0.05782732772827148, 0.05744332885742188, 0.057527294158935545, 0.05809561538696289, 0.0565401611328125, 0.05911040115356445, 0.05911040115356445, 0.058807296752929686, 0.058791934967041014, 0.05813555145263672, 0.05813248062133789, 0.05811199951171875, 0.058054656982421876, 0.05821952056884765, 0.057041919708251954, 0.056956928253173826, 0.057338878631591796, 0.05677363204956055, 0.05590323257446289, 0.05592268753051758, 0.05590425491333008, 0.05611008071899414, 0.05601484680175781, 0.05587148666381836, 0.055932926177978515, 0.056509441375732425, 0.05619200134277344, 0.056089599609375, 0.05595647811889649, 0.055981056213378906, 0.056079360961914064, 0.05589503860473633, 0.0559554557800293, 0.056065025329589846, 0.0564029426574707, 0.05732863998413086, 0.057398273468017576, 0.05806694412231445, 0.05649203109741211, 0.05601792144775391, 0.05590425491333008, 0.05598822402954102, 0.05598310470581055, 0.05660671997070312, 0.057234432220458986, 0.05611315155029297, 0.05587251281738281, 0.055923713684082034, 0.05598822402954102, 0.056018943786621096, 0.05615820693969727, 0.05591244888305664, 0.05669478225708008, 0.05584281539916992, 0.05593395233154297, 0.05587148666381836, 0.056016895294189455, 0.056000511169433595, 0.05590016174316406, 0.05584896087646484, 0.11391897583007812, 0.056886272430419924, 0.05697945785522461, 0.05592268753051758, 0.0564951057434082, 0.05716787338256836, 0.056594432830810545, 0.05600153732299805, 0.055695358276367186, 0.05583564758300781, 0.05596057510375976, 0.055913471221923826, 0.05584896087646484, 0.05577318572998047, 0.05587148666381836, 0.056035327911376956, 0.055943168640136716, 0.05580799865722656, 0.05776486587524414, 0.057622528076171874, 0.05631180953979492, 0.055818241119384764, 0.055777278900146485, 0.05587251281738281, 0.0560118408203125, 0.05598918533325195, 0.05593395233154297, 0.05613772964477539, 0.056908798217773435, 0.05586841583251953, 0.055809024810791016, 0.05591961669921875, 0.05592063903808594, 0.05590425491333008, 0.056174591064453126, 0.05596364974975586, 0.05545369720458984, 0.05593804931640625, 0.055806976318359375, 0.05584896087646484, 0.05586431884765625, 0.055890945434570315, 0.055839744567871094, 0.05590630340576172, 0.055741439819335936, 0.05607321548461914, 0.055858177185058595, 0.05598515319824219, 0.05599641418457031, 0.056035327911376956, 0.056120319366455076, 0.05593907165527344, 0.05596364974975586, 0.055932926177978515, 0.055947265625, 0.05584076690673828, 0.05575884628295898, 0.055823360443115234, 0.05576704025268555, 0.05586636734008789, 0.055923713684082034, 0.05593088150024414, 0.0557916145324707, 0.11292671966552735, 0.05593600082397461, 0.056169471740722655, 0.055927806854248044, 0.05587558364868164, 0.05590835189819336, 0.055769088745117185, 0.056676353454589844, 0.05695078277587891, 0.05639987182617188, 0.056005630493164066, 0.05686995315551758, 0.05640902328491211, 0.05631488037109375, 0.05584588623046875, 0.057027584075927736, 0.05787955093383789, 0.05715967941284179, 0.05641113662719727, 0.05674803161621094, 0.057038848876953124, 0.05642956924438477, 0.05662105560302735, 0.05600460815429688, 0.05647359848022461, 0.056395774841308595, 0.05592473602294922, 0.05653299331665039, 0.05728460693359375, 0.056796192169189456, 0.056970207214355466, 0.05783552169799805, 0.056842239379882815, 0.05652377700805664, 0.05740031814575195, 0.056120319366455076, 0.05686272048950195, 0.05625446319580078, 0.05662105560302735, 0.056174591064453126, 0.05706854248046875, 0.056346622467041016, 0.05597798538208008, 0.05667020797729492, 0.05655551910400391, 0.05687705612182617, 0.057256961822509764, 0.05748121643066406, 0.057133056640625, 0.05615923309326172, 0.055962623596191405, 0.05652070236206055, 0.056753150939941405, 0.056771583557128906, 0.056970241546630856, 0.05616844940185547, 0.057388031005859375, 0.05657702255249023, 0.057078784942626956, 0.05769420623779297, 0.05860966491699219, 0.056180736541748044, 0.05587148666381836, 0.11307008361816406, 0.05602406311035156, 0.05604044723510742, 0.056018943786621096, 0.058791934967041014, 0.0583741455078125, 0.05816524887084961, 0.05799321746826172, 0.058156032562255856, 0.058518527984619144, 0.058423297882080075, 0.05813350296020508, 0.058103809356689455, 0.05830451202392578, 0.058180606842041016, 0.058041343688964846, 0.05809254455566406, 0.05821952056884765, 0.05822873687744141, 0.05809151840209961, 0.058120193481445315, 0.05805158233642578, 0.05811609649658203, 0.05830348968505859, 0.058006526947021485, 0.05869670486450195, 0.058369022369384765, 0.058347518920898435, 0.058028030395507815, 0.058055679321289064, 0.058210304260253906, 0.0581058578491211, 0.058054656982421876, 0.058288158416748045, 0.058100704193115235, 0.05831987380981445, 0.058275840759277345, 0.05816524887084961, 0.05805977630615235, 0.05837823867797851, 0.05834444808959961, 0.05804032135009766, 0.05805158233642578, 0.05809971237182617, 0.058052608489990234, 0.05806489562988281, 0.05809254455566406, 0.05809664154052734, 0.05816729736328125, 0.0582042236328125, 0.05809862518310547, 0.05814169692993164, 0.05818675231933594, 0.057608192443847656, 0.05821440124511719, 0.058124320983886715, 0.058272735595703125, 0.058308609008789064, 0.058169345855712894, 0.058071041107177736, 0.058175487518310545, 0.05823897552490234, 0.0583372802734375, 0.11740262603759766, 0.058298366546630856, 0.0585544319152832, 0.05803107070922851, 0.05821948623657226, 0.05826047897338867, 0.05810793685913086, 0.058076126098632816, 0.0581662712097168, 0.05814988708496094, 0.05825843048095703, 0.05809356689453125, 0.05850931167602539, 0.058336254119873046, 0.058426368713378904, 0.05825331115722656, 0.058418174743652344, 0.0581396484375, 0.05809254455566406, 0.05811097717285156, 0.05806796646118164, 0.05830144119262695, 0.05805977630615235, 0.05824512100219727, 0.058036224365234375, 0.058142719268798826, 0.05810892868041992, 0.05846227264404297, 0.058175487518310545, 0.058110912322998046, 0.05813248062133789, 0.0584089584350586, 0.05830451202392578, 0.0581396484375, 0.058211326599121094, 0.05804851150512695, 0.0581662712097168, 0.05817350387573242, 0.05801055908203125, 0.0581662712097168, 0.05825331115722656, 0.05611929702758789, 0.05594521713256836, 0.055949310302734374, 0.05611008071899414, 0.056019966125488284, 0.055982078552246094, 0.055962623596191405, 0.05608448028564453, 0.05570457458496094, 0.056204288482666016, 0.05575372695922851, 0.055787521362304686, 0.05749350357055664, 0.05674803161621094, 0.055940097808837894, 0.05600460815429688, 0.056019966125488284, 0.05588787078857422, 0.05589913558959961, 0.05583052825927735, 0.055907329559326174, 0.05695283126831055, 0.11374079895019532, 0.055927806854248044, 0.05590323257446289, 0.05608652877807617, 0.05595340728759766, 0.056174591064453126, 0.05607321548461914, 0.05605686569213867, 0.056255455017089846, 0.05570969772338867, 0.05589606475830078, 0.05596063995361328, 0.056186817169189454, 0.05588991928100586, 0.055947265625, 0.055981056213378906, 0.056204288482666016, 0.05590016174316406, 0.055982078552246094, 0.05592575836181641, 0.05591862487792969, 0.055920608520507814, 0.05575475311279297, 0.055944190979003904, 0.05595852661132812, 0.055911422729492184, 0.05676134490966797, 0.05655039978027344, 0.055940097808837894, 0.055934974670410156, 0.05602918243408203, 0.056057857513427733, 0.055940097808837894, 0.05590425491333008, 0.056027137756347656, 0.05611724853515625, 0.055994369506835937, 0.05601484680175781, 0.05608448028564453, 0.055962623596191405, 0.05571177673339844, 0.055808990478515626, 0.05793484878540039, 0.057431041717529295, 0.056551422119140625, 0.05656576156616211, 0.05593600082397461, 0.05665689468383789, 0.05718732833862305, 0.055906368255615235, 0.058727359771728514, 0.05800755310058594, 0.05647257614135742, 0.056513534545898435, 0.05671116638183594, 0.05588172912597656, 0.056030208587646485, 0.05598003387451172, 0.05590528106689453, 0.05593600082397461, 0.05535129547119141, 0.05544755172729492, 0.05648691177368164, 0.11342233276367188, 0.05600358581542969, 0.05600153732299805, 0.05581107330322266, 0.055964672088623046, 0.05609062576293945, 0.05611724853515625, 0.05599846267700195, 0.056147968292236325, 0.05704908752441406, 0.05751091384887695, 0.05676031875610352, 0.05597798538208008, 0.05692620849609375, 0.05621247863769531, 0.05710335922241211, 0.05665280151367187, 0.055989246368408206, 0.0569989128112793, 0.056965118408203126, 0.056476673126220706, 0.05757952117919922, 0.05608243179321289, 0.057240577697753904, 0.05665280151367187, 0.056497150421142575, 0.057215999603271485, 0.0573573112487793, 0.05771366500854492, 0.056035327911376956, 0.05668044662475586, 0.05652479934692383, 0.056048641204833986, 0.056766464233398435, 0.05603737640380859, 0.05681568145751953, 0.05561439895629883, 0.055951358795166016, 0.05671014404296875, 0.05602099227905273, 0.05684531021118164, 0.05712998580932617, 0.05622579193115235, 0.05718425750732422, 0.05587046432495117, 0.05815193557739258, 0.05669580841064453, 0.056288257598876956, 0.05639168167114258, 0.05692416000366211, 0.05890764617919922, 0.05709619140625, 0.055943168640136716, 0.05594521713256836, 0.05592473602294922, 0.055702529907226565, 0.05582233428955078, 0.05554380798339844, 0.05590425491333008, 0.0567193603515625, 0.05655756759643555, 0.05586636734008789, 0.05595238494873047, 0.11324825286865234, 0.056097793579101565, 0.05586841583251953, 0.0565401611328125, 0.05603737640380859, 0.05586841583251953, 0.05586841583251953, 0.055787582397460934, 0.05582944107055664, 0.05609062576293945, 0.05594524765014648, 0.05589398574829101, 0.05601484680175781, 0.05601587295532227, 0.05612851333618164, 0.056167423248291014, 0.05636508941650391, 0.05600048065185547, 0.05591449737548828, 0.05615820693969727, 0.05590425491333008, 0.056697856903076174, 0.056491008758544924, 0.05597491073608398, 0.0558837776184082, 0.055894016265869144, 0.05606195068359375, 0.056013824462890625, 0.055877632141113284, 0.0558131217956543, 0.0560445442199707, 0.05591756820678711, 0.05575478363037109, 0.05573014450073242, 0.055923713684082034, 0.05588889694213867, 0.05579673767089844, 0.05603123092651367, 0.05595340728759766, 0.05692416000366211, 0.05709721755981445, 0.0572149772644043, 0.05634969711303711, 0.0558919677734375, 0.05590528106689453, 0.0558766098022461, 0.055841793060302736, 0.05595033645629883, 0.05608448028564453, 0.05609267044067383, 0.05588479995727539, 0.05583769607543945, 0.05596160125732422, 0.0559554557800293, 0.05604556655883789, 0.05690572738647461, 0.05594214248657227, 0.055856128692626954, 0.05609267044067383, 0.05597183990478516, 0.0560076789855957, 0.05595340728759766, 0.055993408203125, 0.11313145446777344, 0.05605068969726563, 0.05714432144165039, 0.05607219314575195, 0.055894016265869144, 0.05605990219116211, 0.055856128692626954, 0.055787521362304686, 0.0557916145324707, 0.05595647811889649, 0.05597183990478516, 0.055567359924316405, 0.05594521713256836, 0.05598720169067383, 0.05587251281738281, 0.055973888397216794, 0.05577318572998047, 0.055757823944091796, 0.055972862243652347, 0.05598310470581055, 0.05603839874267578, 0.056036350250244144, 0.05586438369750977, 0.055879615783691404, 0.05582950210571289, 0.056062976837158204, 0.055747615814208985, 0.05714838409423828, 0.05821952056884765, 0.0560711669921875, 0.05586739349365234, 0.05611520004272461, 0.05608857727050781, 0.05595340728759766, 0.0560076789855957, 0.055930912017822264, 0.05865881729125977, 0.05698665618896484, 0.0559431037902832, 0.05590835189819336, 0.05589811325073242, 0.05590323257446289, 0.05572403335571289, 0.05595852661132812, 0.055923713684082034, 0.05552844619750977, 0.05647052764892578, 0.05722623825073242, 0.05685452651977539, 0.05584384155273438, 0.05696409606933594, 0.05613260650634765, 0.05573222351074219, 0.05603123092651367, 0.05594214248657227, 0.05622579193115235, 0.05601587295532227, 0.055932926177978515, 0.05593907165527344, 0.05829119873046875, 0.05608038330078125, 0.056581119537353515, 0.056043521881103515, 0.11301171112060547, 0.05596160125732422, 0.05600460815429688, 0.05594828796386719, 0.0573306884765625, 0.05730099105834961, 0.056136703491210936, 0.05597183990478516, 0.055757823944091796, 0.05651865768432617, 0.05613772964477539, 0.05588684844970703, 0.05589811325073242, 0.055934974670410156, 0.055951358795166016, 0.05582438278198242, 0.055932926177978515, 0.05587251281738281, 0.05707059097290039, 0.057232383728027345, 0.057132030487060545, 0.056750080108642575, 0.05586227035522461, 0.055964672088623046, 0.05600972747802734, 0.05593088150024414, 0.05628518295288086, 0.05651148986816406, 0.056172542572021485, 0.05634764862060547, 0.055940097808837894, 0.0572149772644043, 0.05617049789428711, 0.05602201461791992, 0.05591654586791992, 0.05619609451293945, 0.0580239372253418, 0.056460289001464846, 0.055949310302734374, 0.056018943786621096, 0.056033279418945314, 0.05574860763549805, 0.0558960952758789, 0.056247264862060546, 0.05595443344116211, 0.05688217544555664, 0.05635276794433594, 0.055982078552246094, 0.055987232208251955, 0.0560035514831543, 0.05590937423706055, 0.05664051055908203, 0.05720883178710937, 0.056853504180908204, 0.056360958099365234, 0.0558919677734375, 0.05595852661132812, 0.05596160125732422, 0.05597491073608398, 0.05603228759765625, 0.0562564811706543, 0.055975936889648435, 0.055949310302734374]",tokens/s,17.41148948344487,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b53-2b5e40457723b735115df74c;0af0b51f-0135-4589-9ea0-66c7e96b251c) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2832.146432,8389.132288,0.0,7742.685184,7007.0144,s,10,5.830655212402344,0.5830655212402344,0.0011864011955084602,0.5831084289550781,0.5846742309570313,0.5847881042480468,0.5848792028808594,"[0.5832706909179688, 0.5849019775390625, 0.5821006469726563, 0.5817232055664062, 0.582320068359375, 0.5811737670898437, 0.5829461669921875, 0.5834404907226562, 0.5841292724609375, 0.58464892578125]",tokens/s,439.05871754423805,kWh,6.867315049892591e-06,3.7630052303029514e-06,3.309072091699999e-05,4.372104119719553e-05,tokens/kWh,5855304.288051151,MB,2832.146432,8389.132288,0.0,7742.685184,7283.984384,s,10,342.646671875,34.264667187499995,0.008129823876006116,34.26347265625,34.275117968749996,34.278303125,34.28085125,"[34.26221484375, 34.28148828125, 34.27441015625, 34.256734375, 34.26859765625, 34.26473046875, 34.2578125, 34.267296875, 34.25966015625, 34.2537265625]",tokens/s,1.8386286857904421,kWh,0.0004044825890237167,0.00022169153025011835,0.0019209350367467965,0.002547109156020632,tokens/kWh,24733.92231781122,,s,629,347.34827374267576,0.5522230107196754,0.06935218607240677,0.5438034057617187,0.5445187377929688,0.5449166748046874,1.1266134716796876,"[0.54485400390625, 0.5438034057617187, 0.543246337890625, 0.5442498779296875, 0.5438187255859375, 0.544405517578125, 0.5433927612304688, 0.5439907836914063, 0.5433046875, 0.5437378540039063, 0.5438269653320312, 0.5440072021484375, 0.543795166015625, 0.544226318359375, 0.5433211059570312, 0.5441300659179688, 0.54451708984375, 0.5443307495117188, 0.5436416015625, 0.5442396240234375, 0.5436170043945312, 0.545089599609375, 0.543943603515625, 0.5440245971679688, 0.5435012817382813, 0.54389453125, 0.543447021484375, 0.544269287109375, 0.5434480590820312, 0.5442416381835937, 0.5436282958984375, 0.5436856079101563, 0.5433425903320312, 0.5441137084960938, 0.5437623901367188, 0.5438228759765625, 0.54314599609375, 0.543266845703125, 0.5434480590820312, 0.54394677734375, 0.543478759765625, 0.544342041015625, 0.5433477172851563, 0.5437071533203125, 0.5434163208007813, 0.5436990356445313, 0.5434121704101562, 0.5436589965820312, 0.5431746826171875, 0.5436907348632812, 0.5431572265625, 0.5437921142578125, 0.5443922119140625, 0.5438034057617187, 0.5433538818359375, 0.5437992553710937, 0.54312548828125, 0.5436221313476562, 0.5442550048828125, 0.5447546997070313, 0.5435003051757813, 0.544110595703125, 1.129606201171875, 0.5438555908203125, 0.5446000366210938, 0.5434962158203125, 0.5441279907226563, 0.5435760498046875, 0.5443614501953125, 0.5440512084960938, 0.5445314331054687, 0.544, 0.544395263671875, 0.5442437133789062, 0.5449338989257813, 0.5445150756835937, 0.5455985107421875, 0.5442651977539062, 0.5440481567382812, 0.5437183837890625, 0.5441484985351562, 0.5433558959960938, 0.544912353515625, 0.5442252807617187, 0.544775146484375, 0.5442785034179688, 0.5442467651367188, 0.5443768310546875, 0.544701416015625, 0.5438463745117188, 0.5441720581054688, 0.543763427734375, 0.5439447021484375, 0.5434183959960938, 0.5442539672851563, 0.543873046875, 0.5442447509765626, 0.5435484008789062, 0.544189453125, 0.5435852661132813, 0.5441228637695312, 0.5439774780273438, 0.5444413452148438, 0.5440932006835938, 0.5443020629882812, 0.5437276000976563, 0.5443696899414062, 0.5436436767578126, 0.5473484497070312, 0.5439129638671875, 0.5444976806640625, 0.54388427734375, 0.5441033935546875, 0.5436795043945313, 0.5439283447265625, 0.5437163696289062, 0.5439866943359375, 0.5433651123046875, 0.5438566284179688, 0.5434593505859375, 0.5443276977539062, 0.5436395263671875, 0.54405224609375, 0.5434828491210938, 0.5446829833984375, 1.1269549560546874, 0.5436589965820312, 0.5440983276367187, 0.5438801879882813, 0.5444874267578125, 0.544152587890625, 0.5444679565429688, 0.5439887084960937, 0.5441515502929688, 0.5438320922851563, 0.54458984375, 0.5438555908203125, 0.544068603515625, 0.5435648193359375, 0.5439713134765625, 0.5431132202148438, 0.5440512084960938, 0.54389453125, 0.5443778686523437, 0.543446044921875, 0.5438597412109375, 0.5438648071289063, 0.5439907836914063, 0.5458063354492187, 0.5444915161132813, 0.543572998046875, 0.5443184814453125, 0.5437357788085937, 0.543878173828125, 0.5435125732421875, 0.5439928588867188, 0.5434962158203125, 0.544068603515625, 0.5433712768554687, 0.5437890625, 0.5446963500976563, 0.5459650268554688, 0.5440255737304688, 0.5438883666992187, 0.5443931884765625, 0.5446287231445313, 0.5444362182617187, 0.5449840698242188, 0.5441444091796875, 0.5444034423828125, 0.543510498046875, 0.5440491333007812, 0.5454448852539062, 0.5443717041015625, 0.5432412109375, 0.5437112426757813, 0.5432013549804687, 0.5434971313476562, 0.5437726440429688, 0.5439866943359375, 0.5433477172851563, 0.5437194213867188, 0.5431173095703125, 0.5434132690429687, 0.5431746826171875, 0.544648193359375, 0.5442304077148438, 0.5442119750976563, 1.1263170166015626, 0.5434583129882813, 0.5442969360351563, 0.543373291015625, 0.5437562866210938, 0.543373291015625, 0.5437644653320313, 0.5435750122070313, 0.5440972900390625, 0.5432647705078125, 0.5439580078125, 0.5433252563476563, 0.5438104858398437, 0.5432924194335937, 0.5439324340820313, 0.5438750610351563, 0.5440696411132813, 0.543283203125, 0.543783935546875, 0.54326171875, 0.5440430297851563, 0.543267822265625, 0.5438822631835938, 0.54329443359375, 0.5437296752929688, 0.5433179931640625, 0.5443358764648437, 0.5437020263671875, 0.5440706787109375, 0.5436016845703125, 0.5435985717773437, 0.54325244140625, 0.5440829467773437, 0.5445253295898438, 0.5435402221679687, 0.54327294921875, 0.5437296752929688, 0.5436436767578126, 0.54380029296875, 0.5434234619140625, 0.5437747192382812, 0.5434429321289063, 0.5436068115234375, 0.543056884765625, 0.5473935546875, 0.54410546875, 0.5440604248046875, 0.5434931030273438, 0.5439365234375, 0.5433773803710937, 0.5438289794921874, 0.5436354370117188, 0.5443809204101563, 0.5432913818359375, 0.5437869873046876, 0.5433190307617187, 0.5438975830078125, 0.54358837890625, 0.543973388671875, 0.5433487548828125, 0.5437777709960937, 0.5435279541015625, 0.5438392333984375, 1.126728759765625, 0.5433446655273437, 0.544162841796875, 0.5432760620117187, 0.5441720581054688, 0.5443881225585937, 0.54490625, 0.54394677734375, 0.5442590942382812, 0.5439252319335938, 0.544701416015625, 0.5439826049804688, 0.5446226196289062, 0.5433385009765626, 0.5438064575195313, 0.5434009399414063, 0.5440983276367187, 0.5450567626953124, 0.5439652099609374, 0.5432319946289063, 0.5438095092773437, 0.5432586059570312, 0.5437614135742187, 0.543435791015625, 0.544484375, 0.5434317016601562, 0.5437009887695312, 0.5431490478515625, 0.5438167114257813, 0.543298583984375, 0.5435801391601562, 0.5435494995117187, 0.5441719970703125, 0.5433681640625, 0.5436928100585937, 0.5437245483398437, 0.5445037841796875, 0.5434193725585937, 0.5443307495117188, 0.543531005859375, 0.5440133056640625, 0.5436139526367187, 0.5439487915039063, 0.54411572265625, 0.5449932861328125, 0.5437962036132813, 0.5439794921875, 0.543466552734375, 0.5439456787109375, 0.543562744140625, 0.5440255737304688, 0.5437542114257813, 0.5439508666992188, 0.5435565795898437, 0.5450424194335938, 0.5437081298828125, 0.5441198120117188, 0.5460613403320312, 0.5440328369140625, 0.5436876220703125, 0.5439970092773437, 0.5443685913085937, 0.5439518432617187, 1.1256268310546875, 0.5433876342773437, 0.5439027099609375, 0.5437491455078125, 0.5440143432617187, 0.5435873413085938, 0.5440634765625, 0.5435217895507812, 0.544837646484375, 0.5441290283203125, 0.5442662353515625, 0.5436077880859375, 0.5440040893554687, 0.5432811279296875, 0.5437716674804688, 0.5439754028320313, 0.543921142578125, 0.5436795043945313, 0.5448601684570312, 0.543963134765625, 0.5444270629882813, 0.5435064086914062, 0.5439723510742187, 0.5439713134765625, 0.5442775268554687, 0.5437449951171875, 0.544595947265625, 0.5438966064453125, 0.5437860107421875, 0.5432279052734375, 0.5435350952148438, 0.5433651123046875, 0.5438668823242188, 0.5433426513671875, 0.5449727783203125, 0.5435340576171875, 0.5439334106445313, 0.54326171875, 0.5467739868164062, 0.5434767456054688, 0.5440921630859376, 0.5435054931640625, 0.544133056640625, 0.5435484008789062, 0.5440020751953125, 0.5437808837890625, 0.5440993041992187, 0.5436477661132812, 0.5438197631835937, 0.5434757690429688, 0.54377978515625, 0.5431531372070313, 0.5442908325195313, 0.5436088256835937, 0.5439539184570312, 0.54348291015625, 0.54430517578125, 0.5432504272460937, 0.5439170532226563, 0.5432053833007813, 0.5438597412109375, 0.5436006469726562, 0.543825927734375, 1.1284039306640625, 0.5435811767578125, 0.54392626953125, 0.5432975463867188, 0.5440030517578125, 0.5434276123046875, 0.5444700317382812, 0.5436016845703125, 0.5438228759765625, 0.5433507690429688, 0.5437460327148438, 0.5433599853515625, 0.5435975952148437, 0.5432709350585937, 0.54407373046875, 0.5433487548828125, 0.5436477661132812, 0.5439201049804687, 0.544205810546875, 0.5434214477539062, 0.543847412109375, 0.5432391967773438, 0.5437501220703125, 0.54501171875, 0.543856689453125, 0.5445160522460938, 0.544321533203125, 0.5439661865234375, 0.5444495849609375, 0.5435463256835937, 0.543562744140625, 0.5435699462890625, 0.5438617553710937, 0.5433108520507812, 0.5436497802734375, 0.5432432861328125, 0.5446655883789062, 0.5437214965820313, 0.5437655029296875, 0.543151123046875, 0.5436876831054688, 0.54325146484375, 0.5437511596679687, 0.5447864379882813, 0.5438218994140624, 0.5431879272460938, 0.543825927734375, 0.543182861328125, 0.5435903930664062, 0.54308349609375, 0.5436334228515625, 0.5431613159179688, 0.5436016845703125, 0.5431839599609375, 0.5440931396484375, 0.544869384765625, 0.543978515625, 0.5436641235351563, 0.5438402709960938, 0.54329443359375, 0.5435975952148437, 0.5432872924804687, 0.5449031982421875, 1.12785302734375, 0.5432197265625, 0.5443031005859374, 0.543805419921875, 0.5440696411132813, 0.543562744140625, 0.5437880249023438, 0.5434265747070313, 0.54437890625, 0.5443410034179688, 0.5438085327148438, 0.5433548583984374, 0.5437552490234375, 0.5441167602539062, 0.5438873901367187, 0.5431286010742188, 0.5460684814453125, 0.5433446655273437, 0.54401123046875, 0.5432525024414062, 0.5443113403320312, 0.5433211059570312, 0.5440449829101562, 0.54329443359375, 0.5437286376953125, 0.5433364868164062, 0.5437962036132813, 0.5443225708007813, 0.544879638671875, 0.543446044921875, 0.543447021484375, 0.5436170043945312, 0.5437798461914063, 0.543077392578125, 0.5447864379882813, 0.5435914306640625, 0.5437706298828126, 0.5436293334960938, 0.5449195556640625, 0.5433282470703125, 0.544373779296875, 0.543984619140625, 0.54413720703125, 0.5432801513671875, 0.5436323852539062, 0.5439119262695312, 0.5450363159179688, 0.543909912109375, 0.5446492309570312, 0.5441402587890625, 0.5445682983398438, 0.5445365600585937, 0.5437798461914063, 0.5442201538085938, 0.5439365234375, 0.5434225463867187, 0.5440900268554687, 0.5439365844726562, 0.5439845581054688, 0.5436405639648437, 0.5438587036132813, 0.5438146362304688, 0.5440071411132813, 1.1298836669921875, 0.5435586547851563, 0.5440675659179688, 0.5432913818359375, 0.5438289794921874, 0.5433005981445312, 0.5437767944335937, 0.544300048828125, 0.54409521484375, 0.543405029296875, 0.54519091796875, 0.5432658081054688, 0.5438023681640625, 0.5436221313476562, 0.5439180908203125, 0.5432565307617188, 0.5435023193359375, 0.5447024536132813, 0.54401953125, 0.5435442504882813, 0.5441484985351562, 0.5433333740234375, 0.5439180908203125, 0.5433262329101562, 0.5437921142578125, 0.5436846313476562, 0.5451581420898437, 0.54331494140625, 0.5439129638671875, 0.5431747436523438, 0.5436364135742188, 0.543467529296875, 0.5444925537109375, 0.5434685668945313, 0.5439539184570312, 0.543331298828125, 0.5441863403320313, 0.5441474609375, 0.5438177490234375, 0.5437880249023438, 0.5436282958984375, 0.5433865966796875, 0.5437327270507812, 0.5433026733398437, 0.5443440551757812, 0.5432484130859375, 0.5437440185546875, 0.5434849243164063, 0.5438463745117188, 0.5431654663085937, 0.5436897583007813, 0.5432340698242187, 0.5437265625, 0.5432975463867188, 0.543541259765625, 0.5436201171875, 0.5445703735351562, 0.543752197265625, 0.5440389404296875, 0.5439641723632812, 0.5437122802734375, 0.543457275390625, 0.5449390258789063, 1.1290306396484375, 0.5434531860351562, 0.5440993041992187, 0.5435391845703125, 0.5443635864257812, 0.54331591796875, 0.5436958618164063, 0.54315625, 0.54371533203125, 0.5437439575195312, 0.544100341796875, 0.5435381469726562, 0.5437388916015625, 0.5432217407226563, 0.5436754150390625, 0.54318896484375, 0.5440829467773437, 0.544564208984375, 0.543515625, 0.5431910400390625, 0.5438494873046875, 0.54326171875, 0.5435248413085938, 0.5433456420898437, 0.5437112426757813, 0.5435238647460937, 0.5456578369140626, 0.5435504760742188, 0.5442672729492187, 0.543446044921875, 0.54443212890625, 0.5432156372070313, 0.5440870971679688, 0.5433077392578125, 0.5435750122070313, 0.5440040893554687, 0.5438587036132813, 0.5430794067382813, 0.5436282958984375, 0.5436846313476562, 0.5436190795898438, 0.5430947875976563, 0.5436282958984375, 0.5432340698242187, 0.5435166625976563, 0.5432954711914062, 0.5439907836914063, 0.5439354858398437, 0.5442140502929688, 0.543595458984375, 0.544216064453125, 0.5438013305664062, 0.5442723999023438, 0.5444034423828125, 0.5439794921875, 0.54333642578125, 0.5436118774414063, 0.5431787719726563, 0.5439519653320313, 0.5432032470703125, 0.5435668334960938, 0.5430866088867188, 0.5438269653320312]",tokens/s,1.810862605483909,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3956.158464,12732.33408,0.0,12085.886976,11337.370624,s,10,10.987366088867189,1.098736608886719,0.001768964710489349,1.0985178833007812,1.1007381103515623,1.101008154296875,1.101224189453125,"[1.1001180419921874, 1.1012781982421875, 1.0972359619140626, 1.0976529541015625, 1.097395263671875, 1.09569873046875, 1.0975172119140626, 1.0993828125, 1.1004088134765626, 1.1006781005859374]",tokens/s,232.99487605076595,kWh,1.295269936323166e-05,7.0975890679255816e-06,6.178085498018682e-05,8.183114341134406e-05,tokens/kWh,3128393.2904756567,MB,3956.158464,12732.33408,0.0,12085.886976,11686.804992,s,10,644.0755078125,64.40755078125,0.02043927244380867,64.406017578125,64.42852109374999,64.437791796875,64.445208359375,"[64.41949609375, 64.4215078125, 64.3984453125, 64.3965, 64.3855703125, 64.37932421875, 64.38755078125, 64.41358984375, 64.4470625, 64.4264609375]",tokens/s,0.9781461837288221,kWh,0.0007606077267395126,0.000416880381825722,0.003642305219397435,0.00481979332796267,tokens/kWh,13071.09988191758,,s,629,652.9415617065439,1.038062896194822,0.13045675837303516,1.0222479248046874,1.02322421875,1.0235658081054688,2.119142734375,"[1.0216539916992187, 1.0224589233398438, 1.0223789672851562, 1.0224598999023438, 1.0228684692382812, 1.0230978393554688, 1.0232852783203126, 1.0228684692382812, 1.0223707885742188, 1.0226544799804687, 1.0221055908203125, 1.022455810546875, 1.0223565063476563, 1.0220953369140624, 1.02287255859375, 1.0226544799804687, 1.0230180053710938, 1.0228029174804687, 1.022360595703125, 1.0219008178710938, 1.02221826171875, 1.0227445678710938, 1.0227865600585937, 1.0227056884765624, 1.0226657104492187, 1.0222479248046874, 1.0227005615234375, 1.0229114990234376, 1.0229288940429688, 1.0229381103515625, 1.0223861694335938, 1.0226493530273437, 1.0219724731445312, 1.02327294921875, 1.0225264892578125, 1.021897705078125, 1.0218536987304687, 1.0230333251953125, 1.0221629638671874, 1.022266357421875, 1.0232105102539062, 1.0222919921875, 1.0218916015625, 1.0232176513671876, 1.0224496459960937, 1.0222622680664062, 1.0223595581054687, 1.0233876342773438, 1.0225960693359375, 1.0226697998046874, 1.0229954833984376, 1.0221025390625, 1.0217092895507813, 1.0235074462890625, 1.0223370361328126, 1.0225868530273436, 1.0227589111328126, 1.0223380737304688, 1.0219468994140626, 1.02196533203125, 1.0226288452148438, 1.0231378173828125, 2.124905517578125, 1.0222356567382813, 1.021960205078125, 1.022509033203125, 1.0222704467773438, 1.0222008056640626, 1.0229309692382813, 1.0228899536132812, 1.0222418212890625, 1.022761962890625, 1.0235012817382811, 1.0224527587890626, 1.0228264770507813, 1.0230067138671874, 1.022603271484375, 1.022150634765625, 1.0228582153320311, 1.0227486572265625, 1.0225458984375, 1.023267822265625, 1.0228500366210938, 1.0230599975585937, 1.02280908203125, 1.0225940551757813, 1.0225387573242188, 1.0220328979492188, 1.0222684326171876, 1.0227660522460937, 1.0227394409179686, 1.0236610717773438, 1.0233159790039061, 1.02287158203125, 1.0244444580078125, 1.0236958618164063, 1.022750732421875, 1.0225233764648438, 1.0231818237304688, 1.0220595092773437, 1.0220052490234375, 1.0232534790039063, 1.0219489135742188, 1.02171337890625, 1.0222592163085937, 1.0224414672851563, 1.023140869140625, 1.0223493041992187, 1.0228746337890624, 1.0218157958984375, 1.02228173828125, 1.02193359375, 1.0220175170898438, 1.0220697631835938, 1.02232470703125, 1.0216427612304688, 1.0218946533203126, 1.0215690307617187, 1.022439453125, 1.02179736328125, 1.0224251098632813, 1.0220114135742187, 1.0223197021484376, 1.0227127685546875, 1.0225541381835936, 2.119232421875, 1.0225018920898437, 1.022055419921875, 1.0218680419921875, 1.0223175659179689, 1.0221229858398437, 1.0219366455078125, 1.0225018920898437, 1.0220114135742187, 1.0218792724609376, 1.0220257568359374, 1.0222643432617187, 1.0220001220703125, 1.0221516723632813, 1.0221260986328125, 1.0216611938476563, 1.0218567504882812, 1.0222837524414063, 1.022792724609375, 1.0223380737304688, 1.0225889282226563, 1.0225274658203125, 1.0219089965820312, 1.0221567993164062, 1.0223544311523438, 1.0222837524414063, 1.0220943603515624, 1.022055419921875, 1.0220431518554687, 1.0217420654296876, 1.022118896484375, 1.0222120971679687, 1.02186083984375, 1.0227660522460937, 1.0224302368164062, 1.0219202270507812, 1.0222950439453125, 1.022434326171875, 1.0217748413085936, 1.0216837158203125, 1.021929443359375, 1.0220933227539062, 1.0220226440429687, 1.0222172241210938, 1.021971435546875, 1.0217420654296876, 1.0220123901367189, 1.0219243774414062, 1.0223441772460937, 1.022044189453125, 1.022455810546875, 1.0223093872070312, 1.0240828857421875, 1.0233630981445312, 1.0228223876953124, 1.0220809936523438, 1.022814208984375, 1.0219632568359376, 1.0219100341796874, 1.021834228515625, 1.0227333374023437, 1.0218475341796875, 1.0219889526367187, 2.119702392578125, 1.0218157958984375, 1.0222847900390626, 1.0225991821289062, 1.0219386596679687, 1.0222909545898438, 1.0225694580078124, 1.02200732421875, 1.0223073120117188, 1.0221957397460937, 1.0222427978515625, 1.0221383666992188, 1.0221312255859376, 1.022075927734375, 1.0216263427734376, 1.0212208862304688, 1.02229296875, 1.0224076538085938, 1.023636474609375, 1.0234900512695313, 1.023056884765625, 1.0234654541015624, 1.0236641235351562, 1.02160791015625, 1.0215352172851562, 1.0216673583984375, 1.021822998046875, 1.0217000732421875, 1.022213134765625, 1.02185986328125, 1.0224988403320312, 1.022687255859375, 1.0223042602539063, 1.0217113647460938, 1.0225919799804688, 1.022482421875, 1.022055419921875, 1.0219806518554688, 1.023088623046875, 1.0223472900390624, 1.021929443359375, 1.0223493041992187, 1.0222120971679687, 1.0214901733398438, 1.0219417724609374, 1.0221721801757813, 1.0221773071289062, 1.0221752319335937, 1.0220165405273438, 1.0218884887695312, 1.0218833618164063, 1.0219745483398437, 1.0215157470703125, 1.0211686401367188, 1.0218997802734375, 1.0223994750976562, 1.0220114135742187, 1.022298095703125, 1.02169189453125, 1.0218690795898437, 1.021822998046875, 1.0221168823242188, 1.0219120483398438, 2.118912109375, 1.0223964233398437, 1.0216151123046875, 1.022171142578125, 1.0217677001953125, 1.0219642944335938, 1.0219970703125, 1.0222827758789061, 1.0216611938476563, 1.021549560546875, 1.0215291137695313, 1.0223114013671875, 1.022255126953125, 1.0225100708007813, 1.0217799682617188, 1.02200830078125, 1.0224179077148436, 1.0221260986328125, 1.0218782958984376, 1.0218884887695312, 1.0225499877929687, 1.022076904296875, 1.0219366455078125, 1.0222387084960938, 1.0217011108398437, 1.0217328491210937, 1.02257666015625, 1.0215403442382813, 1.0214144287109375, 1.0214072265625, 1.0221701049804688, 1.0217277221679688, 1.0222387084960938, 1.0218997802734375, 1.0216539916992187, 1.0216837158203125, 1.0219857788085938, 1.0216980590820313, 1.0217205810546874, 1.0214840087890624, 1.02214453125, 1.0218076171875, 1.0222633056640624, 1.0221834106445313, 1.0223790283203125, 1.0221107177734374, 1.022223388671875, 1.0214297485351562, 1.0217717895507812, 1.0216028442382812, 1.0226585693359376, 1.0215946044921875, 1.0224056396484376, 1.0217778930664063, 1.0220472412109376, 1.0222387084960938, 1.0221731567382812, 1.0224568481445313, 1.0223892211914063, 1.021928466796875, 1.0224486694335937, 1.0220421142578124, 1.022455810546875, 2.117295166015625, 1.0219315185546876, 1.0214788818359375, 1.0215782470703125, 1.021528076171875, 1.0218690795898437, 1.0219069213867187, 1.0218506469726563, 1.0224097290039063, 1.0219745483398437, 1.0218424072265626, 1.021686767578125, 1.0215823364257812, 1.0215782470703125, 1.021676513671875, 1.0219089965820312, 1.02179736328125, 1.0221465454101561, 1.0219561157226562, 1.021676513671875, 1.0220635986328126, 1.0227415161132813, 1.0216591186523438, 1.0214799194335937, 1.021432861328125, 1.0216959838867188, 1.021422607421875, 1.0225029296875, 1.0220267333984374, 1.021981689453125, 1.022392333984375, 1.0219192504882812, 1.021675537109375, 1.0215321655273437, 1.0215844116210937, 1.0223329467773437, 1.0216427612304688, 1.0223380737304688, 1.0223411254882813, 1.0224465942382812, 1.0220513305664063, 1.021770751953125, 1.0217267456054688, 1.0211819458007811, 1.0219478759765626, 1.0219786376953126, 1.0217195434570312, 1.02196533203125, 1.0216980590820313, 1.0216949462890625, 1.0220328979492188, 1.02213427734375, 1.0221680908203126, 1.02171337890625, 1.0219458618164063, 1.021823974609375, 1.022129150390625, 1.0220892333984375, 1.0217769165039063, 1.022181396484375, 1.0221486206054688, 1.0219581298828124, 1.0223165283203124, 2.1193359375, 1.021507568359375, 1.0222807006835937, 1.0218096923828126, 1.0217789306640626, 1.0224578857421875, 1.0224793701171875, 1.0222202758789063, 1.0224015502929686, 1.0220513305664063, 1.0227291870117188, 1.0218936157226564, 1.0227210083007812, 1.0215659790039062, 1.021507568359375, 1.0218588256835937, 1.0215946044921875, 1.0212608032226562, 1.0221383666992188, 1.021644775390625, 1.0220400390625, 1.0219151611328126, 1.0219458618164063, 1.0218168334960938, 1.0220155029296876, 1.0220830688476563, 1.0217257080078126, 1.0215249633789063, 1.0215782470703125, 1.0214522705078124, 1.021971435546875, 1.0216908569335938, 1.0222796630859374, 1.0217891845703124, 1.0220093383789062, 1.0221598510742187, 1.0217533569335937, 1.021591552734375, 1.021834228515625, 1.0217297973632813, 1.0220697631835938, 1.0221373291015625, 1.021681640625, 1.021823974609375, 1.022286865234375, 1.0229125366210938, 1.0224752807617188, 1.0218772583007814, 1.0224363403320313, 1.02299853515625, 1.021749267578125, 1.022983154296875, 1.0218731689453124, 1.0215403442382813, 1.022688232421875, 1.0222745361328125, 1.0221066284179687, 1.021812744140625, 1.0228449096679688, 1.0219735107421875, 1.0219089965820312, 1.0222427978515625, 1.0222807006835937, 2.121678955078125, 1.0220349731445313, 1.0220318603515626, 1.022308349609375, 1.0220912475585937, 1.0223062744140625, 1.0220667114257813, 1.0221168823242188, 1.0219243774414062, 1.0236375122070311, 1.0226729125976564, 1.022350341796875, 1.0221209716796875, 1.0218065795898437, 1.0220042114257812, 1.02285107421875, 1.0233190307617188, 1.0220626220703124, 1.0221178588867188, 1.0219304809570313, 1.0225458984375, 1.0225377197265626, 1.0221055908203125, 1.0222172241210938, 1.0222172241210938, 1.0232381591796875, 1.0236713256835936, 1.02211376953125, 1.0223145141601562, 1.0224046020507813, 1.0224127807617187, 1.0221404418945312, 1.0220615844726562, 1.0223544311523438, 1.0224671020507812, 1.0220564575195312, 1.02242919921875, 1.0219437866210936, 1.0222633056640624, 1.0225182495117187, 1.0220728149414062, 1.0216908569335938, 1.0225111083984375, 1.0221690673828125, 1.0233764038085937, 1.0224005126953124, 1.0228797607421876, 1.022582763671875, 1.0221362915039063, 1.0220574951171875, 1.0226176147460937, 1.023177734375, 1.022688232421875, 1.0220369873046875, 1.0229730224609375, 1.0224833984375, 1.0232719116210938, 1.0233231201171875, 1.0232688598632813, 1.0227711791992187, 1.02242919921875, 1.0218782958984376, 1.0227630004882813, 2.123450439453125, 1.0225910034179688, 1.022224365234375, 1.0221588745117187, 1.0227947387695313, 1.0223759155273437, 1.0220062866210937, 1.021928466796875, 1.0225244140625, 1.0222254028320312, 1.0220328979492188, 1.0240491943359376, 1.0243778076171874, 1.0222274780273437, 1.0225735473632813, 1.02270361328125, 1.0226463012695313, 1.0230241088867187, 1.023151123046875, 1.0230661010742188, 1.022940185546875, 1.0235166625976562, 1.0226575317382813, 1.0224097290039063, 1.02308349609375, 1.0247874755859374, 1.0235719604492188, 1.0233917236328125, 1.023362060546875, 1.0236497802734374, 1.0237828979492187, 1.0238555908203124, 1.024322509765625, 1.0232586059570312, 1.0231644287109376, 1.023604736328125, 1.0232422485351562, 1.0228469848632813, 1.0233712768554688, 1.0233978881835937, 1.0237593383789063, 1.02187109375, 1.0220492553710938, 1.0218803100585938, 1.023657958984375, 1.0220001220703125, 1.0220830688476563, 1.0218035278320312, 1.02250390625, 1.02404296875, 1.0239928588867186, 1.0232227783203125, 1.0231644287109376, 1.0232197265625, 1.0240625, 1.0235238647460938, 1.022983154296875, 1.0219345703125, 1.0223042602539063, 1.022192626953125, 1.0235565795898438, 1.0230742797851562, 1.0222684326171876, 2.122977294921875, 1.021991943359375, 1.0222418212890625, 1.022287841796875, 1.0231869506835938, 1.0230241088867187, 1.022814208984375, 1.022866455078125, 1.0224425048828125, 1.0229033203125, 1.0226390991210939, 1.0222633056640624, 1.0229155883789063, 1.0221915893554687, 1.0231552124023438, 1.0233661499023436, 1.0227711791992187, 1.0222151489257814, 1.0229217529296875, 1.0231818237304688, 1.0223114013671875, 1.0220543823242187, 1.02232373046875, 1.0222356567382813, 1.0230077514648437, 1.0234613647460937, 1.0228264770507813, 1.0229319458007813, 1.024480224609375, 1.0225131225585937, 1.0220369873046875, 1.0220902099609375, 1.0221383666992188, 1.0218895263671874, 1.0225633544921875, 1.0228480224609375, 1.0221834106445313, 1.0225643310546875, 1.0235330810546874, 1.02322998046875, 1.0229186401367187, 1.0226974487304688, 1.0228060302734374, 1.022993408203125, 1.023494140625, 1.0230015869140625, 1.0222899169921875, 1.0222520141601563, 1.0227855224609375, 1.0226903076171876, 1.022087158203125, 1.0217963256835938, 1.0218424072265626, 1.0221260986328125, 1.0221178588867188, 1.0227445678710938, 1.023115234375, 1.0224527587890626, 1.0226483154296875, 1.022645263671875, 1.022792724609375, 1.0225899658203126, 1.0227425537109376]",tokens/s,0.9633327649660273,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949180-00c8d36e48127f563ccb1729;c119ed00-3b61-4ac1-8cd3-234cb2161b07) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1312.935936,1023.934464,0.0,377.48736,290.348032,s,10,0.6976366119384766,0.06976366119384766,0.0018590846735664541,0.0696058578491211,0.07176354598999024,0.07230553245544434,0.07273912162780762,"[0.07284751892089844, 0.06800761413574219, 0.06769139099121094, 0.06779033660888673, 0.07104889678955079, 0.07164310455322266, 0.07125389099121093, 0.06824649810791016, 0.07096521759033203, 0.06814214324951172]",tokens/s,3669.5321836488733,kWh,8.134236630148721e-07,4.4571909736688417e-07,2.1182893658638067e-06,3.377432126245563e-06,tokens/kWh,75797230.09402885,MB,1312.935936,1023.934464,0.0,377.48736,337.281536,s,10,43.73100244140625,4.373100244140625,0.07142248616314475,4.400765380859375,4.4402288085937505,4.445402490234375,4.449541435546875,"[4.35173583984375, 4.25024462890625, 4.26398681640625, 4.30877587890625, 4.41468994140625, 4.4390791015625, 4.43881396484375, 4.42625927734375, 4.3868408203125, 4.450576171875]",tokens/s,14.40625562709468,kWh,5.082961926562783e-05,2.7857636841584544e-05,0.0001254377571233551,0.00020412501323056748,tokens/kWh,308634.39518232364,,s,629,44.28504171752928,0.07040547172898139,0.008395184351578173,0.07026687622070313,0.07091015625,0.07132590332031251,0.13567237548828126,"[0.0723609619140625, 0.07140863800048829, 0.07176195526123047, 0.07163388824462891, 0.07094169616699218, 0.07094783782958984, 0.07148441314697265, 0.07121202850341797, 0.07107174682617187, 0.07111577606201172, 0.07114342498779297, 0.07076761627197266, 0.07108198547363281, 0.07081574249267578, 0.07077072143554687, 0.07092835235595703, 0.07097344207763671, 0.0714598388671875, 0.07095500946044922, 0.07107481384277343, 0.07127347564697266, 0.07087923431396484, 0.07049420928955077, 0.07076659393310547, 0.07170150756835937, 0.071552001953125, 0.0690360336303711, 0.06716006469726563, 0.06747545623779297, 0.0673433609008789, 0.06707405090332032, 0.06747135925292969, 0.06733312225341796, 0.06719999694824219, 0.06728806304931641, 0.0673095703125, 0.06710784149169922, 0.06706175994873047, 0.0676126708984375, 0.06733926391601562, 0.06688256072998047, 0.06733414459228515, 0.06721126556396484, 0.0674703369140625, 0.06733926391601562, 0.06750822448730469, 0.06778880310058594, 0.06814105224609375, 0.0675225601196289, 0.06693376159667969, 0.06731980895996094, 0.06751334381103516, 0.07044198608398437, 0.0684615707397461, 0.0671098861694336, 0.06701465606689454, 0.06718463897705078, 0.06769561767578125, 0.06716422271728516, 0.06652819061279297, 0.06804582214355469, 0.06770995330810547, 0.13573631286621093, 0.0672204818725586, 0.06833356475830078, 0.06807555389404298, 0.06771196746826172, 0.06805197143554688, 0.06715392303466797, 0.06708428955078125, 0.06746112060546874, 0.06681702423095703, 0.06697062683105469, 0.06744166564941406, 0.06732288360595703, 0.06716006469726563, 0.06725325012207031, 0.06715602874755859, 0.06704019165039063, 0.06741094207763672, 0.06709555053710937, 0.06650572967529297, 0.06745292663574219, 0.06664704132080078, 0.06701261138916016, 0.06722150421142578, 0.06713855743408204, 0.0671098861694336, 0.06744268798828125, 0.06728704071044922, 0.0696258544921875, 0.06788813018798828, 0.06791372680664062, 0.06849024200439453, 0.06736895751953124, 0.06989209747314454, 0.06821990203857421, 0.06771097564697266, 0.06738432312011719, 0.06751948547363282, 0.06726348876953125, 0.06710784149169922, 0.06749183654785157, 0.06747545623779297, 0.06864179229736328, 0.0675563507080078, 0.06683135986328125, 0.06710681915283204, 0.06729011535644532, 0.06674534606933594, 0.06728089904785156, 0.06722354888916016, 0.06724607849121093, 0.06693990325927734, 0.0674703369140625, 0.06723481750488282, 0.06712223815917968, 0.06780818939208984, 0.06794751739501953, 0.06755840301513671, 0.0675758056640625, 0.06719692993164063, 0.0672573471069336, 0.06723481750488282, 0.06736589050292968, 0.13550796508789062, 0.06692147064208985, 0.06734950256347656, 0.07442227172851562, 0.0679557113647461, 0.06727680206298828, 0.06684569549560547, 0.06775091552734375, 0.06815436553955079, 0.06903807830810547, 0.06718975830078125, 0.06693376159667969, 0.06734031677246094, 0.06811030578613281, 0.06972621154785157, 0.06792601776123047, 0.06736486053466798, 0.06915481567382813, 0.06836736297607422, 0.06893875122070313, 0.07033753967285156, 0.06752665710449218, 0.0672194595336914, 0.06725325012207031, 0.06720822143554687, 0.06680572509765625, 0.06687026977539062, 0.0672143325805664, 0.06730445098876953, 0.06715904235839844, 0.06759014129638671, 0.06702899169921875, 0.06747443389892578, 0.06825984191894531, 0.0673280029296875, 0.06705152130126953, 0.06734130859375, 0.06769664001464844, 0.06806425476074218, 0.06706380462646484, 0.06730137634277343, 0.0671825942993164, 0.0676157455444336, 0.06711398315429687, 0.06727577972412109, 0.06933606719970703, 0.06894182586669922, 0.06846259307861328, 0.06729523468017579, 0.06725836944580078, 0.06733312225341796, 0.0672511978149414, 0.0672890853881836, 0.06693376159667969, 0.0676341781616211, 0.06666957092285156, 0.06659993743896485, 0.0665722885131836, 0.0667514877319336, 0.0672368621826172, 0.06741712188720703, 0.06712726593017578, 0.06705049896240234, 0.1353912353515625, 0.0671488037109375, 0.06749900817871093, 0.0674703369140625, 0.06737715148925781, 0.0673064956665039, 0.06735257720947266, 0.06739974212646484, 0.0676075210571289, 0.07006716918945312, 0.06824960327148437, 0.06842777252197266, 0.06737100982666015, 0.06794239807128906, 0.06855276489257812, 0.0674927978515625, 0.06724813079833984, 0.06728294372558594, 0.06814617919921875, 0.06813286590576172, 0.06723583984375, 0.06725222778320313, 0.06753689575195312, 0.06841139221191406, 0.06750617980957031, 0.06819737243652343, 0.06738438415527344, 0.06882195281982421, 0.06757997131347657, 0.06839907073974609, 0.06708633422851562, 0.06791574096679688, 0.06755020904541016, 0.06742527770996094, 0.06748569488525391, 0.066587646484375, 0.06756761932373047, 0.06675154876708984, 0.06733510589599609, 0.06650163269042969, 0.06747647857666016, 0.06739250946044922, 0.06733824157714843, 0.07072870635986328, 0.07021260833740234, 0.07095807647705078, 0.07044915008544922, 0.07039590454101563, 0.07007437133789063, 0.06753177642822265, 0.06737203216552734, 0.07054438018798828, 0.06973747253417968, 0.07040415954589843, 0.07051052856445313, 0.0710830078125, 0.0708136978149414, 0.06919782257080079, 0.06926131439208984, 0.06997299194335938, 0.07050035095214843, 0.0701286392211914, 0.07048499298095703, 0.14131712341308594, 0.06722457885742188, 0.06749183654785157, 0.06760550689697266, 0.07033446502685547, 0.07015731048583984, 0.07011225891113282, 0.06978765106201172, 0.0705269775390625, 0.06983372497558593, 0.06974156951904296, 0.07031603240966797, 0.07043583679199218, 0.0705802230834961, 0.07040614318847656, 0.07013069152832031, 0.0699504623413086, 0.07051058959960937, 0.07024332427978516, 0.06977126312255859, 0.06987673950195313, 0.07046553802490234, 0.07214806365966797, 0.07040809631347657, 0.06932179260253907, 0.07022994995117188, 0.07043071746826172, 0.07018402862548828, 0.07030262756347656, 0.07083622741699219, 0.07088333129882812, 0.06744166564941406, 0.06735155487060547, 0.06759731292724609, 0.06742733001708984, 0.0693780517578125, 0.07071334075927735, 0.07068876647949218, 0.07128985595703125, 0.07055465698242187, 0.07048291015625, 0.07026483154296875, 0.07063346862792969, 0.07024947357177734, 0.0704563217163086, 0.07028530883789062, 0.0700212173461914, 0.07084226989746094, 0.0705269775390625, 0.07041228485107422, 0.07060889434814453, 0.07039590454101563, 0.07050342559814453, 0.0704686050415039, 0.07056486511230468, 0.07071641540527343, 0.07057920074462891, 0.07043276977539062, 0.07038159942626954, 0.07048802947998047, 0.06970470428466796, 0.07084134674072265, 0.07183257293701172, 0.14264422607421876, 0.07059661102294922, 0.0704337921142578, 0.07088127899169921, 0.07030681610107421, 0.07067340850830078, 0.07019110107421875, 0.06985215759277344, 0.07026483154296875, 0.07035391998291016, 0.0703815689086914, 0.07054847717285156, 0.07035903930664063, 0.07019519805908203, 0.0714567642211914, 0.0703662109375, 0.07029964447021485, 0.07025766754150391, 0.07012351989746093, 0.07053107452392578, 0.07036518096923829, 0.07038873291015625, 0.07040204620361327, 0.07045836639404297, 0.07014093017578125, 0.07181926727294922, 0.07061196899414063, 0.07031705474853515, 0.07053209686279296, 0.07042457580566407, 0.07070719909667969, 0.07003033447265625, 0.0699525146484375, 0.07056486511230468, 0.07099187469482422, 0.07034060668945312, 0.07056281280517578, 0.07041228485107422, 0.070940673828125, 0.07032319641113281, 0.0704901123046875, 0.07044915008544922, 0.07032627105712891, 0.07004057312011719, 0.07051776123046875, 0.07030169677734376, 0.07029043579101563, 0.07063859558105469, 0.07095500946044922, 0.07047993469238281, 0.0705125732421875, 0.07045734405517579, 0.07045222473144531, 0.07037542724609375, 0.07044096374511719, 0.0705638427734375, 0.07061196899414063, 0.07037747192382812, 0.07041024017333984, 0.06980403137207031, 0.07040614318847656, 0.07026585388183594, 0.07064780426025391, 0.14216192626953125, 0.07040102386474609, 0.07040819549560547, 0.07026483154296875, 0.07061504364013672, 0.07052496337890625, 0.07054332733154296, 0.07030989074707031, 0.07011020660400391, 0.07049727630615234, 0.07108812713623047, 0.07056179046630859, 0.07050444793701172, 0.07025971221923828, 0.07032217407226563, 0.07038566589355469, 0.07023513793945313, 0.07034368133544922, 0.07049625396728515, 0.07012556457519531, 0.07023721313476562, 0.06932579040527344, 0.07020543670654297, 0.07085977935791016, 0.07015219116210937, 0.07016242980957031, 0.07007129669189453, 0.07072870635986328, 0.07037542724609375, 0.07045836639404297, 0.07033446502685547, 0.07036313629150391, 0.07039180755615235, 0.07009587097167969, 0.07063346862792969, 0.0701839370727539, 0.07108812713623047, 0.07090892791748046, 0.07049727630615234, 0.07038259124755859, 0.07044403076171875, 0.0706519012451172, 0.070761474609375, 0.0694466552734375, 0.0697528305053711, 0.07026380920410157, 0.07064268493652344, 0.0705433578491211, 0.0703477783203125, 0.07133491516113281, 0.0704368667602539, 0.07131238555908204, 0.07170355224609375, 0.0705771484375, 0.07050752258300781, 0.07046451568603515, 0.07079840087890625, 0.07022892761230469, 0.07035084533691406, 0.07033344268798829, 0.0702402572631836, 0.07078604888916015, 0.07056896209716797, 0.13596263122558594, 0.06696959686279297, 0.06721331024169921, 0.06699417877197265, 0.06940467071533203, 0.07078604888916015, 0.07069593811035156, 0.07035187530517578, 0.07053107452392578, 0.07046246337890626, 0.0706344985961914, 0.07024642944335938, 0.07162467193603515, 0.07080652618408204, 0.07045529937744141, 0.07055359649658204, 0.0706519012451172, 0.07040102386474609, 0.06990335845947265, 0.07026080322265625, 0.07056377410888671, 0.07085670471191406, 0.06976921844482421, 0.07021260833740234, 0.06979583740234375, 0.07011634826660157, 0.0700426254272461, 0.07040716552734375, 0.07058329772949219, 0.0699658203125, 0.07026892852783204, 0.0703272933959961, 0.07030989074707031, 0.07069593811035156, 0.07077683258056641, 0.07038259124755859, 0.07024230194091798, 0.07033548736572266, 0.07012454223632812, 0.07299378967285156, 0.07067033386230469, 0.07059865570068359, 0.07034368133544922, 0.07035699462890625, 0.07052902221679687, 0.070687744140625, 0.06977433776855468, 0.07029862213134766, 0.0704901123046875, 0.07047270202636718, 0.07048191833496094, 0.07052799987792968, 0.0706170883178711, 0.07022182464599609, 0.0703272933959961, 0.070255615234375, 0.07052082824707032, 0.07059967803955078, 0.07068160247802735, 0.0705054702758789, 0.07004364776611328, 0.07048089599609375, 0.07136255645751953, 0.14220700073242187, 0.07030985260009766, 0.07026892852783204, 0.0702033920288086, 0.07120588684082031, 0.07035289764404297, 0.07038361358642578, 0.07045222473144531, 0.070181884765625, 0.07042969512939454, 0.07047577667236328, 0.07076150512695313, 0.07077168273925781, 0.07013990020751953, 0.07032422637939453, 0.07050752258300781, 0.07027200317382812, 0.07128268432617188, 0.070614013671875, 0.07047475433349609, 0.07060889434814453, 0.07019827270507813, 0.07051264190673828, 0.07065702056884765, 0.07134515380859376, 0.07106253051757813, 0.07054950714111329, 0.07042253112792969, 0.07038873291015625, 0.0702740478515625, 0.07026687622070313, 0.06861004638671875, 0.0671272964477539, 0.06711507415771484, 0.0671866226196289, 0.06729523468017579, 0.06710169219970703, 0.06714166259765625, 0.06752764892578125, 0.06734130859375, 0.06720921325683593, 0.06747135925292969, 0.06729933166503907, 0.0674150390625, 0.06712422180175781, 0.06744882965087891, 0.06737612915039062, 0.06713958740234376, 0.06726656341552735, 0.0685823974609375, 0.07029043579101563, 0.07070515441894532, 0.0720742416381836, 0.07064166259765625, 0.07065087890625, 0.07042559814453125, 0.07116806030273437, 0.07063648223876953, 0.07048703765869141, 0.07075430297851562, 0.07056896209716797, 0.07044608306884766, 0.07033036804199219, 0.13839053344726562, 0.07068978881835937, 0.07101030731201172, 0.07077279663085938, 0.07043782043457031, 0.070614013671875, 0.07037542724609375, 0.07067443084716797, 0.070181884765625, 0.07043788909912109, 0.07035391998291016, 0.0702003173828125, 0.07099903869628907, 0.07056690979003906, 0.07074406433105469, 0.07023308563232422, 0.07023616027832032, 0.0702208023071289, 0.070150146484375, 0.0707799072265625, 0.07061199951171875, 0.0703927993774414, 0.07016448211669922, 0.07032115173339844, 0.07010406494140625, 0.07089151763916016, 0.07061913299560547, 0.07181926727294922, 0.07085977935791016, 0.07089663696289063, 0.07025459289550781, 0.07071952056884766, 0.07063139343261719, 0.07043276977539062, 0.07035391998291016, 0.07057920074462891, 0.07042867279052735, 0.07073075103759766, 0.07066726684570312, 0.07058124542236328, 0.07049215698242188, 0.07038668823242188, 0.07035903930664063, 0.07047987365722656, 0.07088025665283203, 0.07053414154052734, 0.07050342559814453, 0.07065395355224609, 0.07077375793457032, 0.07083213043212891, 0.070761474609375, 0.07080242919921875, 0.07068364715576173, 0.0717496337890625, 0.07135539245605468, 0.07119155120849609, 0.07112703704833985, 0.07091506958007812, 0.07129190063476562, 0.07068876647949218, 0.07067545318603516, 0.07053619384765625, 0.07090688323974609]",tokens/s,14.20344151445213,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c60-36fa4198483b6e6d79ee734e;1e77eff2-4763-42e9-b267-c09febf359ff) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1477.30432,8196.194304,0.0,7549.7472,6941.631488,s,10,6.0462579345703125,0.6046257934570314,0.0007435687799022576,0.604391815185547,0.6052782531738281,0.6058451202392579,0.6062986138916016,"[0.6064119873046875, 0.6051522827148438, 0.6040460205078125, 0.6039584350585937, 0.6041729736328125, 0.603818603515625, 0.6042236328125, 0.6045599975585938, 0.6048197021484375, 0.6050942993164062]",tokens/s,423.4023800676526,kWh,7.135871880584294e-06,3.909028176627437e-06,3.5656695191997026e-05,4.670159524920876e-05,tokens/kWh,5481611.466030966,MB,1477.30432,8196.194304,0.0,7549.7472,7094.078464,s,10,357.59960156249997,35.759960156249996,0.0025836315483325416,35.759828125,35.7629109375,35.7636703125,35.7642778125,"[35.7644296875, 35.76244140625, 35.755375, 35.758703125, 35.75703515625, 35.75961328125, 35.7587890625, 35.76004296875, 35.7604296875, 35.7627421875]",tokens/s,1.7617469293793129,kWh,0.0004221709076563517,0.00023138749488215953,0.002133793484811199,0.0027873518873497106,tokens/kWh,22602.097814030254,,s,629,362.47207647705085,0.5762672122051682,0.07178708636540315,0.567605224609375,0.5678377075195312,0.5679312866210938,1.1716501220703124,"[0.5675867919921875, 0.5674536743164063, 0.5675294799804688, 0.5675069580078125, 0.5674711303710938, 0.56766259765625, 0.5675397338867187, 0.567414794921875, 0.5675397338867187, 0.567857177734375, 0.5676984252929688, 0.5676615600585937, 0.5676932983398437, 0.567741455078125, 0.567667724609375, 0.5677650146484375, 0.5679800415039062, 0.5678551025390625, 0.5676390380859375, 0.5676298217773438, 0.567710693359375, 0.567710693359375, 0.567568359375, 0.567900146484375, 0.5677824096679688, 0.567510009765625, 0.56774755859375, 0.5680036010742188, 0.56772607421875, 0.5676021728515624, 0.5675284423828125, 0.5676349487304687, 0.5676973876953125, 0.567857177734375, 0.5677864990234375, 0.5676759033203125, 0.5677332763671875, 0.567736328125, 0.5676236572265625, 0.5676482543945313, 0.5678510131835938, 0.5677496337890625, 0.5674977416992187, 0.5674495849609374, 0.5680199584960938, 0.5677691040039062, 0.5677250366210937, 0.5677158203125, 0.5677281494140625, 0.5677322387695313, 0.5676615600585937, 0.5676973876953125, 0.5678653564453126, 0.5677117309570312, 0.5676216430664063, 0.56771484375, 0.5676461791992188, 0.5675673828125, 0.5677025146484375, 0.5677178955078125, 0.5677588500976563, 0.5677485961914063, 1.1728394775390625, 0.5675120849609375, 0.5672499389648438, 0.5673809814453125, 0.567425048828125, 0.5678643188476562, 0.5675172119140625, 0.5675909423828125, 0.5675530395507813, 0.5675233154296875, 0.5674926147460938, 0.567546875, 0.5676093139648437, 0.567673828125, 0.5677547607421874, 0.56760009765625, 0.56751513671875, 0.5678336181640625, 0.5675867919921875, 0.567462890625, 0.5675970458984375, 0.5676533813476563, 0.56762060546875, 0.5676195678710938, 0.5676820678710938, 0.5676759033203125, 0.5675878295898438, 0.5676216430664063, 0.5675888671875, 0.5675222778320312, 0.567667724609375, 0.5676400756835938, 0.5675427856445312, 0.567562255859375, 0.5677056274414063, 0.567699462890625, 0.5677117309570312, 0.5678643188476562, 0.56776806640625, 0.5677557983398438, 0.5678028564453125, 0.5681766357421875, 0.5678325805664063, 0.5677014770507812, 0.5676093139648437, 0.5677567749023438, 0.5677496337890625, 0.5676830444335937, 0.5678008422851563, 0.5677178955078125, 0.5676165161132812, 0.5676011352539062, 0.5678192749023437, 0.5676881713867188, 0.5675899047851563, 0.5676656494140625, 0.5677383422851563, 0.5676973876953125, 0.5679749145507812, 0.5676431274414062, 0.567762939453125, 0.5677230224609375, 0.567647216796875, 1.1716873779296875, 0.5677588500976563, 0.5675479125976562, 0.567457763671875, 0.5674711303710938, 0.5678981323242187, 0.5679073486328124, 0.5674229736328125, 0.5675980834960938, 0.5674864501953125, 0.56749462890625, 0.5673656616210937, 0.5676482543945313, 0.5675530395507813, 0.5675089721679687, 0.5674639282226562, 0.5674158325195312, 0.5674301147460937, 0.5674035034179687, 0.5674526977539063, 0.567446533203125, 0.5673840942382813, 0.567498779296875, 0.56760009765625, 0.5675858154296874, 0.5674158325195312, 0.56747314453125, 0.567436279296875, 0.5673809814453125, 0.567546875, 0.5677117309570312, 0.5675601806640626, 0.5675089721679687, 0.5675140991210937, 0.5675765991210937, 0.5675919189453125, 0.5674424438476563, 0.5677158203125, 0.5675878295898438, 0.56744140625, 0.5677056274414063, 0.5677404174804688, 0.5675233154296875, 0.5674649658203125, 0.567341064453125, 0.5675530395507813, 0.5674721069335937, 0.5675714721679688, 0.5675264282226562, 0.5675673828125, 0.5675612182617188, 0.5674956665039063, 0.5675284423828125, 0.5675427856445312, 0.5675919189453125, 0.567605224609375, 0.5675079956054687, 0.5675581665039062, 0.5677537231445312, 0.5677322387695313, 0.5675714721679688, 0.5675407104492187, 0.5676267700195312, 1.1715543212890625, 0.56745166015625, 0.5672324829101563, 0.5672796020507812, 0.5673430786132813, 0.5674086303710938, 0.5673318481445313, 0.56740966796875, 0.5675089721679687, 0.5676113891601563, 0.5675950317382813, 0.5674301147460937, 0.5677127685546876, 0.5675438232421876, 0.5674536743164063, 0.5673994140625, 0.5674403686523437, 0.5674127197265625, 0.5678090209960938, 0.5673983764648437, 0.5675172119140625, 0.5676820678710938, 0.5676349487304687, 0.5673932495117188, 0.567530517578125, 0.5676093139648437, 0.5676400756835938, 0.5676165161132812, 0.56760009765625, 0.5676707763671875, 0.5676339111328125, 0.567583740234375, 0.567468017578125, 0.5675867919921875, 0.5676011352539062, 0.5675888671875, 0.5676216430664063, 0.5679401245117187, 0.5675479125976562, 0.5676113891601563, 0.56771484375, 0.5675612182617188, 0.567562255859375, 0.5677076416015625, 0.5675817260742188, 0.5677230224609375, 0.5677875366210937, 0.56810595703125, 0.5677905883789063, 0.5677076416015625, 0.56764111328125, 0.5676318969726563, 0.5676728515625, 0.5677659912109375, 0.567630859375, 0.5676646118164063, 0.5677199096679687, 0.567635986328125, 0.5677271118164062, 0.567751708984375, 0.5676656494140625, 0.5677987670898438, 0.5677404174804688, 1.171937255859375, 0.5675632934570313, 0.567510009765625, 0.5676011352539062, 0.5675110473632813, 0.5675509643554687, 0.5675704345703125, 0.5676400756835938, 0.5673451538085937, 0.5674536743164063, 0.5673789672851562, 0.5673011474609375, 0.5673953247070312, 0.567414794921875, 0.5674506225585938, 0.5674485473632812, 0.5674711303710938, 0.5674024658203125, 0.5676707763671875, 0.5677967529296875, 0.5677352905273437, 0.5674874877929688, 0.5676021728515624, 0.5673779296875, 0.5679329223632813, 0.5678540649414062, 0.5675560913085937, 0.5674352416992188, 0.5675601806640626, 0.5673789672851562, 0.5673871459960937, 0.5673840942382813, 0.5674967041015625, 0.5679431762695313, 0.5675222778320312, 0.5674014892578125, 0.5675642700195312, 0.5678223266601562, 0.5675274047851563, 0.5674485473632812, 0.5675612182617188, 0.5675130615234375, 0.5677005004882812, 0.5681069946289062, 0.5675448608398438, 0.56759912109375, 0.5675233154296875, 0.5674772338867188, 0.5674833984375, 0.5675765991210937, 0.5675601806640626, 0.5674649658203125, 0.567468017578125, 0.5676585083007812, 0.5677854614257812, 0.5675899047851563, 0.567546875, 0.5675213012695313, 0.5679288330078125, 0.567762939453125, 0.5678182373046875, 0.5676216430664063, 0.5676011352539062, 1.17129931640625, 0.5673502807617188, 0.567215087890625, 0.5675376586914063, 0.56736767578125, 0.5673421020507813, 0.5673707275390625, 0.567426025390625, 0.5676564331054688, 0.5675714721679688, 0.5674864501953125, 0.5673482055664063, 0.5674526977539063, 0.5674854125976563, 0.5675048828125, 0.5674536743164063, 0.5673359375, 0.56749462890625, 0.5675018310546875, 0.5674526977539063, 0.5674557495117187, 0.5676062622070313, 0.5678377075195312, 0.5674813232421875, 0.5677189331054687, 0.5678090209960938, 0.5677772827148437, 0.5675213012695313, 0.567751708984375, 0.5677783203125, 0.5676656494140625, 0.5676830444335937, 0.5675755615234375, 0.5678561401367187, 0.567636962890625, 0.5677291259765626, 0.5675059204101562, 0.567667724609375, 0.56776806640625, 0.5675161743164062, 0.5675540771484375, 0.5675079956054687, 0.5677742309570313, 0.5676452026367188, 0.56749462890625, 0.5674864501953125, 0.567593994140625, 0.56757861328125, 0.5675172119140625, 0.5676615600585937, 0.56762060546875, 0.5679359741210938, 0.5679605712890625, 0.5678868408203125, 0.5678858032226562, 0.567710693359375, 0.5680302124023437, 0.5678018798828125, 0.5676083374023437, 0.5676615600585937, 0.5680117797851563, 0.5678970947265625, 0.56764111328125, 1.1718861083984375, 0.5675161743164062, 0.5677250366210937, 0.5673861694335938, 0.567342041015625, 0.5673369750976562, 0.5673584594726563, 0.5674086303710938, 0.5673850708007813, 0.5677219848632813, 0.56757861328125, 0.56755712890625, 0.5675601806640626, 0.5676646118164063, 0.5676544189453125, 0.5674854125976563, 0.5673922729492188, 0.567499755859375, 0.5677189331054687, 0.5675161743164062, 0.5676431274414062, 0.56751513671875, 0.567520263671875, 0.5674598388671875, 0.5675693969726563, 0.5675233154296875, 0.5675233154296875, 0.5677301635742188, 0.5675919189453125, 0.5674035034179687, 0.5674905395507812, 0.5677189331054687, 0.5676380004882813, 0.5675653076171875, 0.567541748046875, 0.5674424438476563, 0.5674383544921875, 0.5678059692382813, 0.567689208984375, 0.56762060546875, 0.5675601806640626, 0.5675079956054687, 0.5674854125976563, 0.5674926147460938, 0.56753564453125, 0.5678244018554688, 0.56768408203125, 0.5676267700195312, 0.5678377075195312, 0.5678120727539062, 0.567804931640625, 0.5675601806640626, 0.5677752075195313, 0.5677711181640624, 0.5676748657226562, 0.5678848266601563, 0.5678233642578125, 0.5676984252929688, 0.5676134643554688, 0.56768408203125, 0.5676605224609375, 0.56764111328125, 0.5679544067382812, 1.172231201171875, 0.5673134155273437, 0.5675560913085937, 0.5677117309570312, 0.567499755859375, 0.5674158325195312, 0.5673758544921875, 0.56732568359375, 0.5674649658203125, 0.5678387451171875, 0.5675479125976562, 0.5674649658203125, 0.5675950317382813, 0.5675284423828125, 0.5674014892578125, 0.5673430786132813, 0.5677230224609375, 0.5675120849609375, 0.5674485473632812, 0.5674014892578125, 0.5676533813476563, 0.5676400756835938, 0.567568359375, 0.5674219360351562, 0.5676533813476563, 0.567604248046875, 0.5677455444335937, 0.5675950317382813, 0.5676881713867188, 0.567636962890625, 0.5676769409179687, 0.5676810302734375, 0.56772607421875, 0.567942138671875, 0.5678171997070313, 0.5677557983398438, 0.5677598876953125, 0.5678653564453126, 0.568068115234375, 0.5676103515625, 0.5676461791992188, 0.5675264282226562, 0.5675867919921875, 0.56753564453125, 0.5677793579101562, 0.567762939453125, 0.567562255859375, 0.5676615600585937, 0.5673656616210937, 0.5675407104492187, 0.5676349487304687, 0.567783447265625, 0.5676871948242187, 0.567531494140625, 0.5677056274414063, 0.5678079833984375, 0.5679267578125, 0.5676380004882813, 0.5675755615234375, 0.567530517578125, 0.5676113891601563, 0.5678325805664063, 0.5676062622070313, 1.172675537109375, 0.567414794921875, 0.56749462890625, 0.5675540771484375, 0.5675540771484375, 0.5675079956054687, 0.5674649658203125, 0.5674332275390624, 0.5676083374023437, 0.5674967041015625, 0.5675346069335937, 0.567309326171875, 0.5674373168945313, 0.5676553955078125, 0.5676759033203125, 0.5677711181640624, 0.5675499267578125, 0.5675806884765625, 0.5676011352539062, 0.5674813232421875, 0.5675172119140625, 0.5674291381835938, 0.567647216796875, 0.5675264282226562, 0.5675950317382813, 0.5675899047851563, 0.5676093139648437, 0.5676820678710938, 0.5677025146484375, 0.567605224609375, 0.5676226806640625, 0.5677219848632813, 0.5676124267578125, 0.5677138061523438, 0.5676380004882813, 0.5676656494140625, 0.5676482543945313, 0.56774658203125, 0.5677158203125, 0.5675950317382813, 0.5677445068359375, 0.5677701416015625, 0.5675632934570313, 0.567531494140625, 0.56770458984375, 0.5677557983398438, 0.5675817260742188, 0.5675407104492187, 0.5675028686523438, 0.567583740234375, 0.5677485961914063, 0.5675714721679688, 0.56776806640625, 0.5676380004882813, 0.5676871948242187, 0.5676564331054688, 0.5675089721679687, 0.5678407592773438, 0.567920654296875, 0.56778955078125, 0.5677639770507813, 0.5679882202148437, 0.5677393798828125, 1.17273193359375, 0.56749365234375, 0.5674434814453125, 0.5675346069335937, 0.5677537231445312, 0.5674434814453125, 0.5675059204101562, 0.5675284423828125, 0.567583740234375, 0.5678008422851563, 0.567736328125, 0.5674874877929688, 0.5675048828125, 0.567436279296875, 0.5674956665039063, 0.5676564331054688, 0.567647216796875, 0.567720947265625, 0.56751513671875, 0.5676380004882813, 0.5676185302734374, 0.5676134643554688, 0.5674403686523437, 0.5674761962890625, 0.5675120849609375, 0.5675899047851563, 0.5674024658203125, 0.5674649658203125, 0.5675560913085937, 0.5677567749023438, 0.5676615600585937, 0.5677230224609375, 0.56781005859375, 0.567562255859375, 0.5678551025390625, 0.5675980834960938, 0.5675233154296875, 0.5675581665039062, 0.5675059204101562, 0.5674721069335937, 0.5674967041015625, 0.567530517578125, 0.5675007934570313, 0.5675028686523438, 0.5679646606445312, 0.567546875, 0.5677957153320312, 0.567920654296875, 0.567794677734375, 0.5679216918945312, 0.5680087280273437, 0.5677936401367187, 0.56793701171875, 0.5678694458007812, 0.5679564819335937, 0.56789404296875, 0.5677813720703125, 0.5679912719726562, 0.5678223266601562, 0.5678090209960938, 0.5678837890625, 0.5678981323242187, 0.5679011840820313]",tokens/s,1.7353060851290814,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fd8-3123b8f004e8a25d67ac588c;b342c455-777e-4586-bdba-562372d10dcb) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949288-5e1f1e9f425492757171b6c3;e597356b-a8dd-47f1-97ea-9831b9b5026f) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2108.223488,15043.395584,0.0,14396.94848,13898.251776,s,10,16.774060791015625,1.6774060791015626,0.0018297475804955295,1.6770918579101561,1.6789085327148436,1.6804502990722656,1.6816837121582031,"[1.6819920654296876, 1.67540869140625, 1.6774444580078125, 1.676206298828125, 1.6767392578125, 1.6753848876953126, 1.6765943603515625, 1.6776787109375, 1.67856591796875, 1.678046142578125]",tokens/s,152.61659248136053,kWh,1.9787035716904537e-05,1.0841899168262898e-05,9.527879844519888e-05,0.0001259077333303663,tokens/kWh,2033234.919163287,MB,2108.223488,15043.395584,0.0,14396.94848,14315.959808,s,10,982.2773984375,98.22773984375,0.006322715714689567,98.22801562500001,98.23432265625,98.236829296875,98.238834609375,"[98.233765625, 98.224484375, 98.2393359375, 98.2251640625, 98.22903125, 98.2138984375, 98.2302578125, 98.2295859375, 98.224875, 98.227]",tokens/s,0.6413666862356148,kWh,0.001159520428114467,0.0006355184711477705,0.005722492522434801,0.0075175314216970384,tokens/kWh,8380.410598374077,,s,629,995.810533813476,1.5831646006573552,0.19919555203142542,1.559141357421875,1.55962734375,1.5598268310546874,3.23526474609375,"[1.5585750732421875, 1.55860986328125, 1.5594239501953124, 1.5595242919921875, 1.558529052734375, 1.5586417236328125, 1.559109619140625, 1.5591219482421874, 1.5598243408203125, 1.5595867919921875, 1.55911572265625, 1.5588905029296876, 1.5587757568359375, 1.5587236328125, 1.559189453125, 1.558750244140625, 1.5590482177734375, 1.5591536865234374, 1.5591956787109376, 1.55924169921875, 1.558993896484375, 1.559488525390625, 1.559013427734375, 1.5594393310546875, 1.5592908935546874, 1.5589068603515626, 1.55905126953125, 1.559194580078125, 1.5587266845703125, 1.5591065673828124, 1.5589693603515624, 1.558802490234375, 1.558849609375, 1.559245849609375, 1.5592437744140626, 1.559234619140625, 1.55915771484375, 1.5592222900390624, 1.5598233642578125, 1.55966162109375, 1.559911376953125, 1.5597353515625, 1.559793701171875, 1.5596707763671875, 1.55985302734375, 1.55943115234375, 1.5597127685546874, 1.5593175048828125, 1.55945263671875, 1.559732177734375, 1.559920654296875, 1.5596063232421875, 1.5595447998046874, 1.5596646728515624, 1.559162841796875, 1.5595458984375, 1.55953662109375, 1.5590595703125, 1.559057373046875, 1.5596236572265625, 1.5592509765625, 1.559478271484375, 3.234529296875, 1.55871337890625, 1.5585946044921875, 1.5587962646484375, 1.5588270263671875, 1.558576171875, 1.558561767578125, 1.55837744140625, 1.5585587158203125, 1.5589652099609375, 1.5587061767578125, 1.55881787109375, 1.558748046875, 1.5588065185546875, 1.5585740966796875, 1.5593819580078125, 1.5591004638671875, 1.558873046875, 1.5587451171875, 1.5594066162109375, 1.559108642578125, 1.55873583984375, 1.5592396240234374, 1.559013427734375, 1.558961181640625, 1.5588013916015624, 1.5588310546875, 1.5591044921875, 1.5594649658203126, 1.558951904296875, 1.5591546630859374, 1.55907177734375, 1.5593482666015626, 1.5590062255859376, 1.559110595703125, 1.5595079345703124, 1.55951513671875, 1.5589560546875, 1.5587255859375, 1.5589560546875, 1.559404541015625, 1.5599503173828124, 1.5588074951171875, 1.5591341552734375, 1.5587113037109375, 1.55890380859375, 1.561585693359375, 1.5592509765625, 1.5593123779296876, 1.5594271240234374, 1.55922021484375, 1.5592386474609374, 1.559277587890625, 1.559350341796875, 1.5596614990234374, 1.5596533203125, 1.559194580078125, 1.5592427978515624, 1.5593604736328126, 1.5594066162109375, 1.559331787109375, 1.5592806396484375, 1.559973876953125, 3.23797607421875, 1.5602718505859374, 1.559963623046875, 1.5592427978515624, 1.5589161376953125, 1.5587706298828126, 1.5589017333984374, 1.559024658203125, 1.559089111328125, 1.5593585205078124, 1.5589334716796874, 1.5592069091796874, 1.5589990234375, 1.5592745361328124, 1.5593963623046876, 1.559119873046875, 1.5596800537109374, 1.5589478759765625, 1.5588074951171875, 1.5590042724609374, 1.558916015625, 1.55900927734375, 1.5591424560546876, 1.559677978515625, 1.5591373291015624, 1.559119873046875, 1.5591474609375, 1.5588157958984374, 1.5592601318359376, 1.5591690673828125, 1.559141357421875, 1.5591485595703125, 1.55908203125, 1.559183349609375, 1.55915576171875, 1.559400390625, 1.5592960205078126, 1.5591761474609376, 1.5593154296875, 1.5590482177734375, 1.558973388671875, 1.559626708984375, 1.559330810546875, 1.5593973388671876, 1.5593338623046875, 1.559593994140625, 1.5598807373046875, 1.5612364501953124, 1.55947412109375, 1.5594168701171875, 1.5596483154296874, 1.5595643310546874, 1.559582763671875, 1.55965234375, 1.5598623046875, 1.559635986328125, 1.5595242919921875, 1.5597813720703124, 1.5598643798828125, 1.5591075439453126, 1.5595611572265624, 1.55919873046875, 1.5593585205078124, 3.23542333984375, 1.55846044921875, 1.558307861328125, 1.5589805908203125, 1.558640625, 1.5585218505859375, 1.55879833984375, 1.558877197265625, 1.5586907958984375, 1.5587757568359375, 1.5589539794921874, 1.559329833984375, 1.559342041015625, 1.5588546142578126, 1.5590174560546874, 1.559341064453125, 1.558877197265625, 1.5588382568359376, 1.55911474609375, 1.5592764892578126, 1.5589847412109374, 1.5587337646484376, 1.55911376953125, 1.5587901611328125, 1.5588280029296875, 1.5588690185546874, 1.5593216552734375, 1.559299072265625, 1.559120849609375, 1.558898681640625, 1.5588414306640626, 1.55898876953125, 1.5588966064453125, 1.5587901611328125, 1.558739990234375, 1.5589232177734376, 1.5593912353515624, 1.55947314453125, 1.559403564453125, 1.559525390625, 1.559373779296875, 1.5593902587890625, 1.5591658935546875, 1.559456787109375, 1.5621683349609374, 1.558978515625, 1.5591126708984375, 1.55909326171875, 1.5591383056640624, 1.5590767822265625, 1.55901025390625, 1.5589765625, 1.559456787109375, 1.5590185546875, 1.55949462890625, 1.55913623046875, 1.5593287353515626, 1.559487548828125, 1.559192626953125, 1.55945263671875, 1.5594342041015625, 1.5596298828125, 1.5594813232421876, 3.236599853515625, 1.5593328857421875, 1.5589642333984375, 1.55854541015625, 1.558728759765625, 1.558466552734375, 1.55877783203125, 1.5593133544921876, 1.5591546630859374, 1.5588116455078125, 1.5587174072265626, 1.5586641845703124, 1.558667236328125, 1.558561767578125, 1.5583734130859375, 1.558703125, 1.5588648681640624, 1.55964111328125, 1.5586856689453126, 1.558662109375, 1.5585587158203125, 1.558540283203125, 1.5592540283203125, 1.558740966796875, 1.559257080078125, 1.5590430908203126, 1.5589775390625, 1.558866943359375, 1.5591044921875, 1.5596021728515626, 1.559546875, 1.55960009765625, 1.55951513671875, 1.5589847412109374, 1.5592540283203125, 1.559413818359375, 1.5594434814453124, 1.5591956787109376, 1.559342041015625, 1.5589847412109374, 1.5589805908203125, 1.5593165283203125, 1.559119873046875, 1.5594117431640624, 1.5589334716796874, 1.5593184814453125, 1.559287841796875, 1.5593902587890625, 1.5601192626953124, 1.5590482177734375, 1.559271484375, 1.559646240234375, 1.5597353515625, 1.5595601806640624, 1.5594761962890624, 1.559477294921875, 1.5593011474609375, 1.5621007080078124, 1.559258056640625, 1.5594239501953124, 1.559357421875, 1.5594639892578126, 1.559361572265625, 3.23485693359375, 1.5584061279296875, 1.5584429931640624, 1.558455322265625, 1.558666259765625, 1.5583436279296874, 1.5583365478515625, 1.55860888671875, 1.55905224609375, 1.558590576171875, 1.558578125, 1.5587183837890626, 1.55867236328125, 1.5587255859375, 1.5585269775390624, 1.5587215576171876, 1.558656982421875, 1.5586744384765625, 1.5588331298828124, 1.559310302734375, 1.558556640625, 1.5583118896484376, 1.5588935546875, 1.5596380615234375, 1.559288818359375, 1.558983642578125, 1.559357421875, 1.559225341796875, 1.558750244140625, 1.558867919921875, 1.5587225341796875, 1.5589283447265625, 1.5586417236328125, 1.5587911376953125, 1.5586907958984375, 1.559267333984375, 1.5594957275390624, 1.558765625, 1.5612979736328125, 1.5589273681640625, 1.5587225341796875, 1.558982666015625, 1.5588209228515626, 1.5590604248046875, 1.558908935546875, 1.5590697021484374, 1.5586539306640625, 1.5587706298828126, 1.5595069580078125, 1.559635986328125, 1.5590185546875, 1.55902978515625, 1.5588546142578126, 1.5590277099609375, 1.5595592041015625, 1.5593226318359374, 1.5589283447265625, 1.5589744873046876, 1.5589171142578124, 1.5588587646484375, 1.5594556884765625, 1.5590963134765625, 1.5595478515625, 3.235958740234375, 1.5587061767578125, 1.5582955322265626, 1.558918212890625, 1.559405517578125, 1.5590072021484376, 1.559066650390625, 1.559258056640625, 1.558982666015625, 1.5591373291015624, 1.5590123291015625, 1.559109619140625, 1.5587010498046876, 1.5590697021484374, 1.558677490234375, 1.558846435546875, 1.559234619140625, 1.55937890625, 1.558992919921875, 1.5595223388671875, 1.559162841796875, 1.5587706298828126, 1.559435302734375, 1.560753173828125, 1.55900830078125, 1.5587052001953126, 1.5587420654296875, 1.55875634765625, 1.5585545654296875, 1.55881884765625, 1.5590809326171875, 1.559193603515625, 1.559277587890625, 1.55932568359375, 1.55913623046875, 1.5592764892578126, 1.559235595703125, 1.5595654296875, 1.559406494140625, 1.5591424560546876, 1.558719482421875, 1.55981103515625, 1.5590491943359375, 1.5598408203125, 1.55958984375, 1.55905029296875, 1.5590296630859375, 1.5592437744140626, 1.559709716796875, 1.559160888671875, 1.5592642822265625, 1.55951513671875, 1.559525390625, 1.55964111328125, 1.559710693359375, 1.5596390380859375, 1.5594691162109375, 1.55936767578125, 1.5593133544921876, 1.5591116943359375, 1.5594761962890624, 1.5594700927734375, 1.5595284423828124, 3.23728076171875, 1.558865966796875, 1.5586630859375, 1.5593482666015626, 1.5598438720703125, 1.559294921875, 1.559099365234375, 1.5591485595703125, 1.558935546875, 1.55944970703125, 1.558636474609375, 1.5588331298828124, 1.5585986328125, 1.558814697265625, 1.55905224609375, 1.5590338134765624, 1.56031591796875, 1.5590655517578125, 1.5592652587890625, 1.5588966064453125, 1.5590400390625, 1.5587420654296875, 1.5598909912109375, 1.559056396484375, 1.5592652587890625, 1.5594056396484375, 1.5589385986328126, 1.5587860107421876, 1.55886083984375, 1.5590543212890624, 1.559300048828125, 1.5590921630859376, 1.559215087890625, 1.5589744873046876, 1.5589600830078125, 1.5591177978515625, 1.5594957275390624, 1.5592960205078126, 1.558877197265625, 1.5592017822265625, 1.5588270263671875, 1.5589222412109376, 1.5591588134765626, 1.559298095703125, 1.5588055419921876, 1.5590748291015626, 1.559099365234375, 1.5595028076171875, 1.5594915771484374, 1.5594495849609376, 1.559300048828125, 1.55938916015625, 1.5596697998046876, 1.5595755615234375, 1.5595201416015625, 1.5593564453125, 1.5596134033203124, 1.55945263671875, 1.5595294189453126, 1.5592764892578126, 1.559214111328125, 1.55919873046875, 1.5595919189453125, 3.238277099609375, 1.5593380126953125, 1.558845458984375, 1.559089111328125, 1.5596502685546876, 1.559151611328125, 1.55905126953125, 1.5589949951171875, 1.558698974609375, 1.5589119873046875, 1.559118896484375, 1.5591055908203124, 1.5588433837890625, 1.55887109375, 1.559098388671875, 1.559488525390625, 1.5590921630859376, 1.5589212646484376, 1.5586201171875, 1.5587542724609376, 1.5588372802734376, 1.5583734130859375, 1.559034912109375, 1.5590389404296876, 1.5593184814453125, 1.559047119140625, 1.559078857421875, 1.5590113525390625, 1.5589908447265626, 1.5593114013671876, 1.559214111328125, 1.559314453125, 1.5593348388671875, 1.5594014892578125, 1.5593011474609375, 1.55919873046875, 1.5591455078125, 1.559130126953125, 1.5589488525390625, 1.55922021484375, 1.5590164794921875, 1.55928369140625, 1.5593359375, 1.55940869140625, 1.5589334716796874, 1.5590072021484376, 1.5590543212890624, 1.5592960205078126, 1.5591453857421875, 1.5592960205078126, 1.55916796875, 1.55928271484375, 1.559329833984375, 1.5590318603515625, 1.5594649658203126, 1.5592314453125, 1.55926123046875, 1.5591290283203125, 1.5590205078125, 1.5590174560546874, 1.559034912109375, 1.5591658935546875, 1.559373779296875, 3.237329833984375, 1.558613037109375, 1.5583804931640626, 1.5587542724609376, 1.5585423583984375, 1.559004150390625, 1.5588382568359376, 1.55866015625, 1.5590809326171875, 1.558877197265625, 1.558772705078125, 1.558992919921875, 1.558794189453125, 1.559083984375, 1.55869287109375, 1.5590604248046875, 1.55894580078125, 1.5587255859375, 1.5587962646484375, 1.558877197265625, 1.5590615234375, 1.558513671875, 1.5592530517578125, 1.5587318115234374, 1.5593697509765625, 1.559119873046875, 1.5611954345703125, 1.559103515625, 1.5589058837890626, 1.5593779296875, 1.559484375, 1.5593184814453125, 1.5594208984375, 1.5591065673828124, 1.559189453125, 1.5593656005859375, 1.5593656005859375, 1.55962060546875, 1.5591669921875, 1.5591322021484375, 1.559208984375, 1.5593665771484375, 1.559109619140625, 1.5592222900390624, 1.5591434326171876, 1.559194580078125, 1.5592008056640625, 1.55911376953125, 1.55950390625, 1.5590687255859375, 1.559582763671875, 1.5593564453125, 1.5598284912109375, 1.5596851806640626, 1.55951513671875, 1.55947216796875, 1.5594691162109375, 1.5591895751953124, 1.5594127197265626, 1.55911572265625, 1.559299072265625, 1.5592508544921875, 1.5591474609375]",tokens/s,0.6316462606508407,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948d02-27681008592811d63d300b85;7f6b8ec3-25cb-4d45-b82b-b63da8dd38e8) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1531.768832,9676.783616,0.0,9030.336512,8583.572992,s,10,9.471071899414062,0.9471071899414062,0.0007865321197485112,0.9470617980957031,0.9481417907714844,0.9482434417724609,0.9483247625732422,"[0.9481192016601563, 0.9483450927734375, 0.9464827270507813, 0.9463027954101563, 0.9464989624023438, 0.9459486694335938, 0.9466808471679687, 0.9474427490234375, 0.9474686889648437, 0.9477821655273437]",tokens/s,270.2967549172948,kWh,1.1175276083175583e-05,6.1235719211254035e-06,5.30962293456365e-05,7.039507734993748e-05,tokens/kWh,3636617.9232591945,MB,1531.768832,9676.783616,0.0,9030.336512,8872.966144,s,10,564.2868125,56.42868125,0.005902619636999738,56.430064453125,56.434701171875,56.4354833984375,56.4361091796875,"[56.42066796875, 56.42100390625, 56.427828125, 56.43380078125, 56.436265625, 56.4306328125, 56.43452734375, 56.42949609375, 56.41955859375, 56.43303125]",tokens/s,1.116453523357858,kWh,0.0006659982151861745,0.00036502579988969956,0.003165337650954964,0.004196361666030838,tokens/kWh,15013.005316004865,,s,629,571.914466552734,0.9092439849804999,0.11250283527003516,0.8956805419921875,0.8961118286132812,0.8962535400390624,1.84229025390625,"[0.8955299682617187, 0.8956344604492188, 0.8952893676757813, 0.8954654541015625, 0.8952473754882813, 0.8954511108398437, 0.8954111938476562, 0.895388671875, 0.8953507690429687, 0.8954296264648437, 0.8954921264648438, 0.8958034057617188, 0.8957071533203125, 0.895331298828125, 0.89537841796875, 0.8954317016601563, 0.895688720703125, 0.8951869506835938, 0.89523095703125, 0.895341552734375, 0.8953385009765625, 0.89526171875, 0.8952105712890625, 0.8952554931640625, 0.8955637817382812, 0.8952135620117188, 0.895278076171875, 0.89569384765625, 0.8954439697265625, 0.8954368286132812, 0.8953395385742188, 0.895320068359375, 0.895224853515625, 0.8956580200195312, 0.8956251220703125, 0.895593505859375, 0.8955872802734375, 0.895393798828125, 0.89556591796875, 0.8955975341796875, 0.8953139038085938, 0.8952350463867188, 0.8954644775390626, 0.8952453002929688, 0.8950845336914063, 0.89560888671875, 0.8954244384765625, 0.8954317016601563, 0.8957972412109375, 0.8959805297851563, 0.8959723510742188, 0.896089111328125, 0.8958925170898437, 0.8958883666992188, 0.8961648559570312, 0.8958853149414062, 0.896374755859375, 0.896205810546875, 0.8961065063476562, 0.89630517578125, 0.89601025390625, 0.8961126098632812, 1.844611083984375, 0.8954337158203125, 0.89558837890625, 0.8955914306640625, 0.8959385375976563, 0.8956160278320312, 0.8950343627929688, 0.8949083862304688, 0.8952064208984375, 0.895373291015625, 0.8951787719726563, 0.895162353515625, 0.8951439208984375, 0.895182861328125, 0.895109130859375, 0.8953907470703125, 0.8957255859375, 0.8957091674804688, 0.8956385498046875, 0.8958136596679688, 0.8958320922851563, 0.8952658081054687, 0.8952637329101563, 0.8951807861328125, 0.8952391967773438, 0.8951900024414062, 0.8952842407226562, 0.8955750122070313, 0.89556787109375, 0.8954613647460937, 0.895562744140625, 0.8955679321289063, 0.8955903930664062, 0.8953548583984375, 0.8952647705078125, 0.8954501342773438, 0.8954869995117187, 0.89586181640625, 0.8957409057617187, 0.8956713256835938, 0.8955576171875, 0.8954132690429687, 0.8955822143554687, 0.89568359375, 0.8954531860351562, 0.8953220825195313, 0.89735986328125, 0.8953026733398437, 0.8954552612304687, 0.8961781616210938, 0.8958126220703125, 0.8957644653320312, 0.8956436767578125, 0.8958248901367187, 0.896047119140625, 0.8958515014648437, 0.8958351440429687, 0.8958453979492188, 0.895657958984375, 0.8957132568359375, 0.8961444091796875, 0.8957071533203125, 0.8954951782226562, 1.842377685546875, 0.895404052734375, 0.8950650634765625, 0.8951746826171875, 0.8952135620117188, 0.895152099609375, 0.8950364379882813, 0.8954869995117187, 0.8951756591796876, 0.8951070556640625, 0.89539794921875, 0.8955360717773437, 0.8956303100585937, 0.895278076171875, 0.8953108520507812, 0.895466552734375, 0.8955247802734375, 0.8954808349609376, 0.89577880859375, 0.8958197631835938, 0.8954214477539062, 0.8957296752929688, 0.8957849731445312, 0.8969154663085938, 0.8955699462890625, 0.8955740356445312, 0.8955402221679688, 0.8957010498046875, 0.8956046752929687, 0.8959457397460937, 0.89588427734375, 0.895657958984375, 0.8959969482421875, 0.8955279541015625, 0.89569384765625, 0.8956375122070312, 0.8958167114257812, 0.89558837890625, 0.895529052734375, 0.89546337890625, 0.8958197631835938, 0.8958023681640624, 0.895668212890625, 0.895762451171875, 0.8955596923828125, 0.89588427734375, 0.8957982788085938, 0.896742431640625, 0.8959180297851562, 0.895889404296875, 0.8958699340820313, 0.8959774780273437, 0.8961423950195313, 0.8960029907226562, 0.895941650390625, 0.8961116333007813, 0.8960184326171875, 0.8960419921875, 0.8958904418945313, 0.8957849731445312, 0.8956774291992188, 0.8956559448242187, 0.8954009399414062, 1.84203369140625, 0.8953282470703126, 0.8955074462890625, 0.8958883666992188, 0.895805419921875, 0.895224853515625, 0.8955054321289062, 0.89569482421875, 0.8954593505859375, 0.895592529296875, 0.89609619140625, 0.89571533203125, 0.89652734375, 0.8958136596679688, 0.8957890625, 0.8957183837890625, 0.8957440185546875, 0.8959334106445312, 0.895752197265625, 0.8961761474609375, 0.8955248413085938, 0.896421875, 0.895963134765625, 0.895330322265625, 0.8955340576171875, 0.8957603759765626, 0.8954306640625, 0.8958146362304688, 0.895805419921875, 0.8954982299804688, 0.8955299682617187, 0.8953681640625, 0.8958136596679688, 0.8957706298828125, 0.89615869140625, 0.8956036987304687, 0.8954429321289062, 0.895578125, 0.8957962036132813, 0.8954183959960937, 0.8954849243164062, 0.89569384765625, 0.8957583618164062, 0.8959283447265625, 0.8975103759765625, 0.8956825561523437, 0.8957440185546875, 0.895594482421875, 0.8960829467773438, 0.8958392333984375, 0.8954531860351562, 0.8957625122070313, 0.8960316772460938, 0.8954849243164062, 0.8954644775390626, 0.8962181396484376, 0.895805419921875, 0.8961618041992188, 0.8958760986328125, 0.8960430297851563, 0.89603173828125, 0.8957348022460937, 0.8958709716796875, 1.8425006103515624, 0.8954132690429687, 0.8952770385742187, 0.8951552124023437, 0.8951265258789063, 0.8949452514648437, 0.8951756591796876, 0.8958238525390625, 0.895236083984375, 0.895140869140625, 0.8951900024414062, 0.8952207641601563, 0.895283203125, 0.8956876831054688, 0.8953272094726562, 0.89558837890625, 0.8958331298828125, 0.896849853515625, 0.8957357788085938, 0.8959354858398437, 0.8956190795898438, 0.8957183837890625, 0.89544189453125, 0.8957081298828125, 0.8961085205078125, 0.8960030517578125, 0.895741943359375, 0.8960706787109375, 0.8960758056640625, 0.8956846313476563, 0.8959539794921875, 0.8957930908203126, 0.8956805419921875, 0.8956405639648437, 0.8957777709960938, 0.895805419921875, 0.8960061645507813, 0.8956876831054688, 0.8957982788085938, 0.8960235595703125, 0.896294921875, 0.8963491821289062, 0.8959744262695313, 0.8960379028320312, 0.8958555908203125, 0.8957880249023438, 0.8960327758789063, 0.8959774780273437, 0.8963082275390625, 0.8961085205078125, 0.8957849731445312, 0.8961433715820313, 0.896015380859375, 0.8959569702148438, 0.89592626953125, 0.895857666015625, 0.89588427734375, 0.8977633056640625, 0.8960972900390625, 0.8961024169921875, 0.8960040893554687, 0.8959642333984374, 0.895754150390625, 1.8420654296875, 0.895541259765625, 0.8955330810546875, 0.895551513671875, 0.8954542846679687, 0.89588525390625, 0.8957655639648437, 0.8955913696289063, 0.8954900512695313, 0.895973388671875, 0.8957081298828125, 0.895446044921875, 0.8953692016601562, 0.8954695434570312, 0.8955350952148438, 0.8955064086914063, 0.8957685546875, 0.895510498046875, 0.8955617065429687, 0.89569384765625, 0.8958258666992187, 0.8956436767578125, 0.8955709228515625, 0.8953487548828125, 0.895425537109375, 0.89541015625, 0.8953661499023438, 0.8959344482421875, 0.8957112426757813, 0.8955719604492187, 0.8957173461914063, 0.8952852783203125, 0.895425537109375, 0.8956323852539062, 0.8953405151367188, 0.8953170166015625, 0.8956170043945313, 0.89562109375, 0.8976466064453125, 0.8961249389648438, 0.8960225830078125, 0.8955401611328125, 0.8958443603515625, 0.8955822143554687, 0.8957726440429687, 0.8958177490234375, 0.8958822631835938, 0.8957255859375, 0.89603173828125, 0.8960040893554687, 0.8958167114257812, 0.8959232177734375, 0.8961556396484375, 0.895921142578125, 0.8957132568359375, 0.8960726928710937, 0.8958699340820313, 0.8956088256835938, 0.8956508178710938, 0.8958750610351562, 0.8959140014648438, 0.896184326171875, 0.8957747192382812, 1.8425538330078124, 0.8953139038085938, 0.8954296264648437, 0.8954317016601563, 0.8954019775390625, 0.8954296264648437, 0.895804443359375, 0.89565185546875, 0.8956016845703125, 0.8954736938476563, 0.8951910400390625, 0.8951807861328125, 0.8956160278320312, 0.8953865966796875, 0.895224853515625, 0.8951572265625, 0.895447021484375, 0.8951286010742188, 0.8952534790039063, 0.8959088745117187, 0.8953425903320312, 0.8954378051757812, 0.8953170166015625, 0.8966533203125, 0.8956589965820313, 0.8957174072265625, 0.8958781127929687, 0.8958924560546875, 0.89592626953125, 0.895657958984375, 0.8963993530273437, 0.8960133056640625, 0.8955350952148438, 0.895636474609375, 0.8958023681640624, 0.8959241943359375, 0.8960481567382812, 0.8959610595703125, 0.8962498779296875, 0.8960297241210937, 0.8959395751953125, 0.8961136474609375, 0.896173095703125, 0.8961300048828125, 0.8959856567382812, 0.8956856079101563, 0.8959078369140625, 0.8958648071289063, 0.8959365234375, 0.8959959106445312, 0.89580029296875, 0.8960338134765625, 0.8962744140625, 0.8961085205078125, 0.896195556640625, 0.8962867431640625, 0.8956589965820313, 0.896268310546875, 0.8962437133789063, 0.896083984375, 0.895952880859375, 0.8960859985351562, 0.8958607177734375, 1.8435286865234375, 0.8953579711914063, 0.8959723510742188, 0.895552490234375, 0.8962406616210937, 0.895710205078125, 0.8959600830078125, 0.895594482421875, 0.8955453491210937, 0.8953630981445313, 0.8957020263671875, 0.8955637817382812, 0.895478759765625, 0.8956026611328125, 0.89554736328125, 0.8955330810546875, 0.8961474609375, 0.895688720703125, 0.8956405639648437, 0.89556787109375, 0.8954163208007813, 0.8957962036132813, 0.8957061157226562, 0.8955453491210937, 0.895515625, 0.8955586547851563, 0.8955166625976563, 0.8954368286132812, 0.8956876831054688, 0.8957532348632813, 0.8957552490234375, 0.89554638671875, 0.8954439697265625, 0.8958248901367187, 0.8957378540039063, 0.8956109008789063, 0.895552490234375, 0.8955668334960938, 0.8956426391601563, 0.8957552490234375, 0.8958330688476562, 0.8957849731445312, 0.8957276000976563, 0.895825927734375, 0.8963133544921875, 0.8962559814453125, 0.89609423828125, 0.8956958618164063, 0.8961679077148438, 0.8956907348632812, 0.8954450073242187, 0.8954439697265625, 0.8957511596679687, 0.8955791625976562, 0.8954685668945312, 0.8961618041992188, 0.8959201049804687, 0.8955299682617187, 0.8956661987304687, 0.8956989135742187, 0.8956763916015625, 0.895662109375, 0.8958924560546875, 1.8434007568359374, 0.8952268676757813, 0.8956057739257812, 0.8958668823242187, 0.8953005981445312, 0.8950661010742188, 0.8951449584960938, 0.8954685668945312, 0.8951981811523437, 0.8952791137695313, 0.8951910400390625, 0.895224853515625, 0.8951367797851563, 0.8953661499023438, 0.8954818725585938, 0.8955637817382812, 0.8956846313476563, 0.8963553466796875, 0.8955187377929688, 0.89537841796875, 0.8953743286132813, 0.895267822265625, 0.8952647705078125, 0.8953159790039062, 0.895520751953125, 0.8957470703125, 0.8953702392578125, 0.8953262329101562, 0.8954111938476562, 0.8956661987304687, 0.8953436279296875, 0.8954030151367187, 0.895283203125, 0.8952268676757813, 0.89514599609375, 0.8952166137695312, 0.8958832397460937, 0.89552587890625, 0.8954111938476562, 0.895561767578125, 0.8955135498046874, 0.895520751953125, 0.8956928100585938, 0.8956426391601563, 0.8954716186523437, 0.8955125732421875, 0.8955330810546875, 0.8961566772460937, 0.8961351928710938, 0.8959539184570312, 0.8957091674804688, 0.89584228515625, 0.8958197631835938, 0.8957470703125, 0.8957593383789062, 0.8957214965820313, 0.8955811767578125, 0.8955985717773437, 0.8961331176757813, 0.8957828979492187, 0.8960563354492187, 0.8956763916015625, 0.8957511596679687, 1.8439761962890624, 0.8954634399414062, 0.89548388671875, 0.8955084838867188, 0.89558837890625, 0.895562744140625, 0.8955801391601562, 0.895515625, 0.8953907470703125, 0.8954337158203125, 0.8955177001953125, 0.8955903930664062, 0.8959283447265625, 0.8955166625976563, 0.89547265625, 0.8954685668945312, 0.895752197265625, 0.895425537109375, 0.8954317016601563, 0.8954531860351562, 0.8953671875, 0.89560986328125, 0.8953211059570313, 0.8955269165039063, 0.8957849731445312, 0.8958924560546875, 0.8968284301757813, 0.8958668823242187, 0.8959989624023438, 0.8959027099609375, 0.8958955688476562, 0.8957982788085938, 0.8957511596679687, 0.895730712890625, 0.8959959106445312, 0.8958034057617188, 0.895724609375, 0.8958135375976563, 0.8960409545898438, 0.8961474609375, 0.8957860107421876, 0.8955709228515625, 0.8958484497070313, 0.89590576171875, 0.895847412109375, 0.895763427734375, 0.8960266723632813, 0.8961084594726563, 0.8961710205078125, 0.89615869140625, 0.8962139892578125, 0.895963134765625, 0.8957614135742188, 0.8957449951171875, 0.8958822631835938, 0.8960726928710937, 0.8957849731445312, 0.8957849731445312, 0.895599609375, 0.895626220703125, 0.8956661987304687, 0.8960829467773438, 0.8956692504882813]",tokens/s,1.09981480935664,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949393-3d2fe20f66c6aa0258d7e342;1ceb2da6-1f33-4669-9748-fd7626e7e9fc) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpdp6k6q32/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1312.858112,1030.22592,0.0,383.778816,312.280064,s,10,0.275944766998291,0.0275944766998291,0.0013350624992567249,0.0271289119720459,0.027794796180725095,0.02969363794326782,0.031212711353302003,"[0.03159247970581055, 0.027166879653930665, 0.027137279510498047, 0.02737283134460449, 0.02710416030883789, 0.02712054443359375, 0.027108831405639647, 0.027081024169921874, 0.027172767639160156, 0.027087968826293947]",tokens/s,9277.218871905097,kWh,3.2455721808658853e-07,1.778424162839056e-07,7.707522954780324e-07,1.2731519298485266e-06,tokens/kWh,201075766.3702066,MB,1312.858112,1030.22592,0.0,383.778816,347.089408,s,10,16.864729003906252,1.6864729003906251,0.020323100201592524,1.6794135742187501,1.6910009521484375,1.7188808471679686,1.7411847631835937,"[1.7467607421875, 1.67984912109375, 1.6774495849609374, 1.678377685546875, 1.67897802734375, 1.6768824462890626, 1.6750291748046875, 1.684479248046875, 1.684805419921875, 1.6821175537109374]",tokens/s,37.356070165970515,kWh,1.9882176955741114e-05,1.0895638286132514e-05,4.512764553432279e-05,7.590546077619641e-05,tokens/kWh,829979.8111462949,,s,629,17.07843275070189,0.027151721384263763,0.003315017386101685,0.026595327377319337,0.0273623046875,0.02783129653930664,0.05417021270751953,"[0.029247488021850586, 0.027447296142578126, 0.028238847732543947, 0.029016063690185546, 0.02889727973937988, 0.028690431594848635, 0.028497919082641602, 0.029652992248535157, 0.028964864730834962, 0.02876518440246582, 0.029016063690185546, 0.02860851287841797, 0.02900480079650879, 0.029263872146606446, 0.02835353660583496, 0.028241920471191406, 0.02977484893798828, 0.031457279205322264, 0.028201984405517577, 0.02774220848083496, 0.027778047561645508, 0.02750464057922363, 0.027625471115112304, 0.027877376556396483, 0.027576320648193358, 0.027623424530029295, 0.027853824615478515, 0.027623424530029295, 0.02755583953857422, 0.027467775344848632, 0.027467775344848632, 0.027464704513549806, 0.02751692771911621, 0.027671552658081053, 0.028444671630859376, 0.02751081657409668, 0.027527168273925783, 0.02693833541870117, 0.02656768035888672, 0.02656051254272461, 0.026586111068725587, 0.026575904846191406, 0.02656355285644531, 0.026597375869750976, 0.02668339157104492, 0.026562559127807618, 0.0265799674987793, 0.02705510330200195, 0.02689945602416992, 0.02673766326904297, 0.026646528244018555, 0.026635263442993166, 0.02674892807006836, 0.026631168365478516, 0.026613759994506835, 0.026697727203369142, 0.026650623321533205, 0.02710323143005371, 0.026925119400024414, 0.02659014320373535, 0.027423744201660157, 0.027077632904052733, 0.05460070419311523, 0.026619903564453123, 0.026565631866455077, 0.026629119873046874, 0.026554399490356446, 0.02660246467590332, 0.026628095626831053, 0.02656768035888672, 0.026635263442993166, 0.0266527042388916, 0.026569696426391603, 0.026598463058471678, 0.026578880310058593, 0.0265799674987793, 0.02658406448364258, 0.026687488555908204, 0.026688512802124024, 0.02656358337402344, 0.02654003143310547, 0.026614784240722656, 0.026625024795532228, 0.02652672004699707, 0.026619903564453123, 0.028004352569580077, 0.02753945541381836, 0.02736332893371582, 0.027281408309936524, 0.02712678337097168, 0.026574848175048828, 0.026603519439697267, 0.02654310417175293, 0.026565631866455077, 0.0265031681060791, 0.026557439804077147, 0.02653388786315918, 0.026556415557861326, 0.02654412841796875, 0.02655948829650879, 0.02668441581726074, 0.026625024795532228, 0.02655948829650879, 0.026588159561157225, 0.02654310417175293, 0.026572799682617186, 0.02655129623413086, 0.02656768035888672, 0.026604543685913085, 0.02655955123901367, 0.026596288681030273, 0.026562559127807618, 0.026596351623535155, 0.026594303131103517, 0.026558464050292968, 0.02655232048034668, 0.02659328079223633, 0.02651238441467285, 0.026549247741699217, 0.026588159561157225, 0.02657587242126465, 0.02659328079223633, 0.026556415557861326, 0.02650726318359375, 0.02656870460510254, 0.0541736946105957, 0.026597375869750976, 0.026587135314941408, 0.02655232048034668, 0.026550271987915038, 0.026600448608398438, 0.026652671813964843, 0.026689535140991212, 0.02655129623413086, 0.02653081512451172, 0.02658406448364258, 0.026677248001098632, 0.02651548767089844, 0.02671510314941406, 0.027616256713867186, 0.027520000457763674, 0.026843135833740234, 0.026626047134399415, 0.026536991119384765, 0.02662294387817383, 0.026621952056884765, 0.02655129623413086, 0.026534912109375, 0.026634239196777345, 0.026594303131103517, 0.02670182418823242, 0.0265799674987793, 0.02651136016845703, 0.02654003143310547, 0.02653388786315918, 0.02654412841796875, 0.026617855072021485, 0.026968063354492186, 0.02676736068725586, 0.026615808486938477, 0.02655539131164551, 0.02651545524597168, 0.026661888122558593, 0.026573823928833007, 0.026454015731811522, 0.02652470397949219, 0.026513376235961915, 0.0265031681060791, 0.02651852798461914, 0.026694656372070313, 0.026492927551269533, 0.026570751190185548, 0.026505216598510743, 0.026565631866455077, 0.026583040237426758, 0.026574848175048828, 0.02650931167602539, 0.02651033592224121, 0.026664960861206056, 0.026570751190185548, 0.02651238441467285, 0.026582015991210937, 0.02654515266418457, 0.02649087905883789, 0.02654719924926758, 0.026638336181640625, 0.02657689666748047, 0.026598400115966796, 0.05530112075805664, 0.027283456802368163, 0.027225088119506836, 0.02709503936767578, 0.026944511413574217, 0.02655436706542969, 0.02654310417175293, 0.026695680618286134, 0.026656768798828126, 0.0265850887298584, 0.026588191986083986, 0.02655023956298828, 0.026674175262451173, 0.026564607620239256, 0.026580991744995116, 0.026611711502075194, 0.026570751190185548, 0.0265533447265625, 0.026611711502075194, 0.026570751190185548, 0.026529792785644532, 0.02669977569580078, 0.026549247741699217, 0.02660767936706543, 0.02659424018859863, 0.02671001625061035, 0.02650111961364746, 0.02655539131164551, 0.026587135314941408, 0.02655232048034668, 0.026657791137695314, 0.0265533447265625, 0.026541055679321288, 0.026558464050292968, 0.02651238441467285, 0.026514432907104493, 0.026467327117919923, 0.026565631866455077, 0.02692403221130371, 0.026690559387207033, 0.02655436706542969, 0.026600448608398438, 0.026719232559204102, 0.02657689666748047, 0.026662912368774414, 0.02651136016845703, 0.026564607620239256, 0.026537984848022462, 0.026505216598510743, 0.02651238441467285, 0.02656358337402344, 0.02652262306213379, 0.026663936614990235, 0.026497024536132813, 0.026646528244018555, 0.02652876853942871, 0.026637311935424804, 0.02657587242126465, 0.02652672004699707, 0.02657177543640137, 0.026604543685913085, 0.026583040237426758, 0.026484735488891603, 0.054171646118164066, 0.02660147285461426, 0.026550271987915038, 0.02656972885131836, 0.02654515266418457, 0.02658406448364258, 0.026532863616943358, 0.02650726318359375, 0.02657792091369629, 0.02667622375488281, 0.0265799674987793, 0.02656153678894043, 0.026580991744995116, 0.026556480407714845, 0.02659833526611328, 0.026598400115966796, 0.026611711502075194, 0.026615808486938477, 0.026514432907104493, 0.02654207992553711, 0.02656768035888672, 0.02656870460510254, 0.026633216857910157, 0.026589183807373046, 0.02657177543640137, 0.02655436706542969, 0.026660863876342773, 0.026614784240722656, 0.026558464050292968, 0.02655436706542969, 0.026617855072021485, 0.026604543685913085, 0.026554399490356446, 0.026611679077148436, 0.026650623321533205, 0.026488832473754883, 0.02629631996154785, 0.02631372833251953, 0.0275230712890625, 0.027797504425048827, 0.02753023910522461, 0.02730803108215332, 0.02698854446411133, 0.026604543685913085, 0.02653081512451172, 0.026624000549316407, 0.02660147285461426, 0.026764287948608398, 0.026658815383911134, 0.02656870460510254, 0.026557439804077147, 0.02654617691040039, 0.026639360427856446, 0.026580991744995116, 0.026534912109375, 0.02653593635559082, 0.026594303131103517, 0.026620927810668944, 0.026596351623535155, 0.02657587242126465, 0.026604543685913085, 0.026604543685913085, 0.02653388786315918, 0.05420851135253906, 0.0265677433013916, 0.02660960006713867, 0.02667519950866699, 0.026580991744995116, 0.026594303131103517, 0.026562559127807618, 0.026665983200073243, 0.026590208053588867, 0.02653900718688965, 0.026637311935424804, 0.026638336181640625, 0.02654617691040039, 0.026590208053588867, 0.026562559127807618, 0.026696704864501954, 0.026629119873046874, 0.02652774429321289, 0.02652057647705078, 0.02653081512451172, 0.0265533447265625, 0.02654207992553711, 0.026694656372070313, 0.02658406448364258, 0.026640384674072266, 0.02657792091369629, 0.026534912109375, 0.02651545524597168, 0.026521600723266602, 0.02656153678894043, 0.02651852798461914, 0.026580991744995116, 0.02653388786315918, 0.026529792785644532, 0.0265799674987793, 0.026592256546020508, 0.026693632125854492, 0.02651545524597168, 0.026639360427856446, 0.027520000457763674, 0.02674380874633789, 0.026613759994506835, 0.026694656372070313, 0.026521600723266602, 0.026641408920288087, 0.026607616424560547, 0.026558464050292968, 0.026610687255859376, 0.026630144119262695, 0.02669260787963867, 0.026634271621704102, 0.02657072067260742, 0.02654003143310547, 0.02657587242126465, 0.026605567932128905, 0.026534912109375, 0.026697727203369142, 0.026587135314941408, 0.02652060890197754, 0.026654687881469727, 0.026598400115966796, 0.026714111328125, 0.02658406448364258, 0.054166526794433595, 0.02671615982055664, 0.026641408920288087, 0.026625024795532228, 0.0265994873046875, 0.026596288681030273, 0.02654719924926758, 0.026572799682617186, 0.026620927810668944, 0.026605567932128905, 0.02669977569580078, 0.02654316711425781, 0.026589120864868164, 0.026582015991210937, 0.026604543685913085, 0.026550271987915038, 0.026619903564453123, 0.02656051254272461, 0.026521600723266602, 0.026631168365478516, 0.02628505516052246, 0.02677350425720215, 0.026702848434448243, 0.02657177543640137, 0.02660966491699219, 0.02656358337402344, 0.02651545524597168, 0.0265533447265625, 0.026504192352294922, 0.02660147285461426, 0.02653900718688965, 0.026639360427856446, 0.0265850887298584, 0.026626047134399415, 0.02654003143310547, 0.026556447982788087, 0.026569696426391603, 0.026558464050292968, 0.02659328079223633, 0.026556415557861326, 0.02654617691040039, 0.02668339157104492, 0.026589183807373046, 0.026498048782348634, 0.026669055938720702, 0.026529792785644532, 0.026572799682617186, 0.02655129623413086, 0.0265164794921875, 0.026590208053588867, 0.02657792091369629, 0.026616832733154298, 0.026598400115966796, 0.026660863876342773, 0.026573823928833007, 0.026595327377319337, 0.026570751190185548, 0.0265164794921875, 0.02639366340637207, 0.026553279876708986, 0.02658406448364258, 0.026657791137695314, 0.02649497604370117, 0.05412044906616211, 0.026598400115966796, 0.026583040237426758, 0.026651647567749022, 0.02652262306213379, 0.026586111068725587, 0.02655129623413086, 0.026594303131103517, 0.02712678337097168, 0.02734489631652832, 0.026644479751586913, 0.026637311935424804, 0.026594303131103517, 0.026646528244018555, 0.026602495193481446, 0.027348991394042968, 0.027696128845214843, 0.027305984497070314, 0.027241472244262696, 0.02698854446411133, 0.026565631866455077, 0.026608640670776368, 0.027296768188476563, 0.027320320129394532, 0.027320320129394532, 0.027268096923828124, 0.02708684730529785, 0.026829824447631836, 0.026608640670776368, 0.02674073600769043, 0.02660147285461426, 0.026590208053588867, 0.02655129623413086, 0.02654207992553711, 0.02661075210571289, 0.0265860481262207, 0.02667622375488281, 0.026572799682617186, 0.026582015991210937, 0.026586111068725587, 0.02653081512451172, 0.026566656112670898, 0.02655232048034668, 0.026702848434448243, 0.026602495193481446, 0.02654617691040039, 0.026550271987915038, 0.026645503997802734, 0.026550271987915038, 0.026616832733154298, 0.02660966491699219, 0.026596351623535155, 0.026595327377319337, 0.026586111068725587, 0.02654617691040039, 0.02654617691040039, 0.026786815643310546, 0.02656153678894043, 0.026608640670776368, 0.026602495193481446, 0.026529792785644532, 0.026586111068725587, 0.026638336181640625, 0.05429862213134766, 0.026634239196777345, 0.026537984848022462, 0.026639360427856446, 0.02672127914428711, 0.02692915153503418, 0.027014144897460936, 0.026755071640014647, 0.026573823928833007, 0.026562559127807618, 0.026572799682617186, 0.026690559387207033, 0.02657177543640137, 0.026617855072021485, 0.026610687255859376, 0.026606592178344726, 0.026598400115966796, 0.026573823928833007, 0.026597375869750976, 0.026514432907104493, 0.026582015991210937, 0.02659328079223633, 0.026673152923583986, 0.027404319763183593, 0.026896352767944335, 0.026578943252563478, 0.026537984848022462, 0.026552352905273437, 0.02658710479736328, 0.026514432907104493, 0.02651238441467285, 0.026514432907104493, 0.02652262306213379, 0.02651238441467285, 0.02653081512451172, 0.026626047134399415, 0.026565631866455077, 0.026716224670410155, 0.026623935699462892, 0.026580991744995116, 0.02654617691040039, 0.026556415557861326, 0.026479616165161132, 0.02652876853942871, 0.026670080184936523, 0.026643455505371092, 0.026827775955200195, 0.026611711502075194, 0.027028480529785157, 0.027226112365722657, 0.027289600372314454, 0.02755583953857422, 0.0273623046875, 0.0273623046875, 0.02728550338745117, 0.02791116714477539, 0.027173887252807616, 0.0267775993347168, 0.026597375869750976, 0.02654515266418457, 0.02656153678894043, 0.026599424362182617, 0.026617855072021485, 0.05418803024291992, 0.026620927810668944, 0.026647552490234375, 0.02657689666748047, 0.02651238441467285, 0.026595327377319337, 0.026607616424560547, 0.026603519439697267, 0.026594303131103517, 0.026619903564453123, 0.02672230339050293, 0.026630144119262695, 0.026639360427856446, 0.02667622375488281, 0.026677248001098632, 0.026631168365478516, 0.026617855072021485, 0.026565631866455077, 0.026587135314941408, 0.0265164794921875, 0.026616832733154298, 0.026588159561157225, 0.026612735748291014, 0.026612735748291014, 0.02656768035888672, 0.026558464050292968, 0.026629119873046874, 0.026508287429809572, 0.026702848434448243, 0.02671615982055664, 0.0265533447265625, 0.026586111068725587, 0.026626047134399415, 0.02655232048034668, 0.02658406448364258, 0.02654412841796875, 0.02653081512451172, 0.0265799674987793, 0.026612735748291014, 0.026603519439697267, 0.02635366439819336, 0.026727424621582032, 0.02656153678894043, 0.026595327377319337, 0.026583040237426758, 0.026492927551269533, 0.026572799682617186, 0.02653593635559082, 0.026678272247314453, 0.02653900718688965, 0.02690559959411621, 0.02734284782409668, 0.02688102340698242, 0.026624000549316407, 0.02656358337402344, 0.026578943252563478, 0.027030527114868166, 0.027415552139282227, 0.02731724739074707, 0.027236352920532225, 0.027314176559448244, 0.027259904861450194, 0.027412479400634765]",tokens/s,36.83007739537158,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3927.220224,12732.33408,0.0,12085.886976,11337.364992,s,10,11.005286376953126,1.1005286376953127,0.00219951912246486,1.1001186523437498,1.1034800170898438,1.1035955383300782,1.1036879553222656,"[1.1009342041015624, 1.102259765625, 1.0977261962890625, 1.099001708984375, 1.0989405517578126, 1.0975576171875, 1.0993031005859375, 1.103454345703125, 1.1023978271484376, 1.1037110595703126]",tokens/s,232.615482443152,kWh,1.2971816278166244e-05,7.107321095700172e-06,6.386246775661464e-05,8.394160513048104e-05,tokens/kWh,3049739.156191579,MB,3927.220224,12732.33408,0.0,12085.886976,11686.79936,s,10,645.4452890625,64.54452890625001,0.006590572212624524,64.54356250000001,64.553694140625,64.5538998046875,64.5540643359375,"[64.5536484375, 64.5513828125, 64.538359375, 64.55410546875, 64.5438203125, 64.5392109375, 64.53455859375, 64.5433046875, 64.5480546875, 64.53884375]",tokens/s,0.9760703357445925,kWh,0.0007619009707371396,0.00041758989640207795,0.003727131037257969,0.004906621904397187,tokens/kWh,12839.791047184834,,s,629,654.3247843627931,1.0402619783192257,0.1307385835871094,1.0244464111328124,1.0251733154296874,1.0253723876953125,2.12362228515625,"[1.0239293212890626, 1.0247884521484374, 1.0239590454101561, 1.0238433227539063, 1.0245233154296876, 1.0249482421875, 1.0245938720703125, 1.024648193359375, 1.0243778076171874, 1.0244761962890625, 1.0242918701171875, 1.02411474609375, 1.0238392333984374, 1.02443310546875, 1.0240072021484374, 1.025123291015625, 1.0243983154296874, 1.0239702758789062, 1.0238289794921875, 1.024879638671875, 1.0246707763671874, 1.0245980224609375, 1.0249052734375, 1.0246092529296875, 1.0252646484375, 1.025154052734375, 1.0244208984375, 1.0245867919921876, 1.025153076171875, 1.0249769287109376, 1.025039306640625, 1.0247618408203125, 1.0240020751953125, 1.0246666259765624, 1.024626708984375, 1.024693359375, 1.0244617919921875, 1.024501708984375, 1.02472802734375, 1.0254991455078124, 1.024680908203125, 1.025005615234375, 1.0249237060546874, 1.0251907958984374, 1.0256405029296876, 1.0250250244140624, 1.025184814453125, 1.02483251953125, 1.024786376953125, 1.0252471923828126, 1.0244935302734375, 1.0253773193359375, 1.024436279296875, 1.025090576171875, 1.025364990234375, 1.0249923095703124, 1.0241658935546876, 1.024015380859375, 1.0247249755859376, 1.0251048583984375, 1.0238597412109376, 1.02401123046875, 2.12693603515625, 1.0248826904296875, 1.024394287109375, 1.02513671875, 1.02491650390625, 1.024427978515625, 1.02502294921875, 1.0245028076171876, 1.0245499267578124, 1.024806884765625, 1.024963623046875, 1.024541748046875, 1.02464306640625, 1.0238269653320313, 1.024679931640625, 1.025101806640625, 1.0239959106445313, 1.0239354858398437, 1.024395263671875, 1.0241566162109375, 1.0247188720703124, 1.0248836669921875, 1.0245191650390626, 1.0240450439453126, 1.024563232421875, 1.0246707763671874, 1.0240665283203125, 1.02398974609375, 1.02415869140625, 1.024153564453125, 1.0244813232421874, 1.0248365478515624, 1.0246881103515626, 1.0250660400390625, 1.024818115234375, 1.02487451171875, 1.0241402587890625, 1.0241505126953125, 1.0248140869140625, 1.0247342529296875, 1.0252052001953125, 1.024384033203125, 1.0249267578125, 1.0247813720703125, 1.0250096435546876, 1.02519091796875, 1.0286304931640624, 1.0247679443359374, 1.0250260009765626, 1.02523291015625, 1.0252830810546876, 1.024384033203125, 1.0238484497070313, 1.0244556884765625, 1.0247547607421874, 1.0238433227539063, 1.0240604248046874, 1.0238505249023437, 1.0239692993164062, 1.02425390625, 1.0245499267578124, 1.0240184326171875, 1.0239478149414063, 2.123869140625, 1.024112548828125, 1.0244178466796876, 1.024606201171875, 1.024331787109375, 1.024970703125, 1.0241341552734375, 1.0240655517578126, 1.0246256103515625, 1.024216064453125, 1.02445361328125, 1.0246973876953125, 1.02462158203125, 1.0241884765625, 1.024816162109375, 1.0244024658203126, 1.0240604248046874, 1.0241331787109376, 1.02439013671875, 1.024101318359375, 1.02460107421875, 1.0241126708984376, 1.0239580078125, 1.0260316162109375, 1.024153564453125, 1.024626708984375, 1.025184814453125, 1.024384033203125, 1.0247044677734376, 1.0242744140625, 1.0245263671875, 1.0240440673828124, 1.0243983154296874, 1.0242017822265626, 1.025048583984375, 1.02445263671875, 1.0241658935546876, 1.024067626953125, 1.0245570068359375, 1.0243236083984375, 1.0241505126953125, 1.0240552978515625, 1.0239324340820313, 1.02377880859375, 1.0250537109375, 1.0241873779296875, 1.0242979736328126, 1.0260521240234375, 1.0243245849609375, 1.0240809326171876, 1.024226318359375, 1.02384228515625, 1.0239518432617187, 1.02460107421875, 1.0242713623046875, 1.023963134765625, 1.0244454345703125, 1.02419970703125, 1.0243572998046875, 1.025553466796875, 1.0245345458984374, 1.0243184814453126, 1.02401123046875, 2.12367041015625, 1.0241259765625, 1.0242652587890626, 1.0244208984375, 1.0240543212890625, 1.0244075927734375, 1.0240450439453126, 1.024143310546875, 1.024236572265625, 1.0248232421875, 1.0240543212890625, 1.0240615234375, 1.0252451171875, 1.0245201416015626, 1.02453857421875, 1.024142333984375, 1.024111572265625, 1.0240758056640624, 1.0245355224609376, 1.0249923095703124, 1.0250772705078126, 1.025503173828125, 1.024585693359375, 1.025353759765625, 1.0251724853515625, 1.025302490234375, 1.0245509033203124, 1.02447412109375, 1.025395751953125, 1.0254765625, 1.025158203125, 1.02477001953125, 1.025292236328125, 1.024942138671875, 1.024954345703125, 1.024868408203125, 1.025138671875, 1.0246287841796875, 1.0256414794921875, 1.025081298828125, 1.0248232421875, 1.023910888671875, 1.0245693359375, 1.0238443603515626, 1.027009521484375, 1.0242816162109376, 1.0239989624023438, 1.0243726806640625, 1.0255267333984375, 1.024779296875, 1.0242899169921875, 1.0245653076171874, 1.0248477783203125, 1.0242303466796876, 1.025016845703125, 1.024173095703125, 1.0242969970703124, 1.024927734375, 1.0247515869140624, 1.02410546875, 1.0243441162109375, 1.024101318359375, 1.024564208984375, 2.12349853515625, 1.0242017822265626, 1.024427978515625, 1.0245703125, 1.0245938720703125, 1.025349609375, 1.02506396484375, 1.02497900390625, 1.0247310791015625, 1.0254193115234376, 1.0243072509765625, 1.02436865234375, 1.02426416015625, 1.024543701171875, 1.0246932373046875, 1.0242508544921876, 1.0247291259765625, 1.0247679443359374, 1.0248099365234375, 1.0243450927734374, 1.0237440185546876, 1.0239437255859376, 1.02409619140625, 1.0248201904296874, 1.024573486328125, 1.0249298095703125, 1.0247445068359375, 1.024711669921875, 1.0242037353515625, 1.025076171875, 1.0243707275390626, 1.0241719970703125, 1.0248785400390625, 1.0241934814453124, 1.0238074951171876, 1.0240870361328125, 1.0238863525390625, 1.024194580078125, 1.0243511962890626, 1.023699951171875, 1.0239385375976562, 1.0239354858398437, 1.0245355224609376, 1.0244464111328124, 1.02446484375, 1.0242958984375, 1.024153564453125, 1.0246614990234375, 1.0249881591796874, 1.0237716674804687, 1.02404296875, 1.0248468017578125, 1.0250352783203125, 1.024362548828125, 1.0239723510742187, 1.02413623046875, 1.0246624755859375, 1.029676025390625, 1.02432568359375, 1.0238546142578124, 1.0238064575195311, 1.0237429809570313, 1.02468603515625, 2.1217158203125, 1.0237695922851562, 1.024711669921875, 1.02485498046875, 1.0252984619140626, 1.0250035400390625, 1.0246727294921876, 1.024594970703125, 1.0253424072265624, 1.024754638671875, 1.0237081298828126, 1.024232421875, 1.024865234375, 1.0245396728515626, 1.02403173828125, 1.02407275390625, 1.02489599609375, 1.0252052001953125, 1.0250966796875, 1.023773681640625, 1.02441162109375, 1.02495947265625, 1.0239385375976562, 1.0248038330078124, 1.0244832763671874, 1.0240286865234376, 1.0238095092773438, 1.02410546875, 1.024607177734375, 1.0238873901367187, 1.0241719970703125, 1.0245919189453125, 1.0244515380859376, 1.0247874755859374, 1.0245714111328126, 1.02497998046875, 1.0246953125, 1.024564208984375, 1.02820654296875, 1.02466455078125, 1.02426220703125, 1.02436962890625, 1.0242867431640625, 1.0247608642578125, 1.023847412109375, 1.0239989624023438, 1.0239672241210938, 1.024320556640625, 1.0242447509765624, 1.02379931640625, 1.02389453125, 1.024280517578125, 1.0238443603515626, 1.0239979248046875, 1.02392626953125, 1.02389453125, 1.0241024169921875, 1.0241402587890625, 1.024089111328125, 1.0238341064453125, 1.0239702758789062, 1.0240225830078125, 1.02510595703125, 2.124275634765625, 1.02431640625, 1.0250086669921874, 1.0248448486328126, 1.0252420654296874, 1.024489501953125, 1.0244403076171875, 1.0246031494140626, 1.0247711181640624, 1.0237265625, 1.0236641235351562, 1.0239344482421875, 1.0241915283203125, 1.023867919921875, 1.02379931640625, 1.0237245483398438, 1.024006103515625, 1.023836181640625, 1.024227294921875, 1.02468603515625, 1.0239273071289063, 1.0240594482421874, 1.0241719970703125, 1.024067626953125, 1.023867919921875, 1.0238515014648437, 1.025666015625, 1.0245263671875, 1.024973876953125, 1.02382080078125, 1.0239006958007812, 1.0243861083984376, 1.0252000732421875, 1.025047607421875, 1.0245989990234374, 1.0247177734375, 1.023931396484375, 1.0238443603515626, 1.0247506103515625, 1.0237757568359376, 1.0241033935546875, 1.0239365844726562, 1.0244884033203125, 1.0237399291992189, 1.024362548828125, 1.024405517578125, 1.024089111328125, 1.0244864501953126, 1.02445263671875, 1.02447509765625, 1.024986083984375, 1.0248734130859376, 1.0243768310546875, 1.024090087890625, 1.024077880859375, 1.0242088623046874, 1.0244178466796876, 1.0243809814453124, 1.024343017578125, 1.024501708984375, 1.0247445068359375, 1.024690185546875, 1.0249298095703125, 2.128819091796875, 1.02439111328125, 1.0247823486328125, 1.0247977294921875, 1.02436767578125, 1.0251878662109375, 1.025364990234375, 1.0248714599609374, 1.024216064453125, 1.023978515625, 1.024204833984375, 1.024175048828125, 1.0241976318359376, 1.02432568359375, 1.024288818359375, 1.024332763671875, 1.02521142578125, 1.0247762451171876, 1.0244351806640626, 1.0237880249023437, 1.0244085693359375, 1.024279541015625, 1.024522216796875, 1.0245447998046875, 1.0244136962890624, 1.02485302734375, 1.0252779541015624, 1.0244249267578125, 1.0243072509765625, 1.024364501953125, 1.0249554443359374, 1.02441064453125, 1.02491748046875, 1.025059814453125, 1.0243563232421875, 1.024710693359375, 1.0254488525390626, 1.0243583984375, 1.024216064453125, 1.0244423828125, 1.024673828125, 1.0236497802734374, 1.0240399169921874, 1.02411572265625, 1.0241024169921875, 1.0244669189453126, 1.024669677734375, 1.0245672607421874, 1.0250875244140625, 1.0249993896484375, 1.024100341796875, 1.0239754028320311, 1.0242611083984374, 1.0236723022460938, 1.0240758056640624, 1.025005615234375, 1.0246123046875, 1.024385009765625, 1.024511962890625, 1.0244013671875, 1.0243123779296874, 1.0240665283203125, 1.0242939453125, 2.1268427734375, 1.0253404541015625, 1.0243931884765625, 1.0240972900390626, 1.0241934814453124, 1.0252298583984376, 1.024215087890625, 1.0247833251953125, 1.024521240234375, 1.023973388671875, 1.0242017822265626, 1.0258052978515626, 1.024546875, 1.0249359130859375, 1.025333251953125, 1.0250526123046875, 1.02436962890625, 1.0253035888671875, 1.0246348876953124, 1.0245201416015626, 1.02516015625, 1.0252349853515625, 1.02502294921875, 1.024257080078125, 1.0243369140625, 1.0239395751953124, 1.02425390625, 1.0249779052734376, 1.024067626953125, 1.0241177978515625, 1.0244976806640624, 1.025036376953125, 1.02519189453125, 1.02483349609375, 1.0254356689453126, 1.0247762451171876, 1.024606201171875, 1.0247310791015625, 1.024310302734375, 1.0240450439453126, 1.024733154296875, 1.0244976806640624, 1.0241402587890625, 1.0240235595703124, 1.025154052734375, 1.023847412109375, 1.0244290771484375, 1.0241136474609376, 1.0241822509765626, 1.0238689575195312, 1.0249298095703125, 1.0245478515625, 1.0244771728515625, 1.0245509033203124, 1.024611328125, 1.0240921630859374, 1.024890869140625, 1.0240205078125, 1.024279541015625, 1.0245560302734376, 1.0247720947265626, 1.024206787109375, 1.0244300537109374, 2.128713623046875, 1.024427001953125, 1.0249287109375, 1.025007568359375, 1.0242989501953126, 1.025701904296875, 1.0250526123046875, 1.024421875, 1.02443115234375, 1.0251038818359375, 1.024236572265625, 1.0237388916015624, 1.0245765380859375, 1.023847412109375, 1.0237470703125, 1.0244495849609374, 1.024459716796875, 1.024206787109375, 1.02432666015625, 1.024251953125, 1.0239293212890626, 1.024359375, 1.02411669921875, 1.0238505249023437, 1.024151611328125, 1.025427490234375, 1.0240450439453126, 1.024953369140625, 1.02416796875, 1.0247802734375, 1.0241229248046875, 1.024373779296875, 1.024141357421875, 1.0242396240234375, 1.02449560546875, 1.0251766357421874, 1.024015380859375, 1.0237122802734375, 1.0239969482421876, 1.0238975830078125, 1.0237726440429689, 1.02384130859375, 1.02431640625, 1.0243133544921874, 1.0245919189453125, 1.024943115234375, 1.0246246337890625, 1.024611328125, 1.024716796875, 1.024035888671875, 1.0247413330078126, 1.024973876953125, 1.024796630859375, 1.0246318359375, 1.0259609375, 1.0238064575195311, 1.0238228759765624, 1.02377880859375, 1.02478955078125, 1.02438916015625, 1.0245723876953126, 1.0243604736328125, 1.02432568359375]",tokens/s,0.9612963088545465,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1859.03104,6201.802752,0.0,5555.355648,5273.986048,s,10,6.183414916992188,0.6183414916992188,0.0011184543236210387,0.618503173828125,0.6196270629882813,0.6198319030761719,0.6199957751464844,"[0.6192075805664062, 0.6172159423828125, 0.6163760986328125, 0.6177174072265625, 0.6180655517578125, 0.6173275146484375, 0.6189407958984375, 0.6189457397460938, 0.61958154296875, 0.6200367431640625]",tokens/s,414.010709998621,kWh,7.2943227158652426e-06,3.997002324692457e-06,3.5828705133529186e-05,4.7120030174086885e-05,tokens/kWh,5432933.70259309,MB,1859.03104,6201.802752,0.0,5555.355648,5324.908032,s,10,366.93543359375,36.693543359375,0.025425135768351863,36.68483984375,36.71502578125,36.739856640625,36.759721328125,"[36.6825078125, 36.6820625, 36.67597265625, 36.67723828125, 36.69059375, 36.7646875, 36.68741015625, 36.687171875, 36.7095078125, 36.67828125]",tokens/s,1.7169233121745884,kWh,0.0004334749158389039,0.00023758188798711938,0.002030957448294073,0.002702014252120096,tokens/kWh,23315.939192610833,,s,629,371.9142907714842,0.5912786816716763,0.07347096675901024,0.5822586669921875,0.5836932861328125,0.5840418579101562,1.2002971240234375,"[0.5831044921875, 0.5822699584960938, 0.5834168090820312, 0.58437939453125, 0.5840988159179688, 0.583689208984375, 0.5825567016601563, 0.5822218017578125, 0.5818173217773438, 0.5815992431640625, 0.5817159423828125, 0.5822648315429687, 0.5821982421875, 0.5821337890625, 0.5815695190429687, 0.5815643920898438, 0.582739990234375, 0.5819013061523437, 0.5815090942382812, 0.5818992919921875, 0.5820938110351562, 0.5823037719726563, 0.5815859375, 0.5816412353515625, 0.5814845581054687, 0.5814404907226562, 0.5816504516601563, 0.5815562133789063, 0.5829365844726563, 0.5825740966796875, 0.5817200927734375, 0.5818470458984375, 0.5818685302734375, 0.5846865844726562, 0.581960693359375, 0.5823938598632813, 0.582456298828125, 0.5820784912109375, 0.581928955078125, 0.5816002807617188, 0.5820436401367187, 0.5819269409179687, 0.581781494140625, 0.5828024291992188, 0.582518798828125, 0.583046142578125, 0.58231396484375, 0.58227197265625, 0.5819197387695313, 0.58180712890625, 0.5825986328125, 0.5820835571289062, 0.5818644409179687, 0.5822269287109375, 0.582245361328125, 0.5821522216796875, 0.5817988891601562, 0.5820927734375, 0.5820047607421875, 0.5820805053710938, 0.582076416015625, 0.5821747436523438, 1.2006451416015624, 0.5828607788085938, 0.5824737548828125, 0.5826815795898438, 0.5828382568359375, 0.582724609375, 0.5826375732421875, 0.5826047973632813, 0.5828505859375, 0.5823723754882812, 0.582371337890625, 0.5822054443359375, 0.5829539794921875, 0.5826570434570313, 0.5828075561523437, 0.582297607421875, 0.5822361450195312, 0.5817088012695313, 0.5831546630859376, 0.5821470947265625, 0.58178564453125, 0.5819739990234375, 0.5819053955078125, 0.581644287109375, 0.5816033325195312, 0.58183984375, 0.5819576416015625, 0.5817825317382812, 0.5819330444335937, 0.5823958740234375, 0.5825853271484375, 0.5823047485351562, 0.5824429931640625, 0.5819678955078125, 0.5820170288085937, 0.58241845703125, 0.5822371826171875, 0.5820948486328125, 0.581960693359375, 0.5820579833984375, 0.5817855834960938, 0.5820006103515625, 0.5823180541992188, 0.5817886962890625, 0.5818910522460937, 0.5815715942382812, 0.5831065673828125, 0.5818674926757812, 0.5818327026367187, 0.5820958862304687, 0.5820795288085937, 0.5822504272460938, 0.5827584228515625, 0.58241845703125, 0.5819842529296875, 0.5818224487304687, 0.5830155029296875, 0.5818920288085937, 0.58241943359375, 0.5820692749023437, 0.5821624145507812, 0.58189208984375, 0.582134765625, 1.199605712890625, 0.5822044067382812, 0.58277685546875, 0.5820149536132813, 0.5822853393554688, 0.5823651733398437, 0.5825372314453126, 0.5818859252929688, 0.58189111328125, 0.5824112548828125, 0.5814589233398437, 0.5815828247070313, 0.5821522216796875, 0.5817999267578124, 0.5821306762695313, 0.5816801147460937, 0.5815941162109375, 0.5823600463867188, 0.5821112060546875, 0.5825054931640625, 0.5818009643554688, 0.5820743408203125, 0.5816873168945312, 0.58237646484375, 0.5829601440429687, 0.5822699584960938, 0.5826181030273437, 0.582033447265625, 0.5821142578125, 0.5820088500976562, 0.581875732421875, 0.5822617797851563, 0.581992431640625, 0.5820211181640625, 0.5821327514648438, 0.5825167236328125, 0.583541748046875, 0.5822965698242187, 0.5826570434570313, 0.5821788330078125, 0.5822116088867187, 0.5822228393554687, 0.582134765625, 0.5822730102539062, 0.5822904052734375, 0.5818532104492188, 0.581939208984375, 0.58212353515625, 0.5819115600585938, 0.581917724609375, 0.581796875, 0.5814108276367187, 0.5820999755859375, 0.581823486328125, 0.5824778442382812, 0.5824727172851563, 0.58199658203125, 0.5818797607421875, 0.582181884765625, 0.5825372314453126, 0.5822669067382813, 0.582498291015625, 0.582295654296875, 1.1999998779296874, 0.5822935180664063, 0.5821593627929688, 0.5819166870117187, 0.58216552734375, 0.5825269775390625, 0.5819852905273437, 0.5826549682617187, 0.58295703125, 0.5822156982421876, 0.5825126342773438, 0.5818338012695312, 0.5822903442382813, 0.5818214111328125, 0.5828648681640625, 0.5822689208984375, 0.5817630615234375, 0.5818245239257812, 0.5817979736328125, 0.582257568359375, 0.5819883422851563, 0.5817487182617187, 0.5817006225585938, 0.5819115600585938, 0.5828761596679688, 0.5822597045898438, 0.5822843017578125, 0.5820897216796875, 0.5826027221679687, 0.581818359375, 0.5817947998046875, 0.5823662109375, 0.5819771118164062, 0.5821470947265625, 0.581907470703125, 0.5817671508789063, 0.5819443359375, 0.5819330444335937, 0.5819166870117187, 0.5820835571289062, 0.5821214599609374, 0.5826129760742188, 0.5820989379882813, 0.5827706909179687, 0.5828003540039063, 0.5829396362304687, 0.5819617309570313, 0.5818541870117188, 0.5825341186523437, 0.5824102172851563, 0.582561767578125, 0.582096923828125, 0.58191259765625, 0.581517333984375, 0.5817364501953125, 0.5814691772460937, 0.5818265380859375, 0.5818490600585937, 0.5822433471679688, 0.5819934692382812, 0.5829918823242187, 0.5824635009765625, 0.5829898071289062, 1.201005615234375, 0.5828812866210937, 0.5822740478515624, 0.58203955078125, 0.58253515625, 0.58185009765625, 0.58208154296875, 0.5819259033203125, 0.58195458984375, 0.5821777954101562, 0.5819033813476563, 0.5821747436523438, 0.5821358032226562, 0.5823917846679687, 0.58222900390625, 0.5823723754882812, 0.5822586669921875, 0.5819432983398437, 0.5823989868164062, 0.582287353515625, 0.5819432983398437, 0.5823098754882813, 0.5821470947265625, 0.58201904296875, 0.5821522216796875, 0.5817927856445313, 0.58224951171875, 0.5818756713867187, 0.5823150024414062, 0.5823600463867188, 0.5821900634765625, 0.5823775024414063, 0.5833564453125, 0.5822730102539062, 0.583024658203125, 0.5828505859375, 0.5830850830078125, 0.5825567016601563, 0.5824440307617188, 0.58309326171875, 0.5827573852539063, 0.58252392578125, 0.5825177612304687, 0.5825382690429688, 0.5826395874023438, 0.582086669921875, 0.5830287475585938, 0.5831290893554687, 0.5821757202148438, 0.5823723754882812, 0.5831546630859376, 0.5824204711914063, 0.5822750854492188, 0.5826437377929687, 0.582055908203125, 0.5820119018554688, 0.5823989868164062, 0.5829959716796875, 0.5821931762695313, 0.5819218139648438, 0.5824594116210937, 0.5828392944335937, 0.5823467407226562, 1.2004127197265626, 0.5823897705078125, 0.5821522216796875, 0.5828423461914063, 0.58172314453125, 0.5815429077148437, 0.5822146606445312, 0.5822965698242187, 0.5824727172851563, 0.5832130737304687, 0.5837701416015625, 0.5834608764648438, 0.58389404296875, 0.583257080078125, 0.5831526489257812, 0.5838325805664063, 0.5842800903320312, 0.5840598754882812, 0.5840148315429687, 0.5834495849609375, 0.5838704833984375, 0.5837393798828125, 0.58374658203125, 0.5832683715820313, 0.5839451904296875, 0.5835653076171875, 0.583952392578125, 0.5841285400390624, 0.5839216918945312, 0.584069091796875, 0.5833717651367187, 0.584079345703125, 0.5822791748046875, 0.58410595703125, 0.5837250366210938, 0.5839124755859375, 0.5839564819335937, 0.583910400390625, 0.5855027465820313, 0.5835325317382812, 0.5842186279296875, 0.5839196166992188, 0.5835909423828125, 0.5837138061523437, 0.5835827026367187, 0.5830758666992187, 0.5837998046875, 0.5836973876953125, 0.5838069458007813, 0.5841008911132812, 0.5842042846679687, 0.5843200073242187, 0.584690673828125, 0.5837639770507812, 0.5840875244140625, 0.58410595703125, 0.5839616088867188, 0.5835899047851563, 0.5849579467773437, 0.5842135009765625, 0.5833236694335937, 0.5823897705078125, 0.581928955078125, 1.2009288330078125, 0.582350830078125, 0.58395751953125, 0.5835161743164062, 0.5829293823242188, 0.5832315063476563, 0.5834116821289063, 0.5836349487304687, 0.5841828002929688, 0.5820057373046875, 0.5825576782226562, 0.5837035522460937, 0.5828607788085938, 0.5823723754882812, 0.5838561401367187, 0.58364111328125, 0.5828628540039062, 0.5813483276367187, 0.581317626953125, 0.5815889892578125, 0.58169140625, 0.5815162963867188, 0.58185009765625, 0.582002685546875, 0.5819638061523438, 0.5832847290039063, 0.5816729736328125, 0.5818470458984375, 0.5819771118164062, 0.5817507934570313, 0.5814476928710938, 0.5819402465820313, 0.5820078125, 0.581517333984375, 0.581465087890625, 0.5822945556640625, 0.5821337890625, 0.5815838623046875, 0.5820989990234375, 0.5824296875, 0.5820457153320312, 0.5824461059570313, 0.5824737548828125, 0.5818050537109375, 0.582128662109375, 0.5824798583984375, 0.5821214599609374, 0.5822095336914063, 0.5823784790039063, 0.582134765625, 0.5822146606445312, 0.5818245239257812, 0.5819381713867188, 0.5824696044921875, 0.5824706420898438, 0.5826283569335937, 0.58235595703125, 0.5820753784179687, 0.5823877563476563, 0.582424560546875, 0.58216552734375, 0.5818204345703125, 0.5826416625976563, 1.201427490234375, 0.5822935180664063, 0.582170654296875, 0.5820518188476562, 0.5819218139648438, 0.5819739990234375, 0.5826621704101562, 0.5824409790039062, 0.5830502319335937, 0.582667236328125, 0.5825014038085937, 0.5823467407226562, 0.5828045043945312, 0.5823252563476562, 0.5821880493164062, 0.5829171142578125, 0.582118408203125, 0.5825382690429688, 0.5823733520507812, 0.5822125854492187, 0.5824266357421874, 0.5821091918945313, 0.5821173706054688, 0.5825208129882813, 0.5826007080078125, 0.5825556640625, 0.5820579833984375, 0.5825986328125, 0.582476806640625, 0.5827225341796874, 0.5821777954101562, 0.5822300415039062, 0.58294580078125, 0.5820938720703125, 0.5819207153320313, 0.5821747436523438, 0.582129638671875, 0.5825044555664063, 0.5818736572265625, 0.5816575927734375, 0.5821951904296875, 0.5822125854492187, 0.5821552734375, 0.5820139770507813, 0.5825054931640625, 0.5818880004882813, 0.5819913940429687, 0.5824952392578125, 0.5821358032226562, 0.5826898193359376, 0.582197265625, 0.5821798095703125, 0.582339599609375, 0.5823529052734375, 0.5823723754882812, 0.5820682373046875, 0.582150146484375, 0.5822054443359375, 0.5825587158203125, 0.5829376220703125, 0.5822269287109375, 0.5832161254882813, 0.5821911010742188, 1.2029603271484375, 0.5822730102539062, 0.5827010498046875, 0.5821941528320312, 0.5827747802734375, 0.5820374755859375, 0.582319091796875, 0.5819852905273437, 0.5823017578125, 0.5824746704101562, 0.5826314086914063, 0.582255615234375, 0.5823743896484375, 0.5821563110351563, 0.5829775390625, 0.5822258911132813, 0.5821051025390624, 0.582297607421875, 0.5837189331054687, 0.5839871826171875, 0.5838991088867187, 0.5836922607421875, 0.584005615234375, 0.5837537231445312, 0.583568359375, 0.5831055297851563, 0.5834229736328125, 0.5836431274414062, 0.5840794067382813, 0.5837383422851562, 0.58393701171875, 0.5841243896484375, 0.5823876953125, 0.5833635864257812, 0.58427392578125, 0.5820712890625, 0.5819453735351563, 0.5823057861328125, 0.582002685546875, 0.5819166870117187, 0.5829539794921875, 0.5820671997070312, 0.5822361450195312, 0.5820221557617188, 0.5820999755859375, 0.58199755859375, 0.5821737060546875, 0.583208984375, 0.5831434326171875, 0.5823364868164063, 0.5827174682617188, 0.5824962768554688, 0.5822781372070313, 0.5826682739257812, 0.5827501831054688, 0.5818245239257812, 0.5817589721679688, 0.5818521728515625, 0.58195556640625, 0.58195458984375, 0.582002685546875, 0.5821552734375, 0.5824307250976563, 1.2025272216796874, 0.5821696166992187, 0.5821286010742187, 0.582107177734375, 0.5820220947265625, 0.5823426513671875, 0.5817620239257812, 0.5823989868164062, 0.5820825805664063, 0.581781494140625, 0.5816698608398437, 0.581696533203125, 0.5820774536132812, 0.5819094848632812, 0.581712890625, 0.5817139282226562, 0.5817886962890625, 0.581992431640625, 0.5820774536132812, 0.5824501953125, 0.5818746948242187, 0.5817753295898438, 0.5824921875, 0.5816432495117188, 0.582413330078125, 0.58231396484375, 0.5824603881835938, 0.5824594116210937, 0.5828782348632813, 0.5824542846679688, 0.582302734375, 0.5820313720703125, 0.58241943359375, 0.5823294067382813, 0.5827665405273438, 0.5818951416015625, 0.5827501831054688, 0.5819842529296875, 0.5818511352539063, 0.5822074584960938, 0.5821696166992187, 0.5824000244140625, 0.582150146484375, 0.582055908203125, 0.5819166870117187, 0.5821051025390624, 0.58183984375, 0.581707763671875, 0.5821583251953125, 0.5820416259765625, 0.581970947265625, 0.5821788330078125, 0.582245361328125, 0.58267138671875, 0.5819248657226562, 0.58211328125, 0.5825842895507812, 0.5821829223632813, 0.5829417114257812, 0.582345703125, 0.5822811889648437, 0.5821439819335937, 0.5820999755859375]",tokens/s,1.6912498809745307,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948188-61698c3155d92a4641002213;cb1b519a-94d7-4b05-b855-768da720d205) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1866.50624,3328.704512,0.0,2682.257408,2578.238464,s,10,1.4175592651367188,0.14175592651367186,0.0022402045159939975,0.14135794067382812,0.14292337188720702,0.14542544631958007,0.1474271058654785,"[0.14792752075195312, 0.14161701965332033, 0.1391898193359375, 0.1415839385986328, 0.14236735534667969, 0.14101449584960937, 0.13986236572265626, 0.1410111083984375, 0.14185369873046874, 0.14113194274902344]",tokens/s,1805.9209677932563,kWh,1.6547061226986076e-06,9.066846465884736e-07,6.786722250360737e-06,9.348113019647817e-06,tokens/kWh,27385205.919305906,MB,1866.50624,3328.704512,0.0,2682.257408,2667.0976,s,10,83.8020732421875,8.38020732421875,0.04772557025936409,8.37658544921875,8.433103320312501,8.4430912109375,8.4510815234375,"[8.4288447265625, 8.39970703125, 8.4144833984375, 8.4530791015625, 8.4308837890625, 8.3407841796875, 8.3356474609375, 8.31612109375, 8.32905859375, 8.3534638671875]",tokens/s,7.51771377038971,kWh,9.806880771561905e-05,5.3749043913185225e-05,0.00039216448579924195,0.0005439823374280462,tokens/kWh,115812.5837281126,,s,629,84.93340989685055,0.1350292685164556,0.016826908436194207,0.13322035217285155,0.13436354370117187,0.1346594787597656,0.27325439575195315,"[0.1383321533203125, 0.13691903686523438, 0.13555609130859375, 0.1336924133300781, 0.1340948486328125, 0.13436006164550782, 0.1343426513671875, 0.1342003173828125, 0.13426687622070313, 0.13448089599609375, 0.13400473022460938, 0.13385011291503907, 0.13449215698242187, 0.13411634826660157, 0.13182975769042968, 0.13270323181152344, 0.13469491577148437, 0.13397605895996093, 0.13359616088867188, 0.13410610961914063, 0.13234483337402345, 0.13145497131347655, 0.13187686157226564, 0.1315246124267578, 0.13338418579101563, 0.13427098083496095, 0.13404261779785157, 0.13425152587890626, 0.13394534301757813, 0.13425050354003906, 0.1328660430908203, 0.1342433319091797, 0.13415116882324218, 0.13333401489257812, 0.1342013397216797, 0.13399346923828126, 0.1340968933105469, 0.134508544921875, 0.13404876708984376, 0.1326940155029297, 0.1316812744140625, 0.13177658081054688, 0.13164845275878906, 0.13489971923828126, 0.13331251525878907, 0.1342248992919922, 0.13452799987792968, 0.13407334899902343, 0.13159321594238282, 0.13165260314941407, 0.13322035217285155, 0.13399244689941406, 0.13447474670410156, 0.13405696105957032, 0.13407948303222655, 0.13395046997070312, 0.13439283752441405, 0.1342300109863281, 0.13573222351074218, 0.1321625671386719, 0.13170687866210937, 0.13169970703125, 0.2737838134765625, 0.13152255249023437, 0.13275955200195313, 0.13433351135253907, 0.1345811767578125, 0.13403852844238281, 0.13368319702148437, 0.13302169799804686, 0.13411737060546874, 0.13280256652832031, 0.13474508666992188, 0.13335859680175782, 0.1340590057373047, 0.13169459533691405, 0.13173248291015624, 0.13171405029296876, 0.13407743835449218, 0.1342689208984375, 0.13365248107910158, 0.13403340148925783, 0.1339299774169922, 0.13434367370605468, 0.1315061798095703, 0.13148466491699218, 0.13182156372070314, 0.1337518005371094, 0.1333534698486328, 0.1340999755859375, 0.13400575256347655, 0.13407852172851562, 0.1340497283935547, 0.13159117126464845, 0.13308108520507814, 0.13406515502929686, 0.13437747192382812, 0.1340518341064453, 0.1342433319091797, 0.13421466064453125, 0.13415834045410155, 0.1342986297607422, 0.13406617736816406, 0.13197926330566406, 0.1316177978515625, 0.13170687866210937, 0.13170687866210937, 0.13363917541503906, 0.13486598205566405, 0.13352549743652345, 0.13383468627929687, 0.13421055603027343, 0.13174989318847657, 0.13169973754882813, 0.13152662658691405, 0.13283226013183594, 0.13399142456054688, 0.13478297424316407, 0.13450035095214843, 0.1341460418701172, 0.1339115447998047, 0.13337496948242186, 0.134002685546875, 0.13234687805175782, 0.13289779663085938, 0.2724710388183594, 0.13155020141601562, 0.13163827514648438, 0.131810302734375, 0.1316618194580078, 0.13126144409179688, 0.13352549743652345, 0.13428326416015626, 0.13351628112792968, 0.13398326110839845, 0.1340497589111328, 0.1338419189453125, 0.13386444091796876, 0.13370777893066407, 0.13431910705566405, 0.1338470458984375, 0.13402931213378907, 0.13414399719238282, 0.13386752319335937, 0.13391973876953125, 0.13366067504882811, 0.13545677185058594, 0.13440921020507812, 0.13534104919433593, 0.13244415283203126, 0.134466552734375, 0.13180108642578126, 0.131599365234375, 0.13178880310058594, 0.13154098510742188, 0.13177754211425782, 0.13156147766113283, 0.13368832397460936, 0.134044677734375, 0.13403347778320313, 0.13381727600097656, 0.13421772766113282, 0.1335265350341797, 0.13435289001464842, 0.133959716796875, 0.13362889099121095, 0.1339842529296875, 0.13384602355957032, 0.13403443908691406, 0.13420748901367188, 0.13402316284179688, 0.1340590057373047, 0.13474713134765626, 0.13409791564941406, 0.13352960205078124, 0.13388697814941405, 0.1340088348388672, 0.13404978942871093, 0.13377023315429687, 0.13412864685058593, 0.13397708129882813, 0.13398016357421874, 0.13395968627929689, 0.1340712890625, 0.13423004150390624, 0.13353570556640626, 0.13389414978027345, 0.13325209045410155, 0.27623629760742185, 0.134403076171875, 0.13430271911621094, 0.13406105041503907, 0.1344286651611328, 0.1346447296142578, 0.13446553039550782, 0.13406207275390625, 0.13451161193847655, 0.13428941345214843, 0.1343856658935547, 0.13401190185546874, 0.134466552734375, 0.13412249755859376, 0.13459762573242187, 0.1339463653564453, 0.13446553039550782, 0.13476864624023438, 0.1342044219970703, 0.13403443908691406, 0.13416653442382812, 0.134276123046875, 0.13387055969238282, 0.13414707946777343, 0.13453106689453126, 0.13373338317871095, 0.13383680725097657, 0.13395046997070312, 0.133791748046875, 0.13404570007324218, 0.1341265869140625, 0.13388697814941405, 0.1346693115234375, 0.1340518341064453, 0.1335562286376953, 0.1343969268798828, 0.13446348571777345, 0.1351065673828125, 0.1340641326904297, 0.13445733642578125, 0.13435903930664062, 0.1340631103515625, 0.1340518341064453, 0.13444816589355468, 0.13536051940917967, 0.13402621459960937, 0.13327052307128906, 0.13432012939453125, 0.1344593963623047, 0.13412249755859376, 0.1338419189453125, 0.13387469482421874, 0.13338726806640624, 0.13416653442382812, 0.13386854553222657, 0.13441127014160156, 0.13412351989746094, 0.13347328186035157, 0.13382246398925782, 0.13420338439941407, 0.1338275909423828, 0.13340467834472655, 0.1342750701904297, 0.27683428955078127, 0.13397196960449217, 0.1343129577636719, 0.1338665008544922, 0.13388082885742186, 0.1340958709716797, 0.1340712890625, 0.13391871643066405, 0.13356646728515625, 0.1333104705810547, 0.13409178161621094, 0.133396484375, 0.1351403503417969, 0.13372825622558593, 0.13417575073242188, 0.13519052124023437, 0.1343938903808594, 0.1338654327392578, 0.133359619140625, 0.1340712890625, 0.13396890258789063, 0.1339894104003906, 0.1338838653564453, 0.13374668884277344, 0.133538818359375, 0.13404876708984376, 0.13366169738769532, 0.1339351043701172, 0.13478195190429687, 0.13399859619140625, 0.13399449157714843, 0.13394432067871093, 0.13413682556152343, 0.1340262451171875, 0.13399449157714843, 0.13403546142578124, 0.13435289001464842, 0.1340712890625, 0.13406112670898437, 0.1335233917236328, 0.13358387756347656, 0.13382553100585937, 0.13504620361328126, 0.13462828063964843, 0.13439897155761718, 0.13393516540527345, 0.13389511108398439, 0.1334097900390625, 0.13408767700195312, 0.13458636474609376, 0.13346713256835938, 0.13414297485351562, 0.13160243225097656, 0.1317795867919922, 0.13214413452148438, 0.13376716613769532, 0.133718017578125, 0.13606605529785157, 0.13217485046386718, 0.13167718505859374, 0.13171302795410156, 0.13281280517578126, 0.13389926147460937, 0.27561163330078126, 0.1342044219970703, 0.13399655151367187, 0.13381837463378907, 0.13696818542480468, 0.13187277221679689, 0.13215437316894532, 0.13239193725585938, 0.1329521026611328, 0.13212156677246092, 0.13276876831054688, 0.132463623046875, 0.13214002990722656, 0.13234072875976563, 0.13270118713378906, 0.13262847900390626, 0.1321871337890625, 0.1320509490966797, 0.13238067626953126, 0.13276876831054688, 0.1321564178466797, 0.1327431640625, 0.1328158721923828, 0.13263360595703125, 0.1317580871582031, 0.1315215301513672, 0.13196083068847655, 0.1318225860595703, 0.13159117126464845, 0.13150003051757814, 0.13156454467773437, 0.13174887084960937, 0.1325496368408203, 0.1326755828857422, 0.13302169799804686, 0.13294898986816406, 0.1324881896972656, 0.13196800231933595, 0.13392588806152345, 0.1337159729003906, 0.13316915893554687, 0.1326755828857422, 0.13233255004882813, 0.13182054138183594, 0.13182566833496093, 0.13186253356933594, 0.13180519104003907, 0.1316822967529297, 0.13177548217773438, 0.13161984252929687, 0.1317611541748047, 0.131852294921875, 0.13162086486816407, 0.1321553955078125, 0.13262950134277343, 0.13191372680664062, 0.13178265380859375, 0.13150515747070313, 0.13170176696777344, 0.1315205078125, 0.13172735595703125, 0.13178778076171874, 0.13167103576660155, 0.2728120422363281, 0.13346815490722655, 0.1335142364501953, 0.1333534698486328, 0.13239910888671874, 0.13167616271972657, 0.13235302734375, 0.1338050537109375, 0.13347225952148437, 0.1334886474609375, 0.1320120391845703, 0.1320816650390625, 0.13306573486328124, 0.13341183471679688, 0.13176934814453126, 0.13158195495605468, 0.13284147644042968, 0.13212979125976562, 0.1316290588378906, 0.1317058563232422, 0.13171098327636718, 0.13157887268066407, 0.13160960388183593, 0.13298486328125, 0.13236732482910157, 0.13162188720703125, 0.13165260314941407, 0.13161062622070313, 0.13162495422363282, 0.13157376098632811, 0.13175910949707031, 0.13201100158691406, 0.13279539489746095, 0.13196493530273437, 0.13224140930175782, 0.13323365783691407, 0.13173248291015624, 0.13189529418945312, 0.133254150390625, 0.13235711669921876, 0.1316505584716797, 0.13167922973632812, 0.13156556701660158, 0.13257522583007814, 0.1328158721923828, 0.13173350524902344, 0.13212261962890626, 0.13244415283203126, 0.13331968688964843, 0.1319720916748047, 0.13162188720703125, 0.1338275909423828, 0.13188710021972655, 0.13169049072265626, 0.1329100799560547, 0.13289677429199218, 0.13157273864746094, 0.13191987609863282, 0.1330063934326172, 0.13177952575683594, 0.1324830780029297, 0.1317232666015625, 0.132210693359375, 0.2737438659667969, 0.13398220825195312, 0.13256195068359375, 0.13281686401367188, 0.13316409301757812, 0.13165664672851562, 0.131557373046875, 0.1315246124267578, 0.1316116485595703, 0.13375692749023438, 0.13275033569335937, 0.13160755920410155, 0.13167514038085937, 0.1316546630859375, 0.13151744079589844, 0.1325260772705078, 0.13415525817871093, 0.13211955261230468, 0.13158604431152343, 0.13158706665039063, 0.131599365234375, 0.13191885375976561, 0.13164959716796876, 0.13163615417480468, 0.131557373046875, 0.1315635223388672, 0.13156454467773437, 0.13150822448730468, 0.13155328369140626, 0.131778564453125, 0.13244825744628907, 0.13156761169433595, 0.13152870178222656, 0.13237452697753907, 0.13321420288085936, 0.1316188507080078, 0.13167100524902345, 0.13171507263183593, 0.13173043823242186, 0.13229055786132812, 0.13326028442382812, 0.13208883666992188, 0.13159730529785157, 0.13276570129394533, 0.13298074340820312, 0.13150210571289062, 0.13231517028808593, 0.13168019104003906, 0.1316177978515625, 0.13166490173339843, 0.13228031921386718, 0.131704833984375, 0.13154815673828124, 0.1315246124267578, 0.13159117126464845, 0.13162803649902344, 0.13144985961914063, 0.13153074645996093, 0.13159423828125, 0.13154304504394532, 0.1314959411621094, 0.13150822448730468, 0.13215335083007812, 0.2743265380859375, 0.13167922973632812, 0.13160243225097656, 0.13170278930664062, 0.13161068725585937, 0.13170375061035156, 0.13167820739746094, 0.13153996276855467, 0.13159628295898437, 0.13154917907714844, 0.13170994567871094, 0.13149491882324219, 0.1311682586669922, 0.1336678466796875, 0.13168333435058593, 0.1315020751953125, 0.13163827514648438, 0.1326510009765625, 0.13166490173339843, 0.131599365234375, 0.1316054992675781, 0.1315574035644531, 0.1316433563232422, 0.13159321594238282, 0.13151744079589844, 0.13169664001464843, 0.13158604431152343, 0.13171609497070313, 0.13180723571777345, 0.13306982421875, 0.1319505920410156, 0.13152665710449218, 0.1316188201904297, 0.13160755920410155, 0.13153485107421875, 0.1316321258544922, 0.13152255249023437, 0.13150413513183593, 0.13162393188476562, 0.13158604431152343, 0.13178880310058594, 0.13175398254394532, 0.13282815551757812, 0.13279335021972657, 0.1332623291015625, 0.13260493469238283, 0.1335029754638672, 0.1333217315673828, 0.13349990844726561, 0.13284454345703126, 0.13330943298339842, 0.13328793334960937, 0.13312818908691407, 0.13345382690429688, 0.13288447570800782, 0.13335142517089843, 0.1332725830078125, 0.1326561279296875, 0.13411943054199219, 0.1333104705810547, 0.13202330017089844, 0.1328404541015625, 0.13351219177246093, 0.27342642211914064, 0.1317181396484375, 0.131778564453125, 0.1316300811767578, 0.13164851379394532, 0.13157376098632811, 0.13182464599609375, 0.1316505584716797, 0.1314580535888672, 0.1316853790283203, 0.13220352172851563, 0.13342617797851564, 0.13168435668945314, 0.1316259765625, 0.13177754211425782, 0.13159423828125, 0.13173356628417968, 0.1318133087158203, 0.1316444091796875, 0.1315082550048828, 0.13163209533691406, 0.13170994567871094, 0.13161062622070313, 0.13163929748535155, 0.13251788330078124, 0.13330738830566408, 0.13480653381347657, 0.13196902465820312, 0.1324451904296875, 0.13310054016113282, 0.13354290771484376, 0.13340882873535156, 0.1336657257080078, 0.13366886901855468, 0.133396484375, 0.13281893920898438, 0.13362892150878905, 0.13327769470214842, 0.1322239990234375, 0.13191474914550783, 0.13346099853515625, 0.1332162628173828, 0.13334835815429688, 0.13336679077148436, 0.13285171508789062, 0.13309132385253905, 0.13161369323730468, 0.13310464477539063, 0.13307391357421874, 0.13365248107910158, 0.13310464477539063, 0.13244313049316406, 0.1326376953125, 0.13354908752441405, 0.13300936889648438, 0.13332179260253907, 0.13388691711425782, 0.13313023376464844, 0.1333780517578125, 0.13311077880859376, 0.13335551452636718, 0.13207347106933592, 0.1331865539550781]",tokens/s,7.405801801245284,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1644.60544,7583.82592,0.0,6937.378816,6314.17344,s,10,6.228646179199219,0.6228646179199219,0.003273798456462667,0.6227339782714845,0.6241754699707032,0.6279462249755859,0.6309628289794922,"[0.6317169799804687, 0.6232648315429687, 0.6204146728515625, 0.6228704833984375, 0.6219456176757813, 0.6190134887695312, 0.6202999877929688, 0.6233375244140625, 0.6231851196289062, 0.6225974731445313]",tokens/s,411.0042417482646,kWh,7.31462190548579e-06,4.008082885946565e-06,3.373723287211802e-05,4.505993766355038e-05,tokens/kWh,5681321.663413707,MB,1644.60544,7583.82592,0.0,6937.378816,6464.046592,s,10,368.19280468750003,36.81928046875,0.018694382665316225,36.824119140625,36.835055078125,36.8438986328125,36.8509734765625,"[36.82331640625, 36.826859375, 36.8276015625, 36.8527421875, 36.83308984375, 36.78996875, 36.79416796875, 36.7968203125, 36.823703125, 36.82453515625]",tokens/s,1.7110600532639584,kWh,0.00043422340434458523,0.00023799224410691,0.0019653583664030845,0.00263757401485458,tokens/kWh,23885.58563482566,,s,629,373.2052468261721,0.5933310760352495,0.07394405347043131,0.5842933959960938,0.5858826293945313,0.58612470703125,1.20537375,"[0.5849108276367188, 0.5836380004882813, 0.5857188110351562, 0.5863209228515625, 0.5851494140625, 0.5851401977539062, 0.5856481323242188, 0.5834137573242187, 0.584342529296875, 0.58520166015625, 0.58519140625, 0.5853235473632813, 0.5863526611328125, 0.5858436889648437, 0.5854269409179688, 0.5851904296875, 0.5858662109375, 0.5851248779296875, 0.58393701171875, 0.586082275390625, 0.5860935668945313, 0.5858037719726562, 0.5858048095703124, 0.5849139404296875, 0.5841203002929688, 0.5845104370117188, 0.5843281860351562, 0.58593896484375, 0.5835950317382812, 0.5853214721679687, 0.5840045776367188, 0.58496923828125, 0.5845125122070313, 0.5841981201171875, 0.583709716796875, 0.583604248046875, 0.585080810546875, 0.5840435180664062, 0.5833502807617188, 0.583204833984375, 0.5833809814453125, 0.583488525390625, 0.5837742309570313, 0.5834803466796875, 0.5833226318359375, 0.5833451538085938, 0.5854525146484375, 0.5855877075195313, 0.5852989501953125, 0.5850931396484375, 0.5837567749023438, 0.5835980834960938, 0.583244873046875, 0.5846691284179687, 0.5844193115234375, 0.5830215454101563, 0.58328369140625, 0.5829970092773438, 0.5831393432617188, 0.5830184936523437, 0.583066650390625, 0.5831342163085937, 1.2092733154296875, 0.5848668212890625, 0.5853839721679688, 0.5835458374023438, 0.583478271484375, 0.5848524780273437, 0.5844121704101563, 0.5853265991210937, 0.5850194091796875, 0.5835120849609375, 0.5833359375, 0.5842094116210937, 0.5853716430664062, 0.5833635864257812, 0.583319580078125, 0.5834424438476562, 0.5833430786132813, 0.584900634765625, 0.5847019653320312, 0.5855590209960938, 0.585365478515625, 0.585439208984375, 0.5842821044921875, 0.5841654052734375, 0.5858457641601562, 0.5847449340820312, 0.585164794921875, 0.5848309936523437, 0.584369140625, 0.58404248046875, 0.5838244018554688, 0.583257080078125, 0.5842994995117188, 0.5846712036132813, 0.5851217651367188, 0.5858897705078125, 0.5863434448242187, 0.5853060913085938, 0.5856419677734375, 0.5848340454101563, 0.5839616088867188, 0.5841131591796875, 0.5834690551757813, 0.5832816772460937, 0.5835386962890625, 0.5836953735351562, 0.5837783203125, 0.5832489013671875, 0.5833348999023438, 0.5838141479492187, 0.58338818359375, 0.5840414428710937, 0.5849354248046875, 0.5850501098632812, 0.5841889038085938, 0.5833840942382813, 0.5846098022460937, 0.5854556274414062, 0.5855375366210938, 0.586071044921875, 0.586082275390625, 0.5858805541992187, 0.5854515380859375, 1.2056739501953124, 0.5838673706054688, 0.5836922607421875, 0.5847716064453125, 0.5860086059570313, 0.586287109375, 0.5854852905273438, 0.5859491577148438, 0.5838069458007813, 0.5842606201171875, 0.5834219360351562, 0.58568701171875, 0.5857372436523437, 0.5852446899414062, 0.58332568359375, 0.5833123779296875, 0.5834485473632812, 0.5832437744140625, 0.583193603515625, 0.5835929565429687, 0.5832130737304687, 0.5838284912109375, 0.58541259765625, 0.5862522583007812, 0.5836943359375, 0.5834444580078125, 0.5840834350585937, 0.5839165649414062, 0.5839165649414062, 0.585080810546875, 0.583625732421875, 0.5833779296875, 0.583635986328125, 0.5836810302734375, 0.585175048828125, 0.5848340454101563, 0.5836544189453124, 0.5838428344726563, 0.5849999389648437, 0.5866905517578125, 0.5863915405273438, 0.5861068725585937, 0.5854617309570312, 0.58519140625, 0.585206787109375, 0.5851094970703125, 0.5855795288085938, 0.5858048095703124, 0.5847920532226563, 0.58595947265625, 0.5849281616210937, 0.5841448974609375, 0.5848914184570313, 0.5851996459960938, 0.5850060424804687, 0.585902099609375, 0.5855529174804688, 0.5846098022460937, 0.5835745239257812, 0.5832652587890625, 0.5837926635742188, 0.5831690063476562, 0.5832499389648438, 1.207994384765625, 0.585818115234375, 0.5860628662109375, 0.5862369384765626, 0.5856450805664063, 0.5840588989257812, 0.5840947265625, 0.5850921020507812, 0.5846712036132813, 0.584900634765625, 0.5842124633789062, 0.585068603515625, 0.5859398803710938, 0.5848207397460937, 0.585101318359375, 0.5848822021484374, 0.5862144165039063, 0.5845022583007813, 0.58452685546875, 0.5850029907226563, 0.5852498168945313, 0.58498046875, 0.5852426147460937, 0.584975341796875, 0.5861365966796875, 0.5855333862304688, 0.5858826293945313, 0.5837987670898438, 0.5849784545898438, 0.584690673828125, 0.585865234375, 0.585797607421875, 0.58403125, 0.5845575561523437, 0.583736328125, 0.5857874145507812, 0.584921142578125, 0.5861068115234375, 0.5857382202148438, 0.5839708251953125, 0.5849497680664062, 0.5862993774414063, 0.5846947631835937, 0.5865574340820312, 0.5856399536132812, 0.5843414916992188, 0.5835376586914063, 0.5834024658203125, 0.5844940795898438, 0.5834383544921875, 0.584827880859375, 0.5836144409179688, 0.5839237060546875, 0.5843804321289062, 0.5840660400390625, 0.5849600219726563, 0.5857423095703125, 0.583419921875, 0.5841243896484375, 0.5841254272460937, 0.5858836669921875, 0.586166259765625, 0.5862020874023437, 1.2080977783203124, 0.5857433471679687, 0.5837967529296875, 0.5849077758789063, 0.5842442016601562, 0.5845339965820312, 0.5844049682617187, 0.584521728515625, 0.585860107421875, 0.5852057495117188, 0.5834055786132812, 0.5845022583007813, 0.5843896484375, 0.584616943359375, 0.5853900756835938, 0.585218017578125, 0.584774658203125, 0.583119873046875, 0.5841193237304687, 0.583546875, 0.5849262084960938, 0.5859512329101563, 0.5857454223632812, 0.5856983032226563, 0.5860689697265625, 0.584722412109375, 0.585270263671875, 0.5859860229492188, 0.5858826293945313, 0.5849948120117188, 0.5863117065429687, 0.5867387084960938, 0.5861365966796875, 0.583546875, 0.583605224609375, 0.5836113891601562, 0.58324169921875, 0.5857771606445312, 0.5841275024414062, 0.583731201171875, 0.5836175537109375, 0.5853092041015625, 0.5840271606445312, 0.583151611328125, 0.5838561401367187, 0.5832478637695313, 0.5832324829101563, 0.5840947265625, 0.5859563598632812, 0.5858385620117188, 0.5836267700195312, 0.5830523071289062, 0.58344140625, 0.5830737915039063, 0.5846456298828125, 0.58616015625, 0.5856010131835937, 0.584089599609375, 0.5855467529296875, 0.5856133422851563, 0.5847756958007813, 0.583384033203125, 0.583277587890625, 1.204601806640625, 0.5862328491210937, 0.5855662231445312, 0.5857454223632812, 0.585776123046875, 0.5854013671875, 0.5835397338867188, 0.5845718994140625, 0.584806396484375, 0.5833901977539062, 0.58340966796875, 0.5848545532226562, 0.5836718139648438, 0.583673828125, 0.5833185424804688, 0.5834208984375, 0.5831044921875, 0.58376806640625, 0.5851791381835938, 0.584932373046875, 0.5834536743164063, 0.58336767578125, 0.5833983764648437, 0.583994384765625, 0.5848350830078125, 0.5854105834960938, 0.5858877563476562, 0.584690673828125, 0.5837537231445312, 0.5838837890625, 0.5847900390625, 0.5848023071289062, 0.5842175903320312, 0.5831762084960938, 0.5833820190429687, 0.5859891357421875, 0.5835069580078125, 0.5831127319335937, 0.5831157836914063, 0.5827809448242187, 0.58313623046875, 0.58292431640625, 0.5838295288085937, 0.5834178466796875, 0.5833871459960938, 0.5833328857421874, 0.5838223266601562, 0.5833430786132813, 0.58349462890625, 0.5832028198242187, 0.583478271484375, 0.5832816772460937, 0.5838448486328125, 0.58444287109375, 0.5835448608398438, 0.583488525390625, 0.5832765502929688, 0.5831597900390625, 0.5829530029296875, 0.5834905395507812, 0.5833994140625, 0.583025634765625, 0.5832243041992188, 1.203841064453125, 0.5830482177734375, 0.58326220703125, 0.5829181518554688, 0.5830656127929688, 0.5832324829101563, 0.5837772827148437, 0.5836277465820312, 0.5839247436523437, 0.5832898559570312, 0.5831966552734374, 0.5831823120117188, 0.5834014892578125, 0.5832069091796875, 0.58324169921875, 0.5832232666015625, 0.5835601806640625, 0.5835192260742188, 0.5839534301757813, 0.583889892578125, 0.5849528198242188, 0.583784423828125, 0.5851146240234375, 0.584163330078125, 0.5861038208007813, 0.5857628173828126, 0.5840619506835938, 0.5834321899414062, 0.5833062133789062, 0.5850715942382813, 0.5838827514648437, 0.5832939453125, 0.5836646118164063, 0.5837035522460937, 0.5832540283203125, 0.58330419921875, 0.5831854248046875, 0.5831260375976562, 0.5833052368164062, 0.5834301147460937, 0.5840445556640625, 0.5837404174804688, 0.5842206420898437, 0.5848719482421875, 0.5832294311523437, 0.5834393310546875, 0.5849579467773437, 0.585996337890625, 0.5837516479492187, 0.5843865356445312, 0.5841224365234375, 0.58503466796875, 0.5842933959960938, 0.5842514038085938, 0.5854228515625, 0.5845718994140625, 0.5853296508789062, 0.58530712890625, 0.585987060546875, 0.5857822875976563, 0.5840445556640625, 0.5849928588867187, 0.5844633178710937, 1.2071761474609375, 0.5841336059570312, 0.5841234130859375, 0.5854689331054688, 0.5842175903320312, 0.5836083374023437, 0.5841305541992188, 0.5834137573242187, 0.583314453125, 0.5843251342773438, 0.584774658203125, 0.5859430541992188, 0.58572802734375, 0.5840137939453125, 0.583277587890625, 0.583404541015625, 0.58437939453125, 0.5849334106445313, 0.5850767211914063, 0.5838622436523437, 0.5835120849609375, 0.583267333984375, 0.5832949829101562, 0.5855610961914063, 0.583498779296875, 0.5834619140625, 0.58427392578125, 0.584437744140625, 0.5862256469726562, 0.5834854125976563, 0.5835243530273437, 0.5841622924804688, 0.58366259765625, 0.584537109375, 0.5853388671875, 0.5849989013671875, 0.5844951171875, 0.5839963989257813, 0.5834158325195312, 0.583077880859375, 0.5831526489257812, 0.5830082397460937, 0.5828720703125, 0.5832632446289062, 0.5835222778320313, 0.5850848999023438, 0.5834332275390625, 0.58313623046875, 0.5834127197265625, 0.58305126953125, 0.5829222412109375, 0.5836380004882813, 0.5834926147460937, 0.5847183227539062, 0.58433740234375, 0.5834075927734375, 0.583525390625, 0.585175048828125, 0.5840414428710937, 0.5833728637695312, 0.5833543090820312, 0.5860372314453125, 0.58665673828125, 1.209406494140625, 0.5842022705078125, 0.5846261596679687, 0.5852323608398438, 0.5844152221679687, 0.5856993408203125, 0.5855590209960938, 0.5855303955078125, 0.5856942138671875, 0.5849548950195312, 0.584184814453125, 0.583784423828125, 0.5838602905273438, 0.583943115234375, 0.5855211791992188, 0.5860454711914063, 0.5839083251953125, 0.5838970947265625, 0.58345166015625, 0.584595458984375, 0.5834035034179688, 0.583773193359375, 0.5848862915039063, 0.5850439453125, 0.5842882690429687, 0.5836083374023437, 0.5860198364257813, 0.5854996337890624, 0.5847982177734375, 0.58365234375, 0.5836318969726563, 0.5851351318359375, 0.5849569091796875, 0.5841541137695313, 0.5852262573242187, 0.5854945068359375, 0.58353662109375, 0.5831260375976562, 0.5857720336914063, 0.5833697509765625, 0.5847398681640625, 0.5836799926757813, 0.5841725463867188, 0.5835213012695313, 0.5834793090820313, 0.5834086303710937, 0.5851555786132813, 0.5856593627929687, 0.586039306640625, 0.586672119140625, 0.5860044555664062, 0.583857177734375, 0.5836093139648437, 0.5832765502929688, 0.583372802734375, 0.5853153076171875, 0.5847501220703125, 0.5838038330078125, 0.5834752197265625, 0.5835591430664062, 0.5838551635742187, 0.5840281372070313, 0.5836544189453124, 1.2069908447265625, 0.5845330200195312, 0.5855457153320313, 0.58528564453125, 0.5856204833984375, 0.5856962280273438, 0.5844951171875, 0.585575439453125, 0.5854146728515625, 0.5844561767578125, 0.5846773681640625, 0.5847982177734375, 0.5842196655273437, 0.5833287963867188, 0.5843486938476562, 0.5833656616210937, 0.5831823120117188, 0.5832847290039063, 0.5848411865234375, 0.585359375, 0.5844951171875, 0.5839882202148438, 0.58340966796875, 0.5836564331054688, 0.5833912353515625, 0.5853634643554687, 0.5840670776367187, 0.5857935180664062, 0.584247314453125, 0.5839431762695313, 0.5854044189453125, 0.5834598388671876, 0.5832744750976563, 0.584158203125, 0.5852743530273438, 0.5853818969726563, 0.5850951538085938, 0.5850203857421875, 0.584026123046875, 0.5849088134765625, 0.5856348266601562, 0.5836636352539063, 0.5857105712890625, 0.58444287109375, 0.58351513671875, 0.5845309448242187, 0.5851401977539062, 0.5842554931640624, 0.5832396850585938, 0.583677978515625, 0.58317822265625, 0.5831372680664062, 0.5843138427734375, 0.5847398681640625, 0.5867151489257812, 0.5837537231445312, 0.58568603515625, 0.584394775390625, 0.5833359375, 0.5846251220703125, 0.5849108276367188, 0.5853767700195313, 0.5857874145507812]",tokens/s,1.6853996704204155,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1306.464256,2103.967744,0.0,1457.52064,1272.750592,s,10,1.329427551269531,0.13294275512695314,0.0012200886206429765,0.13265889739990233,0.1341803771972656,0.1350921875,0.1358216357421875,"[0.13600399780273437, 0.13269917297363282, 0.1317310791015625, 0.1318907470703125, 0.131806884765625, 0.1328501434326172, 0.1324745635986328, 0.13397775268554687, 0.13261862182617187, 0.1333745880126953]",tokens/s,1925.6408501202932,kWh,1.562583448681218e-06,8.560805149739424e-07,6.424628677710524e-06,8.843292641365686e-06,tokens/kWh,28948493.55120577,MB,1306.464256,2103.967744,0.0,1457.52064,1369.423872,s,10,78.31643701171875,7.831643701171875,0.021087870794415795,7.826485107421875,7.855575732421875,7.869772485351563,7.881129887695312,"[7.8200595703125, 7.8524208984375, 7.80727783203125, 7.828986328125, 7.81284423828125, 7.83669384765625, 7.82121484375, 7.825626953125, 7.82734326171875, 7.88396923828125]",tokens/s,8.044288326162373,kWh,9.24571096544203e-05,5.067334492555682e-05,0.00037095189544549136,0.0005140823500254685,tokens/kWh,122548.4593993139,,s,629,79.38599323272705,0.12620984615695877,0.015775713114662086,0.12392652893066407,0.12565974731445312,0.1260990447998047,0.2560962493896484,"[0.12357324981689453, 0.12395417785644532, 0.12677222442626954, 0.12635955047607422, 0.124906494140625, 0.12412108612060548, 0.12417945861816407, 0.12488601684570312, 0.12357427215576172, 0.12358553314208984, 0.12521676635742188, 0.12579737854003906, 0.1256099853515625, 0.12379033660888672, 0.12357119750976563, 0.12366028594970703, 0.12364492797851563, 0.12450201416015624, 0.12360806274414063, 0.12366643524169922, 0.12377497863769531, 0.12358348846435546, 0.12363878631591797, 0.12448051452636719, 0.12341043090820313, 0.12406886291503906, 0.1237401580810547, 0.12359986877441406, 0.12348108673095703, 0.12367359924316407, 0.12363571166992188, 0.12357324981689453, 0.12363468933105469, 0.12353740692138672, 0.12361625671386718, 0.12422758483886719, 0.12424396514892579, 0.12538982391357423, 0.12366336059570313, 0.12492594909667969, 0.12365926361083984, 0.12361727905273437, 0.12350054168701172, 0.12435148620605468, 0.12661043548583983, 0.12594380950927733, 0.12404121398925781, 0.12383539581298827, 0.12378726196289062, 0.12356095886230468, 0.12356813049316406, 0.12374221038818359, 0.12462796783447265, 0.12367359924316407, 0.12347289276123047, 0.12355174255371094, 0.1236684799194336, 0.12365004730224609, 0.12358656311035156, 0.12423372650146484, 0.12397772979736328, 0.12430950164794922, 0.25706393432617186, 0.12518399810791014, 0.1238814697265625, 0.12411289978027344, 0.12383539581298827, 0.12371456146240234, 0.12595814514160156, 0.12542259216308593, 0.12580659484863282, 0.12499353790283203, 0.1239152603149414, 0.12502528381347655, 0.12599295806884767, 0.12452146911621094, 0.12406169891357421, 0.12400128173828125, 0.12455014038085938, 0.12355481719970703, 0.12359884643554687, 0.12353740692138672, 0.1237760009765625, 0.1253580780029297, 0.12455423736572266, 0.1236305923461914, 0.12355788421630859, 0.12357529449462891, 0.12357529449462891, 0.123683837890625, 0.12378419494628906, 0.12360908508300782, 0.12378828430175781, 0.12359474945068359, 0.12440064239501954, 0.12405043029785157, 0.12471295928955078, 0.12421119689941407, 0.12409037017822265, 0.12466687774658203, 0.12391321563720703, 0.1252689895629883, 0.1255536651611328, 0.12441907501220703, 0.12589260864257812, 0.12565299224853516, 0.12552909088134764, 0.12456448364257812, 0.12414975738525391, 0.12398387145996094, 0.1251430435180664, 0.12559257507324217, 0.1256447982788086, 0.12538572692871094, 0.12506521606445312, 0.12563251495361327, 0.12649676513671876, 0.12583424377441407, 0.1255536651611328, 0.12570214080810546, 0.12467916870117188, 0.12509900665283202, 0.12491264343261718, 0.12573184204101562, 0.12440268707275391, 0.2558197784423828, 0.12364800262451171, 0.123683837890625, 0.12350975799560547, 0.12375552368164063, 0.12458700561523438, 0.12379647827148438, 0.12382003021240234, 0.12372787475585938, 0.12350975799560547, 0.12364492797851563, 0.12362751770019531, 0.12420403289794922, 0.12373401641845704, 0.12366745758056641, 0.12346367645263671, 0.1236490249633789, 0.12343807983398437, 0.12373401641845704, 0.12360601806640625, 0.1237391357421875, 0.12377907562255859, 0.12418252563476563, 0.12495769500732422, 0.12483993530273438, 0.12376576232910157, 0.12357529449462891, 0.12388044738769531, 0.12398592376708985, 0.12371250915527343, 0.12381696319580078, 0.1249966049194336, 0.12406578826904296, 0.12373709106445313, 0.1236316146850586, 0.12356710052490234, 0.1236305923461914, 0.12532736206054687, 0.12533248138427736, 0.1239900131225586, 0.12350566101074219, 0.12367155456542969, 0.12361727905273437, 0.12359065246582031, 0.12348108673095703, 0.12358451080322265, 0.12368691253662109, 0.12367565155029298, 0.12437299346923827, 0.12555570983886719, 0.12554956817626953, 0.12386406707763672, 0.12385485076904297, 0.12392857360839844, 0.12358246612548827, 0.12377907562255859, 0.12381081390380859, 0.12436070251464844, 0.12358144378662109, 0.12363775634765625, 0.12359168243408203, 0.12367974090576171, 0.12394802856445312, 0.2562037658691406, 0.12365312194824218, 0.1237053451538086, 0.1237760009765625, 0.12487884521484376, 0.12573286437988282, 0.12605644989013673, 0.12572876739501954, 0.12546969604492186, 0.1246740493774414, 0.124980224609375, 0.12507443237304688, 0.12493721771240235, 0.12486860656738281, 0.1260953598022461, 0.12375142669677734, 0.12360294342041016, 0.12364800262451171, 0.12349849700927734, 0.12341350555419922, 0.12356915283203125, 0.12356813049316406, 0.12392652893066407, 0.12379750061035157, 0.12378316497802734, 0.12362239837646484, 0.1237544937133789, 0.12356301116943359, 0.12365824127197265, 0.12351487731933594, 0.12436070251464844, 0.12455014038085938, 0.12394802856445312, 0.12437811279296875, 0.12415590667724609, 0.12446412658691407, 0.1244395523071289, 0.12447334289550781, 0.1255577621459961, 0.1261445083618164, 0.12384767913818359, 0.12500070190429688, 0.12377907562255859, 0.12363775634765625, 0.12484812927246093, 0.12531302642822265, 0.12550348663330077, 0.12423168182373047, 0.12353024291992187, 0.12354150390625, 0.12352819061279297, 0.12369817352294922, 0.12365004730224609, 0.12370738983154297, 0.12456755065917968, 0.12433100891113281, 0.1237739486694336, 0.12359168243408203, 0.12380569458007812, 0.12372684478759766, 0.12373299407958985, 0.12365721893310547, 0.12492594909667969, 0.2555535430908203, 0.12613938903808594, 0.12532940673828125, 0.12536217498779298, 0.1253396453857422, 0.12545536041259767, 0.12536831665039064, 0.12381081390380859, 0.12342476654052735, 0.12351283264160157, 0.12347801971435547, 0.12362035369873046, 0.12350669097900391, 0.12365414428710937, 0.12357324981689453, 0.12354252624511719, 0.12360192108154297, 0.12350975799560547, 0.12355379486083984, 0.12362751770019531, 0.12359065246582031, 0.12350259399414062, 0.12365414428710937, 0.12462079620361328, 0.12366643524169922, 0.12346572875976562, 0.12355891418457031, 0.12368691253662109, 0.12421836853027343, 0.12335001373291016, 0.12413645172119141, 0.12520652770996094, 0.12418355560302734, 0.1253570556640625, 0.12440064239501954, 0.1235230712890625, 0.1235968017578125, 0.12357529449462891, 0.12391321563720703, 0.12415590667724609, 0.12387635040283203, 0.12383539581298827, 0.12365106964111328, 0.12362649536132812, 0.12361727905273437, 0.12473446655273437, 0.12511949157714844, 0.12443341064453126, 0.12362854766845703, 0.12501708984375, 0.12398387145996094, 0.12350873565673828, 0.12383948516845703, 0.1234698257446289, 0.12409651184082031, 0.12359884643554687, 0.12379750061035157, 0.12368077087402343, 0.12469760131835937, 0.12350669097900391, 0.1235077133178711, 0.12355891418457031, 0.12358451080322265, 0.25665740966796874, 0.12402899169921874, 0.12411385345458985, 0.12439552307128907, 0.12551270294189454, 0.12451840209960938, 0.12435968017578125, 0.12583116912841796, 0.1254318084716797, 0.1253939208984375, 0.12529971313476562, 0.1254133758544922, 0.1254062042236328, 0.1256294403076172, 0.12560486602783202, 0.12544409942626952, 0.12538470458984374, 0.12542668914794922, 0.1252710418701172, 0.12386508941650391, 0.12361420440673829, 0.12358144378662109, 0.12357324981689453, 0.12350873565673828, 0.12352819061279297, 0.12368793487548828, 0.12353945922851563, 0.12357222747802735, 0.12369203186035156, 0.12346880340576172, 0.12355583953857421, 0.1235599365234375, 0.12362035369873046, 0.12436275482177735, 0.1252147216796875, 0.12532940673828125, 0.12448358154296875, 0.1234882583618164, 0.12387020874023437, 0.12536627197265626, 0.12422758483886719, 0.12369407653808594, 0.12347187042236328, 0.12357427215576172, 0.12363673400878906, 0.12348006439208985, 0.12376166534423828, 0.12611788940429688, 0.12559974670410157, 0.12563353729248047, 0.12546969604492186, 0.12542668914794922, 0.1254840316772461, 0.12547277069091797, 0.12400947570800781, 0.12405452728271485, 0.12358451080322265, 0.1235599365234375, 0.12348928070068359, 0.12358246612548827, 0.1234862060546875, 0.12351181030273438, 0.12364492797851563, 0.2565027770996094, 0.12444467163085937, 0.124115966796875, 0.12360499572753907, 0.12404326629638672, 0.12441804504394531, 0.1250918426513672, 0.1257748489379883, 0.12563353729248047, 0.12372889709472656, 0.12562022399902345, 0.12486041259765625, 0.12480921936035157, 0.123720703125, 0.12351897430419922, 0.12377088165283204, 0.12374221038818359, 0.12360908508300782, 0.1239920654296875, 0.1237022705078125, 0.12362547302246094, 0.12358758544921875, 0.12353330993652344, 0.12495155334472656, 0.12379033660888672, 0.1237227554321289, 0.12362137603759765, 0.1235445785522461, 0.12405248260498047, 0.12381081390380859, 0.12356403350830078, 0.12472217559814452, 0.12582911682128906, 0.1260021743774414, 0.12565503692626953, 0.12548607635498046, 0.125338623046875, 0.12534374237060547, 0.12404428863525391, 0.12439449310302735, 0.12376576232910157, 0.12392550659179688, 0.12379135894775391, 0.1235384292602539, 0.12375961303710938, 0.12353228759765625, 0.12357427215576172, 0.12382310485839844, 0.1239767074584961, 0.12351385498046875, 0.12377088165283204, 0.12349235534667968, 0.12367667388916016, 0.1235568618774414, 0.12351283264160157, 0.12387225341796874, 0.12435968017578125, 0.12366745758056641, 0.12357734680175782, 0.12359986877441406, 0.12354560089111329, 0.12352819061279297, 0.12502630615234375, 0.26002227783203125, 0.12474163055419922, 0.12465561676025391, 0.12385485076904297, 0.12388966369628907, 0.12364697265625, 0.12477133178710938, 0.12527410888671875, 0.12405145263671875, 0.1237760009765625, 0.12372991943359375, 0.1253529586791992, 0.12522803497314453, 0.12430233764648438, 0.12380057525634766, 0.12450713348388671, 0.12450508880615234, 0.12496998596191407, 0.12366950225830078, 0.12387840270996094, 0.12420403289794922, 0.1244395523071289, 0.12422451019287109, 0.12519014739990234, 0.12686438751220702, 0.12572672271728516, 0.12376882934570313, 0.12486656188964844, 0.1239767074584961, 0.12363263702392578, 0.12419276428222656, 0.12381183624267578, 0.12427263641357422, 0.12527206420898437, 0.12414259338378907, 0.12360499572753907, 0.12352102661132812, 0.12354662322998047, 0.12364083099365235, 0.12379545593261719, 0.1257748489379883, 0.12477439880371094, 0.12361011505126954, 0.12374425506591796, 0.12376268768310547, 0.12368691253662109, 0.12373606109619141, 0.12365824127197265, 0.1240596466064453, 0.12369305419921875, 0.12396441650390624, 0.12373811340332032, 0.12371353912353515, 0.12357119750976563, 0.12355379486083984, 0.1238067169189453, 0.12372889709472656, 0.12425318145751953, 0.12379853057861329, 0.12396031951904297, 0.12386099243164063, 0.12410060882568359, 0.12376780700683594, 0.2569492492675781, 0.12346163177490234, 0.12355788421630859, 0.12358451080322265, 0.12365516662597656, 0.12385894775390625, 0.12412210845947266, 0.12367871856689452, 0.12457984161376953, 0.12387123107910156, 0.12360704040527344, 0.12351078033447266, 0.12353638458251953, 0.12353638458251953, 0.12359782409667969, 0.12352921295166015, 0.12678758239746094, 0.1258260498046875, 0.12560076904296874, 0.12596224212646484, 0.12543590545654296, 0.12542668914794922, 0.12532838439941407, 0.12374221038818359, 0.12540006256103517, 0.12414771270751954, 0.12379750061035157, 0.12367871856689452, 0.12370944213867187, 0.12368077087402343, 0.12369817352294922, 0.12355379486083984, 0.12357119750976563, 0.12395622253417969, 0.12402175903320313, 0.12356505584716797, 0.12348210906982422, 0.12368793487548828, 0.12353536224365234, 0.12395622253417969, 0.12403302764892578, 0.12700466918945313, 0.12573798370361328, 0.12478463745117188, 0.12397158050537109, 0.12360192108154297, 0.12377292633056641, 0.12353024291992187, 0.12364390563964844, 0.12410163116455078, 0.12373197174072266, 0.123936767578125, 0.12391731262207031, 0.12367155456542969, 0.12363980865478516, 0.12377804565429687, 0.12557823944091798, 0.12434022521972657, 0.12393472290039062, 0.12455423736572266, 0.12541849517822265, 0.12554239654541016, 0.12558438110351564, 0.25772952270507815, 0.12436172485351563, 0.12393984222412109, 0.12354867553710938, 0.12373197174072266, 0.12357119750976563, 0.123578369140625, 0.12372889709472656, 0.12355379486083984, 0.12389990234375, 0.12362239837646484, 0.12408729553222657, 0.12435148620605468, 0.1244395523071289, 0.12433100891113281, 0.12407398223876953, 0.12391117095947266, 0.12426445007324219, 0.12396236419677735, 0.12403404998779297, 0.1263288345336914, 0.12405554962158204, 0.12396646118164062, 0.1240995864868164, 0.12518297576904297, 0.12740505981445313, 0.12441292572021484, 0.12416512298583984, 0.12624896240234376, 0.12561100769042968, 0.12505804443359375, 0.12423577880859375, 0.12659302520751953, 0.1257891845703125, 0.1247457275390625, 0.12536627197265626, 0.12574105834960939, 0.12544102478027344, 0.12642201232910155, 0.1264035873413086, 0.12703129577636718, 0.12545126342773438, 0.12710707092285156, 0.12490751647949219, 0.1260052490234375, 0.12597964477539061, 0.1264691162109375, 0.12679987335205078, 0.12610150146484375, 0.12555980682373047, 0.12536831665039064, 0.12553011322021485, 0.12543283081054687, 0.125591552734375, 0.12561817932128908, 0.12541849517822265, 0.1260789794921875, 0.12718182373046874, 0.1256980514526367, 0.12580147552490234, 0.1268490219116211, 0.1256785888671875, 0.12574310302734376]",tokens/s,7.92331209053505,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2293.178368,3364.356096,0.0,2717.908992,2483.907584,s,10,2.332126235961914,0.2332126235961914,0.0007025035151089087,0.23313691711425782,0.23428714752197263,0.2343353157043457,0.23437385025024415,"[0.2342764434814453, 0.23438348388671876, 0.23256541442871093, 0.2323934326171875, 0.23265362548828125, 0.23239085388183595, 0.23300813293457032, 0.2332657012939453, 0.2336903381347656, 0.23349880981445312]",tokens/s,1097.7107330316092,kWh,2.7429988588949646e-06,1.5025212867385563e-06,1.2985660641045106e-05,1.7231180786678625e-05,tokens/kWh,14856788.003635412,MB,2293.178368,3364.356096,0.0,2717.908992,2632.491008,s,10,135.513068359375,13.5513068359375,0.004490003668394658,13.54976904296875,13.55813466796875,13.559575634765624,13.560728408203124,"[13.557814453125, 13.5512802734375, 13.549751953125, 13.54661328125, 13.549029296875, 13.5610166015625, 13.5497861328125, 13.546083984375, 13.5524775390625, 13.54921484375]",tokens/s,4.64899811971836,kWh,0.0001599563909630583,8.766855071097778e-05,0.0007514645898075666,0.0009990895314816027,tokens/kWh,63057.411788284844,,s,629,137.39515400695814,0.21843426710168204,0.027695203958367918,0.2150584259033203,0.21541007690429687,0.21553008728027342,0.44768570190429685,"[0.215546875, 0.21509120178222657, 0.21484748840332032, 0.2146693115234375, 0.2148485107421875, 0.21482701110839844, 0.21496627807617188, 0.2148546600341797, 0.2148341827392578, 0.2148423614501953, 0.2148730926513672, 0.21502566528320313, 0.21523968505859375, 0.21587353515625, 0.2149601287841797, 0.2150584259033203, 0.21547929382324219, 0.21505638122558593, 0.21523866271972655, 0.21525401306152345, 0.2153605194091797, 0.21535232543945312, 0.21514854431152344, 0.21529600524902343, 0.2149396514892578, 0.21511885070800782, 0.21522738647460937, 0.21515980529785156, 0.21498982238769532, 0.21505433654785155, 0.21510963439941405, 0.21512602233886718, 0.2155325469970703, 0.21535232543945312, 0.215225341796875, 0.21511270141601563, 0.21541171264648437, 0.21544345092773437, 0.2153164825439453, 0.21536767578125, 0.2153871307373047, 0.21529087829589844, 0.21537893676757813, 0.21553868103027343, 0.21545779418945313, 0.21531546020507814, 0.21509529113769532, 0.2152816619873047, 0.21531954956054689, 0.2148720703125, 0.215088134765625, 0.2149949493408203, 0.21501849365234374, 0.21520999145507813, 0.2153052215576172, 0.21509529113769532, 0.21530111694335938, 0.2151014404296875, 0.21536665344238282, 0.21565542602539062, 0.21544345092773437, 0.21517721557617187, 0.4497049560546875, 0.21492530822753905, 0.21484031677246093, 0.21474610900878907, 0.21518130493164062, 0.21497036743164064, 0.2149539794921875, 0.2148833312988281, 0.21501951599121094, 0.21493760681152344, 0.21485568237304686, 0.21516493225097658, 0.21500518798828125, 0.2150102996826172, 0.2150328369140625, 0.21521510314941406, 0.2148863983154297, 0.21485055541992187, 0.21527859497070312, 0.21493862915039064, 0.21490380859375, 0.2149171142578125, 0.21504307556152344, 0.21492428588867188, 0.21488844299316406, 0.21539634704589844, 0.21504307556152344, 0.2151526336669922, 0.21495603942871094, 0.21498675537109374, 0.21492019653320313, 0.21486285400390626, 0.21517312622070311, 0.2152755126953125, 0.21528883361816406, 0.21505433654785155, 0.21547929382324219, 0.21528985595703126, 0.21551922607421875, 0.21516184997558593, 0.21498265075683592, 0.21506048583984375, 0.21529702758789063, 0.21518438720703126, 0.21498880004882812, 0.21501747131347657, 0.21603123474121094, 0.21503077697753906, 0.21511576843261718, 0.21513523864746092, 0.21525605773925782, 0.21546394348144532, 0.2150963134765625, 0.21497445678710939, 0.2149396514892578, 0.2152611846923828, 0.21501338195800782, 0.2153605194091797, 0.21494989013671875, 0.2149775390625, 0.21501235961914061, 0.21545472717285155, 0.21544960021972656, 0.4477255554199219, 0.21488946533203124, 0.2147993621826172, 0.214830078125, 0.21490687561035157, 0.21492019653320313, 0.21471743774414062, 0.21540556335449218, 0.21555815124511718, 0.2154772491455078, 0.21546086120605468, 0.21494784545898438, 0.21496524047851562, 0.2147747802734375, 0.21512191772460937, 0.215014404296875, 0.214935546875, 0.21500314331054687, 0.21485055541992187, 0.21497445678710939, 0.2149775390625, 0.21503897094726562, 0.21483314514160157, 0.2149283905029297, 0.21502053833007811, 0.21528678894042969, 0.2153553924560547, 0.21509735107421876, 0.21505229187011718, 0.2149539794921875, 0.21523455810546874, 0.2149099578857422, 0.21484339904785157, 0.21541888427734374, 0.21507379150390624, 0.21519564819335937, 0.2150328369140625, 0.21511065673828125, 0.21497343444824218, 0.2153123779296875, 0.21532876586914063, 0.21510963439941405, 0.21525709533691406, 0.21491506958007814, 0.2153175048828125, 0.21504818725585936, 0.2149048309326172, 0.21543218994140625, 0.21501951599121094, 0.21494886779785155, 0.21495603942871094, 0.21506764221191407, 0.21505433654785155, 0.21506150817871095, 0.21549261474609374, 0.2151208953857422, 0.21526220703125, 0.21505331420898438, 0.21497036743164064, 0.21506048583984375, 0.2150440979003906, 0.21503590393066407, 0.21496934509277343, 0.4474736633300781, 0.21475225830078126, 0.2146570281982422, 0.21483314514160157, 0.2149171142578125, 0.21528678894042969, 0.21490380859375, 0.21523353576660156, 0.2151004180908203, 0.214935546875, 0.21490176391601562, 0.21502362060546876, 0.21514854431152344, 0.21500006103515626, 0.21510861206054688, 0.21527040100097655, 0.21625958251953126, 0.2149365692138672, 0.2149539794921875, 0.21506150817871095, 0.21505638122558593, 0.21492326354980468, 0.21511167907714843, 0.21502362060546876, 0.21509120178222657, 0.21505433654785155, 0.21530931091308594, 0.2149396514892578, 0.2147993621826172, 0.21485568237304686, 0.2149283905029297, 0.214972412109375, 0.2149918670654297, 0.214898681640625, 0.21487820434570312, 0.21496421813964844, 0.21520281982421874, 0.214972412109375, 0.2149775390625, 0.21498675537109374, 0.2149212188720703, 0.21492941284179687, 0.21481471252441406, 0.21471128845214843, 0.21537791442871093, 0.21497856140136717, 0.21484442138671875, 0.21496421813964844, 0.21498573303222657, 0.215046142578125, 0.21490176391601562, 0.2149591064453125, 0.2149529571533203, 0.2151034851074219, 0.21514137268066405, 0.2150768585205078, 0.21502259826660156, 0.21512191772460937, 0.2151628875732422, 0.2150584259033203, 0.21498573303222657, 0.21492019653320313, 0.21532365417480467, 0.4478924865722656, 0.21483724975585938, 0.21483314514160157, 0.21480447387695312, 0.21494476318359376, 0.21484339904785157, 0.2147061767578125, 0.214793212890625, 0.21511167907714843, 0.2152048645019531, 0.2148833312988281, 0.21500518798828125, 0.21489459228515626, 0.21502464294433593, 0.2147235870361328, 0.21478810119628905, 0.2147553253173828, 0.21490176391601562, 0.2149591064453125, 0.21520588684082032, 0.21484646606445312, 0.2149109802246094, 0.21492633056640625, 0.21500006103515626, 0.21491302490234376, 0.21484133911132813, 0.2149908447265625, 0.21542912292480468, 0.2149601287841797, 0.21523455810546874, 0.21497958374023438, 0.21499288940429687, 0.21481471252441406, 0.21496524047851562, 0.21497343444824218, 0.21500518798828125, 0.21537484741210938, 0.21541786193847656, 0.21500723266601562, 0.21525605773925782, 0.21530111694335938, 0.21528472900390624, 0.21499903869628906, 0.21505126953125, 0.2150266876220703, 0.21561138916015626, 0.21505638122558593, 0.21511680603027344, 0.2150707244873047, 0.21535232543945312, 0.2149775390625, 0.21508709716796875, 0.21489561462402343, 0.21500825500488283, 0.21495603942871094, 0.21576191711425782, 0.2151034851074219, 0.21574656677246093, 0.21505946350097657, 0.21543321228027343, 0.21538304138183595, 0.21532261657714843, 0.21520895385742186, 0.44758322143554685, 0.21562471008300782, 0.2151034851074219, 0.2150707244873047, 0.21508096313476563, 0.21512396240234374, 0.2149396514892578, 0.21493350219726562, 0.21506355285644532, 0.21523353576660156, 0.21518130493164062, 0.21520281982421874, 0.21525914001464844, 0.2150891571044922, 0.21512294006347657, 0.21514035034179688, 0.21520384216308594, 0.2151761932373047, 0.21500518798828125, 0.21573017883300782, 0.2152489013671875, 0.2153871307373047, 0.21496524047851562, 0.21487718200683595, 0.2150440979003906, 0.21491404724121094, 0.21495706176757812, 0.2153359375, 0.2152611846923828, 0.2150963134765625, 0.21533287048339844, 0.21510861206054688, 0.2150645751953125, 0.21524787902832032, 0.21535334777832033, 0.21540966796875, 0.2150584259033203, 0.21552639770507812, 0.21640089416503908, 0.21548133850097656, 0.2153297882080078, 0.21563392639160156, 0.21536256408691407, 0.21525299072265625, 0.21546290588378905, 0.21525094604492187, 0.21537382507324218, 0.2153553924560547, 0.2156943359375, 0.21538815307617187, 0.21539328002929686, 0.21541273498535157, 0.21524479675292968, 0.2151137237548828, 0.21520179748535156, 0.21540045166015626, 0.21538201904296875, 0.21517106628417967, 0.21503181457519532, 0.21518438720703126, 0.2152314910888672, 0.21534002685546874, 0.2152857666015625, 0.44831845092773437, 0.2153543701171875, 0.21508607482910155, 0.2153891906738281, 0.21522738647460937, 0.2152263641357422, 0.2151383056640625, 0.2148311004638672, 0.2147604522705078, 0.21474508666992187, 0.2151946258544922, 0.21490176391601562, 0.2149283905029297, 0.2148341827392578, 0.21475634765625, 0.21509426879882812, 0.21515367126464843, 0.2148720703125, 0.2148106231689453, 0.21511576843261718, 0.2154588165283203, 0.21494169616699219, 0.21514752197265624, 0.21526629638671874, 0.21540658569335938, 0.2152980499267578, 0.21505433654785155, 0.21525094604492187, 0.21514956665039062, 0.21498573303222657, 0.2155397186279297, 0.21500006103515626, 0.21513113403320314, 0.21502362060546876, 0.21486079406738282, 0.2149171142578125, 0.21485977172851561, 0.21519564819335937, 0.21494682312011718, 0.2151137237548828, 0.2152744903564453, 0.21504512023925781, 0.21480755615234376, 0.21494682312011718, 0.21507379150390624, 0.214935546875, 0.21486796569824218, 0.2152191925048828, 0.21546086120605468, 0.21495603942871094, 0.21510963439941405, 0.21517414855957032, 0.21489356994628905, 0.21515058898925782, 0.21519973754882812, 0.214898681640625, 0.21508709716796875, 0.2153492431640625, 0.2149160919189453, 0.21502873229980468, 0.21509735107421876, 0.21509426879882812, 0.21496217346191407, 0.4484382629394531, 0.21479731750488282, 0.2149713897705078, 0.2148546600341797, 0.21475328063964844, 0.21487615966796875, 0.21529498291015625, 0.21497856140136717, 0.2148106231689453, 0.21494169616699219, 0.21501849365234374, 0.21517312622070311, 0.21516390991210937, 0.2153175048828125, 0.21526835632324218, 0.21509222412109374, 0.21496832275390626, 0.2149171142578125, 0.21479423522949218, 0.21496421813964844, 0.21496115112304687, 0.21497958374023438, 0.21504205322265624, 0.2148106231689453, 0.21492633056640625, 0.21496832275390626, 0.2149160919189453, 0.2148863983154297, 0.21481266784667968, 0.21477171325683594, 0.21533900451660157, 0.21499903869628906, 0.21496115112304687, 0.2150102996826172, 0.2149918670654297, 0.2151761932373047, 0.21486285400390626, 0.215046142578125, 0.21498471069335937, 0.21511065673828125, 0.21503077697753906, 0.21505126953125, 0.2149713897705078, 0.21494169616699219, 0.2152499237060547, 0.21516184997558593, 0.21492428588867188, 0.21485261535644531, 0.215046142578125, 0.215372802734375, 0.21504512023925781, 0.215119873046875, 0.21498675537109374, 0.21502156066894532, 0.21487513732910157, 0.2149959716796875, 0.21509324645996095, 0.2150891571044922, 0.21487820434570312, 0.21541171264648437, 0.21502566528320313, 0.2151208953857422, 0.21514239501953125, 0.4488765563964844, 0.21482086181640625, 0.21482496643066407, 0.21509735107421876, 0.21479014587402342, 0.21473484802246093, 0.2146693115234375, 0.21500518798828125, 0.21510758972167968, 0.21496217346191407, 0.2148106231689453, 0.21478604125976564, 0.21548748779296875, 0.21496627807617188, 0.2150328369140625, 0.21530624389648437, 0.21493862915039064, 0.2148843536376953, 0.21493145751953124, 0.21505126953125, 0.214908935546875, 0.21497958374023438, 0.21564518737792968, 0.21497343444824218, 0.21515367126464843, 0.21530931091308594, 0.21535334777832033, 0.21521817016601563, 0.21496934509277343, 0.21508096313476563, 0.2152929229736328, 0.21582028198242187, 0.21554074096679687, 0.2151751708984375, 0.2154168395996094, 0.21572300720214843, 0.21506253051757812, 0.215151611328125, 0.21505229187011718, 0.2149959716796875, 0.21501235961914061, 0.21538406372070312, 0.2150758361816406, 0.2152110137939453, 0.2150707244873047, 0.21545472717285155, 0.21499903869628906, 0.21496832275390626, 0.2150277099609375, 0.21533900451660157, 0.21492633056640625, 0.21502464294433593, 0.21509837341308594, 0.21507994079589843, 0.21537997436523437, 0.21541375732421875, 0.21506764221191407, 0.21498982238769532, 0.21500108337402343, 0.21577011108398436, 0.21502975463867188, 0.2149222412109375, 0.21507481384277344, 0.4486348876953125, 0.21474919128417969, 0.21470719909667968, 0.21471743774414062, 0.2151700439453125, 0.21507994079589843, 0.2148239288330078, 0.21487820434570312, 0.21485568237304686, 0.21500108337402343, 0.21522840881347657, 0.21483622741699218, 0.21483212280273437, 0.21519155883789062, 0.21522738647460937, 0.2150154266357422, 0.21518438720703126, 0.21501951599121094, 0.21515776062011718, 0.21541375732421875, 0.2148536376953125, 0.21507174682617186, 0.21507174682617186, 0.21522329711914062, 0.21565644836425782, 0.21497958374023438, 0.2154239959716797, 0.21499288940429687, 0.21505229187011718, 0.21485157775878908, 0.21501132202148437, 0.21518540954589843, 0.21500108337402343, 0.21493760681152344, 0.21507174682617186, 0.2150830078125, 0.2148730926513672, 0.21516697692871095, 0.21505946350097657, 0.21500314331054687, 0.21530419921875, 0.2153369598388672, 0.2151331787109375, 0.21504103088378906, 0.21514752197265624, 0.21487615966796875, 0.21510963439941405, 0.21521510314941406, 0.21491302490234376, 0.21509939575195314, 0.21494989013671875, 0.21516082763671876, 0.2149212188720703, 0.2151004180908203, 0.21516697692871095, 0.2149181365966797, 0.2149591064453125, 0.21501644897460936, 0.21512806701660156, 0.21530316162109375, 0.2154035186767578, 0.21518130493164062, 0.21506866455078125]",tokens/s,4.578036281892053,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 127487 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2029.023232,5480.382464,0.0,4833.93536,4503.282688,s,10,5.706882629394531,0.5706882629394532,0.0016266379572605122,0.5707381896972656,0.5725719726562499,0.5726634887695312,0.5727367016601562,"[0.5727550048828125, 0.5716815185546875, 0.5690197143554687, 0.56878515625, 0.5694818725585937, 0.5685383911132813, 0.5697948608398438, 0.5724900512695312, 0.571784423828125, 0.5725516357421875]",tokens/s,448.58115476462876,kWh,6.716496138660996e-06,3.680318861643172e-06,3.126274105956043e-05,4.16595560598646e-05,tokens/kWh,6145048.680598735,MB,2029.023232,5480.382464,0.0,4833.93536,4688.699392,s,10,334.41517578125,33.441517578125,0.005471176707679699,33.439892578125,33.450382421875,33.4504509765625,33.4505058203125,"[33.44433203125, 33.4401328125, 33.436796875, 33.43383203125, 33.43801953125, 33.4503671875, 33.45051953125, 33.436765625, 33.4447578125, 33.43965234375]",tokens/s,1.8838857971329028,kWh,0.0003948281717245225,0.0002164000882931032,0.0018034092266768405,0.0024146374866944663,tokens/kWh,26090.872997355913,,s,629,339.0184661254883,0.5389800733314599,0.06780085317370947,0.5307340698242188,0.5313611816406251,0.5316872192382812,1.099913408203125,"[0.5303726196289062, 0.5304791259765625, 0.5303756713867187, 0.5306491088867188, 0.5304463500976563, 0.5305681762695312, 0.5304903564453125, 0.530681884765625, 0.5307289428710937, 0.530619384765625, 0.5308334350585937, 0.53085693359375, 0.5303705444335938, 0.5305958251953125, 0.5302774047851563, 0.5305548706054688, 0.5302968139648437, 0.530682861328125, 0.5306572875976563, 0.53058251953125, 0.5303828735351562, 0.5305906982421875, 0.53028759765625, 0.5303807983398438, 0.5304647827148438, 0.5309767456054687, 0.5310658569335938, 0.5310986328125, 0.5302282104492188, 0.5309931640625, 0.5306439819335937, 0.5309173583984375, 0.53104638671875, 0.5319270629882813, 0.531589111328125, 0.5317703857421875, 0.5316536254882812, 0.5319987182617187, 0.53172021484375, 0.53193115234375, 0.5317007446289063, 0.5311692504882812, 0.5306941528320313, 0.5313402709960937, 0.5311119384765625, 0.5320499267578125, 0.5308436279296875, 0.5309603881835937, 0.53089794921875, 0.5311702880859375, 0.5305272216796875, 0.5305426025390625, 0.5305640869140625, 0.5306859741210938, 0.5304647827148438, 0.5306275634765625, 0.5305589599609375, 0.5307914428710937, 0.5308579711914062, 0.530808837890625, 0.5308436279296875, 0.5311334228515625, 1.1034122314453125, 0.531583984375, 0.5308897094726562, 0.5307801513671875, 0.530830322265625, 0.5305958251953125, 0.5306961669921875, 0.5304053955078125, 0.53068798828125, 0.5302650756835937, 0.5304074096679687, 0.5301115112304687, 0.5304862670898437, 0.5300991821289063, 0.5306500854492188, 0.5302988891601562, 0.5305692138671875, 0.530218994140625, 0.5305047607421876, 0.5306592407226562, 0.5311631469726562, 0.5304155883789062, 0.5312952270507812, 0.5305374755859374, 0.530766845703125, 0.5304176635742187, 0.5306275634765625, 0.5305128784179688, 0.5309767456054687, 0.5305927734375, 0.5308180541992188, 0.5308170166015626, 0.5308436279296875, 0.530608154296875, 0.5307197265625, 0.5304832153320312, 0.5310269165039062, 0.530788330078125, 0.531056640625, 0.5312041015625, 0.5311282958984375, 0.5308856201171875, 0.5310484619140625, 0.5306941528320313, 0.5308334350585937, 0.5305599975585937, 0.5322680053710938, 0.530998291015625, 0.5309849853515625, 0.5308098754882813, 0.5307525024414063, 0.53083544921875, 0.5311477661132813, 0.5306255493164063, 0.5309736938476562, 0.531083251953125, 0.5314232177734375, 0.5308733520507812, 0.531041259765625, 0.530524169921875, 0.531251220703125, 0.5304913330078125, 0.5308006591796876, 1.0999736328125, 0.530502685546875, 0.5307003173828125, 0.5304801025390625, 0.5305569458007813, 0.5305374755859374, 0.5308016357421875, 0.5303275756835938, 0.5305835571289063, 0.5303971557617188, 0.5308845825195313, 0.5308651733398437, 0.5319014282226563, 0.5306552124023437, 0.5313453979492188, 0.5305538330078124, 0.5305252075195312, 0.5303152465820312, 0.5307760620117188, 0.5310126342773438, 0.5308641357421875, 0.5306593017578125, 0.530639892578125, 0.5305968627929688, 0.5306808471679687, 0.5308436279296875, 0.5307340698242188, 0.53047705078125, 0.5306808471679687, 0.5303255004882812, 0.53110986328125, 0.5310003051757812, 0.5306552124023437, 0.5305599975585937, 0.5308487548828125, 0.5304658203125, 0.5309296875, 0.5307279663085938, 0.5307545776367187, 0.5304330444335937, 0.5307012939453125, 0.530713623046875, 0.5310167236328125, 0.5309470825195313, 0.5307689208984375, 0.5305252075195312, 0.5307125854492187, 0.5308221435546875, 0.5308416137695312, 0.530892822265625, 0.5315543212890625, 0.5305845947265625, 0.531040283203125, 0.530492431640625, 0.5305466918945313, 0.5306337280273438, 0.530735107421875, 0.5310238647460938, 0.5306429443359375, 0.5305426025390625, 0.5307340698242188, 0.530535400390625, 0.5311631469726562, 1.09973193359375, 0.53024560546875, 0.53064501953125, 0.530534423828125, 0.5306911010742188, 0.5307279663085938, 0.5305538330078124, 0.5307115478515625, 0.5306583251953125, 0.5304094848632812, 0.5305528564453125, 0.5303408813476562, 0.530830322265625, 0.5302855834960938, 0.5308221435546875, 0.5302835083007813, 0.5305282592773437, 0.5302476806640625, 0.5306634521484375, 0.5304013061523437, 0.5309204711914063, 0.5303469848632812, 0.5309839477539062, 0.5302742919921875, 0.53121435546875, 0.5306419067382813, 0.5312420043945313, 0.5304524536132813, 0.5306388549804687, 0.5301882934570312, 0.5307074584960938, 0.5305753784179688, 0.5308630981445313, 0.530572265625, 0.5305016479492187, 0.5303705444335938, 0.5305282592773437, 0.5302476806640625, 0.5306654663085938, 0.5310330810546875, 0.531125244140625, 0.530513916015625, 0.530820068359375, 0.5313054809570312, 0.5315963134765626, 0.5308518676757813, 0.5307258911132813, 0.5309255981445312, 0.5307606811523438, 0.5307371215820312, 0.53144677734375, 0.5305640869140625, 0.5306583251953125, 0.5305569458007813, 0.53089794921875, 0.5304473876953125, 0.53081396484375, 0.5305446166992187, 0.5308446655273438, 0.5305866088867187, 0.5311211547851562, 0.5311539306640625, 0.5308313598632812, 1.1005552978515625, 0.5304330444335937, 0.5307955322265625, 0.530313232421875, 0.5304658203125, 0.5306838989257813, 0.5304944458007812, 0.5302742919921875, 0.5305692138671875, 0.530356201171875, 0.53049755859375, 0.5303019409179688, 0.5304391479492188, 0.5303101196289063, 0.5306808471679687, 0.5305149536132813, 0.5315686645507812, 0.530671630859375, 0.5310003051757812, 0.5307750244140625, 0.5311651611328125, 0.5306009521484375, 0.5306388549804687, 0.5303910522460937, 0.5305743408203125, 0.53049755859375, 0.5307238159179688, 0.5304903564453125, 0.5306675415039063, 0.5307310180664062, 0.5306060791015625, 0.5304954833984376, 0.5306849365234375, 0.5304647827148438, 0.5310084838867187, 0.531357666015625, 0.5308907470703125, 0.5313607788085938, 0.5305436401367187, 0.5303142700195312, 0.5307197265625, 0.5305866088867187, 0.5307258911132813, 0.5306911010742188, 0.530746337890625, 0.530513916015625, 0.5311181030273437, 0.5308856201171875, 0.5307627563476562, 0.5306132202148437, 0.53115185546875, 0.5308733520507812, 0.5311539306640625, 0.5306583251953125, 0.5317621459960937, 0.5305640869140625, 0.5311201171875, 0.5317539672851562, 0.5310535888671875, 0.5312788696289062, 0.5310422973632812, 0.5307166748046875, 0.5311488037109375, 1.099758544921875, 0.5304155883789062, 0.5309942016601562, 0.5306316528320313, 0.5310044555664063, 0.5309592895507812, 0.5309296875, 0.531293212890625, 0.5310361328125, 0.5315983276367188, 0.5309173583984375, 0.53089892578125, 0.5309368286132813, 0.5309173583984375, 0.5310187377929687, 0.53051904296875, 0.5308784790039063, 0.53064501953125, 0.530935791015625, 0.5305640869140625, 0.53085595703125, 0.5304842529296875, 0.5306941528320313, 0.5305200805664062, 0.5306286010742187, 0.5310453491210938, 0.5314888916015625, 0.5309019165039063, 0.5314559936523438, 0.5304483642578125, 0.5311907958984375, 0.5310607299804687, 0.53081396484375, 0.530460693359375, 0.5306798095703125, 0.5305108642578125, 0.5307340698242188, 0.5307310180664062, 0.5322465209960937, 0.5307340698242188, 0.5311016845703125, 0.5303255004882812, 0.53104541015625, 0.5311344604492187, 0.5315880737304688, 0.5312041015625, 0.5313812255859375, 0.5315963134765626, 0.5314437255859376, 0.5312255859375, 0.5312286987304687, 0.5308856201171875, 0.5311661987304688, 0.5306419067382813, 0.5310013427734375, 0.5312112426757812, 0.5314898071289063, 0.5305784301757812, 0.5307023315429688, 0.5308221435546875, 0.5309501342773437, 0.5306531982421875, 0.5310136108398438, 1.101791259765625, 0.5311979370117188, 0.5310494995117188, 0.5309296875, 0.5306951904296875, 0.5305620727539062, 0.53075146484375, 0.5304832153320312, 0.5304832153320312, 0.5304678344726562, 0.5305579223632813, 0.5303460083007813, 0.530535400390625, 0.53030810546875, 0.5306685180664062, 0.530313232421875, 0.530567138671875, 0.5305947875976562, 0.5310105590820312, 0.5304862670898437, 0.5315010375976562, 0.5307781372070313, 0.5309634399414063, 0.5304596557617187, 0.5305436401367187, 0.5304074096679687, 0.5306552124023437, 0.5303705444335938, 0.5305252075195312, 0.5304309692382813, 0.530587646484375, 0.5305272216796875, 0.5306224365234375, 0.5303101196289063, 0.5306122436523437, 0.5303859252929688, 0.53157373046875, 0.5317867431640625, 0.5306838989257813, 0.531019775390625, 0.5308037109375, 0.5315563354492188, 0.5318829956054687, 0.5318727416992187, 0.531968017578125, 0.5313167114257813, 0.5311610717773437, 0.5309112548828125, 0.5310955810546875, 0.5309736938476562, 0.531399658203125, 0.5308375244140625, 0.5309644775390625, 0.5308323974609375, 0.5311262817382812, 0.5311344604492187, 0.5315696411132812, 0.5313095703125, 0.5322188720703125, 0.5319075927734375, 0.5321328735351563, 0.53180517578125, 0.5310341186523437, 1.1037552490234375, 0.5302538452148438, 0.53066650390625, 0.5303644409179687, 0.530545654296875, 0.5308262329101563, 0.5305938110351562, 0.5302937622070313, 0.5309501342773437, 0.5304954833984376, 0.5306388549804687, 0.5307566528320312, 0.531390380859375, 0.5309030151367188, 0.5311232299804688, 0.5304494018554687, 0.5305374755859374, 0.5301544799804687, 0.5307739868164062, 0.5302302856445312, 0.5306542358398437, 0.5302691650390625, 0.5304893188476563, 0.530271240234375, 0.5304873046875, 0.530249755859375, 0.5306429443359375, 0.5304688720703125, 0.5305733032226563, 0.5302630615234375, 0.5308067626953125, 0.5304801025390625, 0.5307535400390625, 0.5306204223632812, 0.5305897216796875, 0.5304873046875, 0.5308692626953125, 0.5305548706054688, 0.531114013671875, 0.5308733520507812, 0.531294189453125, 0.5308948364257813, 0.53117236328125, 0.5310709838867187, 0.5314703369140625, 0.5308497924804687, 0.5314765014648437, 0.530946044921875, 0.5310259399414062, 0.5308795166015625, 0.5310228271484375, 0.5311436767578125, 0.530756591796875, 0.5304422607421875, 0.5306869506835937, 0.5305169677734375, 0.53072998046875, 0.5304780883789062, 0.5309317016601562, 0.5314334716796875, 0.5309531860351563, 0.5307371215820312, 0.5311385498046876, 1.1027271728515624, 0.5304412231445312, 0.5307801513671875, 0.5308590087890624, 0.5311181030273437, 0.5311273193359375, 0.5310904541015625, 0.53079345703125, 0.5309389038085938, 0.5304013061523437, 0.5307391967773437, 0.5304248046875, 0.530746337890625, 0.5302958374023438, 0.531314697265625, 0.5305169677734375, 0.5308467407226563, 0.5305415649414063, 0.5307044067382812, 0.5304258422851562, 0.5306500854492188, 0.530608154296875, 0.5310791625976562, 0.5308784790039063, 0.5317447509765625, 0.530534423828125, 0.5315819702148438, 0.5309255981445312, 0.531177490234375, 0.53104638671875, 0.5305394897460938, 0.530318359375, 0.5304934692382812, 0.5302620239257813, 0.5305006103515625, 0.5303418579101562, 0.530535400390625, 0.5304043579101563, 0.5306634521484375, 0.5306224365234375, 0.5308016357421875, 0.5306849365234375, 0.5312051391601562, 0.53136279296875, 0.5315717163085938, 0.5321359252929687, 0.5316669311523438, 0.5309634399414063, 0.5312890625, 0.5310740356445313, 0.5311795043945312, 0.531040283203125, 0.531083251953125, 0.53062451171875, 0.5307965698242187, 0.5304965209960938, 0.5307801513671875, 0.530714599609375, 0.5308016357421875, 0.5307023315429688, 0.5310259399414062, 0.5310658569335938, 0.5317560424804687, 1.103698974609375, 0.5303838500976562, 0.5308118896484375, 0.5313546142578125, 0.5313822631835937, 0.5304340209960937, 0.5307258911132813, 0.5303450317382813, 0.530662353515625, 0.5305333862304688, 0.5306746826171875, 0.5304586181640625, 0.5306736450195313, 0.5304258422851562, 0.5309183959960937, 0.53065625, 0.5313136596679687, 0.5311395874023438, 0.5308251953125, 0.5308211059570312, 0.530787353515625, 0.530513916015625, 0.5309204711914063, 0.5309265747070312, 0.5309603881835937, 0.5305927734375, 0.5307289428710937, 0.5303971557617188, 0.5306982421875, 0.5306787719726562, 0.531040283203125, 0.5307310180664062, 0.530882568359375, 0.5309470825195313, 0.5306675415039063, 0.5309173583984375, 0.5313720092773437, 0.5311057739257813, 0.5313310546875, 0.5306521606445312, 0.5313228759765625, 0.5306849365234375, 0.530766845703125, 0.5306306762695312, 0.5308231811523437, 0.5304586791992187, 0.5306050415039063, 0.530514892578125, 0.5307535400390625, 0.5304391479492188, 0.5307156372070313, 0.530545654296875, 0.5310863647460937, 0.5305620727539062, 0.530904052734375, 0.53064599609375, 0.5309317016601562, 0.53065625, 0.5311386108398437, 0.5306500244140625, 0.5308600463867188, 0.5306286010742187, 0.5308651733398437]",tokens/s,1.8553561615347953,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694912c-60e453442697ac4940744e43;3a3af6a6-109c-419c-b4f2-14f3bfc8c669) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948268-289bb5d42b45f4406907f61a;96ba6bd8-fd65-47cc-9087-9f12eebaae6f) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3013.357568,9259.450368,0.0,8613.003264,8211.364864,s,10,10.943852783203125,1.0943852783203125,0.00185809213941572,1.0943612060546875,1.0968373901367188,1.0970477233886717,1.0972159899902343,"[1.097258056640625, 1.09552197265625, 1.0923255615234375, 1.0922135009765626, 1.09405712890625, 1.091574951171875, 1.0937421875, 1.094665283203125, 1.0957034912109376, 1.0967906494140625]",tokens/s,233.92127532354476,kWh,1.2894171526034674e-05,7.065532383967365e-06,5.862768579100175e-05,7.858738970100378e-05,tokens/kWh,3257520.080180627,MB,3013.357568,9330.753536,0.0,8684.306432,8503.627264,s,10,640.7681367187499,64.07681367187499,0.020824056597130436,64.070240234375,64.11088984375,64.113944921875,64.116388984375,"[64.1102109375, 64.0801015625, 64.117, 64.0658984375, 64.065828125, 64.06840234375, 64.058859375, 64.04678125, 64.072078125, 64.0829765625]",tokens/s,0.9831949560196744,kWh,0.000756398006098138,0.0004145729534449856,0.003549850923211795,0.004720821882754919,tokens/kWh,13345.133869620864,,s,629,649.5949702148431,1.032742400977494,0.1300197569102036,1.016932373046875,1.018054443359375,1.0185316284179688,2.1104683203125,"[1.0186362915039062, 1.0175692749023437, 1.0181939086914062, 1.0167439575195312, 1.0171248779296875, 1.016279052734375, 1.0170603637695312, 1.0164469604492188, 1.019283447265625, 1.0177188110351563, 1.0175784912109376, 1.0175529174804687, 1.0177136840820313, 1.0184437866210938, 1.0184273681640625, 1.0175098876953126, 1.0180792236328124, 1.0184560546875, 1.0168545532226563, 1.0170511474609376, 1.0169282836914062, 1.0172200927734374, 1.0169989013671874, 1.016431640625, 1.01659033203125, 1.0177269897460937, 1.0169927978515625, 1.0175907592773437, 1.0173204345703124, 1.0170286254882812, 1.0172518310546874, 1.0171300048828125, 1.0176942749023437, 1.0188645629882813, 1.0181181640625, 1.0181754760742188, 1.0174935302734376, 1.0177156982421875, 1.0180280151367187, 1.0169384765625, 1.0188124389648439, 1.0189732055664062, 1.01850830078125, 1.0174976196289063, 1.0177904663085937, 1.0171678466796874, 1.0175887451171874, 1.0177188110351563, 1.017942138671875, 1.019806640625, 1.0175037231445312, 1.0165176391601562, 1.0163671264648437, 1.018239990234375, 1.0174945068359376, 1.0187335815429688, 1.0170091552734375, 1.017196533203125, 1.0169682006835938, 1.0169784545898437, 1.0177904663085937, 1.0165442504882813, 2.11431005859375, 1.017186279296875, 1.0167265014648437, 1.0168094482421874, 1.016437744140625, 1.0170368041992188, 1.0164561767578124, 1.0164777221679688, 1.01673779296875, 1.0177433471679687, 1.0164715576171874, 1.0170480346679687, 1.0178303833007813, 1.0165759887695311, 1.0164664306640625, 1.0171217651367188, 1.0163681030273437, 1.0164623413085938, 1.0167490844726563, 1.016791015625, 1.0163056640625, 1.0171576538085938, 1.0169917602539063, 1.017670654296875, 1.0177689819335938, 1.0164049682617187, 1.0167510986328125, 1.0171494140625, 1.0164398193359374, 1.0172323608398437, 1.0166558837890625, 1.0169886474609375, 1.01707470703125, 1.0176522216796875, 1.0164090576171876, 1.0175907592773437, 1.016195068359375, 1.0165288696289063, 1.0176030883789062, 1.0171300048828125, 1.0172190551757812, 1.01783447265625, 1.0167275390625, 1.0168729858398438, 1.016838134765625, 1.01656884765625, 1.0176798706054688, 1.0164961547851563, 1.0169405517578125, 1.0169313354492187, 1.0181314697265624, 1.0185051879882812, 1.0174259033203126, 1.0190386962890625, 1.0177259521484374, 1.0175364990234375, 1.01764404296875, 1.0178928833007812, 1.0174586791992188, 1.01879296875, 1.0175662231445313, 1.0174638061523438, 1.0168340454101563, 2.110869384765625, 1.0163967895507813, 1.0164827880859375, 1.0182993774414062, 1.017069580078125, 1.0174013671875, 1.017512939453125, 1.0179420166015625, 1.0180894775390625, 1.0180361938476563, 1.0178211669921875, 1.0178641967773439, 1.0179297485351562, 1.019236328125, 1.0181181640625, 1.0165933837890626, 1.0170029907226563, 1.0190684814453126, 1.0187539672851562, 1.0192598876953125, 1.0180260009765625, 1.018945556640625, 1.0177413330078124, 1.0181325073242187, 1.0181212158203126, 1.018977294921875, 1.0179379272460938, 1.0190817260742187, 1.0184038696289062, 1.0187530517578125, 1.0190274658203125, 1.0183782348632813, 1.0174586791992188, 1.0177177734375, 1.0169108276367187, 1.0171514892578124, 1.0180515747070313, 1.0181908569335938, 1.0179368896484375, 1.0182778930664063, 1.0189833984375, 1.0178088989257812, 1.0176030883789062, 1.0174474487304688, 1.01793994140625, 1.0195138549804688, 1.0180485229492187, 1.0181898193359376, 1.0169937744140625, 1.0169609985351562, 1.016900634765625, 1.0166210327148437, 1.0164254760742188, 1.0164019165039062, 1.0165575561523437, 1.0168330078125, 1.016764404296875, 1.016880126953125, 1.0165565185546874, 1.0169978637695312, 1.016406005859375, 1.0169456787109374, 1.0166640625, 2.10943701171875, 1.0166231079101562, 1.0164449462890626, 1.0163446044921876, 1.0166394653320312, 1.0180321044921874, 1.0164285278320313, 1.016468505859375, 1.0164756469726564, 1.0164132080078125, 1.0169036865234375, 1.0166927490234374, 1.0170050659179688, 1.0168923950195312, 1.0167388305664062, 1.0166712036132812, 1.016595458984375, 1.0170153198242187, 1.016637451171875, 1.0168944702148437, 1.0178826293945313, 1.0176430053710936, 1.017217041015625, 1.0173972778320313, 1.0166896362304687, 1.0168361206054688, 1.0170224609375, 1.0166179809570313, 1.0168811645507811, 1.0171064453125, 1.0169569091796875, 1.017037841796875, 1.0164387817382812, 1.0170715942382813, 1.0171002807617187, 1.0174781494140626, 1.01707470703125, 1.0163302612304688, 1.0170009765625, 1.0169763793945312, 1.0165155639648438, 1.0176727294921875, 1.0179563598632813, 1.0177484741210938, 1.0178109741210937, 1.016742919921875, 1.0165545043945312, 1.016642578125, 1.0165452880859376, 1.0169088134765625, 1.0167982177734376, 1.01705419921875, 1.0169876708984376, 1.0166353759765625, 1.01646337890625, 1.016869873046875, 1.0170398559570313, 1.0165657958984375, 1.016586181640625, 1.0168739624023437, 1.0175160522460938, 1.0169579467773437, 1.0165821533203125, 2.1117490234375, 1.016916015625, 1.017007080078125, 1.0167859497070313, 1.0164111328125, 1.016964111328125, 1.0174464111328125, 1.016838134765625, 1.016722412109375, 1.01697021484375, 1.0166876220703125, 1.0165493774414063, 1.0166353759765625, 1.0168402099609375, 1.0164602661132813, 1.016764404296875, 1.0166251220703124, 1.0166599731445312, 1.0170848999023439, 1.0168576049804687, 1.01642138671875, 1.0169517822265626, 1.01652685546875, 1.016585205078125, 1.0163035888671874, 1.0168361206054688, 1.0171637573242187, 1.01675927734375, 1.0166220703125, 1.0165278930664063, 1.0162329711914062, 1.0164930419921876, 1.0165626831054688, 1.0164500732421875, 1.0164182739257812, 1.016795166015625, 1.0171268920898437, 1.0172815551757812, 1.0167838745117188, 1.0167255249023437, 1.0174822387695313, 1.0175631103515625, 1.0172815551757812, 1.0165330200195313, 1.0170941162109375, 1.0168494262695313, 1.0168780517578124, 1.0167992553710938, 1.0165073852539062, 1.01684326171875, 1.0168442993164062, 1.0172507934570312, 1.0167459716796876, 1.0169251708984375, 1.017218017578125, 1.0175836181640625, 1.0206064453125, 1.0173880615234374, 1.017280517578125, 1.0169978637695312, 1.0173460693359375, 1.0168074340820312, 1.0165514526367188, 2.109128662109375, 1.0168524780273438, 1.01806591796875, 1.0175098876953126, 1.0182809448242187, 1.0176317138671875, 1.016975341796875, 1.0173204345703124, 1.0172610473632813, 1.0172006225585937, 1.017169921875, 1.0174801635742188, 1.0167971801757814, 1.018260498046875, 1.01722314453125, 1.0176788330078126, 1.0169375, 1.0171392211914063, 1.01722314453125, 1.01747509765625, 1.017091064453125, 1.0181314697265624, 1.0181068725585938, 1.0168790893554687, 1.0172938232421875, 1.016933349609375, 1.0170153198242187, 1.0169262084960937, 1.0168422241210937, 1.0174187622070312, 1.016806396484375, 1.0169446411132812, 1.0168545532226563, 1.0171279296875, 1.0165452880859376, 1.0166732788085937, 1.0163753051757813, 1.0164019165039062, 1.0169395141601563, 1.016479736328125, 1.0163753051757813, 1.0167285766601561, 1.016975341796875, 1.0166343383789063, 1.016521728515625, 1.0164971313476563, 1.01644287109375, 1.0162514038085937, 1.0162565307617188, 1.0164049682617187, 1.0166456298828126, 1.0162913208007813, 1.0164541625976562, 1.0166067504882812, 1.0164879150390624, 1.0165186767578125, 1.0164029541015625, 1.0165452880859376, 1.0164561767578124, 1.016764404296875, 1.0167705688476563, 1.0167193603515625, 1.016838134765625, 2.111257568359375, 1.0166507568359375, 1.0164859008789062, 1.0166497192382813, 1.0162554931640626, 1.0165023803710938, 1.0166967163085938, 1.016848388671875, 1.0164193115234375, 1.0167725830078125, 1.01661181640625, 1.0165176391601562, 1.017017333984375, 1.0167285766601561, 1.0165155639648438, 1.0169896850585938, 1.0168555297851563, 1.016900634765625, 1.0165892944335937, 1.0169630737304687, 1.0168340454101563, 1.0165022583007812, 1.01639990234375, 1.0166077270507812, 1.0167879638671875, 1.0163414916992188, 1.016711181640625, 1.016896484375, 1.0175538940429687, 1.0173655395507812, 1.01714013671875, 1.0171791381835937, 1.016394775390625, 1.0165196533203125, 1.0164090576171876, 1.0165565185546874, 1.0166087646484374, 1.0164295654296875, 1.0164623413085938, 1.016205322265625, 1.0160199584960938, 1.0161571655273438, 1.0178303833007813, 1.0175641479492188, 1.0178211669921875, 1.0177904663085937, 1.0168576049804687, 1.0169139404296874, 1.016748046875, 1.0167572631835937, 1.0166681518554688, 1.0166241455078124, 1.0170941162109375, 1.0179061889648438, 1.0177638549804688, 1.0172477416992187, 1.01701220703125, 1.017195556640625, 1.0166302490234376, 1.0166353759765625, 1.0164111328125, 1.0163353881835937, 1.0165473022460938, 2.111709228515625, 1.016605712890625, 1.0165575561523437, 1.0163179321289062, 1.0160752563476563, 1.0163681030273437, 1.0162769775390625, 1.01686376953125, 1.0165380859375, 1.01652685546875, 1.0169149169921874, 1.0168729858398438, 1.016648681640625, 1.0167265014648437, 1.0163302612304688, 1.0163599243164063, 1.0167684936523438, 1.0168627319335937, 1.0168145751953126, 1.0170839233398437, 1.0169886474609375, 1.016784912109375, 1.0166917114257812, 1.016859619140625, 1.0169098510742187, 1.0174157104492187, 1.0167152709960938, 1.0165821533203125, 1.0165196533203125, 1.0169415893554687, 1.01663232421875, 1.016784912109375, 1.0165791015625, 1.0167817993164063, 1.0162913208007813, 1.016732666015625, 1.0160650024414062, 1.0161500244140624, 1.0161346435546874, 1.0164756469726564, 1.0165278930664063, 1.0166435546875, 1.0165442504882813, 1.0163240966796876, 1.0163547973632812, 1.0165545043945312, 1.016332275390625, 1.0163927001953126, 1.0162432250976563, 1.0166128540039063, 1.01661181640625, 1.0168319702148438, 1.0166835327148438, 1.0168361206054688, 1.0165135498046876, 1.0166527709960937, 1.0165084228515624, 1.0164992065429688, 1.016553466796875, 1.0167490844726563, 1.0172364501953124, 1.0167316284179688, 1.0162698364257812, 2.113395751953125, 1.017417724609375, 1.0168319702148438, 1.016943603515625, 1.0170357666015626, 1.01726513671875, 1.016616943359375, 1.0171105346679687, 1.0168176879882813, 1.0170306396484374, 1.0163834838867187, 1.0166005859375, 1.0165791015625, 1.0170603637695312, 1.0170880126953126, 1.0176962280273438, 1.0171422729492188, 1.0168514404296876, 1.0166578979492187, 1.0166784057617186, 1.01640087890625, 1.0165043334960937, 1.0165616455078126, 1.0163988647460938, 1.016511474609375, 1.0165104370117188, 1.0166610107421874, 1.01673779296875, 1.016742919921875, 1.0167920532226562, 1.0162626342773438, 1.0163783569335938, 1.0163292236328125, 1.0166128540039063, 1.0167019653320313, 1.0171371459960938, 1.0175958862304688, 1.0170101928710937, 1.0169866333007813, 1.0172344360351562, 1.0172160034179687, 1.016932373046875, 1.0171678466796874, 1.0173429565429688, 1.0172846069335937, 1.0185523071289062, 1.0170470581054687, 1.0176522216796875, 1.017080810546875, 1.0174044189453124, 1.0174361572265624, 1.017650146484375, 1.0165667724609375, 1.0164510498046875, 1.0166917114257812, 1.0181375732421876, 1.0173368530273437, 1.0173501586914062, 1.0173982543945312, 1.0175170288085937, 1.0172129516601562, 1.0174832763671875, 1.017628662109375, 2.11508642578125, 1.01734912109375, 1.0171422729492188, 1.0165667724609375, 1.0170706176757813, 1.0171514892578124, 1.0170203857421876, 1.0170941162109375, 1.017049072265625, 1.016974365234375, 1.0170296020507812, 1.0174832763671875, 1.0167408447265625, 1.01798193359375, 1.0171473999023437, 1.016896484375, 1.0165084228515624, 1.0163824462890625, 1.016531982421875, 1.016742919921875, 1.0175529174804687, 1.0176614379882813, 1.0177628173828126, 1.0172733154296876, 1.017101318359375, 1.0169559326171875, 1.0170614013671875, 1.0167142333984376, 1.01680126953125, 1.0172303466796875, 1.0184417114257813, 1.0178109741210937, 1.0175836181640625, 1.0166937866210937, 1.016395751953125, 1.0165514526367188, 1.016395751953125, 1.0163200073242187, 1.0162093505859375, 1.0166200561523437, 1.0170992431640624, 1.0166538696289062, 1.0170900268554688, 1.0167030029296875, 1.0169343872070313, 1.0181693725585939, 1.018203125, 1.0185471801757813, 1.0176788330078126, 1.0179194946289063, 1.0182062377929688, 1.0176983032226563, 1.0169210815429688, 1.0170449829101562, 1.0173613891601563, 1.016896484375, 1.0173850708007812, 1.0171493530273437, 1.0174464111328125, 1.0179911499023437, 1.0175467529296875, 1.017291748046875, 1.0167869262695313]",tokens/s,0.9682956747524812,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481ee-1af78b1168dc6d375af06261;fc66bf74-8e04-43ac-b584-4ba86c5f87a0) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494a6-18dccbfa372ab65852cd8251;5c29abb0-3b1f-43f3-b115-1af2e4d377dc) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2221.408256,3330.801664,0.0,2684.35456,2447.595008,s,10,2.311121505737305,0.2311121505737305,0.0008602880707090342,0.23093637084960938,0.23197196044921875,0.23249637908935547,0.23291591400146486,"[0.2330207977294922, 0.23185542297363282, 0.23051023864746092, 0.23036329650878906, 0.2304237823486328, 0.2300004119873047, 0.23067584228515625, 0.2311968994140625, 0.231619384765625, 0.23145542907714844]",tokens/s,1107.6873256749418,kWh,2.719861554979074e-06,1.4903567604051204e-06,1.2427491002590687e-05,1.6637709317974884e-05,tokens/kWh,15386733.54050159,MB,2222.546944,3330.801664,0.0,2684.35456,2597.68064,s,10,135.98273144531248,13.598273144531248,0.005626597165862265,13.59927783203125,13.6021646484375,13.60570927734375,13.60854498046875,"[13.60925390625, 13.5985986328125, 13.5982265625, 13.6012080078125, 13.59995703125, 13.5903544921875, 13.5950361328125, 13.600318359375, 13.601376953125, 13.5884013671875]",tokens/s,4.63294120734267,kWh,0.00016051297066488652,8.797306729544289e-05,0.0007245357930120089,0.0009730218309723382,tokens/kWh,64746.74873126358,,s,629,137.8431466979981,0.21914649713513207,0.027476947243743017,0.21579168701171875,0.2161471496582031,0.2163871795654297,0.4466235400390625,"[0.2167510986328125, 0.21612850952148438, 0.2157936706542969, 0.21595852661132814, 0.21579776000976564, 0.21578341674804688, 0.21598515319824219, 0.21577626037597655, 0.21600563049316407, 0.21667738342285156, 0.2159656982421875, 0.21651968383789064, 0.21600767517089844, 0.21581004333496093, 0.21593600463867188, 0.21574656677246093, 0.2157589111328125, 0.21606707763671876, 0.2157782440185547, 0.21572096252441406, 0.2157322235107422, 0.21579058837890625, 0.21588172912597656, 0.21599232482910155, 0.2161080322265625, 0.21578341674804688, 0.21588172912597656, 0.21578341674804688, 0.21624217224121095, 0.21602406311035155, 0.2159482879638672, 0.21581619262695312, 0.21592169189453125, 0.21585302734375, 0.2157742004394531, 0.21580697631835938, 0.2158233642578125, 0.21585305786132813, 0.216163330078125, 0.2158858184814453, 0.21590425109863282, 0.2159718475341797, 0.2159482879638672, 0.21587660217285157, 0.2160394287109375, 0.21591244506835938, 0.2157660217285156, 0.21589913940429686, 0.21592576599121094, 0.21606399536132812, 0.21596365356445313, 0.2158428192138672, 0.21607833862304687, 0.21651866149902343, 0.21638246154785157, 0.21635379028320312, 0.21680844116210937, 0.21638656616210938, 0.21633331298828126, 0.21572709655761718, 0.21570457458496095, 0.21581517028808594, 0.448078857421875, 0.21560012817382812, 0.21530323791503905, 0.21556626892089845, 0.21550796508789063, 0.2154915771484375, 0.21565951538085937, 0.21555711364746094, 0.21552742004394532, 0.21577728271484375, 0.2157127685546875, 0.21635789489746093, 0.21575782775878907, 0.21568818664550782, 0.2154915771484375, 0.2156810302734375, 0.21565440368652344, 0.21555404663085936, 0.21562060546875, 0.21577728271484375, 0.2157557830810547, 0.21564006042480469, 0.21555815124511718, 0.21571583557128907, 0.2157373504638672, 0.21600973510742189, 0.21567079162597655, 0.2159482879638672, 0.21565235900878907, 0.2156820526123047, 0.21579469299316406, 0.21556941223144532, 0.215583740234375, 0.21569842529296876, 0.21576191711425782, 0.21580294799804686, 0.21571781921386718, 0.215583740234375, 0.21599334716796875, 0.2168739776611328, 0.21618482971191405, 0.21625753784179688, 0.21591142272949218, 0.21590835571289063, 0.21615206909179688, 0.21587557983398437, 0.21748121643066406, 0.21573324584960937, 0.21585816955566406, 0.21574758911132813, 0.21579161071777345, 0.21602613830566406, 0.21589602661132812, 0.21573837280273436, 0.21600767517089844, 0.2161459197998047, 0.216195068359375, 0.21681561279296874, 0.21595545959472656, 0.2159800262451172, 0.21614183044433594, 0.21579263305664062, 0.21579168701171875, 0.44681414794921875, 0.21564927673339843, 0.21552024841308592, 0.2156093444824219, 0.21581517028808594, 0.21569024658203126, 0.21543324279785156, 0.2153850555419922, 0.21555917358398438, 0.21604454040527343, 0.2156441650390625, 0.21566259765625, 0.2157373504638672, 0.21574348449707031, 0.2156165771484375, 0.2157229461669922, 0.21567692565917967, 0.21569024658203126, 0.2158540802001953, 0.21572198486328126, 0.2158008270263672, 0.21594009399414063, 0.21594009399414063, 0.2173450164794922, 0.21601280212402343, 0.21582643127441406, 0.21562265014648438, 0.216121337890625, 0.21586329650878905, 0.21575065612792968, 0.21581721496582032, 0.21577626037597655, 0.21595135498046875, 0.21587046813964844, 0.2158223419189453, 0.21579263305664062, 0.21578854370117187, 0.21617765808105469, 0.21579571533203126, 0.2159615936279297, 0.2160906219482422, 0.2161459197998047, 0.21604864501953125, 0.2158551025390625, 0.2156943359375, 0.21593498229980468, 0.2158254089355469, 0.21669273376464843, 0.21583258056640625, 0.21568307495117187, 0.21580288696289063, 0.21581210327148437, 0.21588070678710938, 0.21590118408203124, 0.2158018493652344, 0.2157803497314453, 0.2158192596435547, 0.2158192596435547, 0.2157670440673828, 0.21574758911132813, 0.21588890075683595, 0.2157137908935547, 0.21567079162597655, 0.44661248779296875, 0.215657470703125, 0.21567999267578125, 0.21604351806640626, 0.2158858184814453, 0.21600665283203124, 0.21593600463867188, 0.2156145324707031, 0.21560723876953125, 0.21591346740722656, 0.21581619262695312, 0.21606809997558593, 0.216342529296875, 0.21595750427246094, 0.21586636352539063, 0.21600154113769532, 0.2158540802001953, 0.21593907165527343, 0.21594522094726562, 0.21608345031738282, 0.21599334716796875, 0.21589401245117187, 0.21608038330078125, 0.21600665283203124, 0.21591448974609376, 0.21582131958007814, 0.21565542602539062, 0.2157004852294922, 0.21572921752929688, 0.21575570678710937, 0.215878662109375, 0.21577113342285156, 0.2156390380859375, 0.21572709655761718, 0.21576191711425782, 0.215878662109375, 0.21596774291992188, 0.21590835571289063, 0.21587660217285157, 0.21585305786132813, 0.2158356475830078, 0.21571685791015624, 0.21579263305664062, 0.21578956604003907, 0.21685247802734375, 0.2158223419189453, 0.21567692565917967, 0.21570661926269533, 0.21589503479003908, 0.21621554565429688, 0.2161643524169922, 0.21638758850097656, 0.215878662109375, 0.21575885009765625, 0.21584588623046874, 0.21589401245117187, 0.2158745574951172, 0.21597900390625, 0.21583769226074218, 0.21566361999511718, 0.2158182373046875, 0.21573017883300782, 0.21579776000976564, 0.44662783813476564, 0.21568511962890624, 0.2158233642578125, 0.21589605712890625, 0.2156615753173828, 0.21600154113769532, 0.21570355224609375, 0.2157639617919922, 0.21581312561035157, 0.21569024658203126, 0.215552001953125, 0.21569638061523438, 0.21593087768554686, 0.2157076416015625, 0.21581004333496093, 0.21580902099609375, 0.21563699340820314, 0.21653094482421875, 0.21587660217285157, 0.21565440368652344, 0.21584588623046874, 0.2159288330078125, 0.21573426818847657, 0.21571583557128907, 0.21572096252441406, 0.21561036682128906, 0.21595750427246094, 0.21581210327148437, 0.215841796875, 0.21570559692382812, 0.21621452331542967, 0.2162554931640625, 0.216015869140625, 0.21625958251953126, 0.21600767517089844, 0.2159052734375, 0.2159800262451172, 0.21584077453613282, 0.21585714721679689, 0.2158612518310547, 0.21581517028808594, 0.2157496337890625, 0.21570252990722658, 0.21573837280273436, 0.21577215576171874, 0.21601689147949218, 0.21586534118652342, 0.21566053771972657, 0.21617971801757813, 0.215804931640625, 0.2159964141845703, 0.215910400390625, 0.21567692565917967, 0.21574143981933594, 0.215993408203125, 0.21583967590332032, 0.21577626037597655, 0.21723341369628907, 0.2157936706542969, 0.21567181396484375, 0.21573017883300782, 0.2158602294921875, 0.21591346740722656, 0.44625918579101564, 0.2154598388671875, 0.21547109985351562, 0.215942138671875, 0.2155018310546875, 0.2154967041015625, 0.21560525512695314, 0.21534719848632813, 0.2155335693359375, 0.21555609130859374, 0.21548646545410155, 0.21554893493652344, 0.21556838989257812, 0.2154239959716797, 0.21581210327148437, 0.21597080993652343, 0.2159831085205078, 0.21576191711425782, 0.21572813415527345, 0.21552543640136718, 0.21562054443359374, 0.21571994018554688, 0.21557862854003906, 0.21559500122070313, 0.21570867919921874, 0.21560115051269532, 0.21548851013183593, 0.21564210510253906, 0.21561856079101563, 0.21550079345703124, 0.2157137908935547, 0.21574348449707031, 0.21571481323242186, 0.21626162719726563, 0.21590016174316407, 0.2157127685546875, 0.21570867919921874, 0.21562982177734374, 0.21705215454101562, 0.21569229125976563, 0.21560012817382812, 0.2157373504638672, 0.21579058837890625, 0.21566464233398439, 0.21566361999511718, 0.21576191711425782, 0.21585101318359376, 0.21570867919921874, 0.21573529052734375, 0.21557554626464845, 0.2158305206298828, 0.2157742004394531, 0.2156083221435547, 0.2155704345703125, 0.21572813415527345, 0.21567082214355468, 0.21573321533203124, 0.2156513214111328, 0.21562777709960937, 0.21566265869140624, 0.21600965881347656, 0.21582028198242187, 0.21637837219238282, 0.4470773620605469, 0.2156072998046875, 0.21567079162597655, 0.21566566467285156, 0.21550079345703124, 0.21559091186523438, 0.21565542602539062, 0.21559500122070313, 0.2156697540283203, 0.21570661926269533, 0.21575474548339843, 0.21560426330566407, 0.2156011199951172, 0.21565235900878907, 0.21573939514160156, 0.2161090545654297, 0.2160199737548828, 0.21577523803710938, 0.21591552734375, 0.21555815124511718, 0.2156441650390625, 0.21569842529296876, 0.21553868103027343, 0.21665382385253906, 0.21579571533203126, 0.21555711364746094, 0.2155888671875, 0.21576499938964844, 0.215762939453125, 0.21582028198242187, 0.2158582458496094, 0.2157608337402344, 0.21596368408203126, 0.21594313049316408, 0.2157445068359375, 0.2157639617919922, 0.21581414794921874, 0.21576502990722657, 0.21569635009765625, 0.215762939453125, 0.2158223419189453, 0.21592268371582032, 0.21613157653808593, 0.21586329650878905, 0.21606501770019532, 0.21634661865234375, 0.21581517028808594, 0.21568716430664062, 0.215804931640625, 0.21572096252441406, 0.215689208984375, 0.21578239440917968, 0.21577523803710938, 0.21575167846679688, 0.21579776000976564, 0.21582028198242187, 0.21593600463867188, 0.2157178955078125, 0.21569024658203126, 0.21573324584960937, 0.21574758911132813, 0.21596876525878905, 0.21589605712890625, 0.44737637329101565, 0.21560012817382812, 0.2154035186767578, 0.21556224060058593, 0.21559091186523438, 0.21557760620117186, 0.2155878448486328, 0.2156195831298828, 0.21551922607421875, 0.21568511962890624, 0.21572813415527345, 0.21568818664550782, 0.21565542602539062, 0.21562265014648438, 0.21565440368652344, 0.21580799865722655, 0.21636402893066406, 0.2157178955078125, 0.2158018493652344, 0.21578341674804688, 0.21576499938964844, 0.21569740295410156, 0.21566464233398439, 0.21570970153808594, 0.2157936706542969, 0.21613772583007812, 0.21595237731933595, 0.21639680480957033, 0.21639474487304688, 0.21601791381835939, 0.2160025634765625, 0.21584999084472656, 0.2159288330078125, 0.21633024597167969, 0.21584793090820312, 0.2159964141845703, 0.21607936096191407, 0.2160148468017578, 0.21588992309570312, 0.21595852661132814, 0.21601689147949218, 0.21620632934570314, 0.21618380737304688, 0.216595458984375, 0.21603021240234374, 0.21584690856933594, 0.21581619262695312, 0.21571994018554688, 0.21570252990722658, 0.21567692565917967, 0.21569638061523438, 0.21571072387695311, 0.2158970947265625, 0.2156810302734375, 0.2158981170654297, 0.2158602294921875, 0.21611932373046874, 0.21608546447753907, 0.21588479614257813, 0.21581004333496093, 0.21603225708007812, 0.21623910522460937, 0.21606809997558593, 0.44815972900390627, 0.21594316101074218, 0.21585305786132813, 0.21599130249023438, 0.21577932739257813, 0.21586534118652342, 0.21586431884765625, 0.21613055419921876, 0.21584895324707032, 0.21639578247070312, 0.21571994018554688, 0.215657470703125, 0.21565951538085937, 0.2156748809814453, 0.21572607421875, 0.21584690856933594, 0.21561138916015626, 0.21668966674804688, 0.2158008270263672, 0.21577113342285156, 0.21580902099609375, 0.2158673858642578, 0.21579673767089844, 0.21592063903808595, 0.21624319458007812, 0.21564927673339843, 0.21612953186035155, 0.21587046813964844, 0.2156810302734375, 0.21577830505371093, 0.2156943359375, 0.2155827178955078, 0.21565542602539062, 0.21575372314453126, 0.21602610778808592, 0.2159052734375, 0.2158039093017578, 0.21560421752929687, 0.21574656677246093, 0.2161326141357422, 0.21603021240234374, 0.21574041748046874, 0.21587251281738282, 0.21601791381835939, 0.21597900390625, 0.2160343017578125, 0.21582028198242187, 0.21569126892089843, 0.2157424621582031, 0.21565235900878907, 0.21565440368652344, 0.2158602294921875, 0.21615206909179688, 0.21636915588378905, 0.2160148468017578, 0.21692825317382813, 0.21595960998535157, 0.21593696594238282, 0.21585101318359376, 0.21585101318359376, 0.21587251281738282, 0.2156134338378906, 0.21572813415527345, 0.44772250366210936, 0.2154598388671875, 0.21554896545410157, 0.21563491821289063, 0.21536154174804686, 0.2154977264404297, 0.21561651611328125, 0.2155335693359375, 0.21565338134765624, 0.21574867248535157, 0.21558367919921875, 0.2155714874267578, 0.21574960327148437, 0.2156390380859375, 0.21553868103027343, 0.21560838317871095, 0.21563385009765626, 0.2156810302734375, 0.2155704345703125, 0.2155704345703125, 0.21559603881835937, 0.21553663635253906, 0.21553152465820313, 0.21560525512695314, 0.21560421752929687, 0.21559397888183593, 0.21667225646972657, 0.21568818664550782, 0.21559910583496095, 0.21570457458496095, 0.2156513214111328, 0.21557554626464845, 0.21559193420410155, 0.21564210510253906, 0.21565338134765624, 0.21612953186035155, 0.2163251190185547, 0.21550592041015626, 0.215910400390625, 0.21579469299316406, 0.2157455291748047, 0.215762939453125, 0.21571891784667968, 0.21559295654296876, 0.21559091186523438, 0.2155847625732422, 0.21548442077636717, 0.21573426818847657, 0.21578341674804688, 0.21575270080566405, 0.21571583557128907, 0.2156134338378906, 0.2155878448486328, 0.21560012817382812, 0.21608242797851562, 0.2159646759033203, 0.2156615753173828, 0.21582438659667968, 0.21574348449707031, 0.215689208984375, 0.21561856079101563, 0.21556121826171876, 0.2156380157470703]",tokens/s,4.563157582132701,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1535.045632,1705.508864,0.0,1059.06176,901.251072,s,10,1.2508666152954102,0.125086661529541,0.0018104018213424833,0.12444574356079102,0.126299910736084,0.12811838493347166,0.12957316429138183,"[0.12993685913085937, 0.12397126770019531, 0.12358041381835938, 0.124216064453125, 0.1243848648071289, 0.12450662231445313, 0.12540995025634766, 0.1256360321044922, 0.1233287353515625, 0.12589580535888673]",tokens/s,2046.5811211976581,kWh,1.4727642404526186e-06,8.07004418028454e-07,5.46138982864209e-06,7.741158487123162e-06,tokens/kWh,33069985.639208503,MB,1535.045632,1705.508864,0.0,1059.06176,931.976704,s,10,74.43358642578124,7.443358642578124,0.00946750701010035,7.4411630859375,7.453821142578125,7.459902514648437,7.464767612304687,"[7.4524697265625, 7.437251953125, 7.43481591796875, 7.44107763671875, 7.44202734375, 7.4486748046875, 7.46598388671875, 7.43750244140625, 7.4325341796875, 7.44124853515625]",tokens/s,8.463921063755025,kWh,8.849523470459816e-05,4.850131187912767e-05,0.0003201225928605506,0.0004571191394442764,tokens/kWh,137819.65042327833,,s,629,75.43097544097903,0.11992205952460892,0.01485084367631047,0.11795148468017579,0.11891920471191407,0.11957084197998047,0.24228786743164063,"[0.1226967010498047, 0.12151910400390625, 0.12056473541259766, 0.11847885131835938, 0.11765760040283203, 0.1177681884765625, 0.11789107513427734, 0.11814604949951171, 0.11932160186767578, 0.11832012939453125, 0.1177927703857422, 0.11863961791992188, 0.117970947265625, 0.11791155242919922, 0.1177927703857422, 0.11780095672607421, 0.11787673950195313, 0.11762687683105469, 0.11790541076660156, 0.11779481506347657, 0.117823486328125, 0.1177896957397461, 0.11771392059326172, 0.11764736175537109, 0.1177733154296875, 0.11770368194580078, 0.11773951721191406, 0.118076416015625, 0.11775590515136719, 0.11770674896240234, 0.11798528289794921, 0.1176780776977539, 0.117775390625, 0.1183753890991211, 0.11995954895019531, 0.11985305786132812, 0.1180579833984375, 0.11781836700439453, 0.11756646728515625, 0.11805900573730468, 0.11770265960693359, 0.11834572601318359, 0.11778355407714844, 0.11779481506347657, 0.11785932922363282, 0.11764838409423828, 0.11807437133789063, 0.11778662109375, 0.11772621154785157, 0.11770982360839843, 0.11952230072021484, 0.11834982299804687, 0.11924479675292969, 0.1183662109375, 0.11791871643066407, 0.11783168029785156, 0.11791667175292969, 0.11793510437011719, 0.11775590515136719, 0.11848908996582032, 0.11845222473144532, 0.11795967864990234, 0.24229991149902344, 0.11855974578857421, 0.11810508728027344, 0.1181112289428711, 0.11786239624023437, 0.11781222534179688, 0.11777536010742187, 0.11783475494384765, 0.1178050537109375, 0.11783270263671874, 0.11801292419433594, 0.11765145874023437, 0.11773747253417968, 0.11818905639648437, 0.11795558166503907, 0.11861913299560548, 0.11793202972412109, 0.11773951721191406, 0.11792384338378906, 0.11784806060791016, 0.11804672241210938, 0.11847372436523437, 0.11889459228515625, 0.11787366485595703, 0.11765760040283203, 0.11804364776611329, 0.1181839370727539, 0.11801395416259766, 0.11794841766357422, 0.11845529937744141, 0.11901849365234375, 0.11806412506103516, 0.11792793273925781, 0.1178419189453125, 0.11779174041748047, 0.1181470718383789, 0.11795148468017579, 0.11775794982910157, 0.11785420989990235, 0.11785318756103516, 0.11791769409179688, 0.1176842269897461, 0.11780198669433593, 0.11803135681152344, 0.11790847778320312, 0.11800678253173828, 0.11986029052734375, 0.11766470336914063, 0.11762483215332031, 0.11772927856445313, 0.11784601593017578, 0.11774259185791015, 0.1181296615600586, 0.11775794982910157, 0.11769548797607422, 0.11781222534179688, 0.11974041748046875, 0.11866316986083984, 0.11849625396728515, 0.11808665466308593, 0.11798220825195313, 0.11786956787109375, 0.11778559875488281, 0.2420991973876953, 0.11768319702148437, 0.11785318756103516, 0.1178757095336914, 0.11764019012451171, 0.11813990020751954, 0.11782553863525391, 0.11776204681396485, 0.11799756622314453, 0.11830271911621094, 0.118508544921875, 0.11805184173583984, 0.11785523223876954, 0.11794841766357422, 0.11769241333007813, 0.11777126312255859, 0.11797299194335938, 0.11789926147460937, 0.1177528305053711, 0.11770880126953125, 0.11770674896240234, 0.11790438079833984, 0.1180794906616211, 0.11948953247070312, 0.11772211456298828, 0.11786239624023437, 0.11781734466552735, 0.11803340911865234, 0.11814604949951171, 0.11789823913574218, 0.11793920135498047, 0.11805184173583984, 0.11791667175292969, 0.11798834991455077, 0.11809382629394531, 0.1178818588256836, 0.11811328125, 0.11793817901611328, 0.11791053009033203, 0.11808255767822265, 0.11819929504394532, 0.11768319702148437, 0.11867545318603516, 0.11808870697021484, 0.11771084594726562, 0.11781324768066406, 0.11782860565185548, 0.11991654205322265, 0.11770674896240234, 0.117823486328125, 0.11866214752197266, 0.11856486511230468, 0.11797299194335938, 0.11779686737060546, 0.11797913360595703, 0.1178941421508789, 0.11789619445800781, 0.11790847778320312, 0.11780812835693359, 0.11815628814697265, 0.11777945709228516, 0.11793202972412109, 0.11774361419677734, 0.24225689697265626, 0.11883827209472657, 0.11843174743652343, 0.11770162963867188, 0.11769651031494141, 0.11803648376464844, 0.11810921478271484, 0.11791152191162109, 0.11775180816650391, 0.1179842529296875, 0.11786649322509765, 0.11789823913574218, 0.11888333129882812, 0.11870105743408203, 0.11781120300292969, 0.11810918426513672, 0.11798937225341796, 0.11783577728271484, 0.11785113525390625, 0.11797196960449219, 0.11792281341552735, 0.11785011291503907, 0.11803040313720703, 0.11771692657470703, 0.117897216796875, 0.11816140747070313, 0.11904307556152344, 0.11791974639892579, 0.11789311981201171, 0.11902464294433594, 0.11833856201171875, 0.12005171203613281, 0.11821363067626953, 0.11774566650390625, 0.11858534240722657, 0.11854847717285157, 0.11799961853027344, 0.11858636474609376, 0.11794432067871094, 0.11810918426513672, 0.11841535949707031, 0.11808563232421875, 0.11770777893066406, 0.11799142456054687, 0.11910041809082031, 0.11799552154541015, 0.11794534301757813, 0.11774361419677734, 0.11787366485595703, 0.11796173095703125, 0.11805900573730468, 0.11948339080810547, 0.1178265609741211, 0.11779071807861329, 0.11765350341796875, 0.11782860565185548, 0.11791667175292969, 0.11784909057617188, 0.11769856262207032, 0.11790643310546875, 0.11776000213623047, 0.11783065795898437, 0.11767705535888671, 0.2425384979248047, 0.11768013000488281, 0.11795558166503907, 0.11765964508056641, 0.11810201263427735, 0.11820953369140624, 0.1178265609741211, 0.11962060546875, 0.11832319641113281, 0.11826278686523438, 0.11800371551513672, 0.11801292419433594, 0.11777126312255859, 0.11808979034423828, 0.1176882553100586, 0.11781632232666016, 0.11816345977783203, 0.11939839935302735, 0.11851058959960938, 0.11840716552734375, 0.11813273620605469, 0.11791053009033203, 0.1183477783203125, 0.11780403137207031, 0.11925299072265624, 0.11806310272216797, 0.11794226837158203, 0.11788288116455078, 0.11790847778320312, 0.11780812835693359, 0.11756851196289063, 0.11803648376464844, 0.11778457641601563, 0.11857817840576172, 0.11874816131591796, 0.11857202911376953, 0.11783372497558593, 0.11794432067871094, 0.11835801696777344, 0.11801395416259766, 0.11805388641357421, 0.11840716552734375, 0.11800678253173828, 0.11772723388671875, 0.11773849487304687, 0.11781427001953125, 0.11807030487060546, 0.11788591766357422, 0.1177896957397461, 0.11810610961914063, 0.11781529235839844, 0.118150146484375, 0.11891506958007812, 0.11792588806152343, 0.1180979232788086, 0.11776000213623047, 0.11800064086914062, 0.12033126068115234, 0.11792998504638671, 0.1180200958251953, 0.1176995849609375, 0.11785523223876954, 0.11783270263671874, 0.24298291015625, 0.1177528305053711, 0.11772723388671875, 0.11784909057617188, 0.11783782196044922, 0.11776409912109374, 0.1176258544921875, 0.11822592163085938, 0.11832115173339844, 0.11806719970703125, 0.11768729400634766, 0.11852082824707032, 0.11796377563476562, 0.11766067504882813, 0.11783372497558593, 0.11778457641601563, 0.11765452575683594, 0.11768319702148437, 0.11780812835693359, 0.11886284637451172, 0.11925299072265624, 0.11837849426269531, 0.11823616027832032, 0.11786137390136718, 0.11829452514648438, 0.11799961853027344, 0.11810099029541016, 0.11921817779541016, 0.11947929382324218, 0.11808767700195312, 0.11842867279052735, 0.11818086242675781, 0.11845017242431641, 0.11814604949951171, 0.11810406494140625, 0.11812249755859375, 0.118150146484375, 0.11768627166748047, 0.12074086761474609, 0.11790029144287109, 0.1178757095336914, 0.11833753967285156, 0.11824742126464843, 0.1190297622680664, 0.11828121948242187, 0.11830067443847657, 0.11778559875488281, 0.1179842529296875, 0.11804879760742187, 0.11841123199462891, 0.11772108459472656, 0.11835801696777344, 0.11804057312011719, 0.11791462707519532, 0.11991449737548829, 0.1183846435546875, 0.11778867340087891, 0.11815936279296875, 0.11795558166503907, 0.11824639892578125, 0.11795558166503907, 0.11841535949707031, 0.11960320281982421, 0.2442239990234375, 0.11785625457763672, 0.11795455932617188, 0.1188362274169922, 0.1178050537109375, 0.11798834991455077, 0.11827916717529297, 0.11871334075927735, 0.11938304138183593, 0.1188136978149414, 0.11833753967285156, 0.11944652557373046, 0.11828736114501953, 0.1178757095336914, 0.1189969940185547, 0.11945779418945313, 0.11926118469238281, 0.11858534240722657, 0.1181296615600586, 0.1187041244506836, 0.11901542663574219, 0.11876454162597656, 0.11830989074707031, 0.12047666931152344, 0.11891817474365235, 0.11892630767822265, 0.11849318695068359, 0.11825049591064453, 0.1181839370727539, 0.11833241271972657, 0.11835699462890625, 0.11903282928466796, 0.11840614318847656, 0.11857100677490234, 0.11871027374267579, 0.11851980590820313, 0.11851673889160157, 0.11861606597900391, 0.11881267547607421, 0.1186324462890625, 0.12009369659423828, 0.11938201904296875, 0.11857305908203125, 0.11787980651855469, 0.11817676544189454, 0.11832524871826172, 0.11826790618896485, 0.11835801696777344, 0.120447998046875, 0.11851776123046875, 0.11879526519775391, 0.11777536010742187, 0.11783372497558593, 0.11804057312011719, 0.11810304260253907, 0.11771186828613281, 0.11789823913574218, 0.11804160308837891, 0.11768934631347656, 0.11759923553466797, 0.11778559875488281, 0.11754393768310546, 0.11777740478515625, 0.24373759460449218, 0.11779174041748047, 0.1175920639038086, 0.11776306915283204, 0.1174999008178711, 0.11786547088623046, 0.11793408203125, 0.11796479797363281, 0.11785113525390625, 0.11759923553466797, 0.12032717132568359, 0.11938098907470703, 0.11796991729736328, 0.11770674896240234, 0.11779891204833984, 0.11797913360595703, 0.11979264068603515, 0.11813990020751954, 0.11899801635742188, 0.11855462646484374, 0.11808153533935548, 0.1180231704711914, 0.11797913360595703, 0.11860889434814453, 0.11787366485595703, 0.11785113525390625, 0.11788492584228516, 0.11816242980957031, 0.11869491577148437, 0.1198888931274414, 0.11808051300048829, 0.11794739532470704, 0.11787468719482422, 0.11797196960449219, 0.11788288116455078, 0.11782246398925782, 0.11781017303466797, 0.11793612670898437, 0.11752345275878906, 0.11755213165283203, 0.11759308624267578, 0.1179535369873047, 0.11776102447509766, 0.11792486572265624, 0.11769344329833985, 0.11778150177001953, 0.11779993438720703, 0.11772313690185547, 0.11796889495849609, 0.11796275329589843, 0.11774463653564453, 0.11760639953613282, 0.11799858856201172, 0.11777126312255859, 0.11769139099121094, 0.1178757095336914, 0.11771699523925781, 0.1177733154296875, 0.11792588806152343, 0.11762687683105469, 0.11798834991455077, 0.11797913360595703, 0.11765555572509766, 0.24334541320800782, 0.11772621154785157, 0.11778457641601563, 0.11770777893066406, 0.11806208038330078, 0.11857817840576172, 0.11794944000244141, 0.11780403137207031, 0.1176094741821289, 0.11765350341796875, 0.11788390350341797, 0.11832118225097656, 0.1179985580444336, 0.11776409912109374, 0.11778253173828125, 0.11771392059326172, 0.11762995147705078, 0.11899801635742188, 0.11768831634521484, 0.11782860565185548, 0.1177343978881836, 0.11791974639892579, 0.11767910766601562, 0.1176975326538086, 0.11781222534179688, 0.1176842269897461, 0.11812044525146484, 0.11786444854736328, 0.11779481506347657, 0.11779280090332031, 0.11815216064453125, 0.11984896087646485, 0.11806412506103516, 0.11796889495849609, 0.11819007873535156, 0.11776102447509766, 0.11791974639892579, 0.11807334136962891, 0.1183795166015625, 0.11832832336425782, 0.11772211456298828, 0.11752243041992187, 0.1177364501953125, 0.117718017578125, 0.11764736175537109, 0.11770265960693359, 0.11787673950195313, 0.11846963500976562, 0.11805900573730468, 0.11839180755615235, 0.11926834869384766, 0.11823616027832032, 0.11819110107421875, 0.11771186828613281, 0.11779891204833984, 0.11797606658935547, 0.11803033447265625, 0.11784806060791016, 0.11762380981445313, 0.11781938934326172, 0.11753062438964844, 0.1183846435546875, 0.11802214050292968, 0.24430592346191407, 0.11781120300292969, 0.11784294128417969, 0.11759410858154297, 0.11747020721435547, 0.11764940643310547, 0.11776000213623047, 0.1176657943725586, 0.11789209747314452, 0.11810304260253907, 0.11752448272705078, 0.11766681671142579, 0.11787366485595703, 0.11762278747558594, 0.11758796691894531, 0.11786854553222656, 0.11814297485351563, 0.11897650909423828, 0.11819315338134766, 0.11778355407714844, 0.11794944000244141, 0.1182208023071289, 0.1181665267944336, 0.11872051239013671, 0.11848806762695313, 0.11986124420166015, 0.12019916534423829, 0.11816754913330078, 0.1177927703857422, 0.11773849487304687, 0.11777740478515625, 0.11786956787109375, 0.11777843475341797, 0.11820543670654297, 0.1184686050415039, 0.11819417572021484, 0.1180917739868164, 0.11786137390136718, 0.11804774475097657, 0.11794329833984375, 0.11819213104248047, 0.11829759979248047, 0.11792998504638671, 0.11812147521972656, 0.11761766052246093, 0.11775590515136719, 0.11777228546142578, 0.11778253173828125, 0.11822898864746094, 0.11784601593017578, 0.11788082885742188, 0.11892332458496094, 0.11907987213134766, 0.11818086242675781, 0.11776000213623047, 0.11779891204833984, 0.1183272933959961, 0.11851878356933594, 0.11775590515136719, 0.11797401428222656, 0.11782144165039063, 0.11866828918457031, 0.11815936279296875]",tokens/s,8.338749384093028,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1608.957952,5448.925184,0.0,4802.47808,4489.12128,s,10,5.066485656738281,0.5066485656738281,0.0011948895817140703,0.5068220977783202,0.5079840667724609,0.5080279983520508,0.5080631436157227,"[0.5075723266601563, 0.5061219482421875, 0.504515380859375, 0.5053372497558594, 0.5061441345214843, 0.5055963745117188, 0.5075000610351562, 0.5076519470214844, 0.5080719299316406, 0.5079743041992187]",tokens/s,505.2812093912223,kWh,5.964234835571714e-06,3.2679267622643234e-06,2.7223952334698433e-05,3.6456113932534466e-05,tokens/kWh,7022141.758547072,MB,1610.637312,5448.925184,0.0,4802.47808,4557.793792,s,10,299.1010234375,29.910102343749998,0.01023537902845777,29.913578125,29.921135546875,29.9219037109375,29.9225182421875,"[29.899591796875, 29.901322265625, 29.913013671875, 29.890712890625, 29.920396484375, 29.922671875, 29.92096484375, 29.9152421875, 29.90296484375, 29.914142578125]",tokens/s,2.106311749654192,kWh,0.00035296389131082433,0.00019345485010443554,0.0015722178549955057,0.0021186365964107655,tokens/kWh,29736.104864198915,,s,629,303.18425177001956,0.4820099392210167,0.06023141623810366,0.4746004333496094,0.4757639221191406,0.4761423828125,0.98077423828125,"[0.474365966796875, 0.47431475830078124, 0.4748533630371094, 0.47396148681640626, 0.47435162353515625, 0.47414068603515624, 0.47463320922851565, 0.47518923950195313, 0.47532952880859375, 0.4745502624511719, 0.4740556945800781, 0.47391131591796876, 0.47444989013671873, 0.47430245971679685, 0.4739420166015625, 0.4737402954101563, 0.473987060546875, 0.47486770629882813, 0.4739993591308594, 0.475114501953125, 0.47402597045898437, 0.4752353210449219, 0.47403109741210936, 0.4739563598632813, 0.4750960693359375, 0.4755916748046875, 0.4742686767578125, 0.47484210205078126, 0.4754851989746094, 0.4751022033691406, 0.4755619812011719, 0.4745973815917969, 0.47523736572265624, 0.474829833984375, 0.474850341796875, 0.47421026611328126, 0.4759234619140625, 0.47534796142578123, 0.4750888977050781, 0.4741591186523437, 0.4745441284179687, 0.473744384765625, 0.4741099548339844, 0.473807861328125, 0.47429119873046877, 0.4740597839355469, 0.4742881164550781, 0.47400347900390627, 0.4744253234863281, 0.47619073486328123, 0.4757626953125, 0.4742860717773438, 0.47489535522460935, 0.47489950561523436, 0.47460345458984377, 0.4742686767578125, 0.4739921875, 0.4739420166015625, 0.47433624267578123, 0.4748810119628906, 0.4744110107421875, 0.47424615478515625, 0.980806640625, 0.4736993408203125, 0.47429733276367186, 0.4741478271484375, 0.4741099548339844, 0.47420416259765624, 0.4742000732421875, 0.47553228759765626, 0.4755077209472656, 0.4747817077636719, 0.47485235595703124, 0.4747202453613281, 0.4752384033203125, 0.4742318115234375, 0.4740137023925781, 0.4751011962890625, 0.47453387451171875, 0.4759981994628906, 0.4747898864746094, 0.4742266845703125, 0.47406185913085935, 0.4743239440917969, 0.4744366149902344, 0.47420416259765624, 0.47499981689453125, 0.47433114624023437, 0.47446322631835935, 0.47578317260742187, 0.47489739990234375, 0.4754503784179688, 0.4746977233886719, 0.47437005615234373, 0.4743680114746094, 0.47590194702148436, 0.47595211791992187, 0.47569818115234375, 0.474777587890625, 0.474113037109375, 0.4756346740722656, 0.4741652526855469, 0.47452056884765625, 0.4742778930664063, 0.474176513671875, 0.47388363647460935, 0.473754638671875, 0.47433624267578123, 0.474029052734375, 0.47412841796875, 0.4740433654785156, 0.47558758544921875, 0.4756910400390625, 0.4747693786621094, 0.47467111206054685, 0.4742625427246094, 0.4742799377441406, 0.473807861328125, 0.474387451171875, 0.47398092651367185, 0.4740495300292969, 0.47483187866210935, 0.4742236022949219, 0.4746342468261719, 0.4739717102050781, 0.98069091796875, 0.4739993591308594, 0.4743341979980469, 0.4740321350097656, 0.4741048278808594, 0.47378021240234375, 0.4756019287109375, 0.4752998352050781, 0.47476327514648436, 0.47476531982421877, 0.47458303833007814, 0.474603515625, 0.4745164794921875, 0.47447552490234374, 0.4745482177734375, 0.47400344848632814, 0.4754913330078125, 0.47445709228515626, 0.4743076171875, 0.4742041320800781, 0.47417855834960937, 0.4741171264648438, 0.47425332641601564, 0.47636785888671873, 0.4741949462890625, 0.4747110290527344, 0.4743659973144531, 0.47549435424804687, 0.47543499755859375, 0.4749854736328125, 0.4751646728515625, 0.4754985046386719, 0.4756643981933594, 0.475114501953125, 0.4750315551757813, 0.47576882934570314, 0.47484927368164065, 0.47597467041015623, 0.4747806701660156, 0.47479296875, 0.47538177490234373, 0.4749906005859375, 0.47488409423828126, 0.4746322021484375, 0.4749168701171875, 0.47477862548828126, 0.4759582824707031, 0.47688909912109373, 0.47617330932617186, 0.47571865844726563, 0.4744898681640625, 0.4741591186523437, 0.47446939086914064, 0.4745513000488281, 0.4750899047851562, 0.4740403137207031, 0.474461181640625, 0.4741929016113281, 0.47546676635742186, 0.47404339599609374, 0.474176513671875, 0.47427685546875, 0.47412841796875, 0.979821533203125, 0.4742574157714844, 0.4741734313964844, 0.47405874633789064, 0.47411508178710937, 0.47479910278320314, 0.476015625, 0.47458816528320313, 0.4742225952148437, 0.47418679809570313, 0.4743782043457031, 0.47417138671875, 0.4748810119628906, 0.47444174194335936, 0.4738058166503906, 0.4744478759765625, 0.47413861083984377, 0.47446221923828125, 0.47416116333007813, 0.47388262939453124, 0.47408843994140626, 0.47393484497070315, 0.4740771789550781, 0.47400857543945313, 0.47406695556640627, 0.4738518981933594, 0.4751022033691406, 0.4763402099609375, 0.47449191284179687, 0.4742512512207031, 0.4740157470703125, 0.4743352355957031, 0.47394509887695313, 0.473849853515625, 0.47430963134765625, 0.4738713684082031, 0.47423590087890627, 0.4743987121582031, 0.47423590087890627, 0.4738887634277344, 0.4744407043457031, 0.4742297668457031, 0.4740741271972656, 0.47423590087890627, 0.47649996948242185, 0.4742901611328125, 0.4741949462890625, 0.47464346313476563, 0.476073974609375, 0.47489022827148436, 0.4758292541503906, 0.47497113037109373, 0.47431890869140625, 0.4741651916503906, 0.4740843505859375, 0.47435162353515625, 0.47437310791015624, 0.4743291015625, 0.47532852172851564, 0.474313720703125, 0.474640380859375, 0.47469473266601564, 0.4747908630371094, 0.9814691772460937, 0.4747683715820312, 0.4747376708984375, 0.4743690185546875, 0.4749127807617187, 0.4760391540527344, 0.4749434814453125, 0.4753387451171875, 0.4744488830566406, 0.4743935852050781, 0.4743935852050781, 0.47374334716796873, 0.47430349731445315, 0.47411404418945313, 0.47499264526367185, 0.47437823486328123, 0.4740597839355469, 0.4755292053222656, 0.474429443359375, 0.4746004333496094, 0.47424716186523436, 0.4757176208496094, 0.474365966796875, 0.4744038391113281, 0.47409765625, 0.47461581420898435, 0.47587124633789063, 0.47467724609375, 0.47449700927734373, 0.4752076721191406, 0.47526400756835935, 0.4758815002441406, 0.4752086791992188, 0.4745318298339844, 0.4740362548828125, 0.47401980590820314, 0.474777587890625, 0.47447760009765627, 0.47450723266601563, 0.4745646057128906, 0.4748257141113281, 0.4746987609863281, 0.4746670227050781, 0.4746844177246094, 0.4750899047851562, 0.474967041015625, 0.4748011474609375, 0.47588555908203123, 0.47625112915039064, 0.4750325622558594, 0.4751790161132812, 0.47611801147460936, 0.4751523742675781, 0.4750223388671875, 0.4746065979003906, 0.4746875, 0.47461068725585936, 0.47841998291015625, 0.4753489990234375, 0.47590911865234375, 0.4750417785644531, 0.474998779296875, 0.47562240600585937, 0.9817487182617187, 0.47520870971679685, 0.4753449096679688, 0.47469158935546873, 0.4756121520996094, 0.47580160522460935, 0.47535000610351563, 0.47563983154296874, 0.47536431884765623, 0.4756459655761719, 0.4758149108886719, 0.47476327514648436, 0.47486669921875, 0.4744960021972656, 0.4750315551757813, 0.4745902099609375, 0.4752404479980469, 0.4754586181640625, 0.47481234741210937, 0.47426150512695314, 0.4745635986328125, 0.4747796630859375, 0.4742758483886719, 0.4749291381835937, 0.4753070068359375, 0.4757319641113281, 0.47559988403320314, 0.4744366149902344, 0.4746649475097656, 0.47449087524414063, 0.47499365234375, 0.47450009155273437, 0.47397579956054686, 0.47451034545898435, 0.47416116333007813, 0.4750878601074219, 0.47417752075195313, 0.47466085815429687, 0.4771686401367187, 0.4746567687988281, 0.47440997314453126, 0.4744816589355469, 0.4743618469238281, 0.47448269653320313, 0.47479705810546874, 0.47491070556640624, 0.47615179443359373, 0.4757442626953125, 0.4753827819824219, 0.4749906005859375, 0.4746219482421875, 0.4746925964355469, 0.4743485412597656, 0.4746967163085937, 0.47494552612304686, 0.4743270263671875, 0.47532647705078124, 0.47462603759765626, 0.4751585388183594, 0.4746270751953125, 0.47493734741210936, 0.47487387084960936, 0.4749823913574219, 0.9828372192382813, 0.4750807189941406, 0.4744366149902344, 0.4755558471679687, 0.47547698974609376, 0.47533465576171874, 0.4759132080078125, 0.4759879760742188, 0.47586407470703124, 0.4761640930175781, 0.4759255065917969, 0.4748451843261719, 0.474919921875, 0.476189697265625, 0.4745257263183594, 0.4750560913085938, 0.475325439453125, 0.474986572265625, 0.4742686157226563, 0.4745994262695313, 0.4747817077636719, 0.4749609069824219, 0.4750120849609375, 0.47644467163085935, 0.47719833374023435, 0.47524453735351563, 0.4750530700683594, 0.4744356384277344, 0.47469769287109376, 0.4745891418457031, 0.47447756958007814, 0.4750274658203125, 0.47433831787109376, 0.47436083984375, 0.4751431579589844, 0.4747120666503906, 0.474977294921875, 0.47543295288085935, 0.4742215576171875, 0.47395327758789063, 0.474292236328125, 0.473849853515625, 0.4742840270996094, 0.4745062255859375, 0.47460455322265627, 0.475863037109375, 0.4749680786132813, 0.4751790161132812, 0.4745482177734375, 0.47489227294921876, 0.4745994873046875, 0.47445599365234375, 0.47442022705078124, 0.47415603637695314, 0.47442739868164063, 0.474777587890625, 0.47418060302734377, 0.4745369567871094, 0.47485952758789063, 0.4748001708984375, 0.474187744140625, 0.47446221923828125, 0.47530703735351565, 0.9833133544921875, 0.4750899047851562, 0.4764405822753906, 0.47526705932617186, 0.4746055603027344, 0.47418368530273436, 0.4742369384765625, 0.4743475341796875, 0.474524658203125, 0.474323974609375, 0.4742778930664063, 0.47433010864257813, 0.4754462585449219, 0.47468850708007815, 0.474829833984375, 0.4747304992675781, 0.47638424682617186, 0.4761231994628906, 0.47679379272460937, 0.4750878601074219, 0.4745164794921875, 0.47426150512695314, 0.4747745361328125, 0.47573504638671876, 0.4750520324707031, 0.474893310546875, 0.4743792724609375, 0.4743096923828125, 0.474462158203125, 0.47433831787109376, 0.4749609069824219, 0.4743250427246094, 0.4742419738769531, 0.47488204956054686, 0.47455438232421876, 0.4743075866699219, 0.4756408386230469, 0.47438339233398436, 0.4743741760253906, 0.47458706665039063, 0.47492404174804687, 0.47545547485351564, 0.47435980224609375, 0.47457485961914064, 0.4752353210449219, 0.47622964477539065, 0.47467416381835936, 0.47408126831054687, 0.47500799560546875, 0.47463320922851565, 0.4743915405273437, 0.4742758483886719, 0.47516363525390626, 0.47539404296875, 0.4750796813964844, 0.4738518981933594, 0.4742266845703125, 0.477939697265625, 0.474281982421875, 0.47408126831054687, 0.4742758483886719, 0.4744366149902344, 0.4743372802734375, 0.9826785278320312, 0.4751769714355469, 0.47564697265625, 0.47452569580078124, 0.4744765319824219, 0.47413861083984377, 0.47415090942382815, 0.4741754760742187, 0.4749885559082031, 0.47412017822265623, 0.4737423400878906, 0.4744488830566406, 0.4739051513671875, 0.4738478088378906, 0.4745400390625, 0.47486770629882813, 0.47466085815429687, 0.47589376831054686, 0.47535205078125, 0.4755548095703125, 0.4743424072265625, 0.47426663208007813, 0.47488409423828126, 0.47611911010742186, 0.47500384521484373, 0.4744591369628906, 0.47539813232421874, 0.47478475952148436, 0.47490567016601565, 0.47446112060546874, 0.475104248046875, 0.4745164794921875, 0.47489535522460935, 0.47453900146484373, 0.4745594787597656, 0.47434445190429686, 0.47417752075195313, 0.47448269653320313, 0.4744724731445312, 0.4748072509765625, 0.47473458862304685, 0.4740843505859375, 0.4747591552734375, 0.4748912658691406, 0.4760514526367188, 0.47554150390625, 0.4754565124511719, 0.47558041381835936, 0.4751912841796875, 0.47503564453125, 0.47414990234375, 0.4742850646972656, 0.47414886474609375, 0.47481036376953123, 0.47404852294921873, 0.4742584228515625, 0.4739358825683594, 0.4744171447753906, 0.4741263427734375, 0.47414169311523435, 0.4739491882324219, 0.47383755493164065, 0.4742522888183594, 0.983693359375, 0.47566241455078123, 0.47446722412109377, 0.4746977233886719, 0.4751247253417969, 0.47474688720703123, 0.47455032348632814, 0.4745143737792969, 0.474745849609375, 0.4746485900878906, 0.4754575500488281, 0.47418881225585935, 0.474029052734375, 0.47392666625976565, 0.4740771789550781, 0.4740321350097656, 0.4739317626953125, 0.4744591369628906, 0.47454925537109377, 0.47516058349609375, 0.47410687255859374, 0.4743935852050781, 0.4761661376953125, 0.4749885559082031, 0.47419699096679685, 0.47460250854492186, 0.47660543823242185, 0.4745430908203125, 0.4743280639648437, 0.47440179443359376, 0.4743813171386719, 0.47454721069335937, 0.4742799377441406, 0.4744407043457031, 0.4743854064941406, 0.47423898315429686, 0.47517593383789064, 0.4753879089355469, 0.47501516723632814, 0.47492202758789065, 0.4744867858886719, 0.47409762573242187, 0.47553741455078125, 0.4761282653808594, 0.47627365112304687, 0.47592141723632814, 0.474745849609375, 0.47454931640625, 0.47427374267578126, 0.47429937744140627, 0.47435775756835935, 0.47429937744140627, 0.475188232421875, 0.4755568542480469, 0.4755281982421875, 0.4754483337402344, 0.4758005676269531, 0.47536639404296877, 0.4753592224121094, 0.4763494567871094, 0.47446011352539064, 0.4744376220703125, 0.4739686279296875]",tokens/s,2.074646015839662,,,,,,main,False,False -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2234.441728,2932.342784,0.0,2285.89568,2082.575872,s,10,2.433214202880859,0.24332142028808593,0.0008617435568583196,0.24325077056884764,0.24459763488769531,0.24462198333740234,0.24464146209716797,"[0.24380015563964844, 0.24204917907714843, 0.24258213806152343, 0.24336614990234376, 0.24313539123535155, 0.24226588439941407, 0.24288755798339845, 0.24388919067382814, 0.24464633178710937, 0.24459222412109374]",tokens/s,1052.1063032465574,kWh,2.856464352872637e-06,1.5652168180297718e-06,1.2995936322662033e-05,1.7417617493564442e-05,tokens/kWh,14697762.199370166,MB,2234.441728,2959.60576,0.0,2313.158656,2180.684288,s,10,139.734884765625,13.9734884765625,0.01146198033243705,13.9710908203125,13.98807626953125,13.992370166015625,13.995805283203126,"[13.9871220703125, 13.9770771484375, 13.9966640625, 13.980609375, 13.95928515625, 13.96146875, 13.9670087890625, 13.9751728515625, 13.965376953125, 13.965099609375]",tokens/s,4.508537728833344,kWh,0.00016483188493384256,9.03412226583766e-05,0.0007461166431891384,0.0010012897507813577,tokens/kWh,62918.85036358144,,s,629,141.7022607421874,0.22528181358058424,0.02898003334479797,0.22168063354492187,0.2224162872314453,0.22265180053710937,0.4647246643066406,"[0.22235647583007812, 0.22139903259277344, 0.22141644287109374, 0.22147891235351563, 0.2213693389892578, 0.22170726013183595, 0.22163967895507813, 0.2214256591796875, 0.22199398803710937, 0.22159564208984375, 0.22243431091308594, 0.22190284729003906, 0.22202674865722657, 0.2210723876953125, 0.22141133117675782, 0.22209945678710938, 0.22192127990722657, 0.2224189453125, 0.22194073486328125, 0.22234317016601562, 0.22204518127441406, 0.22228582763671875, 0.2221793212890625, 0.22186904907226562, 0.222603271484375, 0.22207487487792968, 0.22233599853515626, 0.22205238342285155, 0.22235133361816406, 0.2235494384765625, 0.22263296508789063, 0.22236671447753906, 0.22127410888671875, 0.2215219268798828, 0.2213519287109375, 0.22189056396484375, 0.22161100769042968, 0.22196940612792967, 0.22203187561035156, 0.22196531677246092, 0.22140007019042968, 0.2213396453857422, 0.2219325408935547, 0.2218577880859375, 0.22200933837890624, 0.22186495971679687, 0.22244557189941405, 0.22216499328613282, 0.22224179077148437, 0.221955078125, 0.2217943115234375, 0.2224179229736328, 0.22189776611328124, 0.2225540771484375, 0.22166732788085938, 0.22277632141113282, 0.22537420654296875, 0.22221209716796875, 0.22146047973632813, 0.22176870727539064, 0.22216294860839844, 0.22160383605957032, 0.46420379638671877, 0.2221997833251953, 0.22188236999511718, 0.22198272705078126, 0.2217697296142578, 0.22201139831542968, 0.2215034942626953, 0.22119526672363282, 0.22219879150390626, 0.22249369812011718, 0.22189773559570314, 0.22246092224121095, 0.22249574279785156, 0.22148197937011718, 0.22124134826660155, 0.221802490234375, 0.22128536987304687, 0.2213744659423828, 0.22234214782714845, 0.22216806030273437, 0.22141746520996095, 0.22162637329101562, 0.22157516479492187, 0.22105191040039063, 0.2215782470703125, 0.2218741760253906, 0.2216898498535156, 0.22149427795410156, 0.2215536651611328, 0.2222725067138672, 0.22197760009765624, 0.22207795715332032, 0.2216847381591797, 0.22156083679199218, 0.2215741424560547, 0.2220595245361328, 0.22160179138183594, 0.22255001831054688, 0.22156492614746093, 0.22203904724121093, 0.22160179138183594, 0.22140109252929688, 0.22128640747070313, 0.22133660888671874, 0.2213908233642578, 0.22154035949707032, 0.22508236694335937, 0.22183013916015626, 0.22202879333496095, 0.2222407684326172, 0.2216785888671875, 0.22169293212890626, 0.22160691833496093, 0.22218649291992187, 0.2214686737060547, 0.22141439819335937, 0.222487548828125, 0.2225991668701172, 0.22195301818847657, 0.22147276306152344, 0.22199909973144533, 0.22176666259765626, 0.22219879150390626, 0.4654428100585937, 0.2218987579345703, 0.22210765075683594, 0.2219008026123047, 0.22165504455566407, 0.22182707214355468, 0.22211891174316406, 0.22192640686035156, 0.221770751953125, 0.22130482482910158, 0.22165913391113282, 0.22189263916015625, 0.2217932434082031, 0.22167552185058595, 0.22172467041015625, 0.22150143432617186, 0.22293606567382812, 0.22162535095214844, 0.2228019256591797, 0.22176255798339845, 0.22248243713378907, 0.22172979736328124, 0.22229402160644532, 0.22310092163085937, 0.22198988342285156, 0.22210150146484375, 0.22238616943359374, 0.22228480529785155, 0.22175334167480468, 0.22205337524414062, 0.22236058044433593, 0.2222407684326172, 0.2220369873046875, 0.2219929656982422, 0.2221854705810547, 0.2224127960205078, 0.22233395385742188, 0.22231858825683593, 0.22264627075195312, 0.22218751525878908, 0.22241587829589843, 0.22255718994140625, 0.2218741760253906, 0.22225920104980468, 0.22183935546875, 0.22192332458496095, 0.22207795715332032, 0.223752197265625, 0.22219673156738282, 0.22203289794921874, 0.22212197875976564, 0.2223206329345703, 0.22238616943359374, 0.22253575134277342, 0.22210047912597655, 0.22248442077636718, 0.2219356231689453, 0.2220912628173828, 0.22202572631835937, 0.22224896240234376, 0.22215168762207033, 0.22308761596679688, 0.22262168884277345, 0.46642996215820315, 0.22230323791503906, 0.22220390319824218, 0.22247935485839843, 0.22223155212402343, 0.22242303466796876, 0.22273023986816406, 0.2226411590576172, 0.22237493896484375, 0.22229808044433594, 0.2220185546875, 0.22259507751464844, 0.22249984741210938, 0.22244659423828125, 0.22167654418945312, 0.22273228454589844, 0.2222581787109375, 0.22126182556152343, 0.2230200653076172, 0.22316950988769532, 0.22224179077148437, 0.22140518188476563, 0.22184857177734374, 0.22160281372070312, 0.22178099060058593, 0.22235443115234374, 0.2213928985595703, 0.22145535278320314, 0.22126591491699218, 0.22138368225097657, 0.22140518188476563, 0.22138983154296876, 0.22134886169433593, 0.22174208068847656, 0.22144720458984374, 0.22132118225097655, 0.22146354675292967, 0.2214001007080078, 0.22144508361816406, 0.2214297637939453, 0.22163763427734376, 0.221517822265625, 0.22244044494628906, 0.22324838256835938, 0.22501274108886718, 0.22167861938476563, 0.22164067077636718, 0.2215004119873047, 0.22143283081054688, 0.22134579467773438, 0.22140518188476563, 0.22115122985839844, 0.22140415954589843, 0.2217195587158203, 0.22167449951171875, 0.2215034942626953, 0.22143795776367187, 0.2215854034423828, 0.22149530029296874, 0.22169497680664063, 0.22140931701660158, 0.22172157287597657, 0.22152806091308594, 0.4667914123535156, 0.22145228576660156, 0.2217512969970703, 0.2216468505859375, 0.22147789001464843, 0.221444091796875, 0.22145126342773438, 0.22144717407226563, 0.22157005310058595, 0.22128947448730468, 0.22147071838378907, 0.22156903076171874, 0.22139801025390626, 0.2215116729736328, 0.2211778564453125, 0.22171034240722656, 0.22124748229980468, 0.22265548706054689, 0.22149836730957032, 0.22151065063476563, 0.22136422729492186, 0.22120550537109376, 0.22162431335449218, 0.22184754943847657, 0.2219530487060547, 0.22183625793457032, 0.22151065063476563, 0.22187826538085936, 0.2214799346923828, 0.22132736206054687, 0.22136524963378906, 0.22137344360351563, 0.22129356384277343, 0.22121778869628905, 0.22129254150390626, 0.2211164093017578, 0.22158746337890625, 0.22140415954589843, 0.22128128051757812, 0.22147071838378907, 0.22140313720703125, 0.22121273803710936, 0.22131808471679687, 0.2216048583984375, 0.22186810302734375, 0.22218540954589844, 0.221623291015625, 0.22134477233886718, 0.22152088928222657, 0.22141746520996095, 0.22138368225097657, 0.22134169006347656, 0.22129458618164063, 0.22138983154296876, 0.22134169006347656, 0.2212956085205078, 0.221159423828125, 0.2240184326171875, 0.22193971252441405, 0.2216089630126953, 0.22141952514648439, 0.22154853820800782, 0.2216837158203125, 0.46434201049804685, 0.22140415954589843, 0.22150758361816406, 0.22161817932128905, 0.2220298309326172, 0.22168780517578124, 0.2222530517578125, 0.22144613647460937, 0.2217840576171875, 0.22155264282226564, 0.22125669860839844, 0.22169197082519532, 0.22176358032226562, 0.22181779479980468, 0.22157926940917969, 0.2215782470703125, 0.2214246368408203, 0.22152294921875, 0.22142771911621092, 0.22154547119140625, 0.22132429504394532, 0.22135606384277343, 0.22152394104003906, 0.22135398864746095, 0.22151577758789062, 0.2220185546875, 0.22214349365234376, 0.22169088745117188, 0.2212464599609375, 0.22148403930664062, 0.221370361328125, 0.22125363159179687, 0.22161305236816406, 0.22148197937011718, 0.22166015625, 0.2215977020263672, 0.22134783935546876, 0.2212351989746094, 0.2241535949707031, 0.22163558959960938, 0.2216785888671875, 0.22160691833496093, 0.221549560546875, 0.22167141723632813, 0.22142054748535156, 0.2214297637939453, 0.22139903259277344, 0.2217943115234375, 0.22149119567871095, 0.221876220703125, 0.22163250732421874, 0.221765625, 0.2215004119873047, 0.2217400360107422, 0.22149427795410156, 0.2213939208984375, 0.2211758117675781, 0.22143487548828125, 0.22141644287109374, 0.22124339294433593, 0.22147071838378907, 0.22166630554199218, 0.22171136474609374, 0.46487347412109375, 0.22141133117675782, 0.22127206420898438, 0.221338623046875, 0.2219622344970703, 0.22154342651367187, 0.22202265930175782, 0.22145330810546876, 0.22258995056152345, 0.22142874145507813, 0.22155264282226564, 0.22175334167480468, 0.2215741424560547, 0.22162739562988282, 0.22164378356933595, 0.2218803253173828, 0.22163250732421874, 0.22160076904296874, 0.22158848571777343, 0.2215116729736328, 0.2215679931640625, 0.22120550537109376, 0.22134477233886718, 0.22316543579101564, 0.22167039489746093, 0.22168063354492187, 0.22168678283691406, 0.2215116729736328, 0.2215188751220703, 0.22155363464355468, 0.22205235290527345, 0.22159359741210938, 0.22261351013183595, 0.2219059143066406, 0.22219161987304686, 0.22189773559570314, 0.22165504455566407, 0.22153114318847655, 0.2218076171875, 0.22219570922851561, 0.22192445373535155, 0.22225091552734375, 0.22204415893554688, 0.22189260864257812, 0.22191104125976563, 0.2215352325439453, 0.22143385314941405, 0.22180557250976562, 0.22147584533691406, 0.22139698791503906, 0.22178713989257812, 0.22213325500488282, 0.22149221801757812, 0.22222848510742188, 0.22150860595703126, 0.2214686737060547, 0.22136627197265624, 0.22127001953125, 0.22115020751953124, 0.22117478942871094, 0.22150143432617186, 0.22140211486816405, 0.22120550537109376, 0.46591384887695314, 0.22151065063476563, 0.22132838439941407, 0.22122496032714845, 0.22145433044433593, 0.22153318786621093, 0.2213519287109375, 0.22116044616699218, 0.22144717407226563, 0.22135296630859375, 0.2212833251953125, 0.22198477172851563, 0.22165811157226561, 0.22226739501953124, 0.22169804382324218, 0.22206361389160156, 0.22276095581054686, 0.22171136474609374, 0.22175949096679687, 0.22171034240722656, 0.22183833312988283, 0.2217021484375, 0.22223257446289063, 0.221876220703125, 0.22166323852539063, 0.22185369873046876, 0.22207693481445312, 0.22190386962890624, 0.2217379913330078, 0.22185165405273438, 0.22234214782714845, 0.22208717346191406, 0.22299136352539062, 0.22185061645507811, 0.22382899475097656, 0.22139187622070314, 0.22159461975097655, 0.22185369873046876, 0.2214072265625, 0.22193971252441405, 0.22215577697753905, 0.22185267639160156, 0.22164480590820312, 0.22156594848632813, 0.2215188751220703, 0.22147990417480468, 0.22186189270019532, 0.22172262573242188, 0.22174310302734376, 0.22215577697753905, 0.22219366455078124, 0.2219448699951172, 0.2216007385253906, 0.22184959411621094, 0.22170930480957032, 0.22189164733886718, 0.2221639404296875, 0.22170518493652344, 0.22201344299316406, 0.2217830352783203, 0.22169395446777343, 0.22200831604003907, 0.22163148498535157, 0.46692044067382815, 0.2217400360107422, 0.22165196228027345, 0.22197555541992187, 0.22193458557128906, 0.22203904724121093, 0.22154655456542968, 0.22138873291015626, 0.22145330810546876, 0.22146354675292967, 0.22145024108886718, 0.22175640869140625, 0.22180351257324218, 0.22217727661132813, 0.22153727722167968, 0.221896728515625, 0.2214164123535156, 0.2225797119140625, 0.22176051330566407, 0.22177381896972656, 0.22154649353027345, 0.221233154296875, 0.2214615020751953, 0.22134681701660155, 0.22129356384277343, 0.22127615356445313, 0.22128640747070313, 0.22148095703125, 0.22134169006347656, 0.22124339294433593, 0.22114816284179686, 0.2213191680908203, 0.2215188446044922, 0.22131814575195313, 0.22146969604492187, 0.22117990112304686, 0.22134375, 0.22164889526367187, 0.22181581115722657, 0.222171142578125, 0.22175640869140625, 0.22172979736328124, 0.22134066772460936, 0.22151986694335937, 0.22140313720703125, 0.22141030883789062, 0.22220594787597656, 0.22215887451171876, 0.22227349853515624, 0.22183116149902343, 0.22192640686035156, 0.22173184204101562, 0.2216898498535156, 0.22144717407226563, 0.22147584533691406, 0.22146354675292967, 0.22156903076171874, 0.22176051330566407, 0.22247833251953125, 0.221802490234375, 0.22252543640136718, 0.22219468688964844, 0.22166015625, 0.46707403564453126, 0.22175640869140625, 0.22192536926269532, 0.22187315368652344, 0.22208204650878907, 0.22162535095214844, 0.22142361450195314, 0.22128025817871094, 0.22170008850097656, 0.2215782470703125, 0.22144613647460937, 0.22178201293945313, 0.22167654418945312, 0.22178816223144532, 0.2213816375732422, 0.2216816711425781, 0.22158131408691406, 0.22198066711425782, 0.22218853759765625, 0.221770751953125, 0.22207693481445312, 0.2219069366455078, 0.22215782165527342, 0.2219069366455078, 0.2217830352783203, 0.2216417236328125, 0.2232033233642578, 0.22143283081054688, 0.22127719116210937, 0.22148300170898438, 0.22127104187011717, 0.2214615020751953, 0.22147584533691406, 0.22175538635253905, 0.22177690124511718, 0.2215290832519531, 0.22144717407226563, 0.22135296630859375, 0.22129356384277343, 0.22165196228027345, 0.22160076904296874, 0.22165402221679686, 0.22142771911621092, 0.22134783935546876, 0.22133555603027344, 0.22152088928222657, 0.22161509704589843, 0.2215034942626953, 0.22159257507324218, 0.22131712341308593, 0.22130586242675782, 0.22127410888671875, 0.22141850280761718, 0.22161305236816406, 0.22188954162597657, 0.22163456726074218, 0.2218014678955078, 0.2217902069091797, 0.22162124633789063, 0.22154444885253907, 0.22161305236816406, 0.2218956756591797, 0.22184857177734374]",tokens/s,4.438884720014453,,,,,,main,False,False -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949500-07e242b56a1f32ec76792080;e3fc2d0e-9368-41ac-96e0-04d21929e4fe) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8cm3__3l/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2454.69184,7298.613248,0.0,6652.166144,6323.221504,s,10,7.735312255859376,0.7735312255859375,0.0028429451285762273,0.7729794311523437,0.7762465698242188,0.7783619323730468,0.7800542224121094,"[0.780477294921875, 0.7757764892578125, 0.7711934204101563, 0.770524658203125, 0.7719502563476562, 0.7706873168945313, 0.7725225219726563, 0.7734363403320312, 0.7744036254882812, 0.77434033203125]",tokens/s,330.9497943099635,kWh,9.102796527565035e-06,4.9879214771863185e-06,4.3937086431680494e-05,5.802780443643185e-05,tokens/kWh,4411678.202997362,MB,2454.69184,7298.613248,0.0,6652.166144,6382.564864,s,10,458.5464296875,45.85464296875,0.0073453057278341505,45.852935546875,45.866698828124996,45.8672068359375,45.8676132421875,"[45.8568515625, 45.857828125, 45.85469140625, 45.8511796875, 45.845234375, 45.8491953125, 45.85046484375, 45.84668359375, 45.8665859375, 45.86771484375]",tokens/s,1.3739066738112995,kWh,0.0005414656486152074,0.00029677022395511814,0.002569976393586324,0.003408212266156649,tokens/kWh,18484.764175514054,,s,629,464.77344433593714,0.7389084965595191,0.09178579233429768,0.727773193359375,0.7285446655273438,0.7288094848632812,1.4987512353515624,"[0.72828515625, 0.7276810302734374, 0.7278029174804688, 0.72793701171875, 0.7272877807617187, 0.727383056640625, 0.7272335205078125, 0.7274444580078125, 0.7278551025390625, 0.727720947265625, 0.7276083374023438, 0.7281663818359375, 0.7275867919921875, 0.72804248046875, 0.72800048828125, 0.7276687622070312, 0.7281151733398438, 0.7279267578125, 0.7276728515625, 0.7276615600585937, 0.7278981323242187, 0.7287255249023438, 0.7286405029296875, 0.7275847778320312, 0.7281510620117188, 0.7281069946289063, 0.7279093627929687, 0.7283015747070313, 0.7283804321289062, 0.7276973876953124, 0.7277127685546875, 0.7276400756835938, 0.7282882690429687, 0.7277936401367188, 0.7284705200195313, 0.727784423828125, 0.727952392578125, 0.727930908203125, 0.728369140625, 0.728827880859375, 0.7278551025390625, 0.72924365234375, 0.7275161743164062, 0.7274977416992188, 0.7274014892578125, 0.727947265625, 0.7273707275390625, 0.7273738403320312, 0.7270891723632813, 0.72770458984375, 0.727604248046875, 0.7274721069335938, 0.728226806640625, 0.7281571655273438, 0.7279820556640625, 0.7282749633789063, 0.7275151977539063, 0.7279564208984375, 0.7273912353515625, 0.7277537231445312, 0.7282349853515625, 0.7278714599609375, 1.503824951171875, 0.72740966796875, 0.7277650146484375, 0.7272868041992188, 0.7274219360351563, 0.7278981323242187, 0.7287039794921875, 0.7276185302734375, 0.7274321899414062, 0.7271383056640625, 0.72722021484375, 0.7271024780273437, 0.7274373168945313, 0.7272499389648438, 0.7280332641601562, 0.7276113891601562, 0.7276943359375, 0.7272652587890625, 0.7283251342773438, 0.7285104370117188, 0.7281961059570312, 0.7280137939453125, 0.7283834838867187, 0.7276124267578125, 0.7279022216796875, 0.7284910278320312, 0.7285155639648437, 0.728142822265625, 0.7280394287109375, 0.7274547119140625, 0.7279277954101563, 0.7275509643554687, 0.7277987670898437, 0.7278591918945313, 0.728226806640625, 0.7301795654296875, 0.726887451171875, 0.727372802734375, 0.7274393310546875, 0.7276984252929688, 0.72789404296875, 0.7279503173828125, 0.7277659912109375, 0.7282677612304688, 0.7285678100585937, 0.7279226684570312, 0.7286671142578125, 0.7283712158203125, 0.7282360229492187, 0.727741455078125, 0.7277588500976563, 0.7276482543945313, 0.727857177734375, 0.7283681030273438, 0.7278796997070313, 0.7283937377929688, 0.727920654296875, 0.7278295288085938, 0.728015869140625, 0.7276452026367187, 0.7281694946289062, 0.7286343383789062, 0.7274926147460937, 1.4987724609375, 0.7273533325195313, 0.727457763671875, 0.7278960571289063, 0.7272560424804687, 0.7277005004882813, 0.7277772827148438, 0.7281305541992188, 0.7275653076171875, 0.727499755859375, 0.7276851196289063, 0.7280271606445312, 0.7270768432617187, 0.7270553588867188, 0.7272847290039063, 0.7274874877929688, 0.7273748779296875, 0.727520263671875, 0.727920654296875, 0.7277373657226562, 0.7273338623046876, 0.728322021484375, 0.7278253784179688, 0.7282452392578125, 0.72855859375, 0.7283507080078125, 0.7274547119140625, 0.727931884765625, 0.7276277465820312, 0.7273011474609375, 0.7271905517578126, 0.727736328125, 0.7286302490234375, 0.728036376953125, 0.727710693359375, 0.7286405029296875, 0.728036376953125, 0.7283230590820312, 0.72854833984375, 0.7284182739257813, 0.7284367065429688, 0.729618408203125, 0.7277322387695313, 0.7279042358398438, 0.7280332641601562, 0.7283035888671875, 0.7280281372070313, 0.7276656494140625, 0.7277639770507812, 0.7281858520507812, 0.7287337036132813, 0.7282145385742187, 0.7282606201171875, 0.7274495849609375, 0.727203857421875, 0.7272243041992188, 0.727235595703125, 0.7272662963867188, 0.7274495849609375, 0.727531494140625, 0.7289282836914063, 0.7277404174804688, 0.7281520385742187, 1.498346435546875, 0.7277035522460937, 0.7284224243164062, 0.7275243530273438, 0.727741455078125, 0.727731201171875, 0.7279708251953125, 0.7275581665039063, 0.7280773315429687, 0.7281356811523437, 0.7283568725585937, 0.7276912841796875, 0.7285411987304687, 0.7277701416015625, 0.7284715576171875, 0.72749462890625, 0.7280516967773437, 0.7273963623046875, 0.7273779296875, 0.7273543701171875, 0.7274270629882813, 0.7274116821289063, 0.727689208984375, 0.7282974853515625, 0.7277066040039063, 0.7288258666992188, 0.72807421875, 0.727720947265625, 0.728501220703125, 0.7287817993164063, 0.728057861328125, 0.7277598876953125, 0.72751513671875, 0.7276728515625, 0.7286773681640625, 0.7274321899414062, 0.7279042358398438, 0.727920654296875, 0.727573486328125, 0.7276513061523437, 0.7273072509765625, 0.7275028686523437, 0.7276226806640625, 0.727731201171875, 0.7275745239257813, 0.727709716796875, 0.7274833984375, 0.7272919311523437, 0.7273953247070313, 0.7276328735351563, 0.7272529907226563, 0.7279380493164063, 0.7277659912109375, 0.7278837890625, 0.7274598388671875, 0.7278622436523438, 0.727636962890625, 0.7282718505859375, 0.7279830932617187, 0.7275888671875, 0.72747314453125, 0.7274137573242188, 0.7276810302734374, 1.500190673828125, 0.7274137573242188, 0.727794677734375, 0.72776806640625, 0.7275325317382813, 0.728131591796875, 0.7274035034179688, 0.7273656616210937, 0.727741455078125, 0.7269683227539062, 0.7274475708007813, 0.727498779296875, 0.7274158325195312, 0.7276226806640625, 0.7277803344726562, 0.7272796020507812, 0.7277967529296875, 0.7278120727539062, 0.7281182861328125, 0.7282227172851562, 0.729248779296875, 0.7279073486328125, 0.727394287109375, 0.7274772338867187, 0.7273768920898438, 0.7279462280273438, 0.7280506591796875, 0.7277485961914063, 0.7276800537109375, 0.7273492431640625, 0.7277352905273438, 0.7280148315429688, 0.7277557983398437, 0.7279503173828125, 0.72796875, 0.727677978515625, 0.7273717651367188, 0.7274014892578125, 0.7277393798828125, 0.7273564453125, 0.7273922729492187, 0.7276431274414062, 0.7273267211914063, 0.727183349609375, 0.7276964111328125, 0.7270574340820313, 0.7279892578125, 0.7274506225585937, 0.7283281860351563, 0.7276236572265625, 0.7274864501953126, 0.7277271118164063, 0.7274741821289062, 0.7276431274414062, 0.727984130859375, 0.7277352905273438, 0.727568359375, 0.7290419311523437, 0.7279063110351562, 0.7274669799804687, 0.7279380493164063, 0.7273656616210937, 0.727736328125, 1.4986966552734375, 0.7277783203125, 0.7279697875976563, 0.7273380126953125, 0.7279759521484375, 0.7274772338867187, 0.7276113891601562, 0.72751513671875, 0.727520263671875, 0.7276339111328125, 0.7279185791015625, 0.7278212890625, 0.7281500244140625, 0.7277086791992188, 0.7276032104492187, 0.7277926635742188, 0.7276728515625, 0.7275827026367188, 0.7275140991210938, 0.7280179443359375, 0.7281879272460937, 0.7278212890625, 0.727699462890625, 0.72736767578125, 0.7276728515625, 0.7279083251953125, 0.7273850708007813, 0.7275069580078125, 0.7274024658203125, 0.7273922729492187, 0.7274700927734375, 0.7275847778320312, 0.7282288818359375, 0.7278090209960938, 0.7279667358398437, 0.7283240966796874, 0.7274004516601562, 0.7275899047851563, 0.729038818359375, 0.728173583984375, 0.727530517578125, 0.7272581176757813, 0.727572509765625, 0.7277127685546875, 0.72736767578125, 0.7276503295898438, 0.7283455810546875, 0.72766259765625, 0.7285360717773437, 0.7277281494140625, 0.72745166015625, 0.7276943359375, 0.7278653564453125, 0.7278253784179688, 0.7273615112304688, 0.7273717651367188, 0.7274547119140625, 0.7275899047851563, 0.7274557495117188, 0.7279779663085938, 0.7282565307617187, 0.728158203125, 0.7288606567382813, 1.500291015625, 0.7285186767578125, 0.7282554931640625, 0.727962646484375, 0.72707275390625, 0.7270717163085938, 0.7278837890625, 0.7273738403320312, 0.7277557983398437, 0.7276339111328125, 0.728394775390625, 0.7282606201171875, 0.728369140625, 0.727878662109375, 0.7280169067382812, 0.727794677734375, 0.7282175903320313, 0.7275038452148438, 0.7273502807617187, 0.7271946411132812, 0.7275499267578125, 0.7277998046875, 0.7273502807617187, 0.7283394775390625, 0.7281551513671874, 0.7276728515625, 0.729017333984375, 0.7274383544921875, 0.7284019165039063, 0.7275847778320312, 0.727709716796875, 0.7274024658203125, 0.7274024658203125, 0.7272263793945313, 0.7275448608398437, 0.727203857421875, 0.7272734985351562, 0.7281172485351562, 0.72766259765625, 0.7277854614257813, 0.7278192749023438, 0.7277168579101563, 0.7283128051757812, 0.7274014892578125, 0.7279892578125, 0.7273103637695313, 0.7277905883789062, 0.72785302734375, 0.7278776245117188, 0.727984130859375, 0.7275591430664062, 0.7280732421875, 0.728015869140625, 0.7281817626953125, 0.7277322387695313, 0.72749365234375, 0.7281449584960937, 0.7275447387695313, 0.7287470092773437, 0.727857177734375, 0.727446533203125, 0.7272703857421875, 0.7274813232421875, 1.5015628662109375, 0.7280670776367187, 0.7274475708007813, 0.7279002075195312, 0.7276564331054688, 0.727930908203125, 0.7274649658203125, 0.7278028564453125, 0.7277875366210937, 0.7275980834960938, 0.7279329223632812, 0.7276553955078126, 0.7276932983398438, 0.7277240600585938, 0.7281275024414062, 0.7278305053710937, 0.7283988647460937, 0.727973876953125, 0.7275120849609376, 0.7283138427734375, 0.7280885620117188, 0.728431640625, 0.7279882202148438, 0.7285555419921875, 0.727773193359375, 0.7274137573242188, 0.727572509765625, 0.7277168579101563, 0.7279124755859375, 0.7280169067382812, 0.7274721069335938, 0.7271577758789063, 0.7273502807617187, 0.7277578125, 0.72791552734375, 0.7280302124023438, 0.7272263793945313, 0.727066650390625, 0.7275479125976563, 0.7274383544921875, 0.727099365234375, 0.7272929077148438, 0.7277578125, 0.728215576171875, 0.727783447265625, 0.7276656494140625, 0.72743115234375, 0.7274690551757812, 0.7273277587890625, 0.7281571655273438, 0.7275397338867188, 0.727414794921875, 0.7275867919921875, 0.7271116943359375, 0.727257080078125, 0.727667724609375, 0.7276851196289063, 0.7272703857421875, 0.7279493408203125, 0.7277250366210938, 0.7286610107421875, 0.7273246459960937, 0.7279380493164063, 1.50266162109375, 0.72736767578125, 0.7276441650390625, 0.727141357421875, 0.7274188842773438, 0.7278018798828125, 0.727625732421875, 0.727541748046875, 0.7280783081054687, 0.7277342529296875, 0.72876953125, 0.728369140625, 0.7288411865234375, 0.7279749145507812, 0.7283046264648437, 0.72797900390625, 0.7282718505859375, 0.7281930541992188, 0.728658935546875, 0.7289108276367188, 0.7287183227539062, 0.7283169555664063, 0.7286814575195313, 0.7278428344726563, 0.7285463256835938, 0.72789404296875, 0.72875830078125, 0.72804248046875, 0.7291146240234375, 0.7289763793945313, 0.7286661376953125, 0.728784912109375, 0.728363037109375, 0.7279595336914062, 0.7274700927734375, 0.7275233154296875, 0.7272816772460937, 0.7279974365234375, 0.7284090576171875, 0.72762060546875, 0.728300537109375, 0.727457763671875, 0.7274711303710938, 0.727183349609375, 0.7278960571289063, 0.7272171630859375, 0.7289682006835938, 0.7285933837890625, 0.7276564331054688, 0.7285442504882812, 0.7269273681640624, 0.72795751953125, 0.7275642700195313, 0.7277936401367188, 0.7290480346679687, 0.72764208984375, 0.727804931640625, 0.7280322265625, 0.7278858032226563, 0.727383056640625, 0.7281940307617187, 0.7273421020507812, 0.72789404296875, 1.502066650390625, 0.7280179443359375, 0.7290101928710937, 0.7292303466796874, 0.7280660400390625, 0.728395751953125, 0.72835791015625, 0.72821142578125, 0.7285330200195312, 0.7277250366210938, 0.728616943359375, 0.7284172973632812, 0.7285718994140625, 0.728131591796875, 0.7278837890625, 0.7286558837890625, 0.7279124755859375, 0.7291678466796875, 0.728326171875, 0.7273584594726562, 0.7271588134765625, 0.7275980834960938, 0.7273738403320312, 0.7279912719726562, 0.72772607421875, 0.7297515258789062, 0.7277824096679687, 0.7272632446289062, 0.7278909301757812, 0.7273318481445312, 0.7274424438476562, 0.7272509155273438, 0.7275796508789063, 0.7279483032226562, 0.7278059692382812, 0.7278919677734375, 0.7276881713867187, 0.7284940795898438, 0.7285985107421875, 0.7283128051757812, 0.7282175903320313, 0.7282974853515625, 0.728426513671875, 0.7279165649414062, 0.7284940795898438, 0.7287070922851563, 0.7288955078125, 0.7288780517578125, 0.7277035522460937, 0.72800048828125, 0.72765234375, 0.728056884765625, 0.7274495849609375, 0.7280240478515625, 0.727583740234375, 0.727520263671875, 0.7282698364257812, 0.7273543701171875, 0.7287091064453125, 0.7278960571289063, 0.7277035522460937, 0.7276973876953124, 0.727141357421875]",tokens/s,1.3533475452727455,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp6imzu_68/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1207.41888,879.230976,0.0,232.783872,169.719808,s,10,0.3329092750549316,0.03329092750549316,0.001090057705435725,0.0329870719909668,0.033882015228271486,0.0350985279083252,0.036071738052368164,"[0.03631504058837891, 0.03281526565551758, 0.03245974349975586, 0.03361167907714844, 0.03344601440429688, 0.033440895080566406, 0.032671966552734376, 0.033158878326416015, 0.03245904159545898, 0.0325307502746582]",tokens/s,7689.782748100329,kWh,3.9167167718924527e-07,2.1461758953218489e-07,8.304311279209363e-07,1.4367203946423665e-06,tokens/kWh,178183591.57052574,MB,1207.41888,879.230976,0.0,232.783872,199.792128,s,10,20.506330566406252,2.050633056640625,0.01795433438952847,2.0542513427734375,2.07115517578125,2.0720841796875,2.0728273828125,"[2.05074072265625, 2.0363714599609377, 2.0684541015625, 2.054240966796875, 2.07094873046875, 2.07301318359375, 2.05426171875, 2.0559150390625, 2.0203577880859376, 2.02202685546875]",tokens/s,30.72222004613905,kWh,2.4533342500768937e-05,1.3444361220557862e-05,5.009960961606961e-05,8.80773133373964e-05,tokens/kWh,715280.6734540922,,s,629,20.768719875335666,0.03301863255220301,0.003999927492502233,0.03273830413818359,0.033139096832275394,0.033337548065185546,0.06495629302978514,"[0.033740798950195314, 0.034253822326660154, 0.0341104621887207, 0.03412275314331055, 0.0335206413269043, 0.03323699188232422, 0.03351859283447266, 0.03322982406616211, 0.033159168243408206, 0.03280998229980469, 0.03333222579956055, 0.033258495330810545, 0.032756736755371094, 0.03297075271606445, 0.03364352035522461, 0.03362918472290039, 0.033584129333496096, 0.03300249481201172, 0.03202867126464844, 0.031971328735351565, 0.031821823120117186, 0.031848447799682614, 0.03192934417724609, 0.03202560043334961, 0.03206041717529297, 0.032089088439941404, 0.03197644805908203, 0.03203379058837891, 0.03201638412475586, 0.031941631317138675, 0.03214438247680664, 0.03217407989501953, 0.03201638412475586, 0.031987712860107424, 0.03198054313659668, 0.03194572830200195, 0.03189145660400391, 0.03199590492248535, 0.03189043235778809, 0.03177676773071289, 0.032366592407226565, 0.03329228973388672, 0.0331960334777832, 0.0321341438293457, 0.032966655731201173, 0.03283865737915039, 0.03283456039428711, 0.032866302490234374, 0.032574462890625, 0.03194470405578613, 0.032132095336914065, 0.032039936065673826, 0.03172454452514648, 0.03170099258422852, 0.03186278343200684, 0.03212492752075195, 0.031920127868652344, 0.032054271697998044, 0.03213926315307617, 0.03209830474853516, 0.03199795150756836, 0.03198873519897461, 0.06604799652099609, 0.031955968856811526, 0.03163852882385254, 0.03178700828552246, 0.03185971260070801, 0.03197337532043457, 0.03201945495605469, 0.031987712860107424, 0.032059391021728514, 0.03199795150756836, 0.03196211242675781, 0.03191500854492187, 0.03182694435119629, 0.031764480590820314, 0.031714303970336914, 0.0317573127746582, 0.031927295684814457, 0.03189145660400391, 0.03197235107421875, 0.03200921630859375, 0.03191500854492187, 0.03215462493896484, 0.03199795150756836, 0.03201126480102539, 0.031936511993408204, 0.03203071975708008, 0.03208294296264649, 0.031936511993408204, 0.032161792755126956, 0.03201331329345703, 0.031562751770019534, 0.03183923149108887, 0.0325928955078125, 0.03195187187194824, 0.03276287841796875, 0.03203276824951172, 0.03358924865722656, 0.032881664276123046, 0.033119232177734374, 0.03294617462158203, 0.03283251190185547, 0.03209011077880859, 0.032072704315185545, 0.03290934371948242, 0.03280073547363281, 0.032763904571533206, 0.03282329559326172, 0.03290419387817383, 0.032740352630615234, 0.032478206634521486, 0.03298611068725586, 0.03275059127807617, 0.033102848052978515, 0.03289395141601562, 0.03265331268310547, 0.032115745544433597, 0.03197641563415527, 0.03259494400024414, 0.03297894287109375, 0.03300044631958008, 0.03283865737915039, 0.03288780975341797, 0.03242803192138672, 0.06496460723876953, 0.03309568023681641, 0.03194470405578613, 0.03273830413818359, 0.03288678359985352, 0.03281919860839844, 0.03297689437866211, 0.032912384033203124, 0.032787487030029296, 0.033037311553955076, 0.03294307327270508, 0.032863231658935545, 0.03278847885131836, 0.03306496047973633, 0.032745471954345705, 0.03307212829589844, 0.03291852951049805, 0.03285094451904297, 0.03298406219482422, 0.03316223907470703, 0.03301683044433594, 0.0328611831665039, 0.03299532699584961, 0.03296051025390625, 0.03287449645996094, 0.0329615364074707, 0.03278745651245117, 0.032949249267578126, 0.033023998260498046, 0.03287449645996094, 0.032909313201904294, 0.03280179214477539, 0.0328089599609375, 0.03198464012145996, 0.032054271697998044, 0.03240140914916992, 0.03320729446411133, 0.033165313720703124, 0.03290726470947265, 0.032584705352783204, 0.032707584381103515, 0.032249855041503905, 0.03290521621704102, 0.0328458251953125, 0.032791553497314455, 0.03254579162597656, 0.03258879852294922, 0.03239424133300781, 0.03282227325439453, 0.032894977569580076, 0.03285811233520508, 0.03289708709716797, 0.032930816650390625, 0.03297683334350586, 0.033140735626220705, 0.032868350982666016, 0.03292364883422851, 0.032985088348388675, 0.033018878936767575, 0.0329431037902832, 0.032927745819091796, 0.032976993560791014, 0.03305363082885742, 0.06707913970947266, 0.03323494338989258, 0.03279257583618164, 0.033137664794921876, 0.0331776008605957, 0.03303014373779297, 0.033274879455566404, 0.0330967025756836, 0.033137664794921876, 0.03282534408569336, 0.03309056091308594, 0.033081344604492184, 0.03303424072265625, 0.033309696197509765, 0.03319500732421875, 0.03301171112060547, 0.033345535278320314, 0.03285094451904297, 0.03292876815795898, 0.03282124710083008, 0.03200307083129883, 0.0319866886138916, 0.032074752807617186, 0.031974399566650394, 0.03197644805908203, 0.03283967971801758, 0.03339263916015625, 0.033058815002441407, 0.032753662109375, 0.03252326583862305, 0.03210553741455078, 0.031921152114868165, 0.03210540771484375, 0.0323768310546875, 0.03219968032836914, 0.0321976318359375, 0.03212799835205078, 0.03213516616821289, 0.03212799835205078, 0.03211775970458984, 0.032985088348388675, 0.034108417510986325, 0.033040382385253905, 0.03294617462158203, 0.03294105529785156, 0.032906238555908206, 0.032069633483886716, 0.031744064331054686, 0.03175724792480469, 0.031899648666381834, 0.03206655883789063, 0.03191296005249023, 0.03192934417724609, 0.032126976013183595, 0.03269740676879883, 0.03293075180053711, 0.03260313415527344, 0.03196211242675781, 0.031904767990112305, 0.03212287902832031, 0.03203788757324219, 0.03282944107055664, 0.03291033554077148, 0.06675456237792969, 0.033050624847412106, 0.0328007698059082, 0.03285299301147461, 0.0328089599609375, 0.03288883209228516, 0.033067008972167966, 0.03282022476196289, 0.03274649429321289, 0.03173785591125488, 0.032020481109619144, 0.03192428779602051, 0.03253753662109375, 0.03285606384277344, 0.033326080322265625, 0.03281919860839844, 0.033073150634765625, 0.03273830413818359, 0.03295129776000977, 0.03292364883422851, 0.03303628921508789, 0.03282841491699219, 0.032919551849365236, 0.03291961669921875, 0.032923583984375, 0.032797695159912106, 0.03295948791503906, 0.03298303985595703, 0.032982078552246094, 0.03352467346191406, 0.032589824676513675, 0.033331199645996096, 0.03296255874633789, 0.032672767639160154, 0.032740352630615234, 0.03300864028930664, 0.03226521682739258, 0.03242803192138672, 0.03284275054931641, 0.03410636901855469, 0.03410943984985351, 0.032530433654785154, 0.032048126220703126, 0.032909313201904294, 0.03273625564575195, 0.03282022476196289, 0.033040382385253905, 0.03312844848632813, 0.033032222747802736, 0.03294512176513672, 0.032824321746826174, 0.032772159576416014, 0.032817089080810546, 0.03304959869384766, 0.032717823028564456, 0.0328007698059082, 0.03302195358276367, 0.03298713684082031, 0.03286937713623047, 0.032884735107421875, 0.032917503356933595, 0.03312947082519531, 0.0328243522644043, 0.06682825469970703, 0.032939006805419925, 0.03280998229980469, 0.0324136962890625, 0.032930816650390625, 0.03313868713378906, 0.03297382354736328, 0.032863231658935545, 0.03299327850341797, 0.03307212829589844, 0.03317145538330078, 0.033142784118652346, 0.032942081451416014, 0.033363967895507815, 0.03327897644042969, 0.03276595306396484, 0.03296255874633789, 0.033056766510009765, 0.03283967971801758, 0.03291648101806641, 0.03299225616455078, 0.032946239471435546, 0.032924606323242185, 0.03313868713378906, 0.03290521621704102, 0.03295641708374023, 0.03341516876220703, 0.0333199348449707, 0.03459379196166992, 0.03333631896972656, 0.032895999908447264, 0.03294412612915039, 0.03291648101806641, 0.03218227386474609, 0.032626686096191404, 0.0323061752319336, 0.03225804901123047, 0.03283148956298828, 0.033329151153564454, 0.03292879867553711, 0.03304956817626953, 0.03294822311401367, 0.03261030578613281, 0.03294105529785156, 0.032903167724609376, 0.03311206436157227, 0.03312025451660156, 0.03289190292358399, 0.03233280181884766, 0.032524288177490236, 0.03305779266357422, 0.03231129455566406, 0.0324505615234375, 0.03278950500488281, 0.03302707290649414, 0.03288678359985352, 0.032996353149414064, 0.03286937713623047, 0.03303014373779297, 0.03281203079223633, 0.03252633666992188, 0.032178176879882815, 0.03199084854125977, 0.065176513671875, 0.03205017471313477, 0.03209011077880859, 0.03212287902832031, 0.03207987213134766, 0.03216588973999023, 0.03170816040039062, 0.03165286445617676, 0.03220684814453125, 0.03201228713989258, 0.03200614547729492, 0.03260927963256836, 0.03219046401977539, 0.03249868774414062, 0.03227340698242188, 0.03234201431274414, 0.03262464141845703, 0.03326873779296875, 0.03293286514282227, 0.032851966857910156, 0.032830463409423825, 0.03336908721923828, 0.03311824035644531, 0.03295331192016602, 0.032176128387451174, 0.03207680130004883, 0.032168991088867185, 0.032793567657470706, 0.03295641708374023, 0.032803871154785155, 0.03245257568359375, 0.03248025512695313, 0.03281612777709961, 0.03295129776000977, 0.033175552368164066, 0.03283359909057617, 0.03288671875, 0.03277721786499024, 0.033051647186279294, 0.033258495330810545, 0.03284275054931641, 0.03292364883422851, 0.03303014373779297, 0.03285504150390625, 0.03174399948120117, 0.03240857696533203, 0.033037311553955076, 0.03285299301147461, 0.032894977569580076, 0.032917503356933595, 0.0321341438293457, 0.03308031845092774, 0.032146430969238284, 0.03266559982299805, 0.03310899353027344, 0.032740352630615234, 0.03243212890625, 0.03290828704833984, 0.032982017517089846, 0.032979969024658204, 0.032942081451416014, 0.032345088958740234, 0.03220172882080078, 0.06660710144042968, 0.032917537689208985, 0.032905185699462894, 0.032873470306396486, 0.033800193786621094, 0.0329615364074707, 0.03261542510986328, 0.03257753753662109, 0.0329881591796875, 0.03291545486450195, 0.032912384033203124, 0.032797695159912106, 0.03293183898925781, 0.033181697845458984, 0.03299020767211914, 0.03279564666748047, 0.03290726470947265, 0.03288883209228516, 0.03297177505493164, 0.03243929672241211, 0.032846847534179685, 0.03225600051879883, 0.03281510543823242, 0.033083393096923826, 0.0330332145690918, 0.032736320495605466, 0.032817089080810546, 0.03186278343200684, 0.03185766410827637, 0.03318483352661133, 0.03217606353759766, 0.03176038360595703, 0.032323585510253904, 0.03279052734375, 0.0322949104309082, 0.032471038818359374, 0.03288780975341797, 0.03274140930175781, 0.03304035186767578, 0.03282841491699219, 0.031971328735351565, 0.03230003356933594, 0.03287551879882813, 0.032753662109375, 0.03289907073974609, 0.03249356842041016, 0.03194367980957031, 0.0321710090637207, 0.03300352096557617, 0.03274649429321289, 0.0328540153503418, 0.03268710327148437, 0.03202764892578125, 0.032950271606445314, 0.03294515228271484, 0.03292671966552734, 0.03265740966796875, 0.03194675254821777, 0.03184127998352051, 0.03206860733032227, 0.03196108818054199, 0.03163750457763672, 0.031665151596069335, 0.0648058853149414, 0.03185663986206055, 0.031904767990112305, 0.031893503189086916, 0.032007167816162106, 0.0315729923248291, 0.031927295684814457, 0.03198259162902832, 0.0319682559967041, 0.03189967918395996, 0.03205014419555664, 0.031955968856811526, 0.03200614547729492, 0.03189657592773437, 0.03188121604919433, 0.03202150344848633, 0.0321638412475586, 0.031991840362548825, 0.03199894332885742, 0.03193343925476074, 0.03197542381286621, 0.0318525447845459, 0.03199078369140625, 0.03202252960205078, 0.03216281509399414, 0.03228876876831055, 0.032105472564697264, 0.0329697265625, 0.03331584167480469, 0.031987712860107424, 0.03201638412475586, 0.03187820816040039, 0.03171219253540039, 0.032074752807617186, 0.03211372756958008, 0.03188115119934082, 0.031936511993408204, 0.032328704833984374, 0.032024574279785153, 0.03211980819702148, 0.031898624420166014, 0.03198566436767578, 0.03184332847595215, 0.032132095336914065, 0.031905792236328126, 0.031921152114868165, 0.03194777679443359, 0.03189145660400391, 0.03162112045288086, 0.03145011138916016, 0.03202867126464844, 0.03241164779663086, 0.03273932647705078, 0.03275980758666992, 0.0327086067199707, 0.032747520446777346, 0.032508926391601564, 0.031954944610595705, 0.03173785591125488, 0.03213926315307617, 0.03191910362243652, 0.032194561004638675, 0.03189452743530274, 0.0649349136352539, 0.032023551940917966, 0.03198566436767578, 0.03202560043334961, 0.03201126480102539, 0.03183616065979004, 0.031854591369628905, 0.0317573127746582, 0.031936511993408204, 0.031903743743896484, 0.03194675254821777, 0.03200511932373047, 0.031974399566650394, 0.03211161422729492, 0.031942655563354495, 0.0321638412475586, 0.03193446350097656, 0.03189145660400391, 0.03212799835205078, 0.032, 0.03198361587524414, 0.031937536239624024, 0.032105472564697264, 0.03191193580627441, 0.031916032791137694, 0.03271680068969727, 0.0333383674621582, 0.03406950378417969, 0.03317657470703125, 0.03253247833251953, 0.03198975944519043, 0.03277926254272461, 0.03240140914916992, 0.03290521621704102, 0.03309465789794922, 0.032075775146484374, 0.03243110275268555, 0.03205017471313477, 0.03197747230529785, 0.03197235107421875, 0.031908863067626955, 0.03199283218383789, 0.03201126480102539, 0.03172659111022949, 0.03145113563537598, 0.031459327697753905, 0.031643648147583005, 0.03180441665649414, 0.032189441680908204, 0.031916032791137694, 0.031955968856811526, 0.031971328735351565, 0.03196723175048828, 0.03189657592773437, 0.031959039688110355, 0.03171327972412109, 0.031732736587524416, 0.03177369689941406, 0.032056320190429685, 0.03193343925476074, 0.03195084762573242, 0.03191910362243652, 0.03195084762573242]",tokens/s,30.28593017651422,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 89903 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493f0-68e8bdad2c7e64af22b921fd;f188bfd6-b853-4b0d-a4e7-5240103fdc64) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490db-788bfdd346c246970b272408;6726043a-18bf-4853-8e91-9bfb259d80f9) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1386.098688,4467.458048,0.0,3821.010944,3588.539392,s,10,2.8072667541503904,0.28072667541503904,0.0013891453980565372,0.2802557830810547,0.2808915802001953,0.2828903091430664,0.2844892922973633,"[0.2848890380859375, 0.28023013305664063, 0.2801990051269531, 0.28025711059570313, 0.2802708740234375, 0.2801849365234375, 0.2804474182128906, 0.280294189453125, 0.28023959350585936, 0.28025445556640627]",tokens/s,911.9190387643711,kWh,3.3119122141305312e-06,1.8147647802379651e-06,1.6291780008722804e-05,2.1418457003091298e-05,tokens/kWh,11952308.234110976,MB,1386.098688,4467.458048,0.0,3821.010944,3698.945536,s,10,163.89264257812502,16.389264257812503,0.003489083519208589,16.3881943359375,16.392421484375,16.3953728515625,16.3977339453125,"[16.39832421875, 16.389771484375, 16.3869296875, 16.391765625, 16.388423828125, 16.38619921875, 16.38796484375, 16.390015625, 16.3857890625, 16.387458984375]",tokens/s,3.8439797546109427,kWh,0.00019345068814485898,0.00010602696284692681,0.000949664353866678,0.0012491420048584639,tokens/kWh,50434.61812585377,,s,629,166.15247476196274,0.264153377999941,0.03329400953216876,0.260126708984375,0.26031758422851564,0.26039378051757817,0.540367294921875,"[0.2604298095703125, 0.26013287353515624, 0.26008575439453124, 0.26007962036132815, 0.26021786499023436, 0.2603202514648438, 0.2601820068359375, 0.26012875366210936, 0.2602219543457031, 0.26012368774414063, 0.2601778869628906, 0.26016357421875, 0.2602014770507812, 0.2604083251953125, 0.26030694580078123, 0.26017074584960936, 0.26030694580078123, 0.2600079345703125, 0.2600693664550781, 0.25997927856445313, 0.2600478820800781, 0.2602444763183594, 0.2602977294921875, 0.2602588195800781, 0.26022711181640623, 0.26007958984375, 0.26012161254882815, 0.26005709838867186, 0.26022503662109375, 0.26006732177734376, 0.2602147827148438, 0.2602352600097656, 0.2603049011230469, 0.26016973876953126, 0.260178955078125, 0.26042266845703127, 0.2604431457519531, 0.2602874755859375, 0.26027420043945315, 0.2602597961425781, 0.26024856567382815, 0.26021273803710937, 0.2602014770507812, 0.26022091674804687, 0.26042776489257813, 0.26031103515625, 0.2603397216796875, 0.2603745422363281, 0.2603683776855469, 0.2603018798828125, 0.26036627197265627, 0.2605783081054687, 0.2604216613769531, 0.26033352661132814, 0.260316162109375, 0.2602147827148438, 0.26032333374023436, 0.2602229614257813, 0.2604031982421875, 0.26052197265625, 0.26033868408203126, 0.260274169921875, 0.5405133056640625, 0.25998745727539063, 0.25996185302734376, 0.25998541259765623, 0.2601246643066406, 0.2599372863769531, 0.26018405151367185, 0.25999563598632813, 0.25993011474609373, 0.2600499267578125, 0.25996185302734376, 0.2602874755859375, 0.2600693664550781, 0.25998745727539063, 0.2600140686035156, 0.26008270263671873, 0.259999755859375, 0.26005197143554687, 0.2600048522949219, 0.2604021911621094, 0.2602219543457031, 0.26024551391601564, 0.26012161254882815, 0.2601471862792969, 0.26012057495117186, 0.260178955078125, 0.26022091674804687, 0.26046771240234373, 0.2601615295410156, 0.26024038696289065, 0.26021273803710937, 0.2602147827148438, 0.26016973876953126, 0.2602188720703125, 0.2601195373535156, 0.26058139038085937, 0.260274169921875, 0.2602301330566406, 0.26009088134765623, 0.26012875366210936, 0.2600478820800781, 0.260136962890625, 0.2601390075683594, 0.26003045654296875, 0.2600919189453125, 0.2601973876953125, 0.26009088134765623, 0.2600837097167969, 0.26009292602539064, 0.260305908203125, 0.26029464721679685, 0.26030899047851563, 0.2602496032714844, 0.26015640258789063, 0.26017791748046876, 0.2601666564941406, 0.26009088134765623, 0.2602219543457031, 0.26042266845703127, 0.26024346923828123, 0.2601379699707031, 0.2602014770507812, 0.2602086486816406, 0.5403678588867188, 0.2603028564453125, 0.2600447998046875, 0.2601502685546875, 0.2600048522949219, 0.26019326782226565, 0.2600621948242188, 0.2599915466308594, 0.2599700622558594, 0.2602352600097656, 0.2600621948242188, 0.26012774658203125, 0.2601891784667969, 0.26008984375, 0.2599700622558594, 0.26002740478515624, 0.25998849487304687, 0.26010009765625, 0.2601195373535156, 0.2601656188964844, 0.26014004516601563, 0.260073486328125, 0.26001715087890626, 0.2600939636230469, 0.2603970642089844, 0.26008779907226565, 0.26007040405273435, 0.26021786499023436, 0.2600970153808594, 0.2600345458984375, 0.2600058898925781, 0.2600550537109375, 0.2601164855957031, 0.260126708984375, 0.26001715087890626, 0.2600202331542969, 0.26008779907226565, 0.2600335388183594, 0.2601031799316406, 0.26019021606445314, 0.26009292602539064, 0.26026190185546877, 0.26016973876953126, 0.26018405151367185, 0.260126708984375, 0.2599700622558594, 0.260210693359375, 0.26018405151367185, 0.2603049011230469, 0.26017279052734377, 0.2600960998535156, 0.26008053588867186, 0.2600447998046875, 0.26002328491210935, 0.2600110168457031, 0.2601195373535156, 0.2600560607910156, 0.2601257019042969, 0.2600007629394531, 0.26009292602539064, 0.2601082763671875, 0.26012057495117186, 0.26048614501953127, 0.5407078247070313, 0.26011749267578127, 0.26015640258789063, 0.260136962890625, 0.260284423828125, 0.26017074584960936, 0.26010015869140624, 0.26020858764648436, 0.2601666564941406, 0.26003662109375, 0.2600970153808594, 0.26017587280273435, 0.26000897216796875, 0.26008575439453124, 0.2603550720214844, 0.26025164794921873, 0.2600058898925781, 0.2601666564941406, 0.2600345458984375, 0.2600980529785156, 0.2600058898925781, 0.2600386657714844, 0.26025982666015623, 0.2601533508300781, 0.26014617919921873, 0.2602496032714844, 0.2600744934082031, 0.2601820068359375, 0.2601922607421875, 0.2602076110839844, 0.26013592529296875, 0.26038885498046876, 0.26029364013671874, 0.26032333374023436, 0.26011239624023436, 0.26018508911132815, 0.2602854309082031, 0.2602239990234375, 0.2601471862792969, 0.2601748352050781, 0.26018405151367185, 0.260453369140625, 0.2603192443847656, 0.2602230224609375, 0.26013180541992187, 0.260316162109375, 0.2601594848632813, 0.2601441345214844, 0.26007244873046875, 0.26016973876953126, 0.2601164855957031, 0.26031103515625, 0.26038681030273436, 0.2602608642578125, 0.260073486328125, 0.26023934936523435, 0.26009906005859373, 0.2601584777832031, 0.2600621948242188, 0.26027008056640627, 0.26026904296875, 0.26036532592773437, 0.26014004516601563, 0.5404119262695313, 0.25996902465820315, 0.25991064453125, 0.2599966735839844, 0.2600755310058594, 0.2599987182617188, 0.2600939636230469, 0.26010726928710937, 0.25997927856445313, 0.26001202392578127, 0.26005197143554687, 0.26035198974609375, 0.26012261962890626, 0.2601553955078125, 0.26011544799804687, 0.26016256713867186, 0.2600048522949219, 0.26005093383789063, 0.259989501953125, 0.26000384521484377, 0.260421630859375, 0.26011749267578127, 0.26003250122070315, 0.2600284118652344, 0.2600130615234375, 0.26020556640625, 0.26025982666015623, 0.2602270812988281, 0.2600611877441406, 0.2601308288574219, 0.2600919189453125, 0.26027008056640627, 0.26002532958984376, 0.2602301330566406, 0.2600560607910156, 0.26042266845703127, 0.26021786499023436, 0.26033355712890627, 0.2601820068359375, 0.2600202331542969, 0.26016461181640627, 0.26014105224609374, 0.2602024841308594, 0.2601257019042969, 0.26028338623046876, 0.2601830749511719, 0.26015127563476564, 0.2601236572265625, 0.2601257019042969, 0.26051788330078124, 0.26029876708984373, 0.26014004516601563, 0.2600867919921875, 0.26011444091796876, 0.2600611877441406, 0.26010726928710937, 0.26002227783203125, 0.26012261962890626, 0.2603407287597656, 0.2602506103515625, 0.26011239624023436, 0.26010726928710937, 0.26004376220703124, 0.540248046875, 0.2601922607421875, 0.2599966735839844, 0.2599495544433594, 0.25990451049804686, 0.26010531616210936, 0.2599730224609375, 0.25991168212890625, 0.2599587707519531, 0.2602270812988281, 0.2600191955566406, 0.2600007629394531, 0.26014617919921873, 0.2600867919921875, 0.25993011474609373, 0.26002944946289064, 0.2599649353027344, 0.260105224609375, 0.2600478820800781, 0.2600663146972656, 0.25986868286132814, 0.25998541259765623, 0.2599813232421875, 0.2601082763671875, 0.2602352600097656, 0.2601041870117187, 0.2601082763671875, 0.2601973876953125, 0.26011749267578127, 0.2600663146972656, 0.2600140686035156, 0.2602147827148438, 0.2602076110839844, 0.2603673706054688, 0.26010009765625, 0.2600714111328125, 0.2600816650390625, 0.26017587280273435, 0.26004583740234377, 0.26028851318359375, 0.26024652099609374, 0.26011239624023436, 0.26015435791015623, 0.2601615295410156, 0.2600407104492187, 0.2600172119140625, 0.26012051391601565, 0.2601451416015625, 0.2603325500488281, 0.2601820068359375, 0.2600765380859375, 0.2600335388183594, 0.26003762817382814, 0.26007962036132815, 0.2600447998046875, 0.2601666564941406, 0.260105224609375, 0.26013388061523435, 0.26012261962890626, 0.26009906005859373, 0.2600611877441406, 0.260178955078125, 0.260537353515625, 0.5406893920898438, 0.25999563598632813, 0.26002944946289064, 0.26001202392578127, 0.2599638977050781, 0.25995263671875, 0.2599413757324219, 0.2603120727539063, 0.2600202331542969, 0.25998541259765623, 0.2599034729003906, 0.2599710693359375, 0.25996902465820315, 0.26002227783203125, 0.26024038696289065, 0.26017587280273435, 0.2601492614746094, 0.2600837097167969, 0.2600110168457031, 0.2600202331542969, 0.26000384521484377, 0.26016357421875, 0.2602168273925781, 0.2602137451171875, 0.26002740478515624, 0.2602188720703125, 0.2599925842285156, 0.2601041870117187, 0.26011651611328124, 0.26032635498046874, 0.2602158203125, 0.2603356018066406, 0.26014617919921873, 0.26028033447265625, 0.2600919189453125, 0.26025164794921873, 0.26034381103515625, 0.26021786499023436, 0.2602567749023437, 0.26020965576171873, 0.2601257019042969, 0.26006427001953125, 0.2600478820800781, 0.26008062744140625, 0.2602711181640625, 0.2601513061523438, 0.2601615295410156, 0.26016259765625, 0.260000732421875, 0.2599966735839844, 0.26008474731445314, 0.2601062316894531, 0.2602567749023437, 0.26017074584960936, 0.26014208984375, 0.2601134033203125, 0.26016461181640627, 0.2600837097167969, 0.2601308288574219, 0.2603345947265625, 0.26025369262695314, 0.260274169921875, 0.2600980529785156, 0.5405234985351562, 0.26009292602539064, 0.2600345458984375, 0.2600663146972656, 0.26004376220703124, 0.2600058898925781, 0.26000997924804686, 0.26011444091796876, 0.260068359375, 0.26003250122070315, 0.25992703247070315, 0.26030694580078123, 0.2600396728515625, 0.2602669982910156, 0.2600560607910156, 0.26017691040039065, 0.260063232421875, 0.2601308288574219, 0.260136962890625, 0.2602291259765625, 0.26010931396484377, 0.26014822387695313, 0.2599966735839844, 0.260052978515625, 0.26002740478515624, 0.2600663146972656, 0.2601533508300781, 0.260232177734375, 0.26017074584960936, 0.26018817138671874, 0.2603263854980469, 0.26019021606445314, 0.26006527709960936, 0.26017587280273435, 0.26023934936523435, 0.2603714599609375, 0.2601922607421875, 0.2601615295410156, 0.26031716918945313, 0.26016461181640627, 0.26007244873046875, 0.2601922607421875, 0.26012875366210936, 0.2601185302734375, 0.26021786499023436, 0.26016973876953126, 0.26021273803710937, 0.260136962890625, 0.2601031799316406, 0.260379638671875, 0.2602291259765625, 0.26033050537109376, 0.26011544799804687, 0.2603222961425781, 0.26023934936523435, 0.26016973876953126, 0.2601441345214844, 0.26020660400390627, 0.26016461181640627, 0.2604021911621094, 0.2601748352050781, 0.26020965576171873, 0.26023834228515624, 0.5403658447265625, 0.26022808837890626, 0.26013491821289064, 0.26002227783203125, 0.2600079345703125, 0.2600202331542969, 0.25998028564453124, 0.26002328491210935, 0.26002532958984376, 0.260173828125, 0.26014208984375, 0.2601799621582031, 0.2599915466308594, 0.26002944946289064, 0.25997720336914065, 0.26005914306640626, 0.2599342041015625, 0.25996185302734376, 0.26002944946289064, 0.2600068969726563, 0.25997515869140625, 0.26023321533203125, 0.25999563598632813, 0.26012161254882815, 0.26036224365234373, 0.26008575439453124, 0.2602362976074219, 0.2601871337890625, 0.26005810546875, 0.2601082763671875, 0.2600693664550781, 0.2600611877441406, 0.2601021423339844, 0.2602342529296875, 0.2601605224609375, 0.2600130615234375, 0.25995367431640626, 0.2600386657714844, 0.2599915466308594, 0.2601390075683594, 0.2602229614257813, 0.260105224609375, 0.2600284118652344, 0.2600478820800781, 0.25997927856445313, 0.2600284118652344, 0.260094970703125, 0.260094970703125, 0.26026190185546877, 0.26019021606445314, 0.2601257019042969, 0.26014004516601563, 0.26012161254882815, 0.2601113586425781, 0.26005197143554687, 0.2601246643066406, 0.2601041870117187, 0.26011239624023436, 0.2600488891601562, 0.2601082763671875, 0.26012979125976565, 0.2600478820800781, 0.2603417663574219, 0.5405787963867188, 0.2601574401855469, 0.2599966735839844, 0.2600396728515625, 0.2600345458984375, 0.26000180053710936, 0.260031494140625, 0.26007244873046875, 0.2600499267578125, 0.26014935302734377, 0.26003753662109375, 0.26024551391601564, 0.26003762817382814, 0.2600888366699219, 0.26008984375, 0.26002740478515624, 0.26008575439453124, 0.2600960083007812, 0.25997515869140625, 0.26009906005859373, 0.2599413757324219, 0.25995672607421877, 0.2602147827148438, 0.26008270263671873, 0.2601390075683594, 0.260126708984375, 0.25999462890625, 0.260052978515625, 0.26000384521484377, 0.26012261962890626, 0.26016256713867186, 0.26027621459960937, 0.2600560607910156, 0.26024755859375, 0.2600263671875, 0.26005093383789063, 0.26024139404296875, 0.2604267578125, 0.26019021606445314, 0.2602567749023437, 0.26005093383789063, 0.2601103210449219, 0.2600130615234375, 0.2600140686035156, 0.26001715087890626, 0.2600120849609375, 0.2602270202636719, 0.26012979125976565, 0.26007040405273435, 0.26004376220703124, 0.26005810546875, 0.260284423828125, 0.26034585571289065, 0.2601922607421875, 0.26010726928710937, 0.26014617919921873, 0.26003662109375, 0.26012057495117186, 0.2601257019042969, 0.2601308288574219, 0.2602854309082031, 0.2605189208984375, 0.2602649536132812]",tokens/s,3.7856793941897777,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyulsjyz5/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2034.978816,5539.10272,0.0,4892.655616,4542.610432,s,10,5.622600402832031,0.5622600402832031,0.0013230028627895694,0.5620163879394531,0.5633294921875001,0.564493603515625,0.565424892578125,"[0.56565771484375, 0.5620866088867188, 0.5614910888671875, 0.5609406127929687, 0.5618070678710938, 0.5607069091796875, 0.5619461669921875, 0.5621692504882813, 0.5627241821289063, 0.56307080078125]",tokens/s,455.30534211724546,kWh,6.621943920114895e-06,3.6285156736691226e-06,3.075088879823785e-05,4.100134839202187e-05,tokens/kWh,6243697.098747441,MB,2035.28192,5539.10272,0.0,4892.655616,4726.279168,s,10,331.3280078125,33.13280078125,0.008409894982587748,33.12982421875,33.142355468750004,33.145203125,33.14748125,"[33.14805078125, 33.124625, 33.1231640625, 33.1260859375, 33.14172265625, 33.1400703125, 33.12555859375, 33.12609375, 33.13908203125, 33.1335546875]",tokens/s,1.9014390125344305,kWh,0.00039121345305516403,0.00021441890828907464,0.0018001254370127661,0.0024057577983570048,tokens/kWh,26187.174803309546,,s,629,335.85907128906234,0.5339571880589229,0.06674281532897068,0.5258792724609375,0.5263243408203125,0.5265016845703125,1.0872363134765626,"[0.5258577880859375, 0.5256417236328125, 0.5264219970703125, 0.5263196411132812, 0.5257113647460937, 0.5258464965820312, 0.525486083984375, 0.5262151489257813, 0.5262940063476562, 0.52614453125, 0.5258035278320312, 0.52607080078125, 0.5257379760742188, 0.5260421142578126, 0.5259489135742188, 0.526060546875, 0.5258331909179688, 0.5260062866210937, 0.5258424072265625, 0.52600732421875, 0.5258045654296875, 0.5261404418945312, 0.5261957397460938, 0.5260748901367187, 0.5259468994140625, 0.5261731567382812, 0.5258363037109375, 0.5262284545898438, 0.5258250122070313, 0.5260421142578126, 0.526023681640625, 0.526244873046875, 0.52615576171875, 0.5261332397460937, 0.52611376953125, 0.5259898681640625, 0.526482421875, 0.526271484375, 0.5264271240234375, 0.5263646850585938, 0.5265366821289063, 0.52632373046875, 0.5263370361328125, 0.5259735107421875, 0.5261414184570312, 0.526107666015625, 0.5262571411132813, 0.5263308715820313, 0.5263267822265625, 0.5261199340820313, 0.52626025390625, 0.5261434936523437, 0.526482421875, 0.5264199829101562, 0.5262622680664063, 0.5264076538085938, 0.5264937133789063, 0.5264998168945313, 0.5264230346679688, 0.5268643798828125, 0.5268623657226562, 0.526497802734375, 1.0878065185546875, 0.5256908569335937, 0.5257564086914063, 0.5259059448242187, 0.5258055419921875, 0.5255567626953125, 0.52566015625, 0.5258875122070312, 0.526002197265625, 0.5259949951171875, 0.5256663208007812, 0.5261107177734375, 0.5259857788085938, 0.5258189086914062, 0.525576171875, 0.525970458984375, 0.5256294555664063, 0.5262294921875, 0.5257728271484375, 0.5257666625976563, 0.525465576171875, 0.5258946533203125, 0.5259468994140625, 0.5260144653320312, 0.5255526123046875, 0.5257932739257812, 0.5255004272460938, 0.5259530029296875, 0.52562841796875, 0.5257584838867188, 0.5257738037109375, 0.525844482421875, 0.5255690307617188, 0.5256724243164063, 0.5258956909179687, 0.5257461547851563, 0.5255751953125, 0.5257482299804688, 0.5255536499023438, 0.5257677001953125, 0.5256314697265625, 0.5257533569335937, 0.5254666137695313, 0.52566015625, 0.5253980102539062, 0.5257482299804688, 0.5259049072265625, 0.5260809936523437, 0.5258383178710937, 0.5262018432617187, 0.5256294555664063, 0.525750244140625, 0.525849609375, 0.5261281127929688, 0.5256796264648438, 0.5259478759765625, 0.5256181640625, 0.5258219604492187, 0.5256539916992188, 0.525886474609375, 0.5259243774414063, 0.5260851440429688, 0.5258331909179688, 1.087382568359375, 0.526060546875, 0.5257789306640624, 0.5258936157226562, 0.5258137817382813, 0.5256263427734374, 0.525675537109375, 0.5257164916992187, 0.525739013671875, 0.525718505859375, 0.5260646362304687, 0.5256406860351562, 0.5256908569335937, 0.5256744995117187, 0.525433837890625, 0.525717529296875, 0.5254287109375, 0.5256908569335937, 0.5254031372070312, 0.5255137329101562, 0.5256263427734374, 0.525770751953125, 0.5254891357421875, 0.5259683837890625, 0.5255454711914063, 0.5256990966796875, 0.5255372924804688, 0.5261035766601563, 0.5256632080078125, 0.5258690795898437, 0.5257625732421874, 0.525728759765625, 0.5260155029296875, 0.5258189086914062, 0.5256539916992188, 0.5256417236328125, 0.5256273803710938, 0.5258055419921875, 0.5257083129882812, 0.525971435546875, 0.5258916015625, 0.52600830078125, 0.5256673583984375, 0.5257686767578125, 0.5254942626953125, 0.52560791015625, 0.5257482299804688, 0.5258803100585937, 0.5259755249023438, 0.5260676879882813, 0.5256151123046875, 0.5258536987304687, 0.5258526611328125, 0.5258741455078125, 0.5255966796875, 0.5259386596679687, 0.5257666625976563, 0.5259059448242187, 0.5259223022460937, 0.5260523681640625, 0.52583935546875, 0.5260933227539063, 0.5256048583984375, 1.0868602294921874, 0.5259120483398437, 0.5255465087890625, 0.5258884887695312, 0.5255741577148437, 0.525613037109375, 0.5256007690429687, 0.5258076171875, 0.5256734619140625, 0.5257328491210937, 0.5254573974609374, 0.525791259765625, 0.525454345703125, 0.5258527221679687, 0.5255577392578125, 0.525912109375, 0.5255772094726563, 0.525727783203125, 0.5255833129882812, 0.5263431396484375, 0.5257984008789063, 0.525792236328125, 0.525896728515625, 0.52569189453125, 0.5255977172851563, 0.52600732421875, 0.525549560546875, 0.5257738037109375, 0.5256058959960938, 0.5258147583007813, 0.52556494140625, 0.5260534057617188, 0.5259837646484375, 0.5260646362304687, 0.5258803100585937, 0.5260369873046875, 0.5255034790039063, 0.5258485717773438, 0.5255608520507813, 0.52600830078125, 0.52562841796875, 0.52608203125, 0.5257349243164062, 0.5257574462890625, 0.5256406860351562, 0.5257717895507813, 0.5257625732421874, 0.5258875122070312, 0.525686767578125, 0.5260492553710937, 0.5258639526367187, 0.5261209716796875, 0.5259990844726562, 0.5260728149414062, 0.5258884887695312, 0.5258946533203125, 0.52564990234375, 0.5258731689453126, 0.5256837158203125, 0.5259202270507812, 0.5259827270507812, 0.5264097290039063, 0.526213134765625, 1.0886195068359374, 0.5266964721679688, 0.5265223388671875, 0.52691455078125, 0.5266104125976563, 0.5267548217773438, 0.5260175170898438, 0.5263544311523437, 0.5261270751953125, 0.5263964233398437, 0.5262694702148437, 0.5260482788085937, 0.5259909057617187, 0.5260492553710937, 0.5259929809570313, 0.5264066772460938, 0.525787109375, 0.5257963256835938, 0.5255126953125, 0.5257431030273437, 0.5255669555664062, 0.5256345825195312, 0.5254779052734375, 0.5258198852539062, 0.5258219604492187, 0.5260687255859375, 0.5260800170898438, 0.525802490234375, 0.5258506469726563, 0.525781005859375, 0.5256857299804687, 0.5258485717773438, 0.52577587890625, 0.5258373413085937, 0.5261895751953125, 0.52682958984375, 0.526614501953125, 0.52650390625, 0.5255485229492187, 0.5258424072265625, 0.525549560546875, 0.5258352661132812, 0.5255639038085937, 0.525822998046875, 0.5257195434570312, 0.525739013671875, 0.5257267456054687, 0.5263533935546875, 0.5261547241210938, 0.5263810424804688, 0.5262264404296875, 0.526551025390625, 0.5261895751953125, 0.5266165771484375, 0.5261998291015625, 0.5261404418945312, 0.5261486206054687, 0.5264189453125, 0.52577587890625, 0.5259304809570312, 0.5256898803710938, 0.5258782958984375, 0.5257636108398438, 1.08657666015625, 0.5256980590820313, 0.52586083984375, 0.5259304809570312, 0.5256939697265625, 0.5258782958984375, 0.5258782958984375, 0.52608203125, 0.5260462036132812, 0.5260513305664063, 0.525549560546875, 0.5258875122070312, 0.5257984008789063, 0.5262018432617187, 0.5257328491210937, 0.5260298461914062, 0.5255608520507813, 0.5259683837890625, 0.52613427734375, 0.52609228515625, 0.5256028442382813, 0.52607080078125, 0.5256304931640625, 0.5257891845703125, 0.525643798828125, 0.5260062866210937, 0.5256611938476563, 0.5261844482421875, 0.5262510375976562, 0.5261025390625, 0.5262673950195312, 0.5262622680664063, 0.5260779418945313, 0.5261721801757813, 0.5261353149414062, 0.5261588745117187, 0.5259642944335937, 0.52661865234375, 0.52625, 0.5263790283203125, 0.5262643432617188, 0.5265131225585937, 0.5259069213867188, 0.526286865234375, 0.5260626220703125, 0.5262888793945313, 0.52619775390625, 0.526224365234375, 0.5260155029296875, 0.526613525390625, 0.526581787109375, 0.5259366455078125, 0.5256642456054688, 0.5257686767578125, 0.5256632080078125, 0.5258198852539062, 0.5258588256835938, 0.5261209716796875, 0.525928466796875, 0.5265029296875, 0.5259878540039062, 0.5262069702148438, 0.5264834594726563, 1.088301025390625, 0.5262653198242188, 0.5256724243164063, 0.5255659790039062, 0.525475830078125, 0.5257636108398438, 0.5255300903320312, 0.5257246704101562, 0.5256058959960938, 0.5256345825195312, 0.5254676513671875, 0.5258137817382813, 0.5256581420898437, 0.5258731689453126, 0.526055419921875, 0.526497802734375, 0.5257297973632813, 0.5260103759765625, 0.5256406860351562, 0.5259428100585938, 0.5259376831054687, 0.5261486206054687, 0.5255321655273437, 0.5259735107421875, 0.5258168334960938, 0.52605029296875, 0.5256571044921875, 0.5257420654296875, 0.5257471923828125, 0.525717529296875, 0.525749267578125, 0.5258168334960938, 0.5255885009765625, 0.5259366455078125, 0.5256325073242187, 0.5258721313476562, 0.5255454711914063, 0.5259120483398437, 0.5255874633789063, 0.5258823852539063, 0.5260584716796874, 0.5260093383789063, 0.525638671875, 0.5257984008789063, 0.5254819946289062, 0.5257584838867188, 0.5255413818359375, 0.5256263427734374, 0.5254993896484375, 0.5260482788085937, 0.5255536499023438, 0.5256744995117187, 0.5257471923828125, 0.5260912475585937, 0.5257963256835938, 0.526045166015625, 0.5258997802734375, 0.5258854370117187, 0.5258956909179687, 0.526097412109375, 0.5258956909179687, 0.52596630859375, 0.525486083984375, 1.0877613525390626, 0.525769775390625, 0.5254829711914063, 0.5259612426757813, 0.525929443359375, 0.5262827758789063, 0.5257748413085938, 0.5258516235351562, 0.5254768676757813, 0.5256775512695312, 0.5254871215820313, 0.5258475341796875, 0.525770751953125, 0.5260123901367187, 0.5260103759765625, 0.52575537109375, 0.525591552734375, 0.5260431518554688, 0.5255874633789063, 0.5257769165039062, 0.52560693359375, 0.5258577880859375, 0.5256089477539062, 0.5257195434570312, 0.52571337890625, 0.5260534057617188, 0.5259540405273437, 0.526023681640625, 0.5256468505859375, 0.5257000732421875, 0.5256878051757813, 0.5258270874023437, 0.5255669555664062, 0.5259325561523438, 0.5256478881835938, 0.5259059448242187, 0.5256539916992188, 0.5259171752929688, 0.525533203125, 0.5258956909179687, 0.5256396484375, 0.5259151611328124, 0.5259089965820313, 0.5258045654296875, 0.525470703125, 0.526087158203125, 0.525570068359375, 0.526012451171875, 0.5258239135742188, 0.526266357421875, 0.5257000732421875, 0.5258916015625, 0.5262540893554688, 0.525970458984375, 0.5258168334960938, 0.5259356079101563, 0.5257216186523438, 0.5257953491210937, 0.5261250610351562, 0.5259458618164062, 0.5255300903320312, 0.5259765625, 0.5258004760742188, 1.0886134033203125, 0.52569189453125, 0.5255536499023438, 0.5258383178710937, 0.5257799682617188, 0.5257584838867188, 0.5256837158203125, 0.5262305297851563, 0.5256099853515624, 0.525781005859375, 0.5258270874023437, 0.5262387084960938, 0.5258424072265625, 0.5259765625, 0.5260114135742188, 0.5261588745117187, 0.5259622192382812, 0.5260318603515625, 0.5257083129882812, 0.526166015625, 0.5256325073242187, 0.52587109375, 0.5258260498046875, 0.5261178588867188, 0.5256458129882813, 0.525897705078125, 0.5261752319335937, 0.5257431030273437, 0.5257799682617188, 0.526298095703125, 0.5260574951171875, 0.52619775390625, 0.52611279296875, 0.526271484375, 0.5262151489257813, 0.5269708862304687, 0.5263012084960937, 0.52657666015625, 0.5264937133789063, 0.5267752685546875, 0.5261527099609375, 0.5262315673828125, 0.5261690673828125, 0.5258997802734375, 0.5257728271484375, 0.5265807495117187, 0.525681640625, 0.5258721313476562, 0.5259049072265625, 0.525970458984375, 0.525633544921875, 0.5261414184570312, 0.5260697631835938, 0.5258782958984375, 0.5258792724609375, 0.52611376953125, 0.5257523193359375, 0.5261752319335937, 0.5259192504882813, 0.52607080078125, 0.5259898681640625, 0.5263278198242187, 0.5262356567382812, 1.089292236328125, 0.5257205810546876, 0.5256959838867188, 0.5258516235351562, 0.525675537109375, 0.525707275390625, 0.525644775390625, 0.5258741455078125, 0.5256427612304687, 0.52611376953125, 0.5256857299804687, 0.5256929321289062, 0.525865966796875, 0.5259274291992188, 0.5254942626953125, 0.52583935546875, 0.5259089965820313, 0.5260984497070312, 0.5255659790039062, 0.52596630859375, 0.5255341796875, 0.5257932739257812, 0.5256161499023437, 0.5257615356445312, 0.5255608520507813, 0.5256908569335937, 0.5256058959960938, 0.5258137817382813, 0.525822998046875, 0.5260390625, 0.5258168334960938, 0.526033935546875, 0.5262326049804688, 0.5259100341796875, 0.5258065795898438, 0.52625, 0.5258567504882813, 0.52642919921875, 0.5261015014648438, 0.525955078125, 0.5256345825195312, 0.5259990844726562, 0.5259017944335938, 0.525970458984375, 0.5258311767578125, 0.5259765625, 0.5258895263671876, 0.5263216552734375, 0.5260062866210937, 0.5263206176757812, 0.5261220092773438, 0.5258639526367187, 0.5263790283203125, 0.52634521484375, 0.52650390625, 0.5263104248046875, 0.5256294555664063, 0.5261782836914063, 0.5257164916992187, 0.526087158203125, 0.526256103515625, 0.52645068359375, 0.5261045532226563]",tokens/s,1.8728093232254581,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694919b-5b27d2c22e7a588847b7b749;69efa352-6180-4411-ab7d-4ccc409e27f8) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,4421.566464,24111.480832,0.0,23465.033728,21691.057664,s,10,26.201972412109374,2.6201972412109376,0.0014392080916773324,2.620363525390625,2.621498388671875,2.622221728515625,2.622800400390625,"[2.62030126953125, 2.620416259765625, 2.620310791015625, 2.621337646484375, 2.620593017578125, 2.619945556640625, 2.622945068359375, 2.618053466796875, 2.6175732421875, 2.62049609375]",tokens/s,97.70256833095829,kWh,3.092599951558643e-05,1.6948578058490964e-05,0.00015162839908040572,0.00019950297665448312,tokens/kWh,1283188.874135765,MB,4421.566464,24111.480832,0.0,23465.033728,21890.213376,s,10,1554.3346406249998,155.43346406249998,0.013784362336340894,155.428859375,155.4548328125,155.45787734375,155.46031296875,"[155.44153125, 155.42890625, 155.45415625, 155.426875, 155.41909375, 155.416984375, 155.4288125, 155.42403125, 155.460921875, 155.433328125]",tokens/s,0.4053181236099044,kWh,0.0018348197203377882,0.0010056435987538315,0.008919082301926796,0.011759545621018413,tokens/kWh,5357.349852650515,,s,629,1575.449455322266,2.504689118159405,0.3111804707409938,2.46706884765625,2.4683868164062504,2.4691709960937502,5.086480859375,"[2.467560546875, 2.468384765625, 2.46702587890625, 2.467464111328125, 2.46763427734375, 2.468833251953125, 2.467610595703125, 2.46744580078125, 2.467310546875, 2.4678994140625, 2.46620263671875, 2.466724853515625, 2.46737109375, 2.46790234375, 2.466193359375, 2.46736279296875, 2.4661279296875, 2.467852294921875, 2.469380126953125, 2.468475830078125, 2.467101806640625, 2.46930322265625, 2.4674384765625, 2.46818505859375, 2.46793115234375, 2.468096923828125, 2.466873291015625, 2.466653076171875, 2.466144287109375, 2.467576904296875, 2.466427978515625, 2.46687744140625, 2.467862548828125, 2.468832275390625, 2.46677001953125, 2.46746533203125, 2.4666767578125, 2.467177490234375, 2.4664228515625, 2.466610107421875, 2.466058349609375, 2.46723388671875, 2.470784912109375, 2.466979736328125, 2.466400146484375, 2.4673740234375, 2.467078125, 2.466891845703125, 2.46717041015625, 2.467664794921875, 2.4676455078125, 2.467311767578125, 2.4667421875, 2.46812060546875, 2.467968017578125, 2.467040283203125, 2.466512939453125, 2.4665068359375, 2.467083251953125, 2.466418701171875, 2.465967041015625, 2.466157470703125, 5.087494140625, 2.466189208984375, 2.466884521484375, 2.46639501953125, 2.46648828125, 2.466607177734375, 2.467287109375, 2.46839599609375, 2.46702587890625, 2.467313720703125, 2.467323974609375, 2.467330078125, 2.466697265625, 2.46641455078125, 2.4657080078125, 2.46809912109375, 2.46658984375, 2.467029052734375, 2.46753076171875, 2.468769775390625, 2.467987548828125, 2.468222900390625, 2.4690595703125, 2.469578857421875, 2.46847900390625, 2.4680283203125, 2.46656298828125, 2.46801513671875, 2.46639013671875, 2.4670986328125, 2.46615234375, 2.46773974609375, 2.4667216796875, 2.46727685546875, 2.4663193359375, 2.46763525390625, 2.46688671875, 2.467874755859375, 2.46723779296875, 2.46824853515625, 2.466218017578125, 2.466986083984375, 2.46668505859375, 2.4669931640625, 2.4665302734375, 2.465965087890625, 2.468173828125, 2.467091552734375, 2.4666787109375, 2.466904052734375, 2.466720703125, 2.466922607421875, 2.46651806640625, 2.466711669921875, 2.46662158203125, 2.4666767578125, 2.466606201171875, 2.466504638671875, 2.46702294921875, 2.467114990234375, 2.46698291015625, 2.466469970703125, 2.46719384765625, 5.0873857421875, 2.46711181640625, 2.4676669921875, 2.4682373046875, 2.46677294921875, 2.465900634765625, 2.466285400390625, 2.46675244140625, 2.4686142578125, 2.467852294921875, 2.468147216796875, 2.469138427734375, 2.468021240234375, 2.46839501953125, 2.467322998046875, 2.46803857421875, 2.4694794921875, 2.467284912109375, 2.467267578125, 2.46729736328125, 2.471918701171875, 2.4674765625, 2.467620849609375, 2.468518798828125, 2.46965966796875, 2.468581298828125, 2.467287109375, 2.4676025390625, 2.46909130859375, 2.4678798828125, 2.466585693359375, 2.46625390625, 2.468111328125, 2.466431884765625, 2.4665302734375, 2.46625390625, 2.46766796875, 2.46662646484375, 2.46632763671875, 2.466314208984375, 2.468085693359375, 2.467567626953125, 2.47077490234375, 2.466901123046875, 2.46776416015625, 2.466997314453125, 2.468117431640625, 2.471785400390625, 2.468166748046875, 2.46658154296875, 2.467800048828125, 2.4673310546875, 2.46755029296875, 2.466908203125, 2.4666357421875, 2.4657724609375, 2.4668466796875, 2.46647509765625, 2.46641455078125, 2.4653935546875, 2.46607861328125, 2.465657958984375, 2.46717041015625, 5.08862353515625, 2.46618017578125, 2.46677001953125, 2.465965087890625, 2.4666962890625, 2.46651806640625, 2.466512939453125, 2.4657919921875, 2.46704833984375, 2.466091064453125, 2.4665322265625, 2.466198486328125, 2.466378662109375, 2.4663388671875, 2.466697265625, 2.4661689453125, 2.467324951171875, 2.46691748046875, 2.46706884765625, 2.46692041015625, 2.466785400390625, 2.467284912109375, 2.467230712890625, 2.4668681640625, 2.4700732421875, 2.46681396484375, 2.466840576171875, 2.46628857421875, 2.4691845703125, 2.469140380859375, 2.468798583984375, 2.46765869140625, 2.4666748046875, 2.46800390625, 2.466842529296875, 2.46696044921875, 2.46841552734375, 2.46970166015625, 2.468170654296875, 2.467095458984375, 2.46616259765625, 2.468423583984375, 2.46754296875, 2.467493896484375, 2.469568603515625, 2.467686279296875, 2.467119140625, 2.46765576171875, 2.4675615234375, 2.46753076171875, 2.46786962890625, 2.467504150390625, 2.466217041015625, 2.46670751953125, 2.466275390625, 2.46649951171875, 2.465642578125, 2.4665927734375, 2.466114501953125, 2.46563134765625, 2.466107421875, 2.466840576171875, 2.46594775390625, 5.0872607421875, 2.466586669921875, 2.467085205078125, 2.466087890625, 2.46624658203125, 2.468030517578125, 2.466228271484375, 2.46631201171875, 2.465871826171875, 2.466620361328125, 2.466470947265625, 2.4659375, 2.465469482421875, 2.467073974609375, 2.467386474609375, 2.466375732421875, 2.46649658203125, 2.469547119140625, 2.46750634765625, 2.466642822265625, 2.46601123046875, 2.466673583984375, 2.466423828125, 2.466227294921875, 2.466232421875, 2.467124267578125, 2.466873291015625, 2.46641162109375, 2.466154541015625, 2.46636962890625, 2.466863037109375, 2.46698291015625, 2.467325927734375, 2.46662744140625, 2.4677744140625, 2.466999267578125, 2.467263427734375, 2.466723876953125, 2.46856201171875, 2.46679150390625, 2.466046875, 2.46635107421875, 2.46673828125, 2.467541015625, 2.4671416015625, 2.467443603515625, 2.466511962890625, 2.467031982421875, 2.46706787109375, 2.4669921875, 2.468369384765625, 2.467056640625, 2.46666455078125, 2.467033203125, 2.468075439453125, 2.468128662109375, 2.466417724609375, 2.471729248046875, 2.467071044921875, 2.466891845703125, 2.46647509765625, 2.466440185546875, 2.46868798828125, 5.08691357421875, 2.466028564453125, 2.4677333984375, 2.467010498046875, 2.467205078125, 2.4664013671875, 2.4672451171875, 2.4667392578125, 2.46585546875, 2.466406494140625, 2.46685693359375, 2.4661708984375, 2.4663828125, 2.46592822265625, 2.466884521484375, 2.466239501953125, 2.46583203125, 2.465574951171875, 2.46738330078125, 2.467092529296875, 2.4664677734375, 2.467158935546875, 2.467737548828125, 2.4660244140625, 2.4675830078125, 2.46628662109375, 2.467053466796875, 2.4666142578125, 2.46684375, 2.466788330078125, 2.467851318359375, 2.46658056640625, 2.468338623046875, 2.467275634765625, 2.46681201171875, 2.466661376953125, 2.466303955078125, 2.46763720703125, 2.473092041015625, 2.46727685546875, 2.467389404296875, 2.467167236328125, 2.467786865234375, 2.4663837890625, 2.4663388671875, 2.465721435546875, 2.467124267578125, 2.4679638671875, 2.46670751953125, 2.46626416015625, 2.467136474609375, 2.466255859375, 2.4674990234375, 2.466595947265625, 2.466490478515625, 2.4665712890625, 2.46690087890625, 2.466754638671875, 2.4670341796875, 2.467210205078125, 2.467547119140625, 2.467244140625, 2.466747314453125, 5.09081201171875, 2.467222412109375, 2.46835205078125, 2.467926025390625, 2.4673740234375, 2.467116943359375, 2.46747021484375, 2.467093505859375, 2.466975830078125, 2.46742626953125, 2.4675400390625, 2.467567626953125, 2.4670546875, 2.467200927734375, 2.467986328125, 2.46847900390625, 2.468297607421875, 2.466345947265625, 2.467420166015625, 2.468263916015625, 2.46740283203125, 2.466755615234375, 2.467157958984375, 2.471689208984375, 2.467622802734375, 2.4668671875, 2.467239013671875, 2.46835205078125, 2.467516357421875, 2.46651904296875, 2.466069580078125, 2.46681591796875, 2.466124755859375, 2.465919921875, 2.465881103515625, 2.46696240234375, 2.46622607421875, 2.46639208984375, 2.465967041015625, 2.46714990234375, 2.466946044921875, 2.466486328125, 2.465672119140625, 2.466734130859375, 2.466515869140625, 2.46664404296875, 2.4666142578125, 2.4672265625, 2.466231201171875, 2.46652001953125, 2.4661728515625, 2.46692041015625, 2.466157470703125, 2.46626611328125, 2.46582470703125, 2.466809814453125, 2.467745849609375, 2.4677080078125, 2.466193359375, 2.4672470703125, 2.467043212890625, 2.47162060546875, 2.4657490234375, 5.08459130859375, 2.46683544921875, 2.46649755859375, 2.467766357421875, 2.467400634765625, 2.467850341796875, 2.467080078125, 2.46782470703125, 2.466931640625, 2.46681298828125, 2.466908203125, 2.467493896484375, 2.466670654296875, 2.46761767578125, 2.467306396484375, 2.46744873046875, 2.46968310546875, 2.467335205078125, 2.466291748046875, 2.46672900390625, 2.4669912109375, 2.468328369140625, 2.466957275390625, 2.46684375, 2.467378173828125, 2.4669912109375, 2.467179443359375, 2.46784521484375, 2.468820068359375, 2.466711669921875, 2.465966064453125, 2.465700927734375, 2.4670791015625, 2.46738427734375, 2.467485595703125, 2.4666328125, 2.4676455078125, 2.466926513671875, 2.46613720703125, 2.46624755859375, 2.4671220703125, 2.46669921875, 2.465594482421875, 2.46590771484375, 2.466947021484375, 2.466668701171875, 2.466886474609375, 2.46719189453125, 2.46757080078125, 2.466482177734375, 2.46595068359375, 2.46605712890625, 2.467263427734375, 2.466189208984375, 2.467197998046875, 2.46523095703125, 2.466417724609375, 2.466122802734375, 2.467242919921875, 2.466663330078125, 2.46723388671875, 2.466820068359375, 2.472427490234375, 5.0853681640625, 2.46607568359375, 2.46763818359375, 2.466788330078125, 2.467725341796875, 2.466460693359375, 2.467099609375, 2.46757177734375, 2.46760546875, 2.466769775390625, 2.46808056640625, 2.46717236328125, 2.468547607421875, 2.46778369140625, 2.4672236328125, 2.466810791015625, 2.467812255859375, 2.468862060546875, 2.467946533203125, 2.467600341796875, 2.46891015625, 2.46693994140625, 2.46805810546875, 2.46829052734375, 2.469150634765625, 2.468052978515625, 2.4687646484375, 2.467143798828125, 2.467037109375, 2.466817138671875, 2.468148193359375, 2.467322998046875, 2.467407958984375, 2.468336669921875, 2.468115478515625, 2.4671845703125, 2.4663388671875, 2.4676884765625, 2.467588134765625, 2.466388916015625, 2.466503662109375, 2.466964599609375, 2.4676494140625, 2.466975830078125, 2.466547607421875, 2.46734033203125, 2.467566650390625, 2.467642333984375, 2.466969482421875, 2.468360107421875, 2.467146728515625, 2.466821044921875, 2.466783203125, 2.46778466796875, 2.46734130859375, 2.467453857421875, 2.467220458984375, 2.468514892578125, 2.467493896484375, 2.4675439453125, 2.467715087890625, 2.47457275390625, 2.468958251953125, 5.0886162109375, 2.467864501953125, 2.469477294921875, 2.468490234375, 2.4670166015625, 2.467751953125, 2.46676171875, 2.466572265625, 2.467084228515625, 2.467091552734375, 2.4673525390625, 2.4678388671875, 2.46633154296875, 2.467618896484375, 2.46672900390625, 2.466831298828125, 2.4672685546875, 2.467834716796875, 2.46698193359375, 2.467683349609375, 2.46766796875, 2.46748974609375, 2.46799560546875, 2.4667822265625, 2.4664052734375, 2.467306396484375, 2.469897216796875, 2.46721630859375, 2.46658251953125, 2.46739453125, 2.467197998046875, 2.468125732421875, 2.466786376953125, 2.467189697265625, 2.467324951171875, 2.467589111328125, 2.46772314453125, 2.468556884765625, 2.466018310546875, 2.466438232421875, 2.467070068359375, 2.466093017578125, 2.466754638671875, 2.46706884765625, 2.46609521484375, 2.467577880859375, 2.46765771484375, 2.468675537109375, 2.46646875, 2.466482177734375, 2.466712646484375, 2.465977294921875, 2.46544384765625, 2.466545654296875, 2.466736083984375, 2.467407958984375, 2.466906005859375, 2.466165771484375, 2.46822900390625, 2.4675185546875, 2.466716552734375, 2.4658984375, 2.466937744140625]",tokens/s,0.3992511456810495,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1260.384256,1005.060096,0.0,358.612992,318.913024,s,20,0.1778504323959351,0.008892521619796753,0.00029352442906368934,0.008754608154296874,0.00910248613357544,0.009169697380065919,0.009785504264831543,"[0.009939455986022949, 0.008732735633850098, 0.008714752197265625, 0.009129183769226074, 0.008936767578125, 0.00906287956237793, 0.008935744285583497, 0.008770496368408202, 0.008699071884155274, 0.00863708782196045, 0.009064096450805664, 0.009099519729614258, 0.009006879806518554, 0.008716704368591309, 0.008663392066955567, 0.009004608154296875, 0.008722240447998048, 0.008636159896850586, 0.008738719940185546, 0.008639936447143554]",tokens/s,28788.234760102965,kWh,1.0174500456220489e-07,5.575151129166093e-08,2.1683886616880275e-07,3.743353820226686e-07,tokens/kWh,683878714.9019684,MB,1260.384256,1005.060096,0.0,358.612992,328.809472,s,20,10.067610046386722,0.5033805023193361,0.00995087263393504,0.5025519256591797,0.5155266967773438,0.5156778381347656,0.5160263879394532,"[0.499431884765625, 0.5143662109375, 0.516113525390625, 0.5138157348632812, 0.515512451171875, 0.5115390319824219, 0.49984701538085935, 0.4923589782714844, 0.49069613647460936, 0.5090225524902344, 0.5124207153320313, 0.5156549072265625, 0.4935624084472656, 0.49096063232421877, 0.5048639221191407, 0.5133187255859375, 0.49067050170898435, 0.5002399291992188, 0.491751953125, 0.4914628295898438]",tokens/s,125.15383434544286,kWh,5.837131539873614e-06,3.1984816083411113e-06,9.866002533436888e-06,1.8901615681651614e-05,tokens/kWh,3333048.4050184162,,s,1259,10.226770932674393,0.008122931638343454,0.0011308170692653316,0.007988224029541016,0.008203213119506836,0.00825251808166504,0.016815533714294433,"[0.008956928253173829, 0.009018367767333984, 0.008947711944580078, 0.009010175704956054, 0.009335807800292969, 0.008112128257751466, 0.007797760009765625, 0.007737343788146973, 0.007766016006469726, 0.007806975841522217, 0.007930880069732665, 0.007813119888305664, 0.0077608962059021, 0.007756800174713135, 0.007740416049957275, 0.007768064022064209, 0.00773529577255249, 0.007709760189056397, 0.007721920013427735, 0.007746560096740723, 0.00775270414352417, 0.00799232006072998, 0.00784281587600708, 0.008203264236450195, 0.008260607719421387, 0.008042495727539062, 0.008059904098510743, 0.007980031967163086, 0.007763967990875244, 0.007751679897308349, 0.007723008155822754, 0.007699456214904785, 0.007764992237091065, 0.007706624031066894, 0.007740416049957275, 0.00783462381362915, 0.007773183822631836, 0.007748608112335205, 0.0077199358940124516, 0.007729152202606201, 0.007729152202606201, 0.007724031925201416, 0.007738368034362793, 0.007704576015472412, 0.0077199358940124516, 0.007832575798034667, 0.007963647842407226, 0.007987199783325195, 0.007905280113220215, 0.0077916159629821775, 0.007706624031066894, 0.007709695816040039, 0.007702527999877929, 0.007707647800445557, 0.007707647800445557, 0.007699456214904785, 0.007712768077850342, 0.007755775928497314, 0.007730175971984863, 0.007721983909606934, 0.007711743831634522, 0.007840799808502196, 0.01680175971984863, 0.007746560096740723, 0.0077547521591186525, 0.00769536018371582, 0.00773632001876831, 0.008111104011535645, 0.008331263542175293, 0.008501248359680176, 0.008590335845947266, 0.008962047576904298, 0.008557600021362305, 0.007902175903320312, 0.007857151985168457, 0.008179712295532226, 0.008134655952453614, 0.008121343612670898, 0.008165375709533691, 0.008182784080505372, 0.00821452808380127, 0.00810086441040039, 0.00818073558807373, 0.008142848014831543, 0.008248319625854492, 0.0081725435256958, 0.008225791931152344, 0.008181759834289551, 0.008154111862182617, 0.008155136108398438, 0.008208383560180664, 0.008145952224731445, 0.008171487808227539, 0.008196096420288086, 0.008184831619262695, 0.008133631706237793, 0.008138751983642578, 0.008140800476074218, 0.008171520233154296, 0.008120320320129394, 0.008166399955749512, 0.008168448448181152, 0.008184831619262695, 0.008118271827697754, 0.00819711971282959, 0.008182784080505372, 0.008133631706237793, 0.008169471740722656, 0.008173567771911621, 0.008208383560180664, 0.008178688049316407, 0.008148991584777832, 0.008138751983642578, 0.008224767684936523, 0.00830668830871582, 0.008169471740722656, 0.008211456298828124, 0.008210432052612305, 0.008233983993530274, 0.008142848014831543, 0.008143872261047362, 0.008169471740722656, 0.008142848014831543, 0.008184831619262695, 0.008094719886779785, 0.016881664276123046, 0.007815167903900147, 0.008175616264343261, 0.008177696228027343, 0.0081561279296875, 0.008162303924560547, 0.008211456298828124, 0.008211456298828124, 0.0081725435256958, 0.008175616264343261, 0.008283136367797851, 0.008204287528991699, 0.008184831619262695, 0.008186880111694337, 0.008223744392395019, 0.008196096420288086, 0.008203264236450195, 0.008229887962341309, 0.008166399955749512, 0.008252415657043457, 0.008163328170776368, 0.008248319625854492, 0.008264703750610352, 0.00818892765045166, 0.00819814395904541, 0.008241151809692383, 0.008165439605712891, 0.0082042236328125, 0.008162303924560547, 0.008232959747314453, 0.008151040077209473, 0.008152128219604492, 0.008161215782165528, 0.008182784080505372, 0.008133631706237793, 0.008158207893371582, 0.008237055778503418, 0.008202239990234375, 0.008193023681640625, 0.008161279678344726, 0.008217599868774414, 0.008156160354614257, 0.00819711971282959, 0.008166399955749512, 0.008308735847473145, 0.008174592018127442, 0.00821350383758545, 0.00821555233001709, 0.008237055778503418, 0.008190976142883302, 0.00820633602142334, 0.008167424201965333, 0.008225791931152344, 0.008162303924560547, 0.008162303924560547, 0.008168448448181152, 0.00820633602142334, 0.00830463981628418, 0.008229887962341309, 0.008415231704711914, 0.0081725435256958, 0.008143872261047362, 0.008152064323425292, 0.01761075210571289, 0.008202239990234375, 0.008141823768615723, 0.008071167945861817, 0.007961599826812745, 0.008018943786621094, 0.008046591758728027, 0.008005632400512695, 0.00840294361114502, 0.008124416351318359, 0.00820531177520752, 0.008201215744018555, 0.00818073558807373, 0.008185855865478516, 0.00819200038909912, 0.008175616264343261, 0.008318976402282715, 0.008154111862182617, 0.00820531177520752, 0.008134655952453614, 0.008143872261047362, 0.008136704444885253, 0.008194047927856446, 0.008137727737426758, 0.008143872261047362, 0.008171520233154296, 0.008257599830627441, 0.008203200340270996, 0.008194047927856446, 0.00821555233001709, 0.008195072174072266, 0.008143872261047362, 0.007998464107513427, 0.008034303665161132, 0.007986176013946533, 0.007982079982757568, 0.008018943786621094, 0.008036352157592774, 0.008148991584777832, 0.008135680198669434, 0.008142848014831543, 0.008236031532287597, 0.008143872261047362, 0.008142848014831543, 0.008132608413696289, 0.00819200038909912, 0.008179712295532226, 0.008140800476074218, 0.008145983695983888, 0.008236991882324218, 0.0081725435256958, 0.008146944046020508, 0.00818892765045166, 0.008175616264343261, 0.008162303924560547, 0.0081725435256958, 0.00821555233001709, 0.008113151550292968, 0.008209407806396484, 0.008227840423583984, 0.008196096420288086, 0.008126463890075684, 0.008157183647155761, 0.017464319229125978, 0.008184831619262695, 0.00818073558807373, 0.008165375709533691, 0.00821555233001709, 0.008167424201965333, 0.008129535675048828, 0.008302592277526855, 0.008182784080505372, 0.00830361557006836, 0.008194047927856446, 0.008250368118286134, 0.008151040077209473, 0.008257535934448243, 0.008231936454772949, 0.008444928169250488, 0.008429568290710449, 0.00829849624633789, 0.008167424201965333, 0.008171520233154296, 0.008143903732299805, 0.008130528450012208, 0.008302592277526855, 0.008113151550292968, 0.008168448448181152, 0.008145919799804687, 0.008143872261047362, 0.008110079765319824, 0.008140800476074218, 0.008116224288940429, 0.008160256385803222, 0.008153087615966797, 0.00813158416748047, 0.008157183647155761, 0.008136704444885253, 0.008124416351318359, 0.008236031532287597, 0.008158207893371582, 0.008129535675048828, 0.008147968292236327, 0.008122367858886719, 0.008253439903259278, 0.008128512382507324, 0.00810905647277832, 0.008117247581481933, 0.008118271827697754, 0.008119296073913575, 0.008129535675048828, 0.008126463890075684, 0.008141823768615723, 0.008165375709533691, 0.00819200038909912, 0.008229887962341309, 0.008130559921264649, 0.008218624114990235, 0.008133631706237793, 0.008155136108398438, 0.00820531177520752, 0.00811520004272461, 0.008147968292236327, 0.008153087615966797, 0.008163328170776368, 0.008157247543334962, 0.01752979278564453, 0.008169471740722656, 0.008134655952453614, 0.008151040077209473, 0.008130559921264649, 0.008232959747314453, 0.00812339210510254, 0.008141823768615723, 0.008133631706237793, 0.008141823768615723, 0.008154175758361816, 0.00810591983795166, 0.008169471740722656, 0.008153087615966797, 0.008141823768615723, 0.008200223922729493, 0.008241120338439942, 0.008136704444885253, 0.008132608413696289, 0.008174592018127442, 0.008144895553588867, 0.008146944046020508, 0.008145919799804687, 0.008146976470947266, 0.008175583839416503, 0.007980031967163086, 0.007983104228973388, 0.00800051212310791, 0.007997439861297608, 0.00799948787689209, 0.008101887702941894, 0.00801689624786377, 0.008174592018127442, 0.0081397762298584, 0.008117247581481933, 0.008122367858886719, 0.008126463890075684, 0.008122367858886719, 0.008138751983642578, 0.008155136108398438, 0.008119296073913575, 0.008136704444885253, 0.00807423973083496, 0.007927807807922363, 0.007958528041839599, 0.007939072132110595, 0.008014847755432129, 0.007986176013946533, 0.007945216178894043, 0.007932928085327149, 0.007987199783325195, 0.008179712295532226, 0.008140800476074218, 0.008185855865478516, 0.008138751983642578, 0.008184831619262695, 0.008138751983642578, 0.008258560180664062, 0.008273920059204102, 0.008167424201965333, 0.008194047927856446, 0.008145919799804687, 0.008177663803100586, 0.01745510482788086, 0.008202239990234375, 0.008157183647155761, 0.00818380832672119, 0.00870195198059082, 0.008116224288940429, 0.00800153636932373, 0.008036352157592774, 0.007977983951568603, 0.00793497610092163, 0.007962624073028564, 0.007925759792327881, 0.00794316816329956, 0.007910399913787843, 0.007746560096740723, 0.007692287921905518, 0.007689216136932373, 0.0076871681213378906, 0.007707647800445557, 0.007682047843933106, 0.0076912641525268555, 0.007675903797149658, 0.0076984319686889645, 0.007768064022064209, 0.007718912124633789, 0.007701504230499268, 0.007781375885009765, 0.007845888137817383, 0.007806975841522217, 0.007817215919494629, 0.007786496162414551, 0.00787660789489746, 0.007799808025360107, 0.0077814397811889644, 0.007800767898559571, 0.007912447929382324, 0.007824384212493896, 0.007813119888305664, 0.007792640209197998, 0.007946239948272706, 0.007815167903900147, 0.007805952072143555, 0.00778547191619873, 0.007804927825927735, 0.007789567947387695, 0.007865344047546387, 0.007772160053253174, 0.007777279853820801, 0.007775231838226319, 0.007806975841522217, 0.007789567947387695, 0.007932928085327149, 0.007971839904785156, 0.009268223762512207, 0.00830463981628418, 0.008198207855224609, 0.008211392402648926, 0.008195072174072266, 0.008028160095214844, 0.008003583908081055, 0.008034303665161132, 0.008128512382507324, 0.008179712295532226, 0.016935935974121095, 0.00780185604095459, 0.007796735763549805, 0.007805952072143555, 0.007813119888305664, 0.007988224029541016, 0.008045568466186523, 0.007916543960571289, 0.007796735763549805, 0.007798783779144287, 0.007809023857116699, 0.007804927825927735, 0.007792640209197998, 0.007812096118927002, 0.00781824016571045, 0.00780083179473877, 0.007870463848114014, 0.007792640209197998, 0.007804927825927735, 0.007790656089782715, 0.007819200038909912, 0.007790592193603516, 0.007794688224792481, 0.007809023857116699, 0.007795711994171142, 0.007768064022064209, 0.007797760009765625, 0.007795711994171142, 0.007795711994171142, 0.007813119888305664, 0.007798783779144287, 0.007802879810333252, 0.007819263935089112, 0.007811071872711181, 0.007840767860412597, 0.007812096118927002, 0.007788544178009033, 0.007775231838226319, 0.0077916159629821775, 0.007782400131225586, 0.00781004810333252, 0.00780185604095459, 0.007778304100036621, 0.007794688224792481, 0.007767039775848389, 0.007796735763549805, 0.0077916159629821775, 0.007797760009765625, 0.007821311950683594, 0.0077844481468200685, 0.007780352115631104, 0.007840767860412597, 0.0077844481468200685, 0.007767136096954346, 0.0077935681343078616, 0.007836671829223632, 0.007762944221496582, 0.007809023857116699, 0.007853055953979492, 0.007787519931793213, 0.007774208068847656, 0.007829504013061523, 0.00781824016571045, 0.01681612777709961, 0.007815199851989746, 0.007830495834350587, 0.007776256084442139, 0.007788544178009033, 0.007783423900604248, 0.007771135807037354, 0.007827455997467042, 0.007795711994171142, 0.007773183822631836, 0.007782400131225586, 0.007808000087738037, 0.00780083179473877, 0.007798783779144287, 0.007873536109924317, 0.007805952072143555, 0.00780083179473877, 0.007847936153411865, 0.0077844481468200685, 0.007780352115631104, 0.0077916159629821775, 0.007790592193603516, 0.007822336196899414, 0.007794688224792481, 0.007888895988464355, 0.007833600044250488, 0.007797760009765625, 0.007749631881713868, 0.007869440078735352, 0.007769087791442871, 0.007812096118927002, 0.00780083179473877, 0.007803904056549072, 0.007812096118927002, 0.007794688224792481, 0.007811071872711181, 0.007814144134521485, 0.007817215919494629, 0.007795711994171142, 0.007819263935089112, 0.007755775928497314, 0.007792640209197998, 0.007717887878417969, 0.007688223838806152, 0.007795680046081543, 0.007675903797149658, 0.007663616180419922, 0.007669760227203369, 0.007685120105743408, 0.007670783996582031, 0.007699456214904785, 0.007779327869415284, 0.007753791809082031, 0.00776800012588501, 0.007794688224792481, 0.007774208068847656, 0.0077916479110717776, 0.0077833919525146485, 0.007789567947387695, 0.0077506561279296875, 0.007809023857116699, 0.007776256084442139, 0.007787519931793213, 0.016747520446777343, 0.007830527782440186, 0.00781004810333252, 0.007762944221496582, 0.007786496162414551, 0.00778547191619873, 0.007792640209197998, 0.007819263935089112, 0.007789567947387695, 0.007815167903900147, 0.0077844481468200685, 0.007814144134521485, 0.007829504013061523, 0.007815167903900147, 0.008204287528991699, 0.008175616264343261, 0.008138751983642578, 0.008138751983642578, 0.008143872261047362, 0.008132608413696289, 0.008127488136291505, 0.008151040077209473, 0.008156160354614257, 0.008147968292236327, 0.00820736026763916, 0.008129535675048828, 0.008166399955749512, 0.008173600196838379, 0.00812335968017578, 0.008141823768615723, 0.008167424201965333, 0.008120320320129394, 0.008144895553588867, 0.008153087615966797, 0.008212479591369629, 0.008155136108398438, 0.008140800476074218, 0.008129535675048828, 0.008145919799804687, 0.00811520004272461, 0.008171520233154296, 0.008128512382507324, 0.008159232139587403, 0.008104960441589355, 0.008134655952453614, 0.008156160354614257, 0.008217599868774414, 0.008228863716125488, 0.008134655952453614, 0.008118271827697754, 0.008137727737426758, 0.008124447822570801, 0.008145888328552247, 0.00819711971282959, 0.008179743766784668, 0.008160223960876465, 0.008161279678344726, 0.008132608413696289, 0.00820531177520752, 0.008125439643859863, 0.008132608413696289, 0.008161312103271484, 0.008129504203796387, 0.017510400772094727, 0.00813161563873291, 0.008154080390930176, 0.008121343612670898, 0.008201215744018555, 0.008152095794677735, 0.008171487808227539, 0.008169471740722656, 0.008168448448181152, 0.008137727737426758, 0.008147968292236327, 0.008148991584777832, 0.008154111862182617, 0.008139840126037598, 0.008128447532653809, 0.00813158416748047, 0.008137727737426758, 0.008121343612670898, 0.008233983993530274, 0.008128512382507324, 0.008145919799804687, 0.008157183647155761, 0.00813158416748047, 0.008204287528991699, 0.008161279678344726, 0.007993343830108643, 0.00791756820678711, 0.007980031967163086, 0.007956480026245117, 0.007964672088623047, 0.007927807807922363, 0.008011775970458984, 0.008147968292236327, 0.008133631706237793, 0.00818380832672119, 0.008170495986938477, 0.008175616264343261, 0.008127488136291505, 0.008137727737426758, 0.008146944046020508, 0.008140800476074218, 0.008151040077209473, 0.008162303924560547, 0.008179712295532226, 0.008148991584777832, 0.008124416351318359, 0.008137727737426758, 0.008125439643859863, 0.008153087615966797, 0.008114175796508789, 0.008143872261047362, 0.008122367858886719, 0.008156160354614257, 0.008160256385803222, 0.008135680198669434, 0.008086527824401855, 0.008153087615966797, 0.008133631706237793, 0.0081725435256958, 0.0081725435256958, 0.00819200038909912, 0.008133631706237793, 0.008163328170776368, 0.017588224411010742, 0.008179712295532226, 0.008193023681640625, 0.00818380832672119, 0.008154111862182617, 0.008151040077209473, 0.008155136108398438, 0.008173567771911621, 0.008159232139587403, 0.008130559921264649, 0.008146944046020508, 0.008140800476074218, 0.008162336349487305, 0.008166367530822755, 0.008170495986938477, 0.008184831619262695, 0.008157183647155761, 0.008153087615966797, 0.00813158416748047, 0.008167424201965333, 0.008148991584777832, 0.008136704444885253, 0.008152064323425292, 0.008177663803100586, 0.008147968292236327, 0.008144895553588867, 0.008175616264343261, 0.008174592018127442, 0.008225791931152344, 0.008175616264343261, 0.008150015830993652, 0.008158207893371582, 0.008162303924560547, 0.008164352416992187, 0.008138751983642578, 0.008152064323425292, 0.008166399955749512, 0.008134655952453614, 0.008158207893371582, 0.008156160354614257, 0.008137727737426758, 0.00818073558807373, 0.008144895553588867, 0.008691712379455567, 0.008336383819580078, 0.008157183647155761, 0.008369152069091796, 0.00819814395904541, 0.008178688049316407, 0.008175616264343261, 0.008190976142883302, 0.008163328170776368, 0.008146944046020508, 0.00818182373046875, 0.008277952194213867, 0.008161279678344726, 0.00819814395904541, 0.008154111862182617, 0.008211456298828124, 0.008153087615966797, 0.008143872261047362, 0.00818380832672119, 0.008178688049316407, 0.017306623458862306, 0.007987199783325195, 0.00799948787689209, 0.007954432010650634, 0.007939072132110595, 0.008583168029785156, 0.008274944305419921, 0.008042495727539062, 0.008291328430175781, 0.008148991584777832, 0.007989247798919678, 0.007942143917083741, 0.007837696075439453, 0.007702527999877929, 0.007700479984283447, 0.007751679897308349, 0.007745535850524903, 0.007712768077850342, 0.007682047843933106, 0.007650303840637207, 0.007663616180419922, 0.007663616180419922, 0.007679999828338623, 0.00780083179473877, 0.007711743831634522, 0.007683072090148926, 0.007688191890716553, 0.007677951812744141, 0.007696415901184082, 0.007775199890136719, 0.007783423900604248, 0.007798783779144287, 0.007811071872711181, 0.007773183822631836, 0.007781375885009765, 0.0077844481468200685, 0.007805952072143555, 0.007782400131225586, 0.007778304100036621, 0.007778304100036621, 0.0078438401222229, 0.007774208068847656, 0.007861248016357422, 0.007822336196899414, 0.00785203218460083, 0.007787519931793213, 0.00779366397857666, 0.007774208068847656, 0.007798783779144287, 0.0077608962059021, 0.00780185604095459, 0.0077916159629821775, 0.00780083179473877, 0.007935999870300293, 0.007792640209197998, 0.007762944221496582, 0.007783423900604248, 0.007781375885009765, 0.007789567947387695, 0.007780352115631104, 0.00781004810333252, 0.0077844481468200685, 0.007790592193603516, 0.01681510353088379, 0.00780185604095459, 0.0077916159629821775, 0.007786496162414551, 0.00780185604095459, 0.007774208068847656, 0.007781375885009765, 0.007767039775848389, 0.007797760009765625, 0.008054783821105957, 0.007769120216369629, 0.007801824092864991, 0.007887872219085693, 0.007806975841522217, 0.00780185604095459, 0.007796735763549805, 0.007758848190307617, 0.007686143875122071, 0.007758848190307617, 0.007764992237091065, 0.007665664196014404, 0.007762944221496582, 0.007830527782440186, 0.0076984319686889645, 0.007705599784851074, 0.007775231838226319, 0.007783423900604248, 0.0077506561279296875, 0.007763967990875244, 0.007799808025360107, 0.007774208068847656, 0.0077844481468200685, 0.007889920234680176, 0.007813119888305664, 0.00785203218460083, 0.007798816204071045, 0.007802847862243652, 0.0077619199752807615, 0.0077608962059021, 0.007812096118927002, 0.007770112037658691, 0.007820288181304931, 0.007809023857116699, 0.007751679897308349, 0.007801919937133789, 0.007790527820587158, 0.00778547191619873, 0.007769087791442871, 0.007772160053253174, 0.007780352115631104, 0.007766016006469726, 0.007772160053253174, 0.007797760009765625, 0.007746560096740723, 0.007767039775848389, 0.007792640209197998, 0.007804927825927735, 0.00780188798904419, 0.007841760158538819, 0.007790592193603516, 0.007799808025360107, 0.00780185604095459, 0.007794688224792481, 0.016740352630615234, 0.007783423900604248, 0.00780185604095459, 0.007804927825927735, 0.007759871959686279, 0.007790592193603516, 0.007788544178009033, 0.007780352115631104, 0.007831615924835205, 0.007797696113586426, 0.007781407833099365, 0.007782368183135986, 0.007770112037658691, 0.007808000087738037, 0.007779327869415284, 0.007780352115631104, 0.007817279815673829, 0.007812032222747803, 0.008190976142883302, 0.008184831619262695, 0.008161279678344726, 0.008177727699279784, 0.008145855903625489, 0.0081397762298584, 0.008135680198669434, 0.008153087615966797, 0.008148991584777832, 0.008147968292236327, 0.0081397762298584, 0.008167424201965333, 0.008143903732299805, 0.008128479957580566, 0.008156160354614257, 0.008138751983642578, 0.008133631706237793, 0.008155136108398438, 0.008153087615966797, 0.008138751983642578, 0.008173567771911621, 0.008208383560180664, 0.008002559661865234, 0.00800972843170166, 0.007980031967163086, 0.008076288223266602, 0.00808448028564453, 0.007964672088623047, 0.008034303665161132, 0.007988224029541016, 0.007964672088623047, 0.007987199783325195, 0.00799948787689209, 0.007969791889190675, 0.007956480026245117, 0.007990272045135497, 0.00797388792037964, 0.00794316816329956, 0.008023039817810058, 0.008061951637268066, 0.008179743766784668, 0.008191967964172363, 0.0081725435256958, 0.008150015830993652, 0.008185855865478516, 0.017484800338745117, 0.008162303924560547, 0.008151040077209473, 0.008169471740722656, 0.008161279678344726, 0.008167424201965333, 0.008156160354614257, 0.008194047927856446, 0.008224767684936523, 0.008291328430175781, 0.008148991584777832, 0.008154111862182617, 0.008160256385803222, 0.00816438388824463, 0.008163295745849609, 0.008171520233154296, 0.008177663803100586, 0.0081725435256958, 0.008231936454772949, 0.008153087615966797, 0.008132608413696289, 0.008309760093688966, 0.008117247581481933, 0.007990272045135497, 0.007979008197784423, 0.007970816135406494, 0.007997439861297608, 0.00799232006072998, 0.00801587200164795, 0.007982079982757568, 0.008145983695983888, 0.008135616302490234, 0.008164352416992187, 0.008163328170776368, 0.008165375709533691, 0.008184831619262695, 0.008158207893371582, 0.008155136108398438, 0.008176639556884765, 0.008182784080505372, 0.008145919799804687, 0.008140800476074218, 0.008156160354614257, 0.008150015830993652, 0.008135680198669434, 0.0081397762298584, 0.008142848014831543, 0.008136704444885253, 0.0081080322265625, 0.008219648361206054, 0.008170495986938477, 0.008157183647155761, 0.008315903663635254, 0.00819711971282959, 0.008167424201965333, 0.008058879852294922, 0.007974912166595459, 0.008057855606079101, 0.00821350383758545, 0.008169471740722656, 0.008144895553588867, 0.008160256385803222, 0.008122367858886719, 0.01683046340942383, 0.00781004810333252, 0.007808000087738037, 0.00778547191619873, 0.007855103969573975, 0.007806975841522217, 0.007795711994171142, 0.007780352115631104, 0.007798783779144287, 0.007795711994171142, 0.0077916159629821775, 0.007814144134521485, 0.007811071872711181, 0.007796735763549805, 0.007827455997467042, 0.007817215919494629, 0.007875584125518798, 0.007839744091033935, 0.007795711994171142, 0.007805952072143555, 0.007778304100036621, 0.0077916159629821775, 0.007786496162414551, 0.007789567947387695, 0.007786496162414551, 0.0077916159629821775, 0.007804927825927735, 0.007798783779144287, 0.007790592193603516, 0.007811071872711181, 0.007797760009765625, 0.007786496162414551, 0.007812096118927002, 0.007651328086853027, 0.007665664196014404, 0.007666687965393066, 0.007665664196014404, 0.007659520149230957, 0.007679999828338623, 0.007675903797149658, 0.007781375885009765, 0.007795711994171142, 0.007799808025360107, 0.007756800174713135, 0.007774208068847656, 0.0077578239440917966, 0.007971839904785156, 0.007782400131225586, 0.007790592193603516, 0.007787519931793213, 0.007797760009765625, 0.007782400131225586, 0.00779366397857666, 0.007764992237091065, 0.007797760009765625, 0.007774208068847656, 0.007792640209197998, 0.007769087791442871, 0.007784512042999268, 0.007765952110290527, 0.007788544178009033, 0.007747583866119385, 0.007840767860412597, 0.016747520446777343, 0.007811071872711181, 0.007816192150115966, 0.007752768039703369, 0.007791552066802978, 0.007802879810333252, 0.007780352115631104, 0.0077916159629821775, 0.007798783779144287, 0.00780185604095459, 0.007831552028656007, 0.007783423900604248, 0.007788544178009033, 0.007796735763549805, 0.007817215919494629, 0.007794688224792481, 0.007841792106628418, 0.007812096118927002, 0.007792640209197998, 0.007789567947387695, 0.007776256084442139, 0.00780185604095459, 0.007771135807037354, 0.007783423900604248, 0.007767039775848389, 0.007773183822631836, 0.007803904056549072, 0.007847936153411865, 0.007794688224792481, 0.007759903907775879, 0.007892960071563721, 0.007931903839111328, 0.007790592193603516, 0.00786636781692505, 0.007941120147705078, 0.008167424201965333, 0.007970816135406494, 0.007970816135406494, 0.007945216178894043, 0.007967743873596191, 0.007945216178894043, 0.007947264194488525, 0.007963647842407226, 0.007942143917083741, 0.008156160354614257, 0.008130559921264649, 0.008107071876525878, 0.00812947177886963, 0.008143872261047362, 0.008111104011535645, 0.008150015830993652, 0.008143872261047362, 0.008136704444885253, 0.008132608413696289, 0.008125439643859863, 0.008134655952453614, 0.008170495986938477, 0.00810905647277832, 0.008399871826171875, 0.008129535675048828, 0.008162303924560547, 0.008132608413696289, 0.008134655952453614, 0.01683251190185547, 0.007788544178009033, 0.007817215919494629, 0.007799808025360107, 0.007805952072143555, 0.007803904056549072, 0.007779327869415284, 0.0077916159629821775, 0.007804927825927735, 0.007817215919494629, 0.007822336196899414, 0.007809023857116699, 0.007855103969573975, 0.007806975841522217, 0.007815167903900147, 0.00784281587600708, 0.007825407981872558, 0.007775231838226319, 0.007829504013061523, 0.007797760009765625, 0.007817215919494629, 0.0077916159629821775, 0.00780185604095459, 0.007789567947387695, 0.0077844481468200685, 0.007764992237091065, 0.007797760009765625, 0.007780352115631104, 0.007777279853820801, 0.007769087791442871, 0.007795711994171142, 0.007806975841522217, 0.007773183822631836, 0.007780416011810303, 0.007806911945343018, 0.007809023857116699, 0.007812096118927002, 0.007888895988464355, 0.007832575798034667, 0.007770112037658691, 0.00778547191619873, 0.007788544178009033, 0.007790592193603516, 0.007799808025360107, 0.007798783779144287, 0.007837696075439453, 0.007821311950683594, 0.00779366397857666, 0.00778547191619873, 0.0077844481468200685, 0.007777279853820801, 0.007804992198944091, 0.007788479804992676, 0.007777279853820801, 0.00781004810333252, 0.007784480094909668, 0.007799776077270508, 0.007783423900604248, 0.007840767860412597, 0.007831552028656007, 0.007805952072143555, 0.007767039775848389, 0.007812096118927002, 0.016696319580078126, 0.007795711994171142, 0.007797760009765625, 0.007806975841522217, 0.007748640060424804, 0.0077833919525146485, 0.007792640209197998, 0.007804927825927735, 0.007803904056549072, 0.007797760009765625, 0.0077742719650268554, 0.007868351936340333, 0.007813119888305664, 0.007794688224792481, 0.007806975841522217, 0.00780083179473877, 0.007832575798034667, 0.007783423900604248, 0.007816192150115966, 0.007777279853820801, 0.007782400131225586, 0.007778304100036621, 0.007790592193603516, 0.007867392063140868, 0.0077916159629821775, 0.007799808025360107, 0.007783423900604248, 0.007789567947387695, 0.007783423900604248, 0.007789567947387695, 0.00778547191619873, 0.007780352115631104, 0.007875584125518798, 0.007797760009765625, 0.00778547191619873, 0.007756800174713135, 0.007773183822631836, 0.007787519931793213, 0.007767039775848389, 0.007811071872711181, 0.00779366397857666, 0.007770112037658691, 0.007783423900604248, 0.00781004810333252, 0.007797760009765625, 0.007773183822631836, 0.007773183822631836, 0.007782400131225586, 0.007844863891601562, 0.007785535812377929, 0.007792575836181641, 0.007806975841522217, 0.00781824016571045, 0.007796735763549805, 0.00778547191619873, 0.007778304100036621, 0.007828479766845703, 0.007777279853820801, 0.007858176231384278, 0.007815167903900147, 0.0077608962059021, 0.00781004810333252, 0.007812096118927002]",tokens/s,123.10826245041925,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3203.223552,5128.060928,0.0,4481.613824,4276.256768,s,10,3.1983958740234373,0.31983958740234375,0.001538131138588488,0.31976911926269536,0.32137343139648433,0.3217927795410156,0.3221282580566406,"[0.31961990356445313, 0.3212218933105469, 0.3187176513671875, 0.3199183349609375, 0.3183829040527344, 0.3169226989746094, 0.31905938720703125, 0.32106072998046875, 0.32128024291992185, 0.32221212768554686]",tokens/s,800.4012326277909,kWh,3.750147018581629e-06,2.054515901244258e-06,1.6957018773937002e-05,2.276168169376289e-05,tokens/kWh,11246972.14574214,MB,3203.223552,5128.060928,0.0,4481.613824,4465.662976,s,10,186.97800781249998,18.69780078125,0.017296888320766722,18.693263671875002,18.720369140625,18.729755859375,18.737265234375002,"[18.718283203125, 18.689103515625, 18.695333984375, 18.678984375, 18.739142578125, 18.694673828125, 18.691853515625, 18.6855234375, 18.701947265625, 18.683162109375]",tokens/s,3.369380214125283,kWh,0.00022053018352016812,0.00012086672241809653,0.0009756264419588673,0.001317023347897132,tokens/kWh,47835.14286257035,,s,629,189.55983416748046,0.3013669859578386,0.038019298519587436,0.29649102783203124,0.2979925964355469,0.29949728393554687,0.6154752392578126,"[0.29722113037109377, 0.2962104187011719, 0.2994401550292969, 0.3005357666015625, 0.30041702270507814, 0.2999255065917969, 0.30030029296875, 0.3005870056152344, 0.30005453491210937, 0.3003166809082031, 0.2982318115234375, 0.29764404296875, 0.29678591918945313, 0.2977320861816406, 0.2972508239746094, 0.2988697509765625, 0.29594931030273436, 0.29598825073242185, 0.29595953369140626, 0.2961408081054687, 0.29663333129882813, 0.2959800415039063, 0.2984028015136719, 0.29736856079101565, 0.29788876342773435, 0.2962769775390625, 0.2963660888671875, 0.2973767700195312, 0.2967070617675781, 0.2959728698730469, 0.2965258178710938, 0.29611212158203126, 0.2976409606933594, 0.2969599914550781, 0.2967398376464844, 0.2962391052246094, 0.29641522216796873, 0.2959902648925781, 0.29629336547851565, 0.29671832275390625, 0.29604147338867187, 0.2959872131347656, 0.2960855102539062, 0.2966855773925781, 0.29605990600585935, 0.29604147338867187, 0.29675726318359374, 0.29607012939453126, 0.2964500427246094, 0.29650942993164064, 0.2962022399902344, 0.2960516967773438, 0.29594830322265625, 0.296783935546875, 0.296870849609375, 0.29710232543945314, 0.29665484619140625, 0.29788363647460936, 0.29600152587890627, 0.29640191650390624, 0.29616229248046877, 0.2960711669921875, 0.6185267333984374, 0.29601278686523436, 0.29589199829101565, 0.29706646728515623, 0.29594418334960937, 0.29600460815429686, 0.29592166137695314, 0.2975078430175781, 0.2961551208496094, 0.29602200317382815, 0.2963240966796875, 0.2960445556640625, 0.2958970947265625, 0.2962135009765625, 0.29608447265625, 0.29607730102539065, 0.29639883422851565, 0.29659750366210935, 0.29723443603515626, 0.29676031494140626, 0.2970972290039062, 0.296911865234375, 0.29686373901367186, 0.29711358642578123, 0.29724774169921875, 0.29758157348632813, 0.29762457275390625, 0.2963056640625, 0.29703271484375, 0.29604864501953126, 0.2962616271972656, 0.296153076171875, 0.2962769775390625, 0.2961919860839844, 0.2968739929199219, 0.29633843994140624, 0.2965504150390625, 0.29617355346679686, 0.29583154296875, 0.2962042846679688, 0.2970060729980469, 0.2965278625488281, 0.29644491577148435, 0.29606298828125, 0.29650225830078125, 0.2965186462402344, 0.3018721313476562, 0.29692620849609375, 0.2963681640625, 0.29674798583984374, 0.296806396484375, 0.29622067260742185, 0.2960076904296875, 0.2968924255371094, 0.2960343017578125, 0.2965831604003906, 0.2970480651855469, 0.29702047729492187, 0.296217529296875, 0.29626266479492186, 0.29842022705078125, 0.29761843872070315, 0.29686373901367186, 0.6160445556640625, 0.2960199890136719, 0.2965903015136719, 0.2964684753417969, 0.29644390869140624, 0.2965688171386719, 0.2976983032226562, 0.29753759765625, 0.29599737548828126, 0.29653094482421877, 0.2970838928222656, 0.2961479797363281, 0.29596878051757813, 0.29587353515625, 0.2960445556640625, 0.2966005859375, 0.2975467529296875, 0.2962995300292969, 0.2958847961425781, 0.2957864990234375, 0.2960732116699219, 0.295973876953125, 0.2962944030761719, 0.29851339721679687, 0.295984130859375, 0.29607730102539065, 0.2961203308105469, 0.2960425109863281, 0.29593295288085936, 0.29733270263671874, 0.29703067016601564, 0.29734808349609376, 0.29631283569335937, 0.296816650390625, 0.29681048583984376, 0.296489990234375, 0.29625653076171876, 0.29661590576171876, 0.29632000732421876, 0.2966937561035156, 0.2978652038574219, 0.2965299072265625, 0.2963240966796875, 0.29751806640625, 0.29830859375, 0.297481201171875, 0.29719039916992185, 0.29953536987304685, 0.29737368774414064, 0.29692108154296876, 0.296585205078125, 0.296052734375, 0.29637939453125, 0.29625753784179687, 0.29655654907226564, 0.29799423217773435, 0.2968268737792969, 0.29685043334960937, 0.29692108154296876, 0.29714944458007814, 0.29722930908203127, 0.2967623596191406, 0.297270263671875, 0.6168934326171875, 0.29688626098632814, 0.296279052734375, 0.29670297241210936, 0.2962923583984375, 0.29618795776367185, 0.2966793518066406, 0.2960650329589844, 0.29615924072265626, 0.29594418334960937, 0.2973972473144531, 0.29612338256835935, 0.2971064453125, 0.2982297668457031, 0.29685043334960937, 0.2960855102539062, 0.296089599609375, 0.29661489868164065, 0.2965350341796875, 0.29597695922851563, 0.2961705017089844, 0.29597695922851563, 0.29592276000976564, 0.29643975830078123, 0.29586328125, 0.29596466064453125, 0.2959923095703125, 0.2958981018066406, 0.29643777465820315, 0.2959923095703125, 0.29621759033203127, 0.2963650512695313, 0.29609677124023437, 0.29788980102539064, 0.2977914733886719, 0.29713613891601565, 0.2964951171875, 0.29727642822265626, 0.2966794128417969, 0.2964142150878906, 0.2965821533203125, 0.2960855102539062, 0.2957701416015625, 0.2966097717285156, 0.29938388061523435, 0.29615609741210935, 0.29603640747070314, 0.2962728271484375, 0.29596978759765624, 0.2960455627441406, 0.296300537109375, 0.29595040893554686, 0.29649197387695314, 0.29635379028320313, 0.296342529296875, 0.29667330932617186, 0.2967008972167969, 0.29659442138671877, 0.29601690673828124, 0.29651455688476563, 0.29645925903320314, 0.2973665466308594, 0.29616226196289064, 0.615267333984375, 0.29617767333984374, 0.29771673583984376, 0.29707980346679685, 0.298039306640625, 0.297238525390625, 0.2971115417480469, 0.29701837158203126, 0.29682891845703124, 0.2972641296386719, 0.2971668395996094, 0.29752627563476564, 0.2972252197265625, 0.29701632690429686, 0.29692620849609375, 0.29619302368164063, 0.2964234619140625, 0.2987468566894531, 0.296131591796875, 0.2958981018066406, 0.29623501586914064, 0.29586944580078123, 0.295846923828125, 0.29605377197265625, 0.29625753784179687, 0.29595443725585935, 0.29606195068359376, 0.29590631103515624, 0.29597491455078123, 0.29599948120117187, 0.296310791015625, 0.2964664306640625, 0.29622885131835935, 0.2963630065917969, 0.29594317626953126, 0.2961397705078125, 0.29812017822265624, 0.3001466979980469, 0.3003924560546875, 0.3000289306640625, 0.29983538818359373, 0.29985791015625, 0.29986712646484376, 0.29981594848632814, 0.30056344604492186, 0.29986407470703125, 0.2999111633300781, 0.29782833862304686, 0.2960773315429687, 0.2960137939453125, 0.29763687133789063, 0.2979921875, 0.2981877746582031, 0.2967510986328125, 0.29627288818359376, 0.29777716064453125, 0.2970634155273438, 0.30057470703125, 0.2982738037109375, 0.2991800231933594, 0.30042315673828124, 0.2963568725585938, 0.2961418151855469, 0.6139617309570312, 0.29619406127929687, 0.29618585205078124, 0.29586431884765624, 0.296079345703125, 0.29643060302734375, 0.2977740783691406, 0.2977832946777344, 0.2964633483886719, 0.29604147338867187, 0.2960609130859375, 0.29627084350585936, 0.29602822875976564, 0.2961315307617188, 0.29630160522460935, 0.29599432373046874, 0.2961469421386719, 0.296531982421875, 0.2960977783203125, 0.2960373840332031, 0.2965381164550781, 0.2960506896972656, 0.29609063720703127, 0.2962063293457031, 0.2960169677734375, 0.29643975830078123, 0.2966312866210937, 0.2963077087402344, 0.2960865173339844, 0.2961162109375, 0.2960281677246094, 0.2961766357421875, 0.296352783203125, 0.2976133117675781, 0.29756414794921876, 0.29727435302734373, 0.29644595336914065, 0.2974146423339844, 0.30141543579101565, 0.2976788330078125, 0.2980526123046875, 0.2980894775390625, 0.29788058471679685, 0.2963497009277344, 0.29696920776367186, 0.29795635986328123, 0.29711566162109376, 0.29751296997070314, 0.29756314086914065, 0.2974320678710938, 0.29744332885742186, 0.29654324340820315, 0.29747711181640624, 0.2984744873046875, 0.29649203491210935, 0.2962442321777344, 0.2966384582519531, 0.2963189697265625, 0.29589913940429685, 0.2962176208496094, 0.2958837585449219, 0.29590936279296876, 0.29634048461914064, 0.6155560913085938, 0.29629541015625, 0.2960916442871094, 0.2964561767578125, 0.29645208740234374, 0.2960373840332031, 0.2963875732421875, 0.2963620300292969, 0.2962861633300781, 0.29651251220703123, 0.29747915649414064, 0.2981519470214844, 0.29662823486328127, 0.2976133117675781, 0.29637530517578126, 0.29608038330078124, 0.29605682373046877, 0.2960496520996094, 0.29625140380859377, 0.29587966918945313, 0.2968821716308594, 0.29635174560546873, 0.29616537475585936, 0.3003904113769531, 0.2970214538574219, 0.2966312866210937, 0.29685556030273436, 0.29636813354492186, 0.29714739990234373, 0.29626266479492186, 0.29595135498046876, 0.2962196350097656, 0.29621759033203127, 0.29663333129882813, 0.2961408081054687, 0.2965729370117188, 0.2961131591796875, 0.2963865661621094, 0.296479736328125, 0.296900634765625, 0.2964776611328125, 0.296848388671875, 0.29701937866210937, 0.29931622314453127, 0.29786627197265625, 0.2962636413574219, 0.29623806762695315, 0.2966210632324219, 0.2961336364746094, 0.2963711853027344, 0.29625140380859377, 0.2967357482910156, 0.29616845703125, 0.2963599853515625, 0.29687493896484374, 0.296300537109375, 0.2967142333984375, 0.29815499877929685, 0.29702349853515625, 0.29747610473632813, 0.296627197265625, 0.29662515258789063, 0.2977822570800781, 0.6183147583007812, 0.29672344970703124, 0.296764404296875, 0.29664154052734376, 0.2967080993652344, 0.29716583251953127, 0.29694155883789064, 0.2967244873046875, 0.2972221374511719, 0.2974392395019531, 0.2972866516113281, 0.2969518127441406, 0.2982625427246094, 0.296279052734375, 0.29735015869140624, 0.2961131591796875, 0.2980966491699219, 0.29623806762695315, 0.29628826904296873, 0.2956605529785156, 0.29607012939453126, 0.2962001953125, 0.29612442016601564, 0.29620736694335936, 0.29724774169921875, 0.2960086975097656, 0.297548828125, 0.2967602844238281, 0.29649102783203124, 0.2963619384765625, 0.2962708740234375, 0.29639984130859376, 0.2968350830078125, 0.2963292236328125, 0.296099853515625, 0.29622784423828125, 0.29627093505859375, 0.296197021484375, 0.2974996337890625, 0.2962104187011719, 0.2960639953613281, 0.29599948120117187, 0.2959902648925781, 0.2965739440917969, 0.2961817626953125, 0.2968320007324219, 0.29708084106445315, 0.2968320007324219, 0.29710540771484373, 0.2960558166503906, 0.2963138427734375, 0.29637326049804685, 0.29592166137695314, 0.2962083740234375, 0.29609573364257813, 0.2962114562988281, 0.2961203308105469, 0.29699481201171873, 0.29659442138671877, 0.29610906982421875, 0.29717401123046877, 0.29671218872070315, 0.2965350341796875, 0.61785498046875, 0.29675213623046875, 0.29679409790039063, 0.29726925659179687, 0.29639578247070314, 0.29744537353515627, 0.2977556457519531, 0.29687704467773435, 0.2974320678710938, 0.297416748046875, 0.29766244506835937, 0.29807000732421873, 0.2972119140625, 0.2973388671875, 0.2967439270019531, 0.2972999572753906, 0.2976030578613281, 0.2986280822753906, 0.29623602294921875, 0.29600152587890627, 0.2964695739746094, 0.2963936767578125, 0.2961868896484375, 0.29702349853515625, 0.29616024780273437, 0.29621148681640624, 0.2964879150390625, 0.29709210205078124, 0.2963056640625, 0.2973767700195312, 0.2969722900390625, 0.29783346557617185, 0.2965718994140625, 0.2977576904296875, 0.2961418151855469, 0.2967091064453125, 0.2973644714355469, 0.29671218872070315, 0.29666201782226564, 0.296838134765625, 0.2965801086425781, 0.2962391052246094, 0.296089599609375, 0.2963804016113281, 0.29701119995117187, 0.2961469421386719, 0.29617767333984374, 0.296384521484375, 0.296369140625, 0.29728460693359376, 0.2963015747070312, 0.2967838745117187, 0.29621554565429686, 0.2963097534179687, 0.29686373901367186, 0.2963947448730469, 0.2962995300292969, 0.29706649780273436, 0.2967142333984375, 0.29672857666015623, 0.2966722412109375, 0.2982359008789062, 0.2979543151855469, 0.6189783325195313, 0.2962237548828125, 0.2969938049316406, 0.29632614135742186, 0.2961541748046875, 0.2965380554199219, 0.2963343505859375, 0.29643777465820315, 0.29634866333007814, 0.2962872314453125, 0.2963630065917969, 0.29629644775390623, 0.29665484619140625, 0.296310791015625, 0.29620120239257813, 0.29598513793945314, 0.29617767333984374, 0.2959667053222656, 0.2962749328613281, 0.2967930908203125, 0.29673779296875, 0.29692825317382815, 0.29633331298828125, 0.29688934326171873, 0.2962821044921875, 0.29612954711914063, 0.2987386779785156, 0.2962083740234375, 0.296374267578125, 0.2962135009765625, 0.29653402709960935, 0.2968299560546875, 0.2963097534179687, 0.2981970520019531, 0.2961192321777344, 0.2962135009765625, 0.2961069946289063, 0.295920654296875, 0.2961459655761719, 0.29606500244140627, 0.29621453857421876, 0.29600564575195315, 0.29583566284179685, 0.2993377380371094, 0.29691802978515625, 0.2961561584472656, 0.2958428039550781, 0.2965329895019531, 0.29672549438476564, 0.29595547485351564, 0.2960772705078125, 0.29593496704101563, 0.2966548767089844, 0.2969466552734375, 0.2978447265625, 0.296627197265625, 0.29650433349609373, 0.2975867004394531, 0.2982461853027344, 0.29639166259765626, 0.29643161010742186, 0.29704702758789064, 0.29672344970703124]",tokens/s,3.3182134958203435,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1460.260864,1709.703168,0.0,1063.256064,942.605312,s,10,0.8792122802734375,0.08792122802734374,0.0017793873831683257,0.08834756851196289,0.08902708129882812,0.09042998733520508,0.09155231216430663,"[0.09183289337158203, 0.08871532440185546, 0.08646198272705079, 0.08839430236816406, 0.08854621124267578, 0.08836022186279296, 0.08833491516113282, 0.08542150115966797, 0.08774205017089844, 0.08540287780761718]",tokens/s,2911.6972742962976,kWh,1.0152025072210082e-06,5.56282725791299e-07,2.6184398915211728e-06,4.1899251245334795e-06,tokens/kWh,61098943.869194776,MB,1460.588544,1709.703168,0.0,1063.256064,942.607872,s,10,54.0859736328125,5.40859736328125,0.07705683066534309,5.45104150390625,5.477568017578125,5.481690747070313,5.484988930664063,"[5.4858134765625, 5.457708984375, 5.47665185546875, 5.46595458984375, 5.472109375, 5.4443740234375, 5.39297412109375, 5.29275341796875, 5.29816357421875, 5.29947021484375]",tokens/s,11.648121641981426,kWh,6.466018401433034e-05,3.543631727276219e-05,0.00015379821301947998,0.0002538947143065725,tokens/kWh,248134.35038245353,,s,629,54.78888442230225,0.08710474470954252,0.010581359575427846,0.08675225830078125,0.08761917724609375,0.0884242431640625,0.17260260864257815,"[0.08388607788085937, 0.08398233795166016, 0.08383897399902343, 0.08367718505859376, 0.08453632354736328, 0.0890890884399414, 0.08862201690673828, 0.08834764862060547, 0.08846745300292969, 0.08839679718017578, 0.08905522918701173, 0.08837939453125, 0.08853196716308594, 0.08820326232910156, 0.08852684783935547, 0.08853094482421875, 0.08833126068115234, 0.0883599395751953, 0.08833229064941406, 0.08829747009277343, 0.08771071624755859, 0.08838553619384766, 0.08845721435546874, 0.08875212860107422, 0.08825856018066407, 0.08850534057617188, 0.08816230773925782, 0.08822886657714844, 0.08858009338378907, 0.0882339859008789, 0.08867430114746094, 0.08864665222167968, 0.08844902038574219, 0.08828108978271484, 0.0884295654296875, 0.08401407623291016, 0.08382259368896484, 0.0838635482788086, 0.08400077056884765, 0.08771788787841797, 0.0872069091796875, 0.08849203491210937, 0.08865280151367187, 0.08827187347412109, 0.08736563110351563, 0.08729804992675781, 0.08718950653076171, 0.08671231842041016, 0.08691506958007812, 0.08681779479980468, 0.0866529312133789, 0.086940673828125, 0.08705535888671875, 0.08424960327148437, 0.08400383758544921, 0.08397414398193359, 0.08412364959716796, 0.08525926208496094, 0.08683622741699219, 0.08680038452148438, 0.08671949005126953, 0.08692940521240235, 0.1763768310546875, 0.08659661102294922, 0.08731238555908204, 0.0868884506225586, 0.08696832275390624, 0.08851148986816407, 0.08714854431152344, 0.08598323059082032, 0.08636518096923829, 0.08685465240478515, 0.08704512023925781, 0.08686284637451172, 0.08745779418945313, 0.08453529357910156, 0.0869775390625, 0.08696627044677735, 0.08664575958251954, 0.0876072998046875, 0.08689766693115235, 0.08705535888671875, 0.08679936218261719, 0.0866550064086914, 0.08538006591796875, 0.08647270202636718, 0.08609996795654297, 0.08713215637207031, 0.08740147399902344, 0.08702365112304687, 0.08689250946044921, 0.08694169616699218, 0.08711577606201172, 0.08716902160644531, 0.08723149108886719, 0.0867747802734375, 0.08696012878417969, 0.08699494171142579, 0.0842239990234375, 0.08377855682373046, 0.08404684448242188, 0.08385945892333985, 0.08593510437011719, 0.08742301177978516, 0.08683209228515625, 0.08660889434814453, 0.0867952651977539, 0.08660377502441406, 0.08784178924560547, 0.08544153594970703, 0.08774246215820312, 0.08707891082763672, 0.08705331420898438, 0.08723865509033203, 0.08705843353271485, 0.08711577606201172, 0.08695193481445312, 0.08490188598632813, 0.08705741119384766, 0.08451789093017578, 0.08685260772705078, 0.08724787139892579, 0.08672563171386719, 0.08705638122558594, 0.08634572601318359, 0.1737533416748047, 0.08698675537109375, 0.08697344207763671, 0.08704000091552734, 0.08702982330322266, 0.08781715393066407, 0.08694579315185547, 0.08745369720458984, 0.08681785583496093, 0.08706249237060547, 0.08565961456298828, 0.0864000015258789, 0.0841502685546875, 0.08501248168945312, 0.08692018890380859, 0.08674201965332032, 0.0869222412109375, 0.08693862152099609, 0.0867041244506836, 0.08703385925292968, 0.08675225830078125, 0.086761474609375, 0.08619315338134766, 0.08686489868164063, 0.08680652618408204, 0.08746598052978516, 0.08694783782958984, 0.08730828857421875, 0.08752537536621094, 0.08693350219726563, 0.08705023956298828, 0.08706867218017578, 0.08747212982177735, 0.0880005111694336, 0.08732262420654296, 0.08604876708984376, 0.08707180786132812, 0.08670201873779297, 0.0873164825439453, 0.08682189178466797, 0.08709529876708984, 0.08716492462158203, 0.08606719970703125, 0.08672358703613281, 0.08690790557861328, 0.0867962875366211, 0.08702361297607422, 0.08684031677246094, 0.08695507049560547, 0.08671123504638673, 0.08706867218017578, 0.08863744354248047, 0.08740863800048829, 0.08729503631591796, 0.08682080078125, 0.08690380859375, 0.08818892669677734, 0.08729702758789062, 0.08700415802001953, 0.08678809356689453, 0.0868106231689453, 0.08693657684326171, 0.08690995025634765, 0.17623654174804687, 0.08711167907714844, 0.08703897857666015, 0.08722227478027343, 0.08726322937011718, 0.08703590393066406, 0.08675942230224609, 0.08699088287353515, 0.08680445098876953, 0.08698777770996094, 0.08604876708984376, 0.08687308502197266, 0.08715878295898437, 0.08707379150390625, 0.087108642578125, 0.08752022552490234, 0.08703385925292968, 0.08690995025634765, 0.0869939193725586, 0.08686182403564453, 0.08672665405273437, 0.08708710479736329, 0.086866943359375, 0.08725094604492188, 0.08706047821044922, 0.08675945281982422, 0.08477897644042968, 0.08724479675292969, 0.08646656036376953, 0.086866943359375, 0.08677273559570313, 0.08715058898925782, 0.08593714904785156, 0.08755712127685547, 0.08769945526123046, 0.08737586975097657, 0.0873512954711914, 0.08723763275146484, 0.08681779479980468, 0.08708710479736329, 0.08671129608154297, 0.08673894500732422, 0.08689766693115235, 0.0878828125, 0.08707987213134766, 0.08682710266113282, 0.08714435577392578, 0.08673996734619141, 0.08677581024169922, 0.08577126312255859, 0.08668160247802735, 0.08681574249267578, 0.0868136978149414, 0.08693555450439452, 0.08668364715576173, 0.0859504623413086, 0.08378470611572265, 0.08481996917724609, 0.08730931091308594, 0.08659455871582031, 0.08389631652832032, 0.08560025787353516, 0.08680038452148438, 0.17275801086425782, 0.08369971466064453, 0.08384204864501953, 0.08377139282226563, 0.08545689392089843, 0.08574976348876953, 0.08727859497070313, 0.08688025665283203, 0.0867799072265625, 0.08705741119384766, 0.08711580657958984, 0.08679933166503906, 0.0867962875366211, 0.08704307556152344, 0.08686386871337891, 0.08696320343017579, 0.08698880004882813, 0.08742403411865235, 0.0870973129272461, 0.08734207916259766, 0.08837529754638672, 0.08715980529785156, 0.08705023956298828, 0.086793212890625, 0.0869744644165039, 0.08696217346191407, 0.08680550384521485, 0.08711167907714844, 0.08749362945556641, 0.08701235198974609, 0.08706150054931641, 0.08685772705078125, 0.08689766693115235, 0.08696115112304688, 0.0865955810546875, 0.08719564819335937, 0.08694989013671875, 0.08730931091308594, 0.08871324920654297, 0.08721711730957031, 0.08766668701171874, 0.08723865509033203, 0.08547942352294922, 0.088416259765625, 0.08738614654541016, 0.08706454467773438, 0.08716083526611328, 0.08708815765380859, 0.08697238159179688, 0.08681676483154296, 0.08706559753417968, 0.0872499237060547, 0.08688639831542969, 0.08803123474121094, 0.0873512954711914, 0.08714649963378907, 0.08679936218261719, 0.08706047821044922, 0.08689254760742188, 0.08732466888427734, 0.0865843505859375, 0.08680239868164062, 0.08697548675537109, 0.1747763214111328, 0.08670310211181641, 0.08710451507568359, 0.08699903869628907, 0.08693657684326171, 0.08607334136962891, 0.0869969940185547, 0.08681574249267578, 0.08696627044677735, 0.08683827209472657, 0.08736153411865234, 0.08679219055175781, 0.08696012878417969, 0.086830078125, 0.0871352310180664, 0.08736870574951172, 0.08679424285888672, 0.08719462585449218, 0.08674508666992188, 0.08693862152099609, 0.08799231719970703, 0.08773529815673828, 0.08741171264648437, 0.08712397003173829, 0.08704512023925781, 0.08727654266357422, 0.08699903869628907, 0.08696729278564454, 0.0868853759765625, 0.08697344207763671, 0.08712499237060548, 0.08712499237060548, 0.0865771484375, 0.08749158477783203, 0.08688127899169922, 0.08696524810791016, 0.08687923431396484, 0.08711475372314453, 0.08697856140136719, 0.08686489868164063, 0.08696832275390624, 0.0843724822998047, 0.08361881256103515, 0.08333516693115234, 0.08410316467285156, 0.08653314971923828, 0.0870778579711914, 0.08714342498779297, 0.08421171569824219, 0.08394239807128906, 0.08393215942382813, 0.08404377746582031, 0.08362393951416015, 0.08348060607910156, 0.08413180541992188, 0.08405094146728516, 0.08691302490234375, 0.08714854431152344, 0.08712191772460938, 0.08708812713623047, 0.08714649963378907, 0.08697241973876953, 0.08708812713623047, 0.175857666015625, 0.08723865509033203, 0.08733695983886719, 0.08694886779785156, 0.08750080108642579, 0.08700927734375, 0.0869959716796875, 0.08687718200683593, 0.08966963195800781, 0.0872069091796875, 0.08702873229980469, 0.0869713897705078, 0.08688333129882812, 0.08693247985839844, 0.08705228424072266, 0.08724275207519532, 0.0872847671508789, 0.08704819488525391, 0.08684848022460938, 0.08682803344726563, 0.08715570831298829, 0.08688742065429687, 0.08504934692382812, 0.08777011108398437, 0.08640819549560547, 0.08897232055664063, 0.08616751861572265, 0.0871905288696289, 0.0890224609375, 0.0864686050415039, 0.08654438018798828, 0.08633036804199219, 0.08477593231201172, 0.08412876892089843, 0.08437452697753907, 0.08408985900878906, 0.08387686157226562, 0.08403353881835937, 0.08393318176269532, 0.08396185302734376, 0.08435302734375, 0.08403558349609375, 0.08378470611572265, 0.0841338882446289, 0.08378470611572265, 0.08367820739746094, 0.08367922973632813, 0.0835563507080078, 0.08376831817626954, 0.08383078765869141, 0.08380723571777343, 0.0837734375, 0.08563814544677735, 0.08487833404541016, 0.08407552337646484, 0.0837580795288086, 0.0836485137939453, 0.08376012420654297, 0.08358809661865234, 0.08362290954589843, 0.08371814727783203, 0.08357478332519531, 0.08497869110107421, 0.1696030731201172, 0.08372121429443359, 0.08371916961669922, 0.0837918701171875, 0.08386457824707032, 0.08376319885253906, 0.08395263671875, 0.08374784088134765, 0.08342527770996094, 0.08687206268310547, 0.08412262725830078, 0.08380416107177735, 0.08415436553955079, 0.0838440933227539, 0.08380316925048828, 0.08431715393066407, 0.08392499542236329, 0.08376319885253906, 0.08381747436523437, 0.08540467071533203, 0.08532991790771484, 0.08402022552490235, 0.084421630859375, 0.0841195526123047, 0.08383897399902343, 0.08380623626708984, 0.08381231689453125, 0.08393830108642578, 0.08379705810546875, 0.08394233703613281, 0.08370175933837891, 0.08405811309814454, 0.08380210876464844, 0.0837734375, 0.08362290954589843, 0.08380006408691407, 0.08357478332519531, 0.08359935760498047, 0.08378470611572265, 0.0834549789428711, 0.08362188720703125, 0.08369152069091797, 0.08387686157226562, 0.08376627349853516, 0.08377961730957031, 0.0838430404663086, 0.08387686157226562, 0.08553778839111328, 0.0837232666015625, 0.08371916961669922, 0.08374476623535156, 0.08370073699951172, 0.08382259368896484, 0.08384921264648437, 0.08401203155517578, 0.08357990264892579, 0.08365055847167968, 0.08378575897216797, 0.08367919921875, 0.08369055938720703, 0.0851127700805664, 0.08426700592041016, 0.08618905639648437, 0.1722030029296875, 0.08384819030761718, 0.08397721862792969, 0.0837570571899414, 0.08369664001464844, 0.0834549789428711, 0.08403865814208984, 0.08388505554199219, 0.08392908477783204, 0.08388198089599609, 0.08392704010009766, 0.08387379455566406, 0.0837027816772461, 0.08390860748291015, 0.08369254302978515, 0.08369766235351563, 0.08587161254882812, 0.0840478744506836, 0.08396288299560548, 0.08394649505615234, 0.08418099212646485, 0.08524288177490234, 0.08649318695068359, 0.0840816650390625, 0.08387174224853515, 0.08375603485107422, 0.08405811309814454, 0.08390758514404296, 0.0839393310546875, 0.08409497833251953, 0.08389836883544922, 0.08448000335693359, 0.08434893035888671, 0.08451583862304687, 0.0842639389038086, 0.08398745727539063, 0.08408370971679688, 0.08378880310058594, 0.08385740661621094, 0.08418406677246094, 0.08394035339355468, 0.08402124786376954, 0.08522137451171875, 0.08547840118408204, 0.08503705596923829, 0.08401510620117188, 0.08397516632080078, 0.08380723571777343, 0.08466022491455077, 0.0840079345703125, 0.08379193878173828, 0.08372525024414063, 0.08389631652832032, 0.0839393310546875, 0.08357071685791016, 0.08390652465820313, 0.08391577911376953, 0.0837550048828125, 0.08391474914550781, 0.08372633361816406, 0.08374476623535156, 0.08390758514404296, 0.0837550048828125, 0.1732474822998047, 0.08425472259521484, 0.0840273895263672, 0.08414924621582032, 0.08400179290771484, 0.08393830108642578, 0.08408268737792969, 0.08393727874755859, 0.08398745727539063, 0.08395468902587891, 0.08501862335205078, 0.08602214050292968, 0.08572313690185547, 0.08382463836669922, 0.08390962982177734, 0.08375296020507812, 0.08380928039550781, 0.0840263671875, 0.08383385467529297, 0.08391986846923828, 0.08388505554199219, 0.083666015625, 0.08356137847900391, 0.08396697235107421, 0.08387481689453125, 0.08371916961669922, 0.08389119720458985, 0.08397926330566406, 0.0838656005859375, 0.08394751739501953, 0.08407244873046875, 0.0837232666015625, 0.08388813018798828, 0.08374578857421874, 0.08396185302734376, 0.08385330963134766, 0.08368434906005859, 0.08367206573486329, 0.08419328308105468, 0.08365875244140625, 0.08380825805664062, 0.08445235443115234, 0.0841533432006836, 0.08358707427978515, 0.0837027816772461, 0.08361472320556641, 0.08509645080566407, 0.08371302032470704, 0.08371097564697266, 0.08373248291015625, 0.08388198089599609, 0.08400691223144531, 0.08375910186767578, 0.08609996795654297, 0.08662732696533203, 0.08376422119140625, 0.08392396545410157, 0.08380006408691407, 0.08389119720458985, 0.0837754898071289, 0.0837734375, 0.0838175048828125, 0.08397615814208985]",tokens/s,11.480430869002337,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1688.977408,2250.768384,0.0,1604.32128,1463.693312,s,10,1.2614907455444335,0.12614907455444335,0.001156305474696835,0.12596041870117186,0.12672372131347656,0.12802366485595704,0.12906361968994143,"[0.1293236083984375, 0.12533795166015624, 0.12527750396728515, 0.1264024658203125, 0.12550204467773438, 0.12507218933105468, 0.1257669143676758, 0.12615392303466796, 0.12621929931640624, 0.12643484497070312]",tokens/s,2029.3450499275416,kWh,1.4795928945144019e-06,8.107423014348568e-07,6.482848936275459e-06,8.773184132224718e-06,tokens/kWh,29179827.545131337,MB,1688.977408,2250.768384,0.0,1604.32128,1560.975872,s,10,72.8006865234375,7.280068652343751,0.00707557452467299,7.2794865722656255,7.288760400390625,7.290637524414063,7.292139223632812,"[7.2925146484375, 7.2867998046875, 7.28834326171875, 7.272900390625, 7.28120458984375, 7.28177783203125, 7.2702080078125, 7.2777685546875, 7.277201171875, 7.27196826171875]",tokens/s,8.653764546535935,kWh,8.591771873335045e-05,4.70891264273655e-05,0.0003703246191761246,0.0005033314643368406,tokens/kWh,125166.02768516574,,s,629,73.8150656280517,0.1173530455135958,0.015000349665490152,0.11545600128173829,0.11595755310058595,0.11627356262207031,0.24130564147949218,"[0.1175910415649414, 0.11764224243164062, 0.11673804473876953, 0.11585126495361328, 0.115281982421875, 0.11545388793945313, 0.11590860748291015, 0.11528498840332031, 0.11539968109130859, 0.11538841247558594, 0.11548159790039063, 0.11523072052001954, 0.11521842956542969, 0.11532492828369141, 0.11577958679199218, 0.11560755157470703, 0.1152696304321289, 0.11566079711914062, 0.1159925765991211, 0.11540889739990234, 0.11546623992919922, 0.11522150421142578, 0.11538944244384766, 0.1153617935180664, 0.11544371032714844, 0.1166714859008789, 0.11574886322021484, 0.1154672622680664, 0.11605299377441407, 0.11584102630615234, 0.1156485137939453, 0.11544882965087891, 0.11554099273681641, 0.115557373046875, 0.11551026916503906, 0.1155788803100586, 0.115725341796875, 0.11555939483642579, 0.11551129913330078, 0.11544371032714844, 0.11543244934082031, 0.11539968109130859, 0.11540684509277344, 0.11523072052001954, 0.11638988494873047, 0.11598438262939453, 0.11537510681152344, 0.11538534545898438, 0.11558604431152343, 0.11559219360351562, 0.11591474914550781, 0.11588198089599609, 0.1159362564086914, 0.11570175933837891, 0.11595673370361329, 0.11578368377685547, 0.11646463775634766, 0.11617485046386719, 0.11598336029052735, 0.11553279876708984, 0.11578470611572265, 0.11579801940917969, 0.24151040649414063, 0.11573862457275391, 0.11605094146728516, 0.11689881896972656, 0.1161523208618164, 0.1156648941040039, 0.11554099273681641, 0.11535564422607422, 0.11528498840332031, 0.11527065277099609, 0.11566387176513672, 0.11533516693115234, 0.11576012420654297, 0.11564749145507812, 0.1155051498413086, 0.11542221069335938, 0.11543961334228516, 0.11525529479980469, 0.11523993682861328, 0.11541913604736329, 0.1155225601196289, 0.1157734375, 0.11563929748535157, 0.11553587341308594, 0.11531571197509766, 0.115378173828125, 0.11524201965332032, 0.11551340484619141, 0.11532073974609375, 0.11577961730957032, 0.11592704010009766, 0.11556451416015626, 0.1154549789428711, 0.11527680206298828, 0.11526656341552734, 0.11538739013671875, 0.11530854034423828, 0.11538432312011719, 0.11546214294433593, 0.11541401672363281, 0.11528192138671875, 0.11616563415527344, 0.11571199798583984, 0.11578880310058594, 0.11551747131347656, 0.11540067291259766, 0.11685273742675781, 0.11557068634033203, 0.11726950073242187, 0.1159393310546875, 0.11543148803710937, 0.11552044677734374, 0.1161891860961914, 0.11548876953125, 0.11572940826416016, 0.1157396469116211, 0.11657421112060547, 0.11594854736328125, 0.11598851013183593, 0.11593417358398438, 0.11551846313476563, 0.11548365020751954, 0.11548569488525391, 0.2411530303955078, 0.11556454467773437, 0.11577139282226563, 0.11626496124267578, 0.11560652923583985, 0.11565161895751953, 0.11566384124755859, 0.11581747436523437, 0.1154672622680664, 0.11527474975585937, 0.11536998748779297, 0.11562598419189453, 0.11528089904785156, 0.11566902160644531, 0.11589116668701172, 0.11617894744873047, 0.1157201919555664, 0.11589734649658204, 0.11571302032470702, 0.11588607788085938, 0.11542323303222657, 0.11556966400146484, 0.1156833267211914, 0.11643392181396485, 0.11568434906005859, 0.11659468841552735, 0.11596083068847657, 0.11616665649414062, 0.11567922973632813, 0.11553794860839844, 0.11543036651611328, 0.11538636779785157, 0.11580620574951171, 0.11554918670654298, 0.11579296112060547, 0.1156412811279297, 0.11582361602783203, 0.11581132507324218, 0.11555328369140624, 0.11571507263183593, 0.11545600128173829, 0.11533926391601562, 0.1156147232055664, 0.1153966064453125, 0.1154119644165039, 0.11549286651611328, 0.11582566070556641, 0.11649638366699219, 0.1159516830444336, 0.11595462036132813, 0.11600281524658203, 0.11591474914550781, 0.11610931396484375, 0.11565261077880859, 0.1153290252685547, 0.11547238159179687, 0.1153290252685547, 0.115346435546875, 0.11534438323974609, 0.11529523468017579, 0.115378173828125, 0.11543551635742187, 0.11540377807617187, 0.24315084838867188, 0.11575193786621094, 0.11506790161132813, 0.11524915313720703, 0.1155962905883789, 0.11531366729736328, 0.11553177642822265, 0.11558297729492187, 0.11519385528564453, 0.11537305450439453, 0.11545811462402343, 0.11599967956542968, 0.11572838592529297, 0.1153966064453125, 0.11536077117919921, 0.11558092498779297, 0.1153617935180664, 0.1152911376953125, 0.11529011535644532, 0.11535564422607422, 0.11525939178466797, 0.1152143325805664, 0.11515494537353516, 0.11549696350097656, 0.11525017547607422, 0.11560550689697266, 0.11576217651367188, 0.11544268798828125, 0.11537203216552734, 0.11532288360595704, 0.11530239868164062, 0.11551334381103516, 0.115162109375, 0.11524813079833984, 0.1151436767578125, 0.11530035400390624, 0.11520614624023437, 0.11535871887207032, 0.11520716857910156, 0.11560345458984375, 0.11539968109130859, 0.11531676483154298, 0.11529417419433594, 0.1153966064453125, 0.11623017883300782, 0.11542422485351563, 0.11526451110839844, 0.11549388885498046, 0.11539762878417968, 0.11577855682373046, 0.1153955841064453, 0.11553279876708984, 0.11524403381347656, 0.11579084777832031, 0.11539968109130859, 0.11569152069091797, 0.11539353942871093, 0.11553997039794922, 0.11545600128173829, 0.11532492828369141, 0.1152696304321289, 0.11538841247558594, 0.1153433609008789, 0.241364990234375, 0.11547853088378907, 0.11533721923828125, 0.11539049530029297, 0.11531874847412109, 0.11551129913330078, 0.1152542724609375, 0.11560758209228515, 0.11542422485351563, 0.1155389404296875, 0.11542118072509766, 0.11545804595947265, 0.1156178207397461, 0.115463134765625, 0.11520716857910156, 0.11564339447021485, 0.11541913604736329, 0.11587276458740234, 0.1153955841064453, 0.11544882965087891, 0.11543654632568359, 0.11536793518066406, 0.11598540496826172, 0.11562393951416015, 0.11578470611572265, 0.11597004699707031, 0.11561062622070313, 0.11569459533691406, 0.11571302032470702, 0.11590144348144531, 0.11557068634033203, 0.11549900817871094, 0.11544678497314453, 0.11544268798828125, 0.11540889739990234, 0.11547443389892578, 0.11561574554443359, 0.11543449401855468, 0.11550624084472656, 0.11566381072998047, 0.11555225372314454, 0.11549286651611328, 0.11550822448730469, 0.11547955322265625, 0.1153064956665039, 0.11546623992919922, 0.11551744079589844, 0.11544371032714844, 0.11622195434570312, 0.11589017486572266, 0.11552665710449218, 0.1153986587524414, 0.11532390594482422, 0.11578470611572265, 0.11576934051513672, 0.11560550689697266, 0.11554713439941407, 0.11664281463623047, 0.11574784088134765, 0.11553997039794922, 0.11533824157714843, 0.11548569488525391, 0.11580518341064452, 0.2409891815185547, 0.11542425537109376, 0.11530342102050781, 0.11544166564941406, 0.115346435546875, 0.11540684509277344, 0.11648614501953125, 0.1165475845336914, 0.11569561767578125, 0.11536281585693359, 0.11529318237304688, 0.11541401672363281, 0.11542221069335938, 0.11558809661865234, 0.11539456176757812, 0.11545702362060548, 0.11533209228515626, 0.11550822448730469, 0.11541913604736329, 0.11535667419433594, 0.11532185363769532, 0.11544985961914063, 0.11528300476074219, 0.11542829132080078, 0.11535564422607422, 0.11552665710449218, 0.11533004760742188, 0.11546419525146484, 0.11538022613525391, 0.11521539306640625, 0.1153545913696289, 0.11576934051513672, 0.11584614562988281, 0.11546214294433593, 0.11551232147216797, 0.11547647857666016, 0.11558092498779297, 0.1154796142578125, 0.11671033477783203, 0.11560243225097656, 0.11540480041503906, 0.11559120178222657, 0.11558499145507813, 0.11560243225097656, 0.11611443328857422, 0.11554815673828125, 0.11593113708496093, 0.11560857391357422, 0.11550003051757812, 0.1153433609008789, 0.11553488159179688, 0.11636220550537109, 0.11614924621582032, 0.11589631652832032, 0.11544371032714844, 0.11543961334228516, 0.11590144348144531, 0.11554303741455078, 0.11543961334228516, 0.11548467254638672, 0.1157918701171875, 0.11545804595947265, 0.11548159790039063, 0.24169778442382814, 0.11536077117919921, 0.11515801239013672, 0.1152573471069336, 0.11527884674072265, 0.11527986907958984, 0.115346435546875, 0.11558502197265624, 0.1152471694946289, 0.11550918579101563, 0.11519078063964844, 0.11538329315185547, 0.11518669128417969, 0.11536281585693359, 0.11518163299560547, 0.11539347076416015, 0.11518678283691407, 0.11522755432128906, 0.11527577972412109, 0.11537920379638672, 0.11528710174560547, 0.11573241424560547, 0.11531775665283203, 0.11599462127685548, 0.11536895751953125, 0.11559219360351562, 0.1152174072265625, 0.11544473266601563, 0.11515187072753906, 0.11589734649658204, 0.11575091552734375, 0.11540377807617187, 0.11524508666992188, 0.11525116729736327, 0.1152573471069336, 0.11555532836914062, 0.1153259506225586, 0.1153617935180664, 0.1152542724609375, 0.11547135925292969, 0.11532083129882813, 0.11537305450439453, 0.11539968109130859, 0.11537612915039062, 0.11517235565185546, 0.11529216003417969, 0.11535257720947266, 0.11539046478271485, 0.11532390594482422, 0.11542940521240234, 0.11545801544189453, 0.115557373046875, 0.11530239868164062, 0.11545193481445312, 0.11529827117919922, 0.11565363311767578, 0.11541299438476563, 0.11608678436279297, 0.11551641845703126, 0.11544064331054688, 0.11531980895996094, 0.11546214294433593, 0.11526348876953126, 0.2421370849609375, 0.11553485107421875, 0.11565363311767578, 0.11698073577880859, 0.11526451110839844, 0.11523788452148437, 0.11507615661621094, 0.11531769561767578, 0.11525939178466797, 0.11527884674072265, 0.11663155364990234, 0.11554815673828125, 0.11534745788574219, 0.11536281585693359, 0.11533106994628907, 0.11543449401855468, 0.11581439971923828, 0.11606221008300781, 0.11575091552734375, 0.11566182708740234, 0.1154867172241211, 0.1154119644165039, 0.11538432312011719, 0.11553075408935547, 0.11544985961914063, 0.1154119644165039, 0.11534540557861328, 0.11543142700195312, 0.11537612915039062, 0.11558502197265624, 0.11556352233886719, 0.1155072021484375, 0.11545394897460938, 0.11533824157714843, 0.11553485107421875, 0.11541094207763672, 0.1153597412109375, 0.11537407684326172, 0.11529216003417969, 0.11547955322265625, 0.11540787506103516, 0.11534950256347656, 0.11540275573730469, 0.11537305450439453, 0.11569664001464844, 0.11560038757324219, 0.11557785797119141, 0.11525635528564453, 0.115210205078125, 0.11607859039306641, 0.11538022613525391, 0.11546419525146484, 0.11593727874755859, 0.11551436614990235, 0.11531263732910156, 0.11544576263427735, 0.11548569488525391, 0.11550924682617188, 0.11583999633789062, 0.11551641845703126, 0.11535871887207032, 0.11538739013671875, 0.11543142700195312, 0.24210124206542968, 0.11537920379638672, 0.11549593353271484, 0.11537612915039062, 0.11512525177001953, 0.11541709136962891, 0.1153280029296875, 0.11576729583740235, 0.115378173828125, 0.11518669128417969, 0.11519593811035156, 0.115463134765625, 0.11528498840332031, 0.11549491119384765, 0.11540172576904296, 0.11594445037841797, 0.11559526062011719, 0.116279296875, 0.11579698944091797, 0.11580210876464844, 0.11537203216552734, 0.11540991973876953, 0.11529318237304688, 0.11539456176757812, 0.1153597412109375, 0.11614412689208985, 0.11539968109130859, 0.11535155487060547, 0.11532492828369141, 0.11529523468017579, 0.11584108734130859, 0.1154241943359375, 0.11634585571289062, 0.11548159790039063, 0.11571302032470702, 0.1153219223022461, 0.1154579849243164, 0.11549388885498046, 0.1154303970336914, 0.11558297729492187, 0.11536383819580077, 0.11582975769042969, 0.11551436614990235, 0.11530035400390624, 0.11534233856201172, 0.11510886383056641, 0.11527168273925781, 0.11539968109130859, 0.11526860809326171, 0.1152573471069336, 0.11550822448730469, 0.11615952301025391, 0.1159669418334961, 0.11542835235595703, 0.11572223663330078, 0.11554918670654298, 0.11545906829833984, 0.11568742370605468, 0.1153812484741211, 0.11539762878417968, 0.11535155487060547, 0.1154303970336914, 0.11551129913330078, 0.24231832885742188, 0.11538432312011719, 0.1153259506225586, 0.11540480041503906, 0.11546419525146484, 0.11534745788574219, 0.11531878662109375, 0.11547138977050782, 0.115283935546875, 0.11556147003173828, 0.11574169921875, 0.11635916900634766, 0.11543142700195312, 0.11534540557861328, 0.11524813079833984, 0.1152911376953125, 0.11551436614990235, 0.11536589050292968, 0.11532083129882813, 0.11543247985839844, 0.11533615875244141, 0.11537407684326172, 0.11525631713867188, 0.11534438323974609, 0.11512217712402344, 0.11535769653320313, 0.11603763580322266, 0.11543449401855468, 0.1153986587524414, 0.11541506958007812, 0.11541094207763672, 0.11551331329345703, 0.11517440032958984, 0.11537715148925781, 0.11538534545898438, 0.11531263732910156, 0.11526758575439452, 0.11542835235595703, 0.11535052490234375, 0.11555430603027343, 0.11530035400390624, 0.11539046478271485, 0.11545398712158203, 0.1154815673828125, 0.11535667419433594, 0.11539250946044922, 0.1153812484741211, 0.1153986587524414, 0.1155051498413086, 0.11556665802001953, 0.11569554901123047, 0.11560447692871094, 0.11522866821289063, 0.11546419525146484, 0.11527577972412109, 0.11578880310058594, 0.11543142700195312, 0.1154119644165039, 0.115346435546875, 0.11526553344726563, 0.11522560119628907, 0.11540480041503906, 0.11531673431396484]",tokens/s,8.5212956819612,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 65169 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1591.713792,2250.768384,0.0,1604.32128,1463.693312,s,10,1.2648313903808592,0.12648313903808595,0.0013989604614154947,0.1262478713989258,0.1271006202697754,0.1287280216217041,0.13002994270324708,"[0.1303554229736328, 0.12560326385498047, 0.12551904296875, 0.12673897552490235, 0.12573398590087892, 0.12508892822265624, 0.12599820709228515, 0.1264975357055664, 0.1265692138671875, 0.12672681427001953]",tokens/s,2023.9851884361806,kWh,1.4855278241965507e-06,8.139949754399822e-07,6.45409544104969e-06,8.753618240686224e-06,tokens/kWh,29245049.64245863,MB,1591.713792,2250.768384,0.0,1604.32128,1560.975872,s,10,72.9124365234375,7.291243652343749,0.003929707108860259,7.2908349609375005,7.297245703125,7.2972966796875,7.2973374609375,"[7.297234375, 7.2856533203125, 7.29734765625, 7.29276513671875, 7.29289794921875, 7.29063818359375, 7.29089404296875, 7.29077587890625, 7.28937060546875, 7.284859375]",tokens/s,8.640501264794358,kWh,8.609067341933647e-05,4.718392125277432e-05,0.00037183251274354733,0.0005051071074156582,tokens/kWh,124726.02162011672,,s,629,73.93082885742194,0.11753708880353239,0.015022143769719637,0.11565261077880859,0.11614679260253907,0.11651727142333984,0.24159293273925783,"[0.11554611206054688, 0.11542835235595703, 0.11560959625244141, 0.11652607727050782, 0.11558604431152343, 0.1156341781616211, 0.11573760223388672, 0.1157570571899414, 0.11565670776367187, 0.11562393951416015, 0.11558502197265624, 0.11574476623535156, 0.1157580795288086, 0.11542835235595703, 0.11543961334228516, 0.11608268737792969, 0.1160478744506836, 0.11690393829345704, 0.11582669067382813, 0.11567513275146485, 0.1154303970336914, 0.11542835235595703, 0.1157232666015625, 0.1157694091796875, 0.11561158752441406, 0.11565261077880859, 0.11558297729492187, 0.11557170867919922, 0.11555225372314454, 0.11560857391357422, 0.11558604431152343, 0.11568946838378906, 0.11568537902832031, 0.11553177642822265, 0.1154703369140625, 0.11592601776123047, 0.11566284942626953, 0.11548159790039063, 0.11579289245605469, 0.11569561767578125, 0.11579801940917969, 0.11614924621582032, 0.11590553283691406, 0.1172838363647461, 0.11599565124511718, 0.11589427185058594, 0.11579392242431641, 0.11582054138183594, 0.11560550689697266, 0.11576937866210937, 0.11679228973388672, 0.11623014068603515, 0.115884033203125, 0.11555532836914062, 0.11562290954589843, 0.11581132507324218, 0.11705241394042969, 0.11600486755371094, 0.11588607788085938, 0.115736572265625, 0.11612569427490234, 0.11604176330566406, 0.24190666198730468, 0.11594547271728516, 0.11553177642822265, 0.1155819549560547, 0.1155051498413086, 0.11557990264892579, 0.11673600006103516, 0.11557376098632813, 0.11559117126464843, 0.1155072021484375, 0.11576525115966797, 0.11566079711914062, 0.11590656280517578, 0.115525634765625, 0.11559219360351562, 0.1160847396850586, 0.11556658935546875, 0.1156147232055664, 0.11551641845703126, 0.11533209228515626, 0.11555328369140624, 0.11560147094726562, 0.11554502105712891, 0.11557376098632813, 0.1158809585571289, 0.11545088195800782, 0.11558604431152343, 0.11544985961914063, 0.11559321594238281, 0.11556761932373047, 0.11569868469238281, 0.11571097564697265, 0.11561676788330078, 0.11552051544189453, 0.11553997039794922, 0.11532393646240234, 0.11541606140136719, 0.11545494079589844, 0.11565055847167968, 0.11606527709960937, 0.11634278106689454, 0.1155440673828125, 0.11550105285644531, 0.11538432312011719, 0.11557785797119141, 0.11540582275390625, 0.11706163024902344, 0.11553382110595703, 0.11559321594238281, 0.11543244934082031, 0.11545804595947265, 0.11543142700195312, 0.11540480041503906, 0.11530547332763671, 0.11539667510986328, 0.11571295928955078, 0.11585740661621094, 0.11560447692871094, 0.11563622283935547, 0.11560550689697266, 0.11555430603027343, 0.11554303741455078, 0.1156147232055664, 0.24147865295410156, 0.11553382110595703, 0.11642678070068359, 0.11596393585205078, 0.11609900665283203, 0.11560550689697266, 0.11545906829833984, 0.11533824157714843, 0.11538944244384766, 0.11619737243652344, 0.11612876892089843, 0.11601612854003907, 0.1155758056640625, 0.11589433288574219, 0.1156719970703125, 0.11552870178222656, 0.11574681854248046, 0.11581337738037109, 0.11625574493408203, 0.11572531127929687, 0.11590656280517578, 0.11556454467773437, 0.11557075500488281, 0.1164062042236328, 0.11572736358642578, 0.11559935760498047, 0.1159710693359375, 0.11614105224609375, 0.11683942413330078, 0.11663980865478515, 0.11592697906494141, 0.11562598419189453, 0.11584307098388671, 0.11552973175048828, 0.11597516632080078, 0.11550310516357422, 0.11585126495361328, 0.11633869171142579, 0.11576121520996094, 0.11590239715576171, 0.11548057556152344, 0.11562700653076172, 0.11580518341064452, 0.11599974060058593, 0.11608370971679688, 0.11642265319824219, 0.11600179290771484, 0.11641446685791015, 0.11563212585449219, 0.11547647857666016, 0.11545088195800782, 0.11568032073974609, 0.11588703918457031, 0.11573350524902344, 0.1157201919555664, 0.11566182708740234, 0.11576627349853516, 0.11582566070556641, 0.11572736358642578, 0.11544780731201172, 0.11562393951416015, 0.11546828460693359, 0.11547853088378907, 0.2430136260986328, 0.1159004135131836, 0.11564236450195313, 0.11563827514648438, 0.11565567779541015, 0.11559526062011719, 0.11573350524902344, 0.11556658935546875, 0.1157396469116211, 0.1160284194946289, 0.11578982543945313, 0.11594035339355468, 0.11562290954589843, 0.11544985961914063, 0.11558502197265624, 0.11559117126464843, 0.11605811309814452, 0.11588301086425781, 0.1157949447631836, 0.11583795166015624, 0.11589734649658204, 0.11614617919921875, 0.11590348815917968, 0.1155440673828125, 0.1166714859008789, 0.11601510620117188, 0.11548365020751954, 0.11576012420654297, 0.11538944244384766, 0.11538944244384766, 0.11550617980957031, 0.1161164779663086, 0.11625676727294922, 0.11571814727783203, 0.11565875244140625, 0.1156485137939453, 0.11559117126464843, 0.11533106994628907, 0.11534438323974609, 0.115957763671875, 0.11574066925048829, 0.11546316528320312, 0.11652095794677735, 0.11555532836914062, 0.11549081420898437, 0.11547443389892578, 0.11542630767822265, 0.11534438323974609, 0.11527986907958984, 0.11548365020751954, 0.11580825805664062, 0.1169459228515625, 0.11576729583740235, 0.11597516632080078, 0.11551849365234375, 0.11617174530029296, 0.1161553955078125, 0.11608678436279297, 0.11566079711914062, 0.11565261077880859, 0.11548365020751954, 0.1155758056640625, 0.11557376098632813, 0.2416373748779297, 0.11573760223388672, 0.11561881256103515, 0.11579596710205078, 0.11593830108642578, 0.115810302734375, 0.11552870178222656, 0.11581542205810547, 0.11552665710449218, 0.11582771301269532, 0.1157396469116211, 0.11553791809082031, 0.1157027816772461, 0.11587789154052734, 0.11567513275146485, 0.11574169921875, 0.11559833526611328, 0.11615334320068359, 0.11550822448730469, 0.11546316528320312, 0.11542733001708984, 0.11593523406982421, 0.11576217651367188, 0.11546521759033203, 0.11530751800537109, 0.11534848022460938, 0.11562496185302734, 0.11573248291015625, 0.11568946838378906, 0.11568025970458984, 0.1157570571899414, 0.11558809661865234, 0.11546009826660156, 0.11551436614990235, 0.11556864166259766, 0.11568851470947265, 0.11561977386474609, 0.11564441680908204, 0.11560243225097656, 0.11574374389648437, 0.11693260955810547, 0.11600179290771484, 0.11581951904296875, 0.11551337432861328, 0.11621782684326172, 0.11561574554443359, 0.11616153717041015, 0.11619328308105469, 0.11597926330566406, 0.11552153778076171, 0.11582157135009766, 0.11549798583984375, 0.11565773010253906, 0.11573868560791016, 0.11633657836914063, 0.11591986846923828, 0.11580723571777343, 0.11681075286865235, 0.11580210876464844, 0.11566284942626953, 0.11573554992675782, 0.11571405029296875, 0.11579392242431641, 0.24123802185058593, 0.11577037048339844, 0.11569152069091797, 0.11550924682617188, 0.11548262023925782, 0.11541299438476563, 0.11537100982666015, 0.11548979187011718, 0.11538540649414063, 0.11682399749755859, 0.11572531127929687, 0.11558911895751953, 0.11550003051757812, 0.11544576263427735, 0.11544678497314453, 0.11560755157470703, 0.11546828460693359, 0.11556249237060547, 0.1156485137939453, 0.11555023956298828, 0.11552047729492188, 0.11564236450195313, 0.11559014129638671, 0.11573452758789063, 0.11543859100341797, 0.11551641845703126, 0.11565465545654297, 0.11574988555908203, 0.11569971466064453, 0.11559935760498047, 0.11563724517822266, 0.11556044769287109, 0.11579084777832031, 0.11558399963378906, 0.11560345458984375, 0.11583385467529297, 0.11566182708740234, 0.11573350524902344, 0.11707392120361328, 0.11604377746582031, 0.11578166198730469, 0.11570275115966797, 0.11584614562988281, 0.11643289947509766, 0.11766886138916016, 0.11568742370605468, 0.11568131256103516, 0.1155184326171875, 0.11561779022216796, 0.11559219360351562, 0.1157734375, 0.1157580795288086, 0.11578675079345703, 0.11578163146972656, 0.11558399963378906, 0.1155758056640625, 0.11558399963378906, 0.11569766235351563, 0.11568230438232421, 0.11554412841796875, 0.11554502105712891, 0.11572531127929687, 0.11579392242431641, 0.24222003173828124, 0.11544473266601563, 0.11548876953125, 0.11571814727783203, 0.11562905883789062, 0.1165486068725586, 0.11599871826171874, 0.11563724517822266, 0.11574374389648437, 0.11579801940917969, 0.11553382110595703, 0.11554611206054688, 0.11684249877929688, 0.11557273864746094, 0.11547853088378907, 0.1154877471923828, 0.11573350524902344, 0.11547853088378907, 0.11548876953125, 0.11556556701660156, 0.1155973129272461, 0.11554815673828125, 0.1156280288696289, 0.1164400634765625, 0.115525634765625, 0.11554815673828125, 0.11559321594238281, 0.11538022613525391, 0.11541510772705078, 0.11557574462890625, 0.11551538848876954, 0.11658854675292969, 0.11569561767578125, 0.11581132507324218, 0.11544371032714844, 0.1155594253540039, 0.11566284942626953, 0.11575603485107422, 0.1155072021484375, 0.11597618865966797, 0.11624857330322266, 0.11573554992675782, 0.11539968109130859, 0.11556352233886719, 0.11553997039794922, 0.1154119644165039, 0.11562086486816406, 0.11566899108886719, 0.11578470611572265, 0.11621580505371094, 0.11563520050048828, 0.11619840240478516, 0.11569459533691406, 0.11568230438232421, 0.11591270446777344, 0.11579801940917969, 0.11572531127929687, 0.11583692932128906, 0.11580006408691407, 0.11551436614990235, 0.11591372680664062, 0.11565055847167968, 0.11566796875, 0.2425927734375, 0.11592601776123047, 0.11652812957763672, 0.1160263671875, 0.1156280288696289, 0.1154703369140625, 0.11540684509277344, 0.11567411041259766, 0.11576422119140625, 0.11549388885498046, 0.11578163146972656, 0.11591372680664062, 0.11623423767089844, 0.11559321594238281, 0.11569459533691406, 0.11551744079589844, 0.11644620513916015, 0.1155645751953125, 0.11573654174804687, 0.11594445037841797, 0.11607039642333984, 0.1157232666015625, 0.11550310516357422, 0.11549900817871094, 0.11566694641113281, 0.11567616271972657, 0.11566902160644531, 0.1161082534790039, 0.11594137573242187, 0.11565158081054687, 0.11552153778076171, 0.11549702453613281, 0.11560646057128907, 0.11540275573730469, 0.11600281524658203, 0.11596390533447265, 0.11555123138427735, 0.11542118072509766, 0.11550617980957031, 0.11543654632568359, 0.11562700653076172, 0.11552051544189453, 0.11569356536865234, 0.11555020904541016, 0.11564236450195313, 0.11563622283935547, 0.11566694641113281, 0.11554611206054688, 0.11565567779541015, 0.11542425537109376, 0.11545600128173829, 0.11553587341308594, 0.11576729583740235, 0.11622911834716797, 0.11651174163818359, 0.11568844604492187, 0.11576831817626954, 0.11554713439941407, 0.11568025970458984, 0.11566694641113281, 0.11580620574951171, 0.11560345458984375, 0.11570893096923827, 0.24270541381835936, 0.11573554992675782, 0.1154734115600586, 0.115525634765625, 0.11553279876708984, 0.11555430603027343, 0.11576525115966797, 0.11567922973632813, 0.11552665710449218, 0.11537715148925781, 0.1155041275024414, 0.11540991973876953, 0.11538329315185547, 0.11551641845703126, 0.11542937469482421, 0.11544576263427735, 0.11548365020751954, 0.11621273803710938, 0.11560447692871094, 0.11579801940917969, 0.11566796875, 0.11572531127929687, 0.11551750183105469, 0.1155849609375, 0.11561062622070313, 0.11598028564453125, 0.11561676788330078, 0.11556147003173828, 0.11561676788330078, 0.11551436614990235, 0.11571302032470702, 0.11565875244140625, 0.11572223663330078, 0.11552973175048828, 0.11562290954589843, 0.11551747131347656, 0.11566793823242187, 0.11573760223388672, 0.11573760223388672, 0.11582463836669922, 0.1156147232055664, 0.11548880004882812, 0.1162034912109375, 0.11573350524902344, 0.11611344146728515, 0.11572425842285156, 0.11567616271972657, 0.11576217651367188, 0.11560352325439453, 0.11609388732910156, 0.11659980773925781, 0.11586048126220704, 0.11570381164550782, 0.11573248291015625, 0.11557170867919922, 0.11563827514648438, 0.11605709075927734, 0.1166714859008789, 0.1158123550415039, 0.11569971466064453, 0.11564543914794922, 0.11563629150390625, 0.11554707336425782, 0.24293376159667968, 0.1157570571899414, 0.11543551635742187, 0.11561164855957032, 0.11565161895751953, 0.11541910552978515, 0.11535257720947266, 0.11540790557861329, 0.1153371810913086, 0.11557478332519532, 0.11545394897460938, 0.11548365020751954, 0.11608576202392579, 0.11585740661621094, 0.1155758056640625, 0.11546419525146484, 0.11544882965087891, 0.11567922973632813, 0.11579596710205078, 0.11548467254638672, 0.11567820739746094, 0.11553075408935547, 0.11560447692871094, 0.11557683563232422, 0.11538841247558594, 0.11541709136962891, 0.11647283172607421, 0.11568946838378906, 0.11550924682617188, 0.11575603485107422, 0.11594035339355468, 0.11566387176513672, 0.11554713439941407, 0.11556761932373047, 0.11553997039794922, 0.11574578857421874, 0.1154959716796875, 0.11552867126464844, 0.11542527770996094, 0.11553689575195313, 0.11548262023925782, 0.11609190368652343, 0.11563314819335938, 0.11550105285644531, 0.11568844604492187, 0.1156341781616211, 0.11574681854248046, 0.11630079650878906, 0.11589119720458985, 0.1157027816772461, 0.11557170867919922, 0.11571609497070312, 0.11571916961669922, 0.11559321594238281, 0.1154549789428711, 0.1155389404296875, 0.11564543914794922, 0.11553587341308594, 0.11558604431152343, 0.11541913604736329, 0.11550822448730469, 0.11574272155761718, 0.11551026916503906]",tokens/s,8.507952767756032,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2170.855424,7227.31008,0.0,6580.862976,6226.036224,s,10,5.757403198242186,0.5757403198242188,0.0009922788511109175,0.5756337280273438,0.5767303344726562,0.5773405700683594,0.5778287585449219,"[0.5763906860351562, 0.5754805908203126, 0.5742902221679688, 0.5758182373046875, 0.5751873168945313, 0.574752685546875, 0.575786865234375, 0.5751510620117187, 0.5779508056640625, 0.5765947265625]",tokens/s,444.6449053249566,kWh,6.784125003549789e-06,3.717420948032668e-06,3.101488592300367e-05,4.151643187458613e-05,tokens/kWh,6166233.1862557735,MB,2170.855424,7227.31008,0.0,6580.862976,6478.658048,s,10,336.90312500000005,33.6903125,0.0038578817961585758,33.690537109375,33.6951453125,33.6965375,33.69765125,"[33.6891484375, 33.6979296875, 33.68431640625, 33.69015234375, 33.69138671875, 33.68830859375, 33.690921875, 33.6910078125, 33.6948359375, 33.6851171875]",tokens/s,1.8699737498724598,kWh,0.00039786197528243067,0.0002180628312197768,0.001800652746076575,0.0024165775525787823,tokens/kWh,26069.926840448938,,s,629,341.54916522216826,0.5430034423245914,0.06841490320905963,0.5347440795898437,0.5353432861328125,0.5355950317382813,1.1101828955078124,"[0.5346764526367187, 0.534593505859375, 0.53607421875, 0.5346488037109375, 0.5349376220703125, 0.5348045043945312, 0.5348710327148437, 0.534361083984375, 0.5351505737304687, 0.5344501953125, 0.5342750854492188, 0.5343682861328125, 0.5348853759765625, 0.5342218017578125, 0.5351854248046874, 0.5346846923828125, 0.5351168212890625, 0.5340159912109375, 0.5345996704101562, 0.5340569458007812, 0.5348679809570313, 0.5342392578125, 0.5346948852539063, 0.5346427001953125, 0.5350113525390625, 0.5345361938476563, 0.534276123046875, 0.5349284057617187, 0.5341531982421875, 0.5347072143554688, 0.534413330078125, 0.5346447143554688, 0.5343323974609375, 0.534677490234375, 0.5346836547851562, 0.5348843383789063, 0.5341747436523437, 0.53469287109375, 0.5349488525390625, 0.5349376220703125, 0.5349048461914062, 0.5350922241210937, 0.5349293823242187, 0.5348905029296875, 0.5348187866210937, 0.5352499389648437, 0.5351372680664063, 0.5351874389648438, 0.5344020385742188, 0.5347307739257813, 0.534319091796875, 0.5348607788085937, 0.53452490234375, 0.5352724609375, 0.5355673828125, 0.53477685546875, 0.5353441162109375, 0.5347553100585938, 0.535225341796875, 0.5340364990234375, 0.5348362426757812, 0.53452392578125, 1.1103118896484374, 0.5353318481445313, 0.5341173706054687, 0.5345740966796875, 0.5343201293945312, 0.5346703491210938, 0.5341255493164062, 0.5354373168945312, 0.534645751953125, 0.5345771484375, 0.5340641479492187, 0.535025634765625, 0.5342791748046875, 0.5352877807617188, 0.5341102294921874, 0.5349335327148438, 0.53408154296875, 0.5348935546875, 0.5341614379882812, 0.5348187866210937, 0.5348075561523438, 0.5348945922851562, 0.5341737060546875, 0.5358602294921875, 0.5343109130859375, 0.53522021484375, 0.5344286499023437, 0.5350133666992187, 0.5340877075195313, 0.5351638793945312, 0.5342811889648438, 0.535573486328125, 0.5346611328125, 0.5351854248046874, 0.5346078491210937, 0.53532568359375, 0.5343160400390625, 0.53486181640625, 0.5339535522460938, 0.5352847290039062, 0.5343467407226562, 0.5350491943359375, 0.5341419677734375, 0.5348751220703125, 0.534150146484375, 0.5347573852539063, 0.5344307250976562, 0.5354004516601563, 0.5344286499023437, 0.5352489013671875, 0.5350963134765625, 0.5354454956054687, 0.537660400390625, 0.5361489868164062, 0.53553662109375, 0.5356922607421875, 0.5343662109375, 0.5351168212890625, 0.535394287109375, 0.5363414916992187, 0.5349365844726562, 0.5353850708007812, 0.5346826171875, 1.110350830078125, 0.5346856689453126, 0.5343385620117187, 0.5345433349609375, 0.5344020385742188, 0.5345515747070313, 0.5340989379882812, 0.5348003540039062, 0.5341388549804688, 0.53479833984375, 0.5344071655273438, 0.5343672485351563, 0.5352960205078126, 0.5340405883789062, 0.5349048461914062, 0.5344491577148438, 0.535067626953125, 0.5343518676757812, 0.5349970092773437, 0.534128662109375, 0.5347153930664063, 0.53432421875, 0.5345413208007812, 0.5351311645507812, 0.5347737426757813, 0.534719482421875, 0.5343641357421876, 0.5350553588867187, 0.5344020385742188, 0.5347225341796875, 0.5345587158203124, 0.5351331787109375, 0.5342013549804687, 0.5347911376953125, 0.5345372314453125, 0.5347225341796875, 0.5342412719726563, 0.5348792114257812, 0.5342689208984375, 0.5345525512695313, 0.5345115966796875, 0.5345474853515625, 0.53495703125, 0.53488232421875, 0.535014404296875, 0.5344368896484375, 0.53495703125, 0.534287353515625, 0.5348126831054687, 0.5351044921875, 0.535436279296875, 0.53448193359375, 0.5350891723632812, 0.5339913940429688, 0.5349908447265626, 0.5347307739257813, 0.5351055297851562, 0.5346611328125, 0.5345126342773437, 0.5350236206054687, 0.5345863647460938, 0.5352509155273437, 0.5350174560546875, 1.1105382080078126, 0.5355950317382813, 0.5344696044921875, 0.5347164306640625, 0.5345341186523438, 0.5346948852539063, 0.5343866577148437, 0.535103515625, 0.5347359008789062, 0.5351536865234375, 0.5343518676757812, 0.5350891723632812, 0.5348259887695312, 0.5349007568359375, 0.5342996215820313, 0.5354127197265625, 0.5349365844726562, 0.5348239135742188, 0.5341798095703125, 0.534992919921875, 0.534329345703125, 0.5348515625, 0.5342095336914062, 0.53524072265625, 0.5343549194335937, 0.534803466796875, 0.534345703125, 0.534719482421875, 0.5346047973632813, 0.5349662475585938, 0.5341337890625, 0.5350051879882812, 0.5342566528320313, 0.5347942504882812, 0.5347379150390625, 0.5350942993164063, 0.5344153442382813, 0.5352182006835937, 0.534846435546875, 0.5348956298828125, 0.5342689208984375, 0.5350553588867187, 0.5349601440429688, 0.53513525390625, 0.534561767578125, 0.535309326171875, 0.5342945556640625, 0.5351065673828125, 0.5344860229492188, 0.53475634765625, 0.534240234375, 0.5348341674804687, 0.5342648315429688, 0.534993896484375, 0.5347952880859375, 0.5356380004882813, 0.534824951171875, 0.5348976440429688, 0.5344174194335938, 0.5349017333984375, 0.5350840454101562, 0.5351629028320313, 0.5344470825195312, 1.1098358154296875, 0.5345259399414063, 0.5348362426757812, 0.534181884765625, 0.5347102661132812, 0.534091796875, 0.5347164306640625, 0.534054931640625, 0.5361285400390625, 0.5355950317382813, 0.5359862060546875, 0.5356984252929687, 0.5359247436523438, 0.5353123779296876, 0.5349468383789062, 0.53486181640625, 0.5354373168945312, 0.5347164306640625, 0.5351342163085937, 0.5345413208007812, 0.5349837036132813, 0.53418701171875, 0.5347440795898437, 0.5342218017578125, 0.5345996704101562, 0.5342843017578125, 0.5351854248046874, 0.5345218505859375, 0.5348915405273438, 0.5342116088867187, 0.5350205688476563, 0.534297607421875, 0.534667236328125, 0.5343784790039062, 0.5350491943359375, 0.5344635009765625, 0.535109619140625, 0.5345679321289063, 0.5353430786132812, 0.5343150024414063, 0.5344952392578125, 0.5345228881835937, 0.5347676391601562, 0.53477783203125, 0.534740966796875, 0.5348997192382813, 0.5346795654296875, 0.53429248046875, 0.534645751953125, 0.5347973022460938, 0.5346099243164063, 0.5341378784179688, 0.5352919311523437, 0.5342832641601563, 0.5348894653320313, 0.53421875, 0.5347788696289062, 0.534635498046875, 0.5353768920898437, 0.5347850341796875, 0.5351485595703125, 0.5345423583984374, 0.5350031127929687, 1.1105545654296876, 0.5355919189453126, 0.5343006591796875, 0.5348720703125, 0.53427197265625, 0.5345126342773437, 0.5340037231445313, 0.535520263671875, 0.534739990234375, 0.535130126953125, 0.5343836059570313, 0.5350440673828125, 0.5346948852539063, 0.5351239624023437, 0.5344174194335938, 0.5351260375976562, 0.5345014038085938, 0.5347993774414063, 0.534582275390625, 0.5357168579101562, 0.5343109130859375, 0.5353820190429688, 0.5346990356445313, 0.5351629028320313, 0.5347451171875, 0.5354332275390625, 0.5348679809570313, 0.5348095703125, 0.5343733520507813, 0.5351966552734375, 0.5344102172851563, 0.5353021240234375, 0.5340569458007812, 0.534813720703125, 0.5341829223632812, 0.5347389526367188, 0.5341173706054687, 0.5348935546875, 0.5345310668945312, 0.535056396484375, 0.5342576904296875, 0.5349898071289062, 0.5346007080078125, 0.5354352416992187, 0.53414501953125, 0.5347205200195313, 0.5340692749023438, 0.5347993774414063, 0.5341737060546875, 0.53488330078125, 0.5342361450195312, 0.5347962646484375, 0.53441845703125, 0.53486181640625, 0.5344409790039063, 0.5348505859375, 0.5344778442382813, 0.5346652221679687, 0.5343754272460938, 0.5350758666992188, 0.5344163818359375, 0.5350686645507813, 0.5343856811523438, 1.1107747802734376, 0.5348556518554688, 0.5345413208007812, 0.5346017456054688, 0.5344235229492188, 0.5347625122070313, 0.534202392578125, 0.5346734008789062, 0.534513671875, 0.5345167236328126, 0.5345884399414063, 0.5348331298828125, 0.534603759765625, 0.5347123413085938, 0.535151611328125, 0.5338849487304688, 0.5345567016601562, 0.53408154296875, 0.5351454467773438, 0.5341777954101562, 0.5348731079101563, 0.534076416015625, 0.5348925170898438, 0.5345361938476563, 0.534635498046875, 0.534403076171875, 0.5345423583984374, 0.5348433837890625, 0.5344429931640625, 0.5346826171875, 0.5351076049804687, 0.5349652709960937, 0.5341634521484375, 0.5351157836914062, 0.534329345703125, 0.5346119384765625, 0.5346826171875, 0.5350000610351563, 0.5348444213867187, 0.5346211547851563, 0.5343887329101562, 0.5350768432617188, 0.53532568359375, 0.5349631958007812, 0.5353236694335938, 0.534540283203125, 0.5354454956054687, 0.5352796020507813, 0.5357506713867187, 0.5357987670898438, 0.5363568725585938, 0.5354977416992187, 0.5355079956054688, 0.5346631469726563, 0.5348648681640625, 0.53481982421875, 0.5350850830078125, 0.5346867065429688, 0.534488037109375, 0.5345198364257813, 0.5341511840820312, 0.5348761596679688, 0.5343672485351563, 1.1098511962890625, 0.5344020385742188, 0.5343488159179688, 0.5351004028320312, 0.5345003662109375, 0.5340426025390625, 0.5349335327148438, 0.534129638671875, 0.5345115966796875, 0.534624267578125, 0.5344778442382813, 0.5346898193359375, 0.5347532958984375, 0.5341439819335938, 0.5348208618164062, 0.5342740478515625, 0.534593505859375, 0.5343160400390625, 0.5345218505859375, 0.5341788330078125, 0.5349027709960937, 0.5340282592773438, 0.5352099609375, 0.5348515625, 0.5344050903320312, 0.534667236328125, 0.534846435546875, 0.5345792236328125, 0.5350031127929687, 0.5352366333007812, 0.5349683227539063, 0.5348577270507813, 0.5350656127929687, 0.5350123291015625, 0.5349437255859375, 0.5353164672851562, 0.535109619140625, 0.5345955810546875, 0.5346232299804687, 0.5350543212890625, 0.5351209106445313, 0.5352017822265625, 0.5351526489257813, 0.534813720703125, 0.5347174682617187, 0.53507275390625, 0.5347962646484375, 0.535278564453125, 0.5350348510742188, 0.5344235229492188, 0.5348864135742187, 0.5350287475585938, 0.5347123413085938, 0.5351557006835937, 0.5351854248046874, 0.534824951171875, 0.5352898559570313, 0.5345700073242188, 0.5350369262695313, 0.5348126831054687, 0.5350420532226563, 0.5343068237304688, 0.5352109985351563, 1.1140484619140625, 0.5357752075195312, 0.5347020874023437, 0.5347215576171875, 0.5340579833984375, 0.5347225341796875, 0.5339064331054687, 0.5348648681640625, 0.5343109130859375, 0.5346764526367187, 0.5340579833984375, 0.534751220703125, 0.5342074584960937, 0.53477685546875, 0.5345331420898437, 0.5362677612304687, 0.5344603881835938, 0.535562255859375, 0.534560791015625, 0.5345679321289063, 0.53437646484375, 0.5348444213867187, 0.5341480712890625, 0.5348433837890625, 0.5341439819335938, 0.5348792114257812, 0.5344849853515625, 0.5353707275390625, 0.534392822265625, 0.5349959716796875, 0.534276123046875, 0.5353820190429688, 0.5345740966796875, 0.5356973876953125, 0.534592529296875, 0.5353021240234375, 0.5345812377929687, 0.5350471801757812, 0.5342730102539063, 0.5356431274414063, 0.5344461059570312, 0.5352222900390625, 0.534319091796875, 0.5352243041992187, 0.5342218017578125, 0.5354874877929687, 0.5344635009765625, 0.5353492431640625, 0.5346590576171875, 0.5352969970703125, 0.5345413208007812, 0.5364439086914062, 0.5349744873046876, 0.5354383544921875, 0.5349171142578125, 0.5353850708007812, 0.5352386474609375, 0.5347727661132813, 0.5347440795898437, 0.5347891235351563, 0.53429248046875, 0.5350348510742188, 0.5342208251953126, 1.1111966552734376, 0.5346058349609375, 0.5340743408203125, 0.5344491577148438, 0.5340753784179687, 0.5344573364257813, 0.5351997680664062, 0.5346652221679687, 0.5347676391601562, 0.5348843383789063, 0.5340805053710938, 0.5348229370117188, 0.5342904052734375, 0.5346806030273438, 0.5341122436523438, 0.5346744384765625, 0.5341951904296875, 0.53463037109375, 0.5346017456054688, 0.5344931640625, 0.5348495483398438, 0.5341675415039062, 0.534192138671875, 0.5346129760742188, 0.5353421020507813, 0.5345730590820312, 0.5347010498046875, 0.5343364868164062, 0.53553564453125, 0.5343969116210937, 0.534887451171875, 0.5349539794921875, 0.5349130249023437, 0.5349970092773437, 0.5343682861328125, 0.534813720703125, 0.5341798095703125, 0.5349908447265626, 0.5340886840820313, 0.5348864135742187, 0.53427197265625, 0.5349468383789062, 0.5341091918945312, 0.5351629028320313, 0.5349376220703125, 0.5348556518554688, 0.53496728515625, 0.5342669067382813, 0.5347440795898437, 0.53432421875, 0.53492529296875, 0.5341470947265625, 0.5350174560546875, 0.5343016967773437, 0.5352642822265625, 0.5343908081054688, 0.535689208984375, 0.535041015625, 0.5345894165039062, 0.5347891235351563, 0.5347962646484375, 0.5358428344726562, 0.5355755615234375]",tokens/s,1.8416089513521527,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,6416.572416,20902.838272,0.0,20256.391168,19273.842688,s,10,27.008011230468746,2.700801123046875,0.0043850724799974665,2.700565551757813,2.7060529296875,2.7063339599609373,2.7065587841796876,"[2.703509521484375, 2.7029794921875, 2.69485400390625, 2.69747705078125, 2.696227783203125, 2.69636279296875, 2.698151611328125, 2.705843505859375, 2.706614990234375, 2.705990478515625]",tokens/s,94.7866904436106,kWh,3.1842278407679666e-05,1.7448346880028112e-05,0.00015081939843319958,0.00020011002372090735,tokens/kWh,1279296.2353402255,MB,6420.922368,20902.838272,0.0,20256.391168,19862.693376,s,10,1586.4640625,158.64640624999998,0.022306639855444715,158.64365625,158.67640156250002,158.68144296875,158.68547609375,"[158.63340625, 158.605671875, 158.67528125, 158.686484375, 158.659984375, 158.625515625, 158.642296875, 158.6395, 158.65090625, 158.645015625]",tokens/s,0.3971095311211943,kWh,0.0018735859500534004,0.0010268917923910701,0.0086674246283786,0.011567902370823072,tokens/kWh,5446.104054171531,,s,629,1608.2408713378911,2.556821735036392,0.3208393016972219,2.518036376953125,2.5194736328125003,2.520285693359375,5.21544693359375,"[2.518319091796875, 2.519310302734375, 2.51766064453125, 2.517339111328125, 2.51743017578125, 2.51803955078125, 2.517208984375, 2.517015625, 2.516982666015625, 2.5182587890625, 2.51766064453125, 2.517487548828125, 2.5175009765625, 2.51837646484375, 2.5176474609375, 2.517837890625, 2.517665771484375, 2.51789208984375, 2.517560302734375, 2.517328857421875, 2.517376953125, 2.51846044921875, 2.517990478515625, 2.517222412109375, 2.517308349609375, 2.518451171875, 2.5171455078125, 2.518161376953125, 2.51881884765625, 2.518177734375, 2.517626953125, 2.5173310546875, 2.51724072265625, 2.517538818359375, 2.51719384765625, 2.5173740234375, 2.517359619140625, 2.518120361328125, 2.517709716796875, 2.51768310546875, 2.51764013671875, 2.51835400390625, 2.51860791015625, 2.51725927734375, 2.518401123046875, 2.5179462890625, 2.518530029296875, 2.51753466796875, 2.517834716796875, 2.518044677734375, 2.517002197265625, 2.5178828125, 2.51770361328125, 2.5193173828125, 2.519462890625, 2.51964306640625, 2.51835595703125, 2.51885986328125, 2.51799560546875, 2.51936767578125, 2.519690185546875, 2.5200732421875, 5.22134814453125, 2.517864501953125, 2.51825244140625, 2.51934716796875, 2.5177138671875, 2.5171435546875, 2.517161865234375, 2.51721826171875, 2.51709326171875, 2.517622802734375, 2.518835205078125, 2.51759912109375, 2.517168212890625, 2.517455810546875, 2.518162353515625, 2.517591064453125, 2.51749365234375, 2.517919677734375, 2.517905517578125, 2.51747216796875, 2.51761669921875, 2.517444580078125, 2.51732373046875, 2.516890625, 2.517098388671875, 2.517686279296875, 2.517358642578125, 2.516806640625, 2.516729736328125, 2.51791259765625, 2.517181396484375, 2.516504638671875, 2.5171435546875, 2.517117919921875, 2.517271484375, 2.516725830078125, 2.5167197265625, 2.5176513671875, 2.517125, 2.51666015625, 2.51749365234375, 2.518317138671875, 2.5185751953125, 2.51751025390625, 2.517790771484375, 2.517456787109375, 2.5185126953125, 2.517084228515625, 2.517465087890625, 2.51748974609375, 2.517200927734375, 2.51684033203125, 2.51730126953125, 2.518183837890625, 2.518331298828125, 2.51782861328125, 2.51742919921875, 2.51755615234375, 2.517507080078125, 2.517507080078125, 2.517161865234375, 2.517621826171875, 2.517966796875, 5.21453759765625, 2.5204541015625, 2.52109814453125, 2.519462890625, 2.518677490234375, 2.519301025390625, 2.518960205078125, 2.516992919921875, 2.517800048828125, 2.517873779296875, 2.51792578125, 2.517770263671875, 2.5189140625, 2.517824462890625, 2.517718994140625, 2.518151123046875, 2.51847998046875, 2.518257568359375, 2.518415283203125, 2.51804052734375, 2.518096923828125, 2.5183427734375, 2.518391845703125, 2.517865478515625, 2.519243896484375, 2.51814501953125, 2.518434814453125, 2.518415283203125, 2.518769775390625, 2.519129150390625, 2.518907958984375, 2.519370849609375, 2.5190185546875, 2.518751220703125, 2.52101416015625, 2.518179931640625, 2.519162841796875, 2.518454345703125, 2.519679931640625, 2.5194833984375, 2.5204951171875, 2.518433837890625, 2.51841650390625, 2.5182412109375, 2.519734375, 2.519425048828125, 2.518444091796875, 2.51785205078125, 2.518681640625, 2.518297607421875, 2.518287353515625, 2.51755517578125, 2.519033935546875, 2.518720458984375, 2.5197587890625, 2.519648193359375, 2.518752197265625, 2.51768212890625, 2.51812255859375, 2.51717626953125, 2.51780908203125, 2.517684326171875, 2.5175439453125, 5.2187802734375, 2.519068603515625, 2.518744140625, 2.51738525390625, 2.518098876953125, 2.51873681640625, 2.519017578125, 2.51888427734375, 2.519688232421875, 2.52031396484375, 2.518892578125, 2.519817138671875, 2.5199248046875, 2.51848388671875, 2.518221923828125, 2.517916748046875, 2.519314453125, 2.518415283203125, 2.517685302734375, 2.51694287109375, 2.51803857421875, 2.52027587890625, 2.520306640625, 2.518614013671875, 2.520572998046875, 2.517498779296875, 2.517350341796875, 2.5175869140625, 2.51862841796875, 2.517562255859375, 2.51831201171875, 2.51706982421875, 2.518307861328125, 2.517770263671875, 2.518127685546875, 2.517791748046875, 2.51879833984375, 2.51784912109375, 2.518008056640625, 2.5185615234375, 2.518908935546875, 2.5175224609375, 2.517482421875, 2.517506103515625, 2.5204541015625, 2.518011962890625, 2.521114501953125, 2.5210244140625, 2.520853515625, 2.52010693359375, 2.52200244140625, 2.52052880859375, 2.52000048828125, 2.51896728515625, 2.519782470703125, 2.51968603515625, 2.51883935546875, 2.51808251953125, 2.51833349609375, 2.517741455078125, 2.51945068359375, 2.518738037109375, 2.51962060546875, 5.21556396484375, 2.51751025390625, 2.517732421875, 2.51755712890625, 2.51782666015625, 2.51806005859375, 2.518541259765625, 2.517211181640625, 2.51755322265625, 2.516893798828125, 2.517453857421875, 2.517432373046875, 2.517993408203125, 2.517221435546875, 2.517614501953125, 2.518161376953125, 2.5184521484375, 2.518592529296875, 2.517885986328125, 2.517688232421875, 2.51909326171875, 2.51841015625, 2.518773681640625, 2.518697998046875, 2.51839892578125, 2.518539306640625, 2.51853515625, 2.51841845703125, 2.51909326171875, 2.5179013671875, 2.5176484375, 2.5175888671875, 2.51825146484375, 2.518151123046875, 2.51841015625, 2.518299560546875, 2.518412353515625, 2.518192138671875, 2.5184072265625, 2.518162353515625, 2.519458740234375, 2.518036376953125, 2.517877685546875, 2.518980712890625, 2.520924072265625, 2.518964111328125, 2.520530029296875, 2.520292236328125, 2.5202412109375, 2.519448486328125, 2.519780517578125, 2.519730224609375, 2.5202646484375, 2.5176298828125, 2.51782763671875, 2.51780908203125, 2.51826171875, 2.51890185546875, 2.5179423828125, 2.518106201171875, 2.518285400390625, 2.5179658203125, 2.518643798828125, 5.21514599609375, 2.51869384765625, 2.518576171875, 2.51751318359375, 2.5187890625, 2.51831298828125, 2.51842138671875, 2.518813720703125, 2.51734619140625, 2.51774267578125, 2.51757568359375, 2.51732177734375, 2.51702783203125, 2.516744140625, 2.5167646484375, 2.517622802734375, 2.516887451171875, 2.5171630859375, 2.517233642578125, 2.517984375, 2.517812255859375, 2.517107666015625, 2.517927978515625, 2.5184912109375, 2.51740478515625, 2.517085205078125, 2.517383056640625, 2.518287353515625, 2.518308837890625, 2.51829248046875, 2.517562255859375, 2.51860888671875, 2.517875732421875, 2.517098388671875, 2.517589111328125, 2.518604736328125, 2.517823486328125, 2.517308349609375, 2.518150146484375, 2.518289306640625, 2.51751220703125, 2.51789208984375, 2.518724609375, 2.5184931640625, 2.5175673828125, 2.51732373046875, 2.517525390625, 2.5192119140625, 2.51820849609375, 2.519125, 2.518769775390625, 2.5183896484375, 2.5181767578125, 2.51797705078125, 2.51743017578125, 2.517621826171875, 2.517073974609375, 2.517191650390625, 2.51754296875, 2.517603271484375, 2.517738525390625, 2.51803857421875, 2.518088623046875, 5.21595068359375, 2.518022216796875, 2.518259765625, 2.517396484375, 2.517515380859375, 2.5171640625, 2.51678515625, 2.51755517578125, 2.51660693359375, 2.51696337890625, 2.5178798828125, 2.51924169921875, 2.518275146484375, 2.516697021484375, 2.51698486328125, 2.517755859375, 2.51724072265625, 2.518010986328125, 2.517244873046875, 2.5173779296875, 2.517291015625, 2.517590087890625, 2.518003662109375, 2.518558837890625, 2.519623779296875, 2.519237548828125, 2.520456298828125, 2.51768115234375, 2.518234130859375, 2.519458740234375, 2.52040283203125, 2.519605224609375, 2.518929443359375, 2.520151123046875, 2.51915869140625, 2.519528564453125, 2.51947119140625, 2.5201142578125, 2.519734375, 2.518724609375, 2.51780517578125, 2.51755224609375, 2.51803857421875, 2.5180732421875, 2.517747802734375, 2.517359619140625, 2.518424560546875, 2.51807421875, 2.518046630859375, 2.51700439453125, 2.51778759765625, 2.517309326171875, 2.51774365234375, 2.51673291015625, 2.517708740234375, 2.517607421875, 2.518066162109375, 2.51755517578125, 2.51816845703125, 2.518012939453125, 2.518370361328125, 2.51797412109375, 2.51844189453125, 5.22497119140625, 2.517222412109375, 2.51845947265625, 2.51675537109375, 2.517399658203125, 2.517277587890625, 2.518365234375, 2.517622802734375, 2.5181572265625, 2.517368896484375, 2.51894580078125, 2.51837548828125, 2.51816845703125, 2.517992431640625, 2.517802001953125, 2.5172060546875, 2.5187685546875, 2.517329833984375, 2.519754638671875, 2.5192744140625, 2.51748046875, 2.516811767578125, 2.517927001953125, 2.51881884765625, 2.5180302734375, 2.516874267578125, 2.518129638671875, 2.517221435546875, 2.517310546875, 2.517367919921875, 2.517562255859375, 2.516991943359375, 2.518096923828125, 2.516927490234375, 2.5179677734375, 2.517698486328125, 2.51718359375, 2.517350341796875, 2.51825244140625, 2.517243896484375, 2.5187900390625, 2.52037109375, 2.519783447265625, 2.51812255859375, 2.51873291015625, 2.5181962890625, 2.5185771484375, 2.518066162109375, 2.518571044921875, 2.51795361328125, 2.51880029296875, 2.5177548828125, 2.518213623046875, 2.51785009765625, 2.519160888671875, 2.518510498046875, 2.51831201171875, 2.51875830078125, 2.51904931640625, 2.51761865234375, 2.5184912109375, 2.51787158203125, 2.519330810546875, 5.22598291015625, 2.518825927734375, 2.518971435546875, 2.51806103515625, 2.518127685546875, 2.518425537109375, 2.51801806640625, 2.516579345703125, 2.51692041015625, 2.517222412109375, 2.517351318359375, 2.517392333984375, 2.51871630859375, 2.518453369140625, 2.518960205078125, 2.51763720703125, 2.517210205078125, 2.517708740234375, 2.517718017578125, 2.517530517578125, 2.51915478515625, 2.518562744140625, 2.51778759765625, 2.517306396484375, 2.5181123046875, 2.518414306640625, 2.520138671875, 2.517626953125, 2.518436767578125, 2.518436767578125, 2.518345703125, 2.517001220703125, 2.51755615234375, 2.518391845703125, 2.518742919921875, 2.51820751953125, 2.517982177734375, 2.52077978515625, 2.5205986328125, 2.51875634765625, 2.518046630859375, 2.5188701171875, 2.5200302734375, 2.517751708984375, 2.51822998046875, 2.51795458984375, 2.518422607421875, 2.520731689453125, 2.5178388671875, 2.518604736328125, 2.518803466796875, 2.517551025390625, 2.517665771484375, 2.518455322265625, 2.5181337890625, 2.516989013671875, 2.517697509765625, 2.518129638671875, 2.517895263671875, 2.517632080078125, 2.51766162109375, 2.518906982421875, 2.519458740234375, 5.22549267578125, 2.51939111328125, 2.518699951171875, 2.51726025390625, 2.517559326171875, 2.51808349609375, 2.5171669921875, 2.51778857421875, 2.517769287109375, 2.5189755859375, 2.520603759765625, 2.517000244140625, 2.5173525390625, 2.517968994140625, 2.517775390625, 2.517918701171875, 2.517655517578125, 2.51848193359375, 2.517267333984375, 2.517482421875, 2.51776708984375, 2.5178828125, 2.5175009765625, 2.517536865234375, 2.518390869140625, 2.51835498046875, 2.5190810546875, 2.519309326171875, 2.5178818359375, 2.51835791015625, 2.51839697265625, 2.51793505859375, 2.517909423828125, 2.517781494140625, 2.517467041015625, 2.517106689453125, 2.51734228515625, 2.518115234375, 2.518010986328125, 2.517520263671875, 2.51814404296875, 2.517705810546875, 2.517486572265625, 2.517609375, 2.51786865234375, 2.518467529296875, 2.51749169921875, 2.517338134765625, 2.51806005859375, 2.5187646484375, 2.518148193359375, 2.51829248046875, 2.51822705078125, 2.518614990234375, 2.518023193359375, 2.517477294921875, 2.51810107421875, 2.518518798828125, 2.517971923828125, 2.517927978515625, 2.518328369140625, 2.5179638671875, 2.51816455078125]",tokens/s,0.39111056758353424,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1263.47264,2645.03296,0.0,1998.585856,1692.285952,s,10,0.24304076957702642,0.02430407695770264,0.0005675228251535707,0.024276448249816895,0.024567701721191406,0.025167994689941405,0.025648229064941408,"[0.025768287658691408, 0.024286592483520508, 0.02426630401611328, 0.024318464279174806, 0.024434303283691405, 0.02419811248779297, 0.02437276840209961, 0.023690656661987306, 0.024211936950683594, 0.023493343353271485]",tokens/s,10533.212203266434,kWh,2.780947089195252e-07,1.5238005259914857e-07,8.187235307952231e-07,1.2491982923138968e-06,tokens/kWh,204931436.0859474,MB,1263.767552,2645.03296,0.0,1998.585856,1740.091904,s,10,13.987102905273439,1.3987102905273439,0.014214042604732734,1.4048785400390624,1.412000280761719,1.4145778381347656,1.416639884033203,"[1.4171553955078124, 1.4012147216796875, 1.405033935546875, 1.411427490234375, 1.4068282470703124, 1.40472314453125, 1.405282958984375, 1.3729639892578125, 1.3760699462890624, 1.386403076171875]",tokens/s,45.04149317171867,kWh,1.62772132953008e-05,8.91975746257787e-06,3.22519140368052e-05,5.7448884794683886e-05,tokens/kWh,1096627.0315804249,,s,629,14.179969017028807,0.022543670933273148,0.0029405698135304874,0.02225868797302246,0.02251775932312012,0.02303631362915039,0.046451998291015664,"[0.023488512039184572, 0.023197696685791015, 0.02246143913269043, 0.02229555130004883, 0.022401023864746093, 0.02228326416015625, 0.02222489547729492, 0.02231091117858887, 0.0225218563079834, 0.023541759490966797, 0.023616512298583983, 0.02328985595703125, 0.023191551208496093, 0.023250944137573244, 0.023595008850097656, 0.023343103408813477, 0.023159807205200195, 0.02307788848876953, 0.023666688919067383, 0.023583744049072267, 0.023846912384033202, 0.022800384521484376, 0.02242355155944824, 0.022296575546264647, 0.02229043197631836, 0.022129663467407225, 0.022167552947998048, 0.022289407730102538, 0.022231039047241212, 0.022359039306640623, 0.022163455963134765, 0.022146047592163084, 0.022198272705078126, 0.022213632583618165, 0.022329343795776366, 0.021812223434448243, 0.021354496002197267, 0.02143436813354492, 0.02147225570678711, 0.021506048202514647, 0.021397504806518555, 0.02148659133911133, 0.02149478340148926, 0.021702655792236326, 0.023331840515136718, 0.022391807556152343, 0.022338560104370117, 0.022211584091186523, 0.022155263900756835, 0.022253568649291993, 0.02229145622253418, 0.022244352340698242, 0.022239231109619142, 0.02221670341491699, 0.022311935424804686, 0.022478847503662108, 0.022331392288208008, 0.02230681610107422, 0.022419456481933595, 0.02239897537231445, 0.02230169677734375, 0.0223242244720459, 0.04737433624267578, 0.022154239654541014, 0.02226483154296875, 0.02230271911621094, 0.02144972801208496, 0.02142617607116699, 0.021544960021972655, 0.021599231719970705, 0.02222489547729492, 0.022396928787231447, 0.022296575546264647, 0.022246400833129884, 0.022206464767456056, 0.022237184524536133, 0.022380544662475587, 0.022543359756469726, 0.022381568908691408, 0.022289407730102538, 0.022167552947998048, 0.02228428840637207, 0.022130687713623046, 0.022371328353881836, 0.02229145622253418, 0.022179840087890625, 0.022157312393188477, 0.022328319549560546, 0.02233344078063965, 0.02234982490539551, 0.022245376586914063, 0.022261760711669923, 0.02227712059020996, 0.02226483154296875, 0.02225868797302246, 0.0223191032409668, 0.022218751907348632, 0.022326271057128907, 0.022303743362426756, 0.02248806381225586, 0.022181888580322266, 0.022226943969726562, 0.02222591972351074, 0.022255615234375, 0.02224844741821289, 0.022112255096435548, 0.022326271057128907, 0.022156288146972656, 0.022769664764404295, 0.022261760711669923, 0.02227507209777832, 0.0222740478515625, 0.022338560104370117, 0.02231500816345215, 0.02227609634399414, 0.022260736465454102, 0.02228326416015625, 0.02230784034729004, 0.022261760711669923, 0.022268928527832032, 0.02252390480041504, 0.02224947166442871, 0.02229555130004883, 0.0222423038482666, 0.02222591972351074, 0.04691865539550781, 0.022223871231079103, 0.022368255615234374, 0.02228121566772461, 0.02227712059020996, 0.02224844741821289, 0.022231039047241212, 0.022236160278320313, 0.02225049591064453, 0.022305791854858398, 0.022199296951293947, 0.022161407470703123, 0.02229350471496582, 0.02231603240966797, 0.022381568908691408, 0.022329343795776366, 0.022428672790527345, 0.022311935424804686, 0.022203392028808593, 0.02231705665588379, 0.022589439392089843, 0.02243891143798828, 0.022245376586914063, 0.02247372817993164, 0.022378496170043945, 0.02221670341491699, 0.022323200225830078, 0.02222489547729492, 0.022269952774047853, 0.022195199966430663, 0.022361087799072265, 0.0221942081451416, 0.022280160903930663, 0.022350847244262697, 0.022339584350585938, 0.02242355155944824, 0.022280191421508787, 0.022199296951293947, 0.022214656829833986, 0.022261760711669923, 0.022237184524536133, 0.0222423038482666, 0.022214656829833986, 0.022581247329711913, 0.022334463119506837, 0.022419456481933595, 0.022260736465454102, 0.022581247329711913, 0.0222423038482666, 0.02211020851135254, 0.022381568908691408, 0.022228992462158204, 0.02225868797302246, 0.02223411178588867, 0.022211584091186523, 0.022369279861450195, 0.02228121566772461, 0.02229452705383301, 0.022181888580322266, 0.022215679168701173, 0.022194175720214843, 0.022372352600097657, 0.022351871490478514, 0.04692172622680664, 0.02230988883972168, 0.02225868797302246, 0.02221772766113281, 0.022237184524536133, 0.02212761688232422, 0.02230169677734375, 0.022252544403076172, 0.02224127960205078, 0.02227302360534668, 0.022236160278320313, 0.022244352340698242, 0.022401023864746093, 0.022337535858154296, 0.022404096603393556, 0.02231808090209961, 0.022379520416259766, 0.022289407730102538, 0.023015424728393553, 0.022839296340942384, 0.023625728607177734, 0.02254643249511719, 0.022501375198364256, 0.022212608337402344, 0.02230886459350586, 0.022192127227783204, 0.02186751937866211, 0.02225868797302246, 0.022220800399780274, 0.022244352340698242, 0.02232729530334473, 0.022322175979614257, 0.022200319290161134, 0.02231603240966797, 0.022213632583618165, 0.022260736465454102, 0.02225971221923828, 0.022184959411621095, 0.022322175979614257, 0.022223871231079103, 0.02229350471496582, 0.022244352340698242, 0.02253824043273926, 0.02221772766113281, 0.022656000137329102, 0.022313983917236328, 0.02267238426208496, 0.022330400466918945, 0.022226911544799804, 0.02228326416015625, 0.026193920135498046, 0.02267852783203125, 0.022219776153564453, 0.022269952774047853, 0.022245376586914063, 0.022183935165405275, 0.022200319290161134, 0.022311935424804686, 0.022221824645996095, 0.022288383483886717, 0.02233241653442383, 0.02232524871826172, 0.02229452705383301, 0.04700876617431641, 0.022387712478637696, 0.022311935424804686, 0.022374399185180666, 0.02227712059020996, 0.022320127487182616, 0.022426624298095704, 0.02230271911621094, 0.0225167350769043, 0.022564863204956053, 0.02248806381225586, 0.022361087799072265, 0.022419456481933595, 0.022410240173339844, 0.022541311264038084, 0.02245737648010254, 0.022437856674194335, 0.02248192024230957, 0.02223411178588867, 0.022358015060424806, 0.02230886459350586, 0.022378496170043945, 0.02230784034729004, 0.022552576065063477, 0.022374399185180666, 0.022311935424804686, 0.02235699272155762, 0.022362112045288086, 0.02224742317199707, 0.02226790428161621, 0.022237184524536133, 0.022477823257446287, 0.022300703048706055, 0.022344671249389648, 0.02226688003540039, 0.022345727920532226, 0.022303743362426756, 0.022238208770751954, 0.022254592895507814, 0.022183935165405275, 0.02230169677734375, 0.022226943969726562, 0.02231603240966797, 0.022246400833129884, 0.02222591972351074, 0.022192127227783204, 0.022269952774047853, 0.02223411178588867, 0.022280191421508787, 0.02234060859680176, 0.02229555130004883, 0.02225766372680664, 0.02227507209777832, 0.022221824645996095, 0.022200319290161134, 0.022208511352539064, 0.02225049591064453, 0.022527999877929687, 0.02226688003540039, 0.022324256896972657, 0.022201311111450194, 0.02225868797302246, 0.02229862403869629, 0.04681727981567383, 0.0222873592376709, 0.022252544403076172, 0.02227609634399414, 0.02228531265258789, 0.022226943969726562, 0.022236160278320313, 0.02235699272155762, 0.022329343795776366, 0.022527999877929687, 0.022365184783935548, 0.022367231369018553, 0.02240716743469238, 0.022360063552856444, 0.022312959671020507, 0.022500352859497072, 0.02227712059020996, 0.02225663948059082, 0.022190080642700196, 0.02223411178588867, 0.02221670341491699, 0.022297599792480468, 0.022380544662475587, 0.02223411178588867, 0.0222740478515625, 0.022218751907348632, 0.02250547218322754, 0.022362112045288086, 0.022189056396484375, 0.022197248458862305, 0.022175743103027345, 0.0222423038482666, 0.02221670341491699, 0.02226585578918457, 0.022161407470703123, 0.022182912826538087, 0.022428672790527345, 0.022236160278320313, 0.02267344093322754, 0.022254560470581056, 0.022212608337402344, 0.022169599533081053, 0.022261760711669923, 0.022228992462158204, 0.022177791595458983, 0.022404096603393556, 0.022181888580322266, 0.022383615493774413, 0.02244915199279785, 0.022458368301391602, 0.022296575546264647, 0.022377471923828125, 0.022180864334106445, 0.02232729530334473, 0.02223308753967285, 0.0223191032409668, 0.022232063293457033, 0.022355968475341798, 0.022238208770751954, 0.022271999359130858, 0.02221670341491699, 0.022223871231079103, 0.022175743103027345, 0.04688896179199219, 0.022303743362426756, 0.022354944229125977, 0.022297599792480468, 0.022215679168701173, 0.022192127227783204, 0.022171648025512695, 0.02224844741821289, 0.02211327934265137, 0.02228531265258789, 0.023112703323364257, 0.023995391845703123, 0.02248089599609375, 0.02227609634399414, 0.02220134353637695, 0.022296575546264647, 0.022337535858154296, 0.02222489547729492, 0.022120447158813478, 0.022183935165405275, 0.02221670341491699, 0.022187007904052734, 0.022220800399780274, 0.022237184524536133, 0.02226790428161621, 0.022206464767456056, 0.02225971221923828, 0.022140928268432617, 0.02229452705383301, 0.02228531265258789, 0.02229350471496582, 0.022176767349243166, 0.022361087799072265, 0.02225152015686035, 0.022210559844970702, 0.022215679168701173, 0.02226380729675293, 0.022187007904052734, 0.022213632583618165, 0.02224332809448242, 0.022754304885864256, 0.02226585578918457, 0.02221772766113281, 0.022206464767456056, 0.02226483154296875, 0.021966848373413086, 0.02205183982849121, 0.022099967956542968, 0.022203392028808593, 0.022363136291503907, 0.022364160537719727, 0.022344703674316405, 0.02223308753967285, 0.022335487365722655, 0.0224399356842041, 0.022565887451171874, 0.0225218563079834, 0.022244352340698242, 0.02223411178588867, 0.02225663948059082, 0.022342655181884767, 0.02221878433227539, 0.02215318489074707, 0.04551270294189453, 0.021498880386352538, 0.021541887283325196, 0.021501951217651367, 0.021432319641113282, 0.021425151824951173, 0.02147327995300293, 0.02146406364440918, 0.021530624389648437, 0.021445632934570313, 0.021526527404785157, 0.021679103851318358, 0.02146611213684082, 0.021538816452026367, 0.0215285758972168, 0.021593088150024413, 0.021651456832885742, 0.02127257537841797, 0.021340160369873046, 0.021440511703491212, 0.021550079345703126, 0.02147737693786621, 0.021549055099487305, 0.02142207908630371, 0.02148249626159668, 0.021562368392944335, 0.021440511703491212, 0.02149171257019043, 0.021424127578735352, 0.021378047943115236, 0.021389312744140625, 0.021445632934570313, 0.02142720031738281, 0.02143948745727539, 0.021332992553710937, 0.021432319641113282, 0.021433343887329103, 0.02142617607116699, 0.02143129539489746, 0.02147327995300293, 0.02142617607116699, 0.021556224822998047, 0.021329919815063478, 0.021178367614746094, 0.02149478340148926, 0.02295091247558594, 0.024001535415649415, 0.02289151954650879, 0.022328319549560546, 0.02247270393371582, 0.022336511611938475, 0.022393856048583984, 0.022271999359130858, 0.02305023956298828, 0.0224399356842041, 0.02233241653442383, 0.022658048629760744, 0.022633472442626954, 0.022371360778808594, 0.022270944595336913, 0.02266726493835449, 0.022451200485229493, 0.02231091117858887, 0.04704051208496094, 0.022401023864746093, 0.022451200485229493, 0.022221824645996095, 0.022182912826538087, 0.023061504364013673, 0.022665216445922853, 0.022202367782592772, 0.02227302360534668, 0.022352895736694335, 0.022350847244262697, 0.022240255355834963, 0.02226585578918457, 0.022435840606689454, 0.022339584350585938, 0.023191551208496093, 0.022448160171508788, 0.022245344161987306, 0.022419456481933595, 0.02229555130004883, 0.022278144836425783, 0.022253568649291993, 0.022322175979614257, 0.02143539237976074, 0.021354496002197267, 0.021412864685058593, 0.021437440872192383, 0.021246976852416992, 0.021206016540527343, 0.021312511444091797, 0.02145996856689453, 0.021396480560302734, 0.02141900825500488, 0.021416959762573243, 0.021406719207763672, 0.02141798400878906, 0.021346303939819337, 0.021408767700195314, 0.02161664009094238, 0.02151219177246094, 0.021358591079711914, 0.021411840438842773, 0.021358591079711914, 0.021420032501220702, 0.02142416000366211, 0.021448671340942384, 0.021368831634521485, 0.02146611213684082, 0.021394432067871092, 0.02146099281311035, 0.021359615325927735, 0.021428224563598632, 0.021392383575439454, 0.021569536209106444, 0.02145996856689453, 0.02147020721435547, 0.0214835205078125, 0.021580799102783203, 0.02149478340148926, 0.02230169677734375, 0.0222873592376709, 0.022323200225830078, 0.02229964828491211, 0.045434879302978515, 0.02152448081970215, 0.02148044776916504, 0.021498880386352538, 0.021572608947753907, 0.021533695220947266, 0.021521408081054686, 0.02148454475402832, 0.02149990463256836, 0.02147737693786621, 0.021549055099487305, 0.02145484733581543, 0.02142207908630371, 0.02142310333251953, 0.021547008514404296, 0.021576704025268553, 0.021511167526245118, 0.021346303939819337, 0.021358591079711914, 0.02142720031738281, 0.021970943450927736, 0.02230169677734375, 0.022139904022216796, 0.022558719635009765, 0.022374399185180666, 0.022157312393188477, 0.022334463119506837, 0.022297599792480468, 0.02227712059020996, 0.022139904022216796, 0.02226585578918457, 0.022194175720214843, 0.022185983657836913, 0.022076416015625, 0.022228992462158204, 0.02224844741821289, 0.02190336036682129, 0.022204416275024414, 0.022226943969726562, 0.022199296951293947, 0.022120447158813478, 0.02222591972351074, 0.02211327934265137, 0.02224745559692383, 0.022212575912475586, 0.022345727920532226, 0.022420480728149415, 0.022236160278320313, 0.022222848892211915, 0.02214297676086426, 0.02222591972351074, 0.022152191162109376, 0.02224127960205078, 0.022139904022216796, 0.022162431716918944, 0.022288383483886717, 0.022170623779296874, 0.02231705665588379, 0.022303743362426756, 0.022345727920532226, 0.0222423038482666, 0.022157312393188477, 0.02229964828491211]",tokens/s,44.35834797978967,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949905-6ad6100d31e8154424405ed6;ff96113b-425d-487e-9c7c-98986d9bb0e1) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1674.596352,5516.034048,0.0,4869.586944,4743.593472,s,10,6.137113647460938,0.6137113647460938,0.0032895717697753717,0.6129547424316406,0.6153997436523437,0.6190885131835938,0.6220395288085938,"[0.6227772827148438, 0.6144583129882812, 0.61261669921875, 0.61060546875, 0.612577392578125, 0.6105811157226563, 0.6121093139648438, 0.6145800170898438, 0.6132927856445313, 0.6135152587890625]",tokens/s,417.13420136176387,kWh,7.2172448039054885e-06,3.954746732982612e-06,3.355963469082604e-05,4.473162622771414e-05,tokens/kWh,5723020.189268045,MB,1674.596352,5516.034048,0.0,4869.586944,4769.651712,s,10,361.12618359375006,36.112618359375006,0.009999043540991457,36.112980468749996,36.125964453125,36.127720507812505,36.1291253515625,"[36.1294765625, 36.0964296875, 36.12557421875, 36.1174140625, 36.11404296875, 36.1114296875, 36.11191796875, 36.09769140625, 36.10714453125, 36.1150625]",tokens/s,1.7445425688343892,kWh,0.00042614284929302004,0.0002335616249413365,0.0019425026683799653,0.0026022071426143214,tokens/kWh,24210.217153084406,,s,629,366.0647033081055,0.5819788605852233,0.07282065333686359,0.5730969848632812,0.5741784912109374,0.5745491943359375,1.1854891552734375,"[0.5736980590820312, 0.573138916015625, 0.5735465087890625, 0.574129150390625, 0.5737092895507813, 0.57447216796875, 0.5745264892578125, 0.5731635131835937, 0.5737205810546875, 0.5735956420898437, 0.5749248046875, 0.5725880126953125, 0.5736376342773437, 0.57371337890625, 0.573749267578125, 0.5733990478515625, 0.5727344360351563, 0.57394482421875, 0.57497705078125, 0.5740851440429687, 0.5741229858398438, 0.5742008056640625, 0.5734738159179688, 0.574509033203125, 0.5738157958984375, 0.573897705078125, 0.5732402954101562, 0.5745704956054688, 0.5727620849609375, 0.5734061889648437, 0.5746319580078125, 0.5745643310546875, 0.5739735107421875, 0.5731901245117188, 0.5725511474609375, 0.5742970581054687, 0.5729157104492187, 0.572837890625, 0.57303857421875, 0.5725552368164063, 0.5740175170898437, 0.5737943115234375, 0.57314404296875, 0.5743441772460938, 0.5747333374023438, 0.5747230834960938, 0.5726239013671875, 0.5742520141601563, 0.5729638671875, 0.5725716552734375, 0.572348388671875, 0.572031005859375, 0.5730672607421875, 0.5724190673828125, 0.572590087890625, 0.5723648071289062, 0.5724487915039063, 0.572526611328125, 0.5727549438476562, 0.57301708984375, 0.5722869873046875, 0.5721016235351563, 1.18763623046875, 0.5728788452148438, 0.5735679931640625, 0.5728031005859375, 0.5727078247070313, 0.5732781982421875, 0.5730969848632812, 0.5728460693359375, 0.57259521484375, 0.5722409057617187, 0.57280615234375, 0.57242724609375, 0.57225830078125, 0.5726760864257813, 0.572142578125, 0.5723013305664062, 0.5727958984375, 0.5730846557617187, 0.572316650390625, 0.5732341918945313, 0.57354443359375, 0.5724467163085938, 0.5723648071289062, 0.5720924072265625, 0.5729249267578125, 0.5723299560546875, 0.5724815063476563, 0.572590087890625, 0.5723525390625, 0.5726453857421875, 0.572458984375, 0.5725296630859374, 0.5720729370117188, 0.572706787109375, 0.5740257568359375, 0.572758056640625, 0.5724251708984375, 0.5737236328125, 0.57303857421875, 0.5752218017578125, 0.5730856323242187, 0.57284814453125, 0.5734194946289063, 0.5729658813476562, 0.5726095581054688, 0.5726730346679687, 0.5732310791015625, 0.5731287231445312, 0.5729985961914063, 0.572822509765625, 0.5728573608398437, 0.5731840209960938, 0.5744937133789062, 0.5729187622070312, 0.5740841064453125, 0.5739561157226563, 0.5734850463867187, 0.5729812622070313, 0.5727211303710937, 0.5733560180664062, 0.5732577514648437, 0.5737369384765625, 0.5736038208007812, 1.18655078125, 0.5729157104492187, 0.5731143798828126, 0.5728522338867188, 0.5734451293945313, 0.5739530029296875, 0.572958740234375, 0.572759033203125, 0.5748234252929687, 0.57375537109375, 0.573359130859375, 0.5733130493164063, 0.5738352661132813, 0.57318603515625, 0.5733314819335937, 0.5734747924804687, 0.5750180053710937, 0.5730693359375, 0.5736345825195313, 0.573718505859375, 0.57335498046875, 0.5729044189453125, 0.5738772583007813, 0.5738352661132813, 0.5730682983398437, 0.5731051635742187, 0.5734297485351563, 0.5730928344726562, 0.5731317749023438, 0.5730283813476562, 0.573849609375, 0.57297509765625, 0.5731461181640625, 0.5727354736328125, 0.5730130004882813, 0.5731461181640625, 0.5728123168945313, 0.5729003295898437, 0.5728778076171875, 0.5738741455078125, 0.5740830688476563, 0.572737548828125, 0.5726536254882812, 0.5742734985351563, 0.5730897827148438, 0.5729822998046875, 0.5732136840820312, 0.5756242065429688, 0.5729976196289063, 0.5738700561523438, 0.573106201171875, 0.573149169921875, 0.5729013671875, 0.573991943359375, 0.573201416015625, 0.5733877563476563, 0.5732608032226563, 0.5743902587890625, 0.5742141723632812, 0.5728829345703125, 0.5743646850585937, 0.5734010620117187, 0.573259765625, 1.1840142822265625, 0.5737728271484375, 0.5731215209960937, 0.5732106323242188, 0.5732505493164063, 0.572969970703125, 0.573033447265625, 0.57314306640625, 0.5731942138671875, 0.5728460693359375, 0.5744957275390625, 0.5742643432617187, 0.5740144653320313, 0.5731768798828125, 0.5741864624023437, 0.5734174194335937, 0.573322265625, 0.5733375854492188, 0.5739192504882813, 0.5734788818359375, 0.5737677001953125, 0.5734850463867187, 0.5734430541992187, 0.5732044677734375, 0.5726986083984374, 0.572821533203125, 0.5730499267578125, 0.5735310668945313, 0.574656494140625, 0.5733201904296875, 0.5725225219726563, 0.5739570922851562, 0.5737728271484375, 0.57303857421875, 0.5733232421875, 0.5734522705078124, 0.5738311767578125, 0.575151123046875, 0.5733201904296875, 0.5735167846679687, 0.5731133422851562, 0.5729003295898437, 0.5728235473632812, 0.573318115234375, 0.5730816040039063, 0.5742418212890625, 0.573048828125, 0.5728880615234375, 0.572990478515625, 0.5729679565429687, 0.5725675659179688, 0.5726566162109376, 0.5728235473632812, 0.5745910034179688, 0.5722357788085938, 0.5722327270507812, 0.572821533203125, 0.5726668701171875, 0.5724385375976563, 0.5728890991210938, 0.5728051147460937, 0.5724405517578125, 0.5733775634765625, 1.1864791259765626, 0.5730631713867187, 0.5741793212890625, 0.574244873046875, 0.5729197998046875, 0.5730785522460937, 0.5742643432617187, 0.5736028442382812, 0.5729208374023438, 0.5731768188476563, 0.573612060546875, 0.5728307495117188, 0.5726996459960938, 0.57314306640625, 0.5741906127929688, 0.572669921875, 0.5738731689453125, 0.5743124389648437, 0.5728778076171875, 0.5729352416992187, 0.574119873046875, 0.5733079223632812, 0.5724866333007812, 0.572231689453125, 0.5732946166992188, 0.5724682006835937, 0.5730529174804687, 0.5733519287109375, 0.573106201171875, 0.5727999877929687, 0.57236376953125, 0.5724620971679687, 0.5725665893554688, 0.5737697143554688, 0.5733406372070312, 0.573254638671875, 0.5726945190429688, 0.5743114013671875, 0.5735679931640625, 0.5726617431640625, 0.5724610595703125, 0.57396533203125, 0.5732372436523437, 0.5741332397460938, 0.5733570556640625, 0.5733375854492188, 0.572717041015625, 0.572859375, 0.5731901245117188, 0.572558349609375, 0.5723750610351562, 0.5745213623046875, 0.573633544921875, 0.5729638671875, 0.573412353515625, 0.5735505981445312, 0.5726494750976563, 0.5753558959960937, 0.5728399658203125, 0.573432861328125, 0.5723391723632812, 0.5729290161132813, 0.572626953125, 1.1842232666015624, 0.5728092041015626, 0.572779541015625, 0.572632080078125, 0.5737963256835937, 0.5736775512695312, 0.5726033935546875, 0.572564453125, 0.5733416748046875, 0.5725654907226563, 0.5723832397460937, 0.5723812255859375, 0.5731624755859375, 0.5723832397460937, 0.5729197998046875, 0.572788818359375, 0.5723237915039062, 0.5723801879882813, 0.5726771850585938, 0.5727251586914063, 0.5726986083984374, 0.57333349609375, 0.5739939575195312, 0.5731481323242188, 0.5727344360351563, 0.5737195434570312, 0.5727918090820312, 0.5725234985351563, 0.5729740600585937, 0.5731522827148438, 0.5736734619140625, 0.5729310913085938, 0.5734471435546875, 0.573127685546875, 0.5729720458984375, 0.5730723876953125, 0.5730529174804687, 0.5730682983398437, 0.5760235595703125, 0.5742151489257813, 0.573750244140625, 0.5731143798828126, 0.573844482421875, 0.5748480224609375, 0.5746339721679687, 0.5739223022460938, 0.5732044677734375, 0.5739735107421875, 0.5730263061523437, 0.5728583984375, 0.5729924926757812, 0.5730785522460937, 0.5734502563476562, 0.5732157592773437, 0.573191162109375, 0.5728983154296875, 0.5738905639648437, 0.5733457641601563, 0.57269873046875, 0.572650390625, 0.5734747924804687, 0.5728338012695312, 0.5733191528320313, 1.1859814453125, 0.57253271484375, 0.5722654418945312, 0.5727242431640625, 0.5725839233398438, 0.5722265625, 0.572601318359375, 0.5729863891601562, 0.572706787109375, 0.5726239013671875, 0.5739888916015625, 0.5729249267578125, 0.573317138671875, 0.5733673095703125, 0.5743882446289063, 0.5732106323242188, 0.5734819946289063, 0.5727047729492187, 0.5730140380859375, 0.572675048828125, 0.5729136352539063, 0.5729924926757812, 0.5723709716796875, 0.5744793701171875, 0.57339599609375, 0.5728604125976563, 0.5722327270507812, 0.5737615356445313, 0.5753026733398438, 0.5738475341796875, 0.5734512939453125, 0.5752913818359375, 0.5733673095703125, 0.5729085693359375, 0.5732177734375, 0.573886474609375, 0.5727651977539062, 0.57282763671875, 0.57284814453125, 0.5731399536132813, 0.5727467651367187, 0.5733314819335937, 0.5736110229492187, 0.5734830322265625, 0.5729003295898437, 0.574867431640625, 0.5740697631835937, 0.572969970703125, 0.5728942260742188, 0.5735751342773437, 0.5731563720703124, 0.5731113891601562, 0.5729085083007812, 0.5745828247070313, 0.5727620849609375, 0.5729269409179687, 0.5724876708984376, 0.573665283203125, 0.5732260131835938, 0.5726268920898437, 0.57246923828125, 0.572621826171875, 0.5729013671875, 1.1888896484375, 0.57364990234375, 0.5738916015625, 0.5726515502929688, 0.5726239013671875, 0.5722838745117188, 0.5730723876953125, 0.573032470703125, 0.5727662353515625, 0.5732413330078125, 0.5728880615234375, 0.5727396240234375, 0.5731368408203125, 0.573233154296875, 0.5731963500976562, 0.5735249633789062, 0.5744219970703125, 0.5727999877929687, 0.5726064453125, 0.5729290771484375, 0.5737103271484375, 0.572811279296875, 0.5732679443359375, 0.5726730346679687, 0.5734287109375, 0.5722675170898438, 0.5725828857421875, 0.5725849609375, 0.5723627319335938, 0.5727416381835938, 0.5728348388671874, 0.5723678588867187, 0.5728604125976563, 0.573675537109375, 0.57333349609375, 0.5725634765625, 0.5726546020507812, 0.5731399536132813, 0.5728031005859375, 0.5724630737304688, 0.57310205078125, 0.5734573974609375, 0.5732689819335938, 0.5729290161132813, 0.5729782104492187, 0.5726064453125, 0.572527587890625, 0.5728818969726562, 0.5733007202148438, 0.5730426635742187, 0.5736365966796875, 0.5730549926757813, 0.573111328125, 0.5725552368164063, 0.5726505126953125, 0.5732802734375, 0.5726494750976563, 0.5724876708984376, 0.5733058471679687, 0.5727211303710937, 0.5725931396484375, 0.5726648559570312, 0.5728655395507812, 1.1863818359375, 0.5727938842773438, 0.5733508911132813, 0.5740676879882812, 0.574234619140625, 0.5727047729492187, 0.5733447875976563, 0.5732515869140625, 0.5743012084960938, 0.5728123168945313, 0.5730969848632812, 0.5729976196289063, 0.5739694213867188, 0.5728798828125, 0.5730140380859375, 0.5724334106445312, 0.5727744140625, 0.5740390625, 0.5736099853515625, 0.5726300048828125, 0.572969970703125, 0.5736683349609375, 0.572948486328125, 0.5725542602539062, 0.5725962524414062, 0.5727139892578125, 0.573559814453125, 0.572568603515625, 0.5725419311523438, 0.5730426635742187, 0.5727252197265625, 0.5726781616210938, 0.5734676513671875, 0.573179931640625, 0.5726351318359375, 0.573233154296875, 0.5737062377929687, 0.5729423217773437, 0.5729710083007813, 0.5735885009765626, 0.5732741088867187, 0.5730344848632812, 0.5727262573242188, 0.5732567138671875, 0.5732689819335938, 0.5727232055664062, 0.5727559814453125, 0.5730703125, 0.5724334716796875, 0.5731378784179687, 0.5730191650390625, 0.572896240234375, 0.5732177734375, 0.5728727416992188, 0.5729361572265625, 0.5730979614257813, 0.574118896484375, 0.573497314453125, 0.5732976684570312, 0.5737840576171875, 0.5730130004882813, 0.5730549926757813, 0.572958740234375, 1.187092529296875, 0.5724129028320313, 0.5726473999023437, 0.57280615234375, 0.572416015625, 0.5723361206054688, 0.572821533203125, 0.5732761840820313, 0.5727733764648437, 0.573095947265625, 0.5734052124023438, 0.573053955078125, 0.572674072265625, 0.5729341430664062, 0.5734830322265625, 0.573849609375, 0.5724999389648437, 0.5727334594726563, 0.5741107177734375, 0.5736663208007813, 0.57402978515625, 0.5733611450195313, 0.572958740234375, 0.5730099487304687, 0.5733345336914063, 0.5739038696289063, 0.575910888671875, 0.574392333984375, 0.5739898681640625, 0.5732730712890625, 0.5733109741210938, 0.5730928344726562, 0.5741782836914062, 0.5731993408203125, 0.57302734375, 0.5732771606445313, 0.572506103515625, 0.5729556274414063, 0.5733088989257813, 0.572968994140625, 0.5736283569335937, 0.5738291015625, 0.5733499145507812, 0.5729085693359375, 0.573539306640625, 0.5743226928710937, 0.5736959838867187, 0.5733365478515625, 0.57354443359375, 0.5733949584960938, 0.5731727294921874, 0.5731215209960937, 0.5734061889648437, 0.5725133056640626, 0.5726607055664062, 0.5729464111328125, 0.5725101928710937, 0.57212109375, 0.5734563598632813, 0.5726597290039063, 0.5727313842773437, 0.572416015625, 0.5735731201171875]",tokens/s,1.7182754696526694,,,,,,main,False,False -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2119.368704,2816.999424,0.0,2170.55232,1927.744512,s,10,2.4524682617187503,0.245246826171875,0.001090999616645648,0.2451712341308594,0.24625067138671874,0.24669122314453126,0.24704366455078125,"[0.24615277099609376, 0.24713177490234375, 0.24369766235351562, 0.24442691040039063, 0.244733154296875, 0.243721435546875, 0.24478985595703126, 0.2455526123046875, 0.24614064025878907, 0.24612144470214845]",tokens/s,1043.8463322684916,kWh,2.8755634550064334e-06,1.5756288392757208e-06,1.2476319504855795e-05,1.6927511799137947e-05,tokens/kWh,15123309.499807118,MB,2119.368704,2816.999424,0.0,2170.55232,2031.983104,s,10,142.79050195312502,14.279050195312502,0.0031195370707275648,14.27847802734375,14.28149794921875,14.284002392578124,14.286005947265625,"[14.2787724609375, 14.2797041015625, 14.28094140625, 14.27818359375, 14.2772841796875, 14.2808662109375, 14.2747421875, 14.275935546875, 14.2775654296875, 14.2865068359375]",tokens/s,4.412058164812777,kWh,0.00016860610004691852,9.240989681349414e-05,0.0007205406280193562,0.000981556624879769,tokens/kWh,64183.76525930624,,s,629,144.7668543243408,0.23015398143774374,0.029166744118704197,0.22656614685058593,0.22702059936523436,0.22731673583984374,0.47147623168945313,"[0.228210693359375, 0.2264954833984375, 0.2263234558105469, 0.2265917510986328, 0.22652210998535155, 0.2266122283935547, 0.22653952026367188, 0.22631117248535157, 0.22642892456054686, 0.22647091674804687, 0.22641664123535157, 0.22648934936523438, 0.2270392303466797, 0.22645555114746094, 0.22661325073242186, 0.22698086547851562, 0.22665011596679688, 0.22659788513183593, 0.2262640686035156, 0.22631219482421874, 0.22640025329589844, 0.22692658996582032, 0.22699212646484376, 0.22696038818359374, 0.22650674438476562, 0.22637158203125, 0.2266552276611328, 0.22633882141113282, 0.22646173095703126, 0.2265814666748047, 0.22644224548339845, 0.22677912902832031, 0.22628044128417968, 0.22633573913574218, 0.2265681915283203, 0.226693115234375, 0.22634597778320312, 0.2265016326904297, 0.2263531494140625, 0.22655078125, 0.22668800354003907, 0.2267904052734375, 0.22690406799316407, 0.2266306610107422, 0.2264575958251953, 0.22657638549804687, 0.22662553405761718, 0.22665113830566405, 0.22651187133789064, 0.22652723693847657, 0.22627635192871093, 0.22631219482421874, 0.22654464721679687, 0.22648934936523438, 0.2266378173828125, 0.22687333679199218, 0.22802943420410157, 0.2267176971435547, 0.2265917510986328, 0.22732595825195312, 0.2264842224121094, 0.22659481811523438, 0.47421746826171873, 0.22658047485351562, 0.22655282592773437, 0.22653030395507812, 0.22670950317382813, 0.22652517700195313, 0.22650469970703124, 0.22647705078125, 0.22634803771972656, 0.2266787872314453, 0.22657125854492188, 0.22652621459960937, 0.22686515808105467, 0.2263900146484375, 0.22639718627929686, 0.22662553405761718, 0.22654975891113283, 0.22652928161621094, 0.2263838653564453, 0.22636647033691407, 0.2263582763671875, 0.22654975891113283, 0.22638899230957032, 0.2263961639404297, 0.22621388244628907, 0.22636749267578124, 0.22646170043945313, 0.22642381286621094, 0.22654156494140626, 0.2262988739013672, 0.22632652282714844, 0.2261411895751953, 0.2261012420654297, 0.22648838806152344, 0.22639097595214844, 0.2262917175292969, 0.22649856567382812, 0.2264145965576172, 0.22670130920410156, 0.2264453125, 0.227240966796875, 0.22686003112792968, 0.22684364318847655, 0.22670335388183593, 0.2264524841308594, 0.2268170166015625, 0.22824140930175782, 0.2270392303466797, 0.22767205810546876, 0.22691123962402343, 0.22711602783203125, 0.22689286804199219, 0.22666029357910156, 0.2267361297607422, 0.2266439666748047, 0.22683135986328126, 0.22709965515136718, 0.22691226196289063, 0.22731365966796874, 0.22672998046875, 0.22711911010742186, 0.22663475036621095, 0.22664601135253906, 0.47127346801757813, 0.2265681915283203, 0.226840576171875, 0.22668389892578125, 0.22649650573730468, 0.22704742431640626, 0.22672998046875, 0.22683135986328126, 0.22644326782226562, 0.2265743408203125, 0.226555908203125, 0.22665933227539062, 0.22649754333496094, 0.22686720275878905, 0.22685696411132814, 0.2270627899169922, 0.22701158142089845, 0.22654360961914063, 0.227631103515625, 0.22625177001953126, 0.2269306945800781, 0.226946044921875, 0.2268231658935547, 0.2272542724609375, 0.2273054656982422, 0.22650982666015626, 0.22637158203125, 0.22674432373046874, 0.22774681091308593, 0.22656512451171876, 0.22653132629394532, 0.22656101989746094, 0.22630706787109375, 0.22627430725097655, 0.22650367736816407, 0.2263654327392578, 0.2271293487548828, 0.2264596405029297, 0.2263173065185547, 0.2265753631591797, 0.22677912902832031, 0.22655282592773437, 0.22645350646972656, 0.2264954833984375, 0.22634597778320312, 0.22660403442382812, 0.226482177734375, 0.22736895751953126, 0.22658149719238282, 0.22649037170410155, 0.22647296142578124, 0.2265016326904297, 0.22649650573730468, 0.22651187133789064, 0.22654464721679687, 0.22644224548339845, 0.22651084899902343, 0.22658662414550781, 0.22652210998535155, 0.22644940185546875, 0.2268712921142578, 0.226334716796875, 0.22724812316894533, 0.4715550842285156, 0.22658047485351562, 0.22659788513183593, 0.22659075927734376, 0.2265425567626953, 0.22664601135253906, 0.22673306274414062, 0.22659481811523438, 0.2266234893798828, 0.22643096923828124, 0.2263951416015625, 0.22653439331054687, 0.22656716918945313, 0.22648626708984376, 0.2264842224121094, 0.2265323486328125, 0.22739251708984376, 0.22662757873535155, 0.22650778198242189, 0.22635621643066406, 0.22640025329589844, 0.22646783447265625, 0.2263951416015625, 0.2265374755859375, 0.22659071350097656, 0.22637158203125, 0.22640229797363282, 0.22639820861816407, 0.2262640686035156, 0.22646578979492188, 0.22665728759765624, 0.22639411926269531, 0.2265692138671875, 0.22644940185546875, 0.22637055969238282, 0.22653132629394532, 0.22651084899902343, 0.22662042236328125, 0.226440185546875, 0.22653952026367188, 0.2264248352050781, 0.22653132629394532, 0.22682112121582032, 0.22637464904785157, 0.22807859802246094, 0.22658047485351562, 0.22653543090820313, 0.22662144470214843, 0.2265364532470703, 0.22637158203125, 0.2266941375732422, 0.2265016326904297, 0.22764134216308593, 0.2268037109375, 0.22737408447265625, 0.2267689666748047, 0.22669818115234375, 0.22641868591308595, 0.2270064697265625, 0.226808837890625, 0.22760447692871094, 0.2266941375732422, 0.22663475036621095, 0.4716912536621094, 0.226555908203125, 0.22654669189453125, 0.22630400085449218, 0.22651904296875, 0.22646885681152343, 0.2263726043701172, 0.2264627227783203, 0.226376708984375, 0.22640956115722657, 0.22651280212402344, 0.2264944610595703, 0.22653030395507812, 0.22637055969238282, 0.22640538024902343, 0.22671359252929688, 0.22652825927734374, 0.22724607849121095, 0.2267904052734375, 0.22637464904785157, 0.2265006103515625, 0.2266173400878906, 0.2275594177246094, 0.22663168334960937, 0.22695526123046875, 0.22644326782226562, 0.2264453125, 0.22670541381835937, 0.22647193908691407, 0.2264842224121094, 0.22652928161621094, 0.22634495544433594, 0.22692658996582032, 0.2264575958251953, 0.22655078125, 0.22647705078125, 0.22659584045410155, 0.22641151428222656, 0.22638490295410157, 0.2264320068359375, 0.22686924743652342, 0.22668185424804688, 0.226555908203125, 0.22660096740722657, 0.2265364532470703, 0.22648013305664064, 0.22652006530761717, 0.22684774780273437, 0.22667674255371092, 0.2264524841308594, 0.2264145965576172, 0.22648320007324219, 0.2265518035888672, 0.22661631774902344, 0.22665216064453125, 0.22664909362792968, 0.22723379516601563, 0.22827008056640624, 0.22659890747070313, 0.22667776489257813, 0.22666752624511718, 0.22652517700195313, 0.22685696411132814, 0.47040716552734374, 0.2265364532470703, 0.22670335388183593, 0.22675152587890626, 0.22674327087402343, 0.22674124145507812, 0.22666854858398439, 0.2265016326904297, 0.22641664123535157, 0.226408447265625, 0.22644837951660157, 0.22657331848144532, 0.22669926452636718, 0.22673817443847658, 0.22666648864746095, 0.22652006530761717, 0.22712115478515624, 0.2267709503173828, 0.22682829284667969, 0.2264514617919922, 0.22717543029785156, 0.22672691345214843, 0.22699417114257814, 0.22659379577636718, 0.22670541381835937, 0.22654464721679687, 0.22634701538085938, 0.22648320007324219, 0.226376708984375, 0.22674330139160156, 0.2265518035888672, 0.22656716918945313, 0.22633882141113282, 0.22662144470214843, 0.22681292724609375, 0.22643609619140626, 0.22689791870117187, 0.22637055969238282, 0.22802841186523437, 0.22646885681152343, 0.22660301208496095, 0.22652006530761717, 0.22687744140625, 0.226482177734375, 0.22646476745605468, 0.22653132629394532, 0.2266787872314453, 0.22718771362304688, 0.22725325012207032, 0.22660914611816407, 0.22652517700195313, 0.2265364532470703, 0.2266623992919922, 0.2268590087890625, 0.2265999298095703, 0.2266439666748047, 0.22733311462402345, 0.2267484130859375, 0.22656101989746094, 0.22658047485351562, 0.22690713500976561, 0.22647296142578124, 0.2265333709716797, 0.4723138427734375, 0.22668698120117187, 0.22651699829101563, 0.22642994689941406, 0.22652006530761717, 0.22658355712890624, 0.22648832702636718, 0.22646067810058593, 0.22636134338378905, 0.2262917175292969, 0.22641766357421875, 0.22637362670898437, 0.226440185546875, 0.22636851501464844, 0.2264944610595703, 0.2264596405029297, 0.22644122314453125, 0.22647398376464845, 0.22640127563476561, 0.22637977600097656, 0.22642892456054686, 0.22646067810058593, 0.22642994689941406, 0.22729216003417968, 0.2265856018066406, 0.22656614685058593, 0.22651187133789064, 0.22686924743652342, 0.22699212646484376, 0.22667776489257813, 0.22668083190917968, 0.22647091674804687, 0.226440185546875, 0.2262794189453125, 0.22660198974609375, 0.22650778198242189, 0.227240966796875, 0.22670130920410156, 0.2265927734375, 0.22639820861816407, 0.22648524475097656, 0.22650265502929687, 0.22666444396972657, 0.2264637451171875, 0.22673306274414062, 0.2266306610107422, 0.2264524841308594, 0.22670541381835937, 0.22680677795410156, 0.22637773132324218, 0.22640333557128905, 0.22649754333496094, 0.22653030395507812, 0.22647602844238282, 0.2265180206298828, 0.2267105255126953, 0.2264811553955078, 0.2263408660888672, 0.22684159851074218, 0.226840576171875, 0.227162109375, 0.22650572204589844, 0.22680064392089844, 0.47229953002929687, 0.22655386352539061, 0.22669209289550782, 0.22669107055664062, 0.22653849792480468, 0.226914306640625, 0.22678323364257813, 0.22689485168457033, 0.2268078155517578, 0.22660812377929687, 0.22647296142578124, 0.22665318298339843, 0.22659584045410155, 0.2267166748046875, 0.22655897521972657, 0.22654566955566408, 0.2271068115234375, 0.22682009887695312, 0.22680575561523436, 0.22656204223632812, 0.2264217529296875, 0.22652108764648438, 0.22662553405761718, 0.22673408508300782, 0.22695730590820312, 0.22662757873535155, 0.2265364532470703, 0.22667570495605469, 0.2265743408203125, 0.22657125854492188, 0.22663679504394532, 0.2263592987060547, 0.22638490295410157, 0.22639411926269531, 0.2264514617919922, 0.22640640258789063, 0.22657125854492188, 0.22636134338378905, 0.22635008239746093, 0.22646681213378905, 0.2263756866455078, 0.22643507385253905, 0.226661376953125, 0.22654054260253906, 0.2264842224121094, 0.2266480712890625, 0.22652621459960937, 0.22654054260253906, 0.22649958801269532, 0.22644326782226562, 0.22654156494140626, 0.2265886688232422, 0.22666546630859374, 0.22688870239257813, 0.2266972198486328, 0.22649856567382812, 0.22649958801269532, 0.22640847778320314, 0.22657020568847655, 0.22655078125, 0.2271825866699219, 0.22659686279296876, 0.22644224548339845, 0.4732682189941406, 0.2267166748046875, 0.22669004821777344, 0.2266112060546875, 0.22709043884277344, 0.2265323486328125, 0.2266306610107422, 0.2265927734375, 0.22674636840820311, 0.22685285949707032, 0.22652723693847657, 0.2264954833984375, 0.22642381286621094, 0.22641253662109376, 0.22645452880859376, 0.2264248352050781, 0.22657125854492188, 0.22701670837402343, 0.22656716918945313, 0.2264268798828125, 0.22636441040039063, 0.22659379577636718, 0.22651596069335939, 0.22646476745605468, 0.22653543090820313, 0.22654873657226562, 0.22635110473632813, 0.2264453125, 0.22657740783691407, 0.22701568603515626, 0.22657331848144532, 0.226808837890625, 0.22729318237304688, 0.226840576171875, 0.22669004821777344, 0.22642994689941406, 0.22665216064453125, 0.226408447265625, 0.2264637451171875, 0.22659071350097656, 0.22760140991210936, 0.2266787872314453, 0.22653952026367188, 0.2264842224121094, 0.22630911254882813, 0.22652825927734374, 0.22639820861816407, 0.22657023620605468, 0.22674330139160156, 0.2271262664794922, 0.22665728759765624, 0.2265333709716797, 0.22666444396972657, 0.22674227905273436, 0.22675149536132813, 0.2265364532470703, 0.22639820861816407, 0.22652210998535155, 0.2266112060546875, 0.22665113830566405, 0.22656614685058593, 0.2264627227783203, 0.22642073059082032, 0.4729661560058594, 0.22653132629394532, 0.22665420532226563, 0.22658566284179688, 0.22666950988769533, 0.22666035461425782, 0.2266480712890625, 0.2267525177001953, 0.22648934936523438, 0.2266399688720703, 0.2264431610107422, 0.22651187133789064, 0.22709657287597657, 0.22754713439941407, 0.22637055969238282, 0.22647091674804687, 0.22653439331054687, 0.22654975891113283, 0.2265999298095703, 0.22654360961914063, 0.22677197265625, 0.22703718566894532, 0.2268784637451172, 0.22653543090820313, 0.22652210998535155, 0.22646681213378905, 0.2276433868408203, 0.2268784637451172, 0.2265886688232422, 0.22731878662109375, 0.2269306945800781, 0.22686822509765625, 0.22654464721679687, 0.22657945251464845, 0.2267484130859375, 0.22667263793945314, 0.22669209289550782, 0.22645350646972656, 0.22654566955566408, 0.22655282592773437, 0.22689996337890625, 0.2264698944091797, 0.22683544921875, 0.22653952026367188, 0.22666035461425782, 0.22665420532226563, 0.22662655639648438, 0.2269420166015625, 0.2271200714111328, 0.2266972198486328, 0.22657331848144532, 0.22654368591308593, 0.22663877868652343, 0.22703616333007812, 0.22663372802734374, 0.22663270568847657, 0.22767308044433593, 0.22783692932128907, 0.22670130920410156, 0.2265927734375, 0.2266378173828125, 0.22650469970703124, 0.22659584045410155]",tokens/s,4.344917232163973,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1938.939904,5480.382464,0.0,4833.93536,4503.41376,s,10,5.748573669433593,0.5748573669433594,0.0017742246285147622,0.5745578002929688,0.5774038818359375,0.5778909057617188,0.5782805249023437,"[0.5783779296875, 0.577295654296875, 0.57393994140625, 0.573126220703125, 0.5733052978515625, 0.5724807739257812, 0.5741940307617187, 0.5749215698242187, 0.5756366577148437, 0.5752955932617188]",tokens/s,445.32785821499897,kWh,6.7656206220020495e-06,3.7063076173277895e-06,3.206590528233145e-05,4.2537833521661295e-05,tokens/kWh,6018172.0319545325,MB,1940.045824,5480.382464,0.0,4833.93536,4688.700416,s,10,334.972359375,33.49723593750001,0.0033039843001841955,33.498505859375,33.500188281250004,33.500191796875,33.500194609375,"[33.5001953125, 33.49962890625, 33.4886484375, 33.4951328125, 33.496859375, 33.49850390625, 33.49586328125, 33.5001875, 33.49883203125, 33.4985078125]",tokens/s,1.8807521945257513,kWh,0.0003954924626188515,0.00021676498843461862,0.0018363540061190774,0.0024486114571725475,tokens/kWh,25728.867605948046,,s,629,339.6100241088865,0.5399205470729518,0.06828609533356728,0.531684326171875,0.532189599609375,0.5323219116210938,1.1051400146484376,"[0.5314457397460938, 0.5318953247070313, 0.5313341674804688, 0.5320222778320313, 0.53230078125, 0.5322966918945312, 0.532052978515625, 0.5319219360351563, 0.5317222290039062, 0.5324605712890625, 0.5316915283203125, 0.5319751586914062, 0.5320540161132813, 0.532116455078125, 0.5318584594726562, 0.5316935424804687, 0.5311477661132813, 0.5319874267578125, 0.5314037475585938, 0.5320007934570312, 0.5313106079101563, 0.5315860595703125, 0.5312788696289062, 0.5318707275390625, 0.5317847290039063, 0.5322864379882812, 0.5312890625, 0.5320038452148438, 0.5311918334960938, 0.5317908325195313, 0.53142529296875, 0.5319515991210938, 0.5317119750976562, 0.5320806274414063, 0.5313812255859375, 0.53172119140625, 0.5311549682617187, 0.532105224609375, 0.5318379516601562, 0.5316167602539063, 0.5311580200195313, 0.5316771850585937, 0.5315532836914062, 0.532158447265625, 0.531968017578125, 0.5320550537109375, 0.5314191284179688, 0.5322158203125, 0.5313853149414063, 0.5317304077148437, 0.5311661987304688, 0.5319700317382813, 0.5315399780273438, 0.5316935424804687, 0.531431396484375, 0.5315706787109375, 0.53121435546875, 0.5316372680664062, 0.5313402709960937, 0.531820556640625, 0.5316812744140625, 0.532168701171875, 1.109749755859375, 0.531346435546875, 0.5321318359375, 0.5310955810546875, 0.5315061645507813, 0.531493896484375, 0.5316331787109375, 0.5316638793945313, 0.5315604248046875, 0.531146728515625, 0.5316085815429688, 0.5309306640625, 0.5317273559570312, 0.5312214965820312, 0.5316608276367187, 0.5313013916015625, 0.5319352416992188, 0.5315829467773437, 0.5317447509765625, 0.5317273559570312, 0.53187890625, 0.5315963134765626, 0.53159423828125, 0.5313740844726562, 0.532463623046875, 0.5318870849609375, 0.5320325317382812, 0.5313720092773437, 0.531726318359375, 0.5309389038085938, 0.5318615112304688, 0.5310996704101563, 0.5315245971679687, 0.5312379150390625, 0.5315072021484375, 0.5316474609375, 0.5321410522460938, 0.5322495727539063, 0.5316792602539062, 0.5317243041992188, 0.5318717651367187, 0.531431396484375, 0.5317796020507812, 0.5316239624023438, 0.5317867431640625, 0.5317509155273438, 0.533918701171875, 0.5316608276367187, 0.5317805786132812, 0.5316536254882812, 0.5319649047851562, 0.531599365234375, 0.531768310546875, 0.5318400268554687, 0.5323099975585938, 0.5319802856445313, 0.5322926025390625, 0.5316300659179688, 0.532094970703125, 0.5318133544921875, 0.531968017578125, 0.5319454956054688, 0.5321953125, 1.106044921875, 0.5311734008789063, 0.5317723999023437, 0.5311109008789062, 0.5316505737304688, 0.5316710205078125, 0.5314662475585937, 0.531103759765625, 0.5316044921875, 0.531599365234375, 0.5319229736328125, 0.5315625, 0.5319280395507813, 0.5312767944335938, 0.5315369262695312, 0.5314559936523438, 0.531651611328125, 0.5313187866210938, 0.5317222290039062, 0.5312973022460937, 0.5317273559570312, 0.5311549682617187, 0.531431396484375, 0.531472412109375, 0.532068359375, 0.5310628051757813, 0.5315819702148438, 0.5311876831054687, 0.5319567260742187, 0.5312583618164063, 0.5321011352539062, 0.5313074951171874, 0.5321062622070313, 0.531146728515625, 0.5314539794921875, 0.5311528930664062, 0.5316218872070313, 0.531188720703125, 0.531778564453125, 0.53129931640625, 0.53172021484375, 0.53125634765625, 0.5316557006835938, 0.5312962646484375, 0.5317734375, 0.531356689453125, 0.5318450927734375, 0.5314447631835938, 0.5319024658203125, 0.531715087890625, 0.5318553466796875, 0.5314600830078124, 0.5316638793945313, 0.5312317504882812, 0.5315184936523437, 0.5314949340820313, 0.5321676635742187, 0.5318441162109375, 0.5320765380859375, 0.5313792114257813, 0.5318953247070313, 0.5314150390625, 0.53174169921875, 1.1051417236328125, 0.531535888671875, 0.5323489379882812, 0.5317929077148438, 0.53146728515625, 0.5314027709960938, 0.5317069091796875, 0.5312337646484375, 0.5317662963867188, 0.531146728515625, 0.5315819702148438, 0.531715087890625, 0.5320007934570312, 0.531135498046875, 0.5316823120117188, 0.5312440185546875, 0.531652587890625, 0.5312808837890625, 0.53197314453125, 0.53145703125, 0.5322874755859375, 0.5318850708007813, 0.5318379516601562, 0.5312071533203125, 0.531473388671875, 0.531135498046875, 0.531435546875, 0.5312982788085937, 0.5317243041992188, 0.5311580200195313, 0.5318113403320313, 0.5314652099609375, 0.5317734375, 0.53153076171875, 0.5319024658203125, 0.5314232177734375, 0.5322066040039063, 0.5310986328125, 0.5315245971679687, 0.53191064453125, 0.5318922119140626, 0.53157275390625, 0.5320130615234375, 0.5313024291992188, 0.5326827392578125, 0.5315870971679687, 0.5319966430664063, 0.5312655639648437, 0.5317652587890624, 0.5317560424804687, 0.532094970703125, 0.5314744262695312, 0.5314744262695312, 0.5314232177734375, 0.5318430786132813, 0.531373046875, 0.5317376098632812, 0.5314959106445313, 0.53226904296875, 0.5319168090820312, 0.5326305541992188, 0.5315430297851562, 0.5318215942382812, 1.1051356201171876, 0.5312973022460937, 0.5315819702148438, 0.5314774780273438, 0.531794921875, 0.5313423461914063, 0.5317007446289063, 0.5312061157226563, 0.532337646484375, 0.5311897583007813, 0.5318450927734375, 0.5311968994140625, 0.5319700317382813, 0.5320519409179687, 0.5319915771484375, 0.531267578125, 0.5318911743164062, 0.531177490234375, 0.5319485473632812, 0.5313751220703125, 0.5316874389648437, 0.5312163696289063, 0.5318246459960938, 0.5318041381835937, 0.5321021728515625, 0.5312501831054688, 0.5314898071289063, 0.5313935546875, 0.5318809814453125, 0.5312296752929687, 0.5316013793945312, 0.5316351928710937, 0.5323223266601562, 0.5317406616210938, 0.5318369140625, 0.5311907958984375, 0.5315348510742187, 0.5314457397460938, 0.531673095703125, 0.53165771484375, 0.5315717163085938, 0.531652587890625, 0.5317069091796875, 0.5313167114257813, 0.53161474609375, 0.5311897583007813, 0.5315440673828125, 0.5312553100585937, 0.5322280883789062, 0.5312880859375, 0.5325035400390625, 0.5322506103515625, 0.5319270629882813, 0.5315594482421875, 0.53212158203125, 0.5314180908203125, 0.5320048828125, 0.5329141845703125, 0.5321277465820312, 0.5319035034179688, 0.5321103515625, 0.5317222290039062, 0.5319536743164063, 1.104712646484375, 0.531314697265625, 0.531794921875, 0.531409912109375, 0.5321390380859375, 0.5317406616210938, 0.5324042358398438, 0.5315665893554687, 0.531862548828125, 0.5314232177734375, 0.5315747680664062, 0.5311682739257813, 0.5318154296875, 0.53142529296875, 0.5318092651367188, 0.5314898071289063, 0.5318870849609375, 0.531378173828125, 0.53172119140625, 0.5311692504882812, 0.531430419921875, 0.5313966064453125, 0.531857421875, 0.5313710327148438, 0.5323397216796875, 0.5316075439453125, 0.5323120727539062, 0.5317294311523437, 0.5321543579101562, 0.5312010498046875, 0.5318461303710937, 0.5319188232421875, 0.532210693359375, 0.5314221801757812, 0.5321881713867187, 0.5314324340820312, 0.5315921630859375, 0.5312501831054688, 0.5330370483398438, 0.531240966796875, 0.5315143432617188, 0.5314150390625, 0.5320355834960937, 0.531736572265625, 0.532005859375, 0.5315584106445312, 0.5322998046875, 0.5318276977539063, 0.5317406616210938, 0.53136279296875, 0.5315768432617187, 0.531726318359375, 0.5317335205078125, 0.5318584594726562, 0.5316566772460938, 0.531314697265625, 0.5316884765625, 0.5317488403320313, 0.5318338623046875, 0.5312901000976562, 0.53187890625, 0.5315584106445312, 0.5323212890625, 1.1061114501953124, 0.5317314453125, 0.5319290771484375, 0.531072998046875, 0.5316290283203124, 0.5314488525390625, 0.5314508666992187, 0.5313126220703125, 0.5319659423828125, 0.5314447631835938, 0.5315645141601563, 0.5322833862304688, 0.5317775268554688, 0.5313228759765625, 0.5322045288085937, 0.53157373046875, 0.5320222778320313, 0.53178369140625, 0.5322998046875, 0.5316710205078125, 0.5317069091796875, 0.531219482421875, 0.5317181396484375, 0.5315245971679687, 0.531736572265625, 0.5315369262695312, 0.53231103515625, 0.5319618530273438, 0.5321339111328125, 0.5312696533203125, 0.5317406616210938, 0.53157275390625, 0.5317908325195313, 0.5312593994140625, 0.5319639282226563, 0.5313003540039063, 0.5317816162109374, 0.5313792114257813, 0.531936279296875, 0.5312071533203125, 0.5314293823242188, 0.5310996704101563, 0.531589111328125, 0.5313955688476563, 0.53193115234375, 0.5313556518554687, 0.5319588012695312, 0.531583984375, 0.532021240234375, 0.5315205078125, 0.5315798950195313, 0.5314150390625, 0.5316608276367187, 0.53139453125, 0.5319014282226563, 0.5313863525390625, 0.5320611572265626, 0.5318748168945312, 0.5321339111328125, 0.5318461303710937, 0.5317355346679687, 0.5316792602539062, 0.5318819580078125, 1.1075174560546874, 0.531072021484375, 0.5316382446289063, 0.5312890625, 0.53142529296875, 0.531535888671875, 0.5324411010742187, 0.5314232177734375, 0.5321287841796875, 0.5315972900390625, 0.5322034912109375, 0.5314857177734374, 0.5315389404296875, 0.531357666015625, 0.5317713623046875, 0.5311488037109375, 0.5317294311523437, 0.5311682739257813, 0.531652587890625, 0.5319782104492188, 0.5317386474609375, 0.5313341674804688, 0.5316669311523438, 0.5313074951171874, 0.5316454467773437, 0.5311918334960938, 0.5317662963867188, 0.5315706787109375, 0.5321380004882813, 0.5316188354492187, 0.5322280883789062, 0.5312737426757812, 0.5317304077148437, 0.5311057739257813, 0.5317775268554688, 0.5316751098632813, 0.5317723999023437, 0.5313269653320313, 0.532220947265625, 0.531409912109375, 0.531726318359375, 0.5313054809570312, 0.5317427368164063, 0.5314866943359375, 0.5319229736328125, 0.5314406127929687, 0.532294677734375, 0.5323673706054688, 0.5322874755859375, 0.5322711181640625, 0.5320068969726562, 0.5316044921875, 0.5322977294921875, 0.531583984375, 0.5319116821289063, 0.5315798950195313, 0.532516845703125, 0.5323622436523437, 0.5323622436523437, 0.5315491943359375, 0.5319935913085938, 0.5320017700195312, 0.531962890625, 1.1080765380859374, 0.5319547119140625, 0.5317703857421875, 0.53150927734375, 0.5314652099609375, 0.5311805419921874, 0.53163623046875, 0.5313218383789062, 0.531493896484375, 0.531631103515625, 0.5318482055664062, 0.5316925659179688, 0.5316792602539062, 0.5311713256835937, 0.53180517578125, 0.5314037475585938, 0.531620849609375, 0.5318748168945312, 0.5327401123046875, 0.5320171508789062, 0.5328353271484375, 0.5319864501953125, 0.5319833374023437, 0.5312788696289062, 0.53235302734375, 0.5315451049804687, 0.5321615600585937, 0.5315635375976563, 0.5320089721679687, 0.5313218383789062, 0.5321666259765625, 0.5312501831054688, 0.5315911865234375, 0.5312686157226563, 0.5321390380859375, 0.531409912109375, 0.531768310546875, 0.5311641845703124, 0.5316935424804687, 0.5315829467773437, 0.5322066040039063, 0.5317447509765625, 0.5320120239257813, 0.5312399291992187, 0.5316751098632813, 0.5313863525390625, 0.5317621459960937, 0.531684326171875, 0.5316658935546875, 0.5314549560546875, 0.5319035034179688, 0.531262451171875, 0.5315963134765626, 0.5311375122070312, 0.5317283935546875, 0.5315194702148438, 0.5318932495117188, 0.532084716796875, 0.5324154663085937, 0.5319188232421875, 0.5318717651367187, 0.5313474731445312, 0.5320181884765625, 1.10763720703125, 0.5311539306640625, 0.5317109985351562, 0.5316484985351563, 0.5316484985351563, 0.5312767944335938, 0.5317509155273438, 0.5310279541015624, 0.5314611206054688, 0.5312849731445313, 0.53146728515625, 0.5310648193359375, 0.5322076416015625, 0.53161572265625, 0.5324257202148438, 0.5314857177734374, 0.5316536254882812, 0.5312450561523437, 0.5317069091796875, 0.5315901489257813, 0.5321072387695313, 0.5315020751953125, 0.531820556640625, 0.5321041870117188, 0.5318809814453125, 0.5312317504882812, 0.532041748046875, 0.5315451049804687, 0.5318154296875, 0.5316792602539062, 0.5321062622070313, 0.5322495727539063, 0.5319905395507812, 0.531694580078125, 0.5317621459960937, 0.5315451049804687, 0.5317488403320313, 0.5312747802734376, 0.5321328735351563, 0.531746826171875, 0.5318154296875, 0.5317119750976562, 0.531684326171875, 0.5313290405273438, 0.5314979858398438, 0.5311682739257813, 0.5318164672851563, 0.5316884765625, 0.53195263671875, 0.5318246459960938, 0.532116455078125, 0.5315665893554687, 0.5320376586914063, 0.5314426879882812, 0.5319772338867188, 0.5315123291015625, 0.53215234375, 0.53184716796875, 0.5321748657226563, 0.5318123779296875, 0.5321236572265625, 0.5316618041992187, 0.5318829956054687]",tokens/s,1.8521243642629592,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2935.840768,9548.857344,0.0,8902.41024,8265.321472,s,10,10.732376098632812,1.0732376098632814,0.0018882051518454717,1.0727980346679686,1.0744577026367188,1.076272918701172,1.0777250915527343,"[1.078088134765625, 1.0709527587890626, 1.071809814453125, 1.072758056640625, 1.0723739013671876, 1.0717447509765625, 1.0728380126953125, 1.07379296875, 1.0740543212890625, 1.07396337890625]",tokens/s,238.53058972897117,kWh,1.2652489013142056e-05,6.933077765861525e-06,5.8725796980607646e-05,7.831136375961123e-05,tokens/kWh,3269001.939307702,MB,2940.198912,9548.857344,0.0,8902.41024,8556.582912,s,10,636.0469335937499,63.60469335937499,0.008753951962243356,63.60478125,63.61318203125,63.615917187499996,63.6181053125,"[63.59721875, 63.61257421875, 63.61096484375, 63.6103046875, 63.61865234375, 63.5982421875, 63.6112421875, 63.58923828125, 63.5992578125, 63.59923828125]",tokens/s,0.9904929443499021,kWh,0.0007508853809701072,0.00041155162046943583,0.003498196520777186,0.00466063352221673,tokens/kWh,13517.475617785843,,s,629,644.6920160522462,1.0249475612913292,0.12743236447478887,1.0094981079101562,1.01038447265625,1.0106333129882812,2.0813571875,"[1.0103838500976563, 1.011209228515625, 1.0106275634765625, 1.010282470703125, 1.0090455322265626, 1.00902294921875, 1.0092001342773438, 1.0092001342773438, 1.0087454833984375, 1.0091202392578125, 1.0090209350585937, 1.0097459106445312, 1.009122314453125, 1.0094325561523438, 1.0090762329101564, 1.008990234375, 1.009164306640625, 1.0090936279296876, 1.0088233032226563, 1.009649658203125, 1.0089727783203124, 1.0095328979492189, 1.009112060546875, 1.0095298461914062, 1.0089553833007812, 1.0094817504882811, 1.0095001831054689, 1.0089779052734376, 1.0089512939453125, 1.0092308349609376, 1.0093475952148439, 1.0095062866210938, 1.0100172729492187, 1.0097244262695313, 1.0091735229492187, 1.0095360107421876, 1.0095134887695312, 1.0093209838867188, 1.009239013671875, 1.00967529296875, 1.0096640014648437, 1.00973974609375, 1.0092687377929688, 1.009269775390625, 1.0091714477539062, 1.0091786499023438, 1.0094950561523437, 1.0094622802734374, 1.0091345825195313, 1.009670166015625, 1.0100039672851562, 1.0096732177734375, 1.0092994384765626, 1.0093557739257812, 1.0091253662109374, 1.00938134765625, 1.0091427612304686, 1.0090465087890625, 1.0089072875976564, 1.0098462524414062, 1.010165771484375, 1.0093465576171874, 2.08412060546875, 1.0087465209960937, 1.0093834228515626, 1.0092031860351562, 1.0096558227539063, 1.0095739135742188, 1.0104360961914063, 1.0100264892578126, 1.0097346801757812, 1.0098964233398438, 1.0101534423828125, 1.00982373046875, 1.009934326171875, 1.0099998779296875, 1.0098063354492188, 1.0099056396484376, 1.0100838623046875, 1.0100254516601563, 1.00965478515625, 1.0096107788085937, 1.0098421630859375, 1.0097664184570312, 1.0100264892578126, 1.0098104248046875, 1.0099118041992188, 1.0101432495117189, 1.010134033203125, 1.0106337280273439, 1.0090772705078126, 1.0097838134765624, 1.0102476806640626, 1.0099251098632813, 1.0096466064453125, 1.00933837890625, 1.0092185668945313, 1.0092902221679687, 1.0095257568359375, 1.00948583984375, 1.0100172729492187, 1.010408447265625, 1.0100695190429687, 1.0098995361328125, 1.0105394897460938, 1.00971826171875, 1.0095902709960938, 1.0096414794921875, 1.009565673828125, 1.00931787109375, 1.0096066284179688, 1.0097407836914063, 1.009343505859375, 1.0096947021484375, 1.0099138793945313, 1.009455078125, 1.0092472534179688, 1.0096680908203124, 1.0096250610351563, 1.0100101318359376, 1.0093414306640625, 1.0092892456054687, 1.009311767578125, 1.009322998046875, 1.0097203369140626, 2.081078369140625, 1.009328125, 1.0090977172851563, 1.0092052612304687, 1.0094520263671876, 1.0091028442382812, 1.0091796264648438, 1.0086420288085938, 1.0094274291992187, 1.0089891967773437, 1.0090137329101563, 1.0094161987304688, 1.0096568603515625, 1.0095421142578125, 1.009850341796875, 1.0090895385742187, 1.0094059448242187, 1.009059814453125, 1.0094509887695313, 1.0088161010742187, 1.008974853515625, 1.0091888427734375, 1.0095595703125, 1.009016845703125, 1.0097346801757812, 1.0089810180664063, 1.00931787109375, 1.0090147705078125, 1.0099138793945313, 1.0100213623046874, 1.0104381713867188, 1.010629638671875, 1.010713623046875, 1.0101360473632812, 1.0105651245117186, 1.0090690307617187, 1.0092144775390626, 1.009344482421875, 1.01054052734375, 1.0098954467773438, 1.0101473388671875, 1.010071533203125, 1.0109757690429688, 1.0099885864257812, 1.010386962890625, 1.0099199829101562, 1.0098933715820313, 1.0100418701171876, 1.0103040161132812, 1.0101207275390625, 1.0105702514648438, 1.0102599487304686, 1.00997119140625, 1.0098646850585937, 1.0102732543945312, 1.0096066284179688, 1.00906494140625, 1.0095984497070312, 1.0102210693359375, 1.0098585815429688, 1.0101514282226562, 1.010039794921875, 1.0104360961914063, 2.082943115234375, 1.009787841796875, 1.0099476318359375, 1.0097244262695313, 1.0104217529296875, 1.0104340209960938, 1.0096947021484375, 1.0098309326171875, 1.009544189453125, 1.0100009155273437, 1.01024560546875, 1.0097223510742188, 1.0100695190429687, 1.0102835083007813, 1.009902587890625, 1.0104699096679688, 1.0107535400390626, 1.0102671508789063, 1.010640869140625, 1.0102630615234376, 1.0101463012695313, 1.0099834594726562, 1.0103336791992188, 1.0099384155273436, 1.0103685302734375, 1.0097254638671875, 1.0096906127929688, 1.0098125, 1.0103756713867187, 1.0102271728515626, 1.010640869140625, 1.0090096435546876, 1.0095994873046874, 1.0097633056640625, 1.0101729125976562, 1.0090198974609375, 1.0095718383789063, 1.0087034912109376, 1.009523681640625, 1.00916943359375, 1.0090352783203125, 1.0087833862304687, 1.0096363525390626, 1.0092359619140625, 1.0092656860351563, 1.0091233520507812, 1.00946533203125, 1.008932861328125, 1.009027099609375, 1.0091468505859376, 1.0092257080078124, 1.0089072875976564, 1.008837646484375, 1.0091827392578125, 1.0094110717773437, 1.0090751953125, 1.0094848022460938, 1.0091868286132812, 1.009480712890625, 1.0097039184570313, 1.0093670654296876, 1.0094059448242187, 1.0094376831054688, 2.081429443359375, 1.009227783203125, 1.0094479370117186, 1.010234375, 1.0106552124023438, 1.010882568359375, 1.0105589599609375, 1.0087034912109376, 1.0110679321289062, 1.0107822265625, 1.0108876953125, 1.0104412231445312, 1.0089584350585938, 1.0093711547851563, 1.0090792846679688, 1.0109276123046875, 1.0108334350585937, 1.0103521118164063, 1.0105712890625, 1.0102968139648438, 1.0089482421875, 1.0090025024414062, 1.0100203247070312, 1.0104258422851562, 1.009818603515625, 1.0096087036132813, 1.0098165893554687, 1.0094642944335936, 1.0101575927734374, 1.0103203735351562, 1.0114457397460936, 1.0106500854492189, 1.010361328125, 1.010103271484375, 1.0099415283203126, 1.0093311767578126, 1.0099465942382813, 1.0090475463867188, 1.0093302001953126, 1.0094981079101562, 1.0090823974609375, 1.0102753295898437, 1.0096322631835937, 1.0092564697265625, 1.0091519775390625, 1.0096640014648437, 1.009281005859375, 1.0090864868164062, 1.0091448364257813, 1.0099537963867187, 1.0093250732421875, 1.0095186157226563, 1.0094940185546875, 1.00933837890625, 1.0094080200195312, 1.0091744995117187, 1.0092451782226564, 1.0095390625, 1.0101217041015624, 1.01006640625, 1.0101780395507813, 1.009744873046875, 1.0093660278320313, 2.08117138671875, 1.0091130981445313, 1.009100830078125, 1.0086492309570312, 1.0088734741210938, 1.0091294555664063, 1.0090188598632812, 1.0089410400390626, 1.009344482421875, 1.0095523681640626, 1.0093916015625, 1.0090680541992187, 1.00940185546875, 1.009154052734375, 1.0091970825195313, 1.0093629150390624, 1.0088673095703125, 1.0088601684570313, 1.009817626953125, 1.009006591796875, 1.0100746459960936, 1.009349609375, 1.0093670654296876, 1.0091519775390625, 1.0093035278320313, 1.0091581420898437, 1.0092021484375, 1.0091058959960937, 1.01058251953125, 1.0097705078125, 1.0106183471679688, 1.0098779907226563, 1.0096998291015624, 1.0104105224609374, 1.0094213256835938, 1.009344482421875, 1.0096097412109375, 1.0097407836914063, 1.0099261474609376, 1.009217529296875, 1.0095718383789063, 1.0091837158203125, 1.0093905639648437, 1.0092267456054687, 1.0091878662109375, 1.0096301879882812, 1.0098125, 1.0094929809570312, 1.009860595703125, 1.0093219604492187, 1.0101544799804687, 1.009301513671875, 1.0094714965820313, 1.009460205078125, 1.0093588256835937, 1.0097899780273438, 1.0095984497070312, 1.0096742553710938, 1.010693115234375, 1.0096957397460937, 1.010155517578125, 1.010134033203125, 1.00997119140625, 2.082757568359375, 1.0091796264648438, 1.0095452270507812, 1.0092155151367188, 1.0096373901367188, 1.00949609375, 1.0099005737304687, 1.0090813598632813, 1.009080322265625, 1.008932861328125, 1.0091448364257813, 1.008964599609375, 1.00949609375, 1.0093455200195312, 1.0097909545898438, 1.0094417724609375, 1.00999267578125, 1.0092533569335937, 1.009428466796875, 1.0093148193359376, 1.0098770141601563, 1.0099015502929687, 1.0105159912109376, 1.0107003173828124, 1.0108948364257813, 1.009523681640625, 1.010176025390625, 1.009207275390625, 1.0096517333984374, 1.0096107788085937, 1.010208740234375, 1.0101217041015624, 1.0100725708007812, 1.0099415283203126, 1.01014013671875, 1.009311767578125, 1.0097100830078125, 1.0091714477539062, 1.0097858276367186, 1.0092830810546876, 1.0094745483398437, 1.009259521484375, 1.0102118530273438, 1.0100234375, 1.0097469482421875, 1.010239501953125, 1.0100633544921875, 1.0095165405273439, 1.0095984497070312, 1.0094940185546875, 1.0099507446289062, 1.0094642944335936, 1.0101094360351563, 1.0097879028320313, 1.0109890747070311, 1.0111426391601563, 1.0094315795898436, 1.0090772705078126, 1.009227783203125, 1.0092001342773438, 1.009723388671875, 1.00982373046875, 1.0097694702148436, 2.08317333984375, 1.0093322143554688, 1.00935986328125, 1.0087188720703124, 1.0093055419921875, 1.008848876953125, 1.0092216186523437, 1.0090895385742187, 1.0095001831054689, 1.0094622802734374, 1.0091847534179688, 1.0095789794921874, 1.0093875122070313, 1.0095748901367188, 1.0098646850585937, 1.0092850952148438, 1.0101329956054688, 1.0093475952148439, 1.0094888916015625, 1.00940185546875, 1.0091079711914062, 1.0090505981445312, 1.0093823852539063, 1.0089502563476562, 1.009269775390625, 1.0090301513671875, 1.00952783203125, 1.0098544921875, 1.0100684814453125, 1.0095472412109374, 1.0094541015625, 1.0092247314453124, 1.0096537475585938, 1.0091100463867186, 1.0094458618164062, 1.0091038818359375, 1.0096301879882812, 1.0089943237304688, 1.0096271362304687, 1.009623046875, 1.0094694213867188, 1.009554443359375, 1.0093568115234375, 1.0092083129882812, 1.0091427612304686, 1.0093660278320313, 1.010423828125, 1.0095114135742187, 1.0090711059570312, 1.0094817504882811, 1.009312744140625, 1.0092687377929688, 1.009132568359375, 1.0089052124023437, 1.0089922485351563, 1.0089625854492188, 1.0091192016601562, 1.00922265625, 1.0094295043945312, 1.0091376342773437, 1.0093772583007812, 1.0091202392578125, 1.0095810546875, 2.08344580078125, 1.0095042724609375, 1.0109685668945312, 1.0099476928710938, 1.01062548828125, 1.0095011596679688, 1.0104494018554688, 1.008996337890625, 1.0095462646484374, 1.008911376953125, 1.00917041015625, 1.0089707641601562, 1.00910693359375, 1.0093240356445312, 1.0093403930664062, 1.0089093017578126, 1.009713134765625, 1.0089676513671875, 1.0090844116210937, 1.0088786010742188, 1.00897998046875, 1.0090782470703126, 1.0091878662109375, 1.0094356689453126, 1.009923095703125, 1.0092708129882813, 1.0097356567382811, 1.008616455078125, 1.0090946655273438, 1.0091868286132812, 1.0094305419921874, 1.0093619384765624, 1.0100254516601563, 1.0098093872070313, 1.0101801147460938, 1.0097049560546876, 1.010398193359375, 1.0095513305664063, 1.0100848388671875, 1.0106326904296874, 1.011041259765625, 1.010619384765625, 1.0100930786132813, 1.0094878540039063, 1.0097684326171874, 1.0093609008789062, 1.0095360107421876, 1.009154052734375, 1.0090618896484376, 1.009249267578125, 1.009196044921875, 1.0090895385742187, 1.0093352661132813, 1.0091714477539062, 1.0094796752929687, 1.0091049194335937, 1.0094694213867188, 1.0094202880859375, 1.009227783203125, 1.0093311767578126, 1.0093629150390624, 1.0093004760742188, 1.0094376831054688, 2.083451904296875, 1.0096097412109375, 1.0098380737304689, 1.009165283203125, 1.0100848388671875, 1.0104432373046874, 1.0090741577148437, 1.0095851440429688, 1.0092666625976563, 1.0088263549804688, 1.0094428100585937, 1.0090045166015624, 1.0094745483398437, 1.0092830810546876, 1.0091581420898437, 1.0089246826171876, 1.0092728271484375, 1.009238037109375, 1.0095165405273439, 1.0092728271484375, 1.009455078125, 1.00973876953125, 1.009924072265625, 1.0094458618164062, 1.0094541015625, 1.0086737670898438, 1.009306640625, 1.009659912109375, 1.0100695190429687, 1.0101934204101561, 1.0098739013671876, 1.0097745971679688, 1.0097152099609374, 1.0097776489257813, 1.0097192993164064, 1.0093025512695313, 1.0096865234375, 1.009438720703125, 1.0101616821289063, 1.0098831176757812, 1.0098104248046875, 1.0097673950195312, 1.01049853515625, 1.0099834594726562, 1.0104944458007812, 1.0097633056640625, 1.0092708129882813, 1.009201171875, 1.0093056030273437, 1.0092236938476562, 1.0090127563476563, 1.0091929321289062, 1.00919091796875, 1.0090670166015625, 1.0091888427734375, 1.008996337890625, 1.0091161499023438, 1.0092789916992186, 1.0097572021484376, 1.0099056396484376, 1.0091837158203125, 1.0094663696289063, 1.0093506469726563]",tokens/s,0.9756596705690017,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1298.415616,872.93952,0.0,226.492416,184.397824,s,11,0.17165715312957763,0.015605195739052512,0.0007607833862353127,0.01534540843963623,0.01575004768371582,0.016863200187683107,0.017753722190856934,"[0.01797635269165039, 0.015421407699584961, 0.015347392082214355, 0.015315903663635253, 0.015256128311157226, 0.01575004768371582, 0.01534540843963623, 0.015287008285522461, 0.015340383529663087, 0.01527171230316162, 0.01534540843963623]",tokens/s,16404.792626814135,kWh,1.8500959462776245e-07,1.0137606156642736e-07,3.4862617005003736e-07,6.350118262442272e-07,tokens/kWh,403142098.1780924,MB,1298.743296,872.93952,0.0,226.492416,197.933568,s,11,10.171151123046876,0.9246501020951705,0.010660233315434522,0.920117919921875,0.9293056640625,0.9430220336914062,0.9539951293945312,"[0.9567384033203125, 0.9293056640625, 0.9245167846679687, 0.9185679931640625, 0.920117919921875, 0.9228417358398437, 0.919580810546875, 0.9193172607421874, 0.9187877807617187, 0.923688232421875, 0.9176885375976562]",tokens/s,68.13388097535262,kWh,1.1024352834572037e-05,6.040874968975529e-06,1.993414258140665e-05,3.6999370384954216e-05,tokens/kWh,1702731.6774454878,,s,692,10.307773435592651,0.014895626352012501,0.0018980452913926306,0.014577664375305176,0.01503354845046997,0.015412480163574216,0.030279741020202637,"[0.016365568161010743, 0.016268287658691406, 0.01589350414276123, 0.01510707187652588, 0.015166463851928711, 0.015711232185363768, 0.015400959968566894, 0.015467519760131837, 0.015654911994934084, 0.015433728218078613, 0.015542304039001465, 0.015136799812316894, 0.015172543525695801, 0.015097855567932129, 0.015466496467590332, 0.016034816741943358, 0.015383551597595215, 0.015619071960449218, 0.015451135635375977, 0.015178751945495606, 0.015625215530395507, 0.015426560401916504, 0.015222847938537598, 0.015156160354614258, 0.015565823554992676, 0.016327680587768553, 0.015833087921142578, 0.01530675220489502, 0.015086591720581055, 0.01507737636566162, 0.01501088047027588, 0.01481721591949463, 0.015222784042358398, 0.015755264282226563, 0.016021503448486327, 0.015339520454406739, 0.015148032188415527, 0.01509171199798584, 0.015104000091552734, 0.015050751686096191, 0.015105024337768554, 0.014737407684326171, 0.014621696472167968, 0.014722047805786133, 0.014584896087646484, 0.014627776145935058, 0.014577664375305176, 0.014598143577575684, 0.014589952468872071, 0.014524415969848633, 0.014585856437683106, 0.01456332778930664, 0.014573568344116212, 0.014537728309631348, 0.014552063941955566, 0.014576640129089356, 0.014576640129089356, 0.014548992156982422, 0.01457049560546875, 0.014616576194763184, 0.014621696472167968, 0.014593024253845215, 0.030418943405151368, 0.014628864288330079, 0.014632960319519044, 0.0145797119140625, 0.01460531234741211, 0.014616576194763184, 0.01458073616027832, 0.014573568344116212, 0.014584832191467285, 0.014578687667846679, 0.014615551948547363, 0.014621696472167968, 0.01458790397644043, 0.014673919677734374, 0.014622719764709472, 0.01457151985168457, 0.014611455917358398, 0.0145797119140625, 0.014619647979736328, 0.014697471618652343, 0.014582783699035644, 0.014603263854980468, 0.01460223960876465, 0.01459404754638672, 0.014568448066711426, 0.014610431671142577, 0.014583807945251465, 0.014577664375305176, 0.014595071792602539, 0.014558208465576173, 0.01457254409790039, 0.014645248413085938, 0.014598143577575684, 0.015670271873474122, 0.015376383781433106, 0.015114239692687988, 0.015088640213012695, 0.015019007682800293, 0.015113216400146484, 0.015021056175231933, 0.014995455741882324, 0.014962688446044922, 0.014998527526855468, 0.014962688446044922, 0.014985216140747071, 0.014946304321289062, 0.01500876808166504, 0.014965760231018066, 0.015024191856384277, 0.014966719627380371, 0.014658559799194336, 0.014590975761413574, 0.014565376281738282, 0.01461350440979004, 0.014582783699035644, 0.015023103713989258, 0.01509887981414795, 0.014726143836975097, 0.014541824340820313, 0.0145797119140625, 0.014606335639953612, 0.014665727615356445, 0.01459609603881836, 0.030462976455688476, 0.014574591636657714, 0.014746623992919922, 0.015026176452636719, 0.015056960105895996, 0.015804351806640624, 0.015242239952087403, 0.015034367561340332, 0.015006719589233398, 0.014931967735290527, 0.01477734375, 0.0145797119140625, 0.014584832191467285, 0.014592000007629394, 0.014552063941955566, 0.014543871879577636, 0.014574591636657714, 0.014581760406494141, 0.014575615882873535, 0.014657535552978516, 0.014608384132385254, 0.014558208465576173, 0.014607359886169433, 0.014606335639953612, 0.01459609603881836, 0.014619647979736328, 0.014576640129089356, 0.014553088188171387, 0.014573568344116212, 0.014575615882873535, 0.014550016403198243, 0.01458892822265625, 0.014589952468872071, 0.014590975761413574, 0.014647295951843262, 0.01455513572692871, 0.014576640129089356, 0.014583807945251465, 0.014652416229248047, 0.01460223960876465, 0.014617600440979005, 0.014598143577575684, 0.0145797119140625, 0.01457254409790039, 0.014576640129089356, 0.014589952468872071, 0.01458790397644043, 0.014652416229248047, 0.014550016403198243, 0.01457049560546875, 0.01458892822265625, 0.014541824340820313, 0.0145797119140625, 0.0145633602142334, 0.014960607528686524, 0.014947327613830566, 0.014850111961364746, 0.014465984344482421, 0.014386176109313965, 0.014425087928771972, 0.014609408378601075, 0.014697471618652343, 0.014664704322814942, 0.030324735641479493, 0.014525440216064453, 0.014568448066711426, 0.014598143577575684, 0.01457049560546875, 0.014592000007629394, 0.014541824340820313, 0.014660639762878418, 0.014574560165405273, 0.014515199661254884, 0.014586879730224609, 0.014577664375305176, 0.01461350440979004, 0.014574591636657714, 0.01455513572692871, 0.014520319938659668, 0.014518272399902344, 0.01457151985168457, 0.014554112434387208, 0.01456332778930664, 0.014540800094604492, 0.01457049560546875, 0.01456332778930664, 0.014504960060119629, 0.014553088188171387, 0.014619647979736328, 0.014583807945251465, 0.014557184219360352, 0.014566399574279786, 0.014529536247253418, 0.014600192070007324, 0.014612480163574219, 0.01456332778930664, 0.014575615882873535, 0.014627840042114258, 0.01467084789276123, 0.014575615882873535, 0.014560256004333496, 0.014663680076599121, 0.014564352035522461, 0.014524415969848633, 0.01454694366455078, 0.01458073616027832, 0.014548992156982422, 0.01452444839477539, 0.014602208137512207, 0.01455513572692871, 0.01456332778930664, 0.014532608032226562, 0.01461350440979004, 0.014622719764709472, 0.014517248153686524, 0.01459404754638672, 0.014558208465576173, 0.014556159973144531, 0.014548992156982422, 0.01457049560546875, 0.014508031845092773, 0.014590975761413574, 0.014540800094604492, 0.014600192070007324, 0.01469644832611084, 0.014692352294921876, 0.030276607513427735, 0.01461350440979004, 0.014553088188171387, 0.014508031845092773, 0.014564352035522461, 0.014553088188171387, 0.014565376281738282, 0.014566399574279786, 0.014537728309631348, 0.014530559539794922, 0.01459609603881836, 0.014569472312927247, 0.014567423820495605, 0.014508031845092773, 0.01455513572692871, 0.014628864288330079, 0.014535679817199706, 0.01458790397644043, 0.014557184219360352, 0.014535712242126466, 0.014557151794433594, 0.014535679817199706, 0.014534655570983887, 0.014531583786010742, 0.014545920372009278, 0.014566399574279786, 0.014538751602172852, 0.014553088188171387, 0.01456332778930664, 0.014638079643249511, 0.014538751602172852, 0.014615551948547363, 0.014623744010925293, 0.014533663749694824, 0.014525407791137696, 0.014531583786010742, 0.0145797119140625, 0.01457049560546875, 0.014547967910766601, 0.014557184219360352, 0.014577664375305176, 0.014550016403198243, 0.014560256004333496, 0.014547967910766601, 0.01447935962677002, 0.014482432365417481, 0.014364671707153321, 0.014386176109313965, 0.014722047805786133, 0.015221759796142579, 0.01508351993560791, 0.015017984390258789, 0.014990336418151855, 0.014946304321289062, 0.014996479988098145, 0.014521344184875488, 0.014540800094604492, 0.014592000007629394, 0.014558208465576173, 0.014535679817199706, 0.01457049560546875, 0.014556159973144531, 0.014526464462280274, 0.031153152465820313, 0.014898176193237305, 0.01458790397644043, 0.014568448066711426, 0.014693375587463378, 0.014625791549682618, 0.015431679725646973, 0.014708736419677734, 0.014601216316223145, 0.01455513572692871, 0.014576640129089356, 0.014567423820495605, 0.014589952468872071, 0.014566399574279786, 0.014551039695739745, 0.01480191993713379, 0.014784511566162109, 0.015605759620666505, 0.015138815879821778, 0.014937088012695313, 0.014920703887939453, 0.014586879730224609, 0.014550047874450684, 0.014574624061584473, 0.01459712028503418, 0.014562239646911621, 0.014526464462280274, 0.014525440216064453, 0.014536704063415527, 0.014520319938659668, 0.014515199661254884, 0.014625791549682618, 0.014533632278442383, 0.014556159973144531, 0.014573568344116212, 0.014589952468872071, 0.01458073616027832, 0.014565376281738282, 0.014610431671142577, 0.01459609603881836, 0.01466982364654541, 0.014590975761413574, 0.014558208465576173, 0.014566399574279786, 0.014538751602172852, 0.014564352035522461, 0.014567423820495605, 0.014530559539794922, 0.014538751602172852, 0.014535679817199706, 0.014551039695739745, 0.01457151985168457, 0.014675968170166016, 0.014567423820495605, 0.014585856437683106, 0.014559231758117675, 0.014527487754821777, 0.014577664375305176, 0.014561280250549317, 0.014562303543090821, 0.014578687667846679, 0.014515199661254884, 0.01454086399078369, 0.03033900833129883, 0.014519295692443847, 0.014643199920654297, 0.014578687667846679, 0.014610431671142577, 0.014585856437683106, 0.014574591636657714, 0.014586879730224609, 0.01457254409790039, 0.014628864288330079, 0.01458790397644043, 0.01529856014251709, 0.014918656349182128, 0.014584832191467285, 0.014532608032226562, 0.014585920333862305, 0.014574527740478516, 0.014573568344116212, 0.014543871879577636, 0.014592063903808595, 0.01458886432647705, 0.014574591636657714, 0.01458892822265625, 0.014633983612060546, 0.014540800094604492, 0.014567423820495605, 0.014564352035522461, 0.014526464462280274, 0.014562303543090821, 0.01458073616027832, 0.014523391723632812, 0.014614527702331542, 0.014553088188171387, 0.014541824340820313, 0.014567423820495605, 0.014635007858276367, 0.014566399574279786, 0.01459404754638672, 0.014535679817199706, 0.014558208465576173, 0.014565376281738282, 0.01458073616027832, 0.014585856437683106, 0.014607359886169433, 0.014540800094604492, 0.014544896125793457, 0.014617600440979005, 0.014566399574279786, 0.01457254409790039, 0.014559231758117675, 0.014561280250549317, 0.01458073616027832, 0.014527551651000977, 0.014563263893127442, 0.014543871879577636, 0.014548992156982422, 0.014582783699035644, 0.014598143577575684, 0.014575615882873535, 0.014552063941955566, 0.014589952468872071, 0.01457254409790039, 0.014573568344116212, 0.030249984741210937, 0.014523391723632812, 0.014586943626403809, 0.014542783737182617, 0.014595104217529296, 0.014566368103027343, 0.014517248153686524, 0.014551039695739745, 0.014527487754821777, 0.014636032104492188, 0.014616576194763184, 0.014551072120666505, 0.014565343856811524, 0.014598143577575684, 0.0146626558303833, 0.014781439781188965, 0.015035391807556153, 0.014568448066711426, 0.01457254409790039, 0.014618623733520507, 0.014560256004333496, 0.014574591636657714, 0.014547967910766601, 0.014540800094604492, 0.014562303543090821, 0.014603263854980468, 0.01456332778930664, 0.014577664375305176, 0.014547967910766601, 0.01459404754638672, 0.01458790397644043, 0.014542847633361817, 0.014595071792602539, 0.01458790397644043, 0.014665792465209961, 0.014555071830749512, 0.014537728309631348, 0.014530559539794922, 0.014581760406494141, 0.014584832191467285, 0.014564352035522461, 0.014573568344116212, 0.014548992156982422, 0.014700544357299805, 0.014585856437683106, 0.014574591636657714, 0.014560256004333496, 0.014617600440979005, 0.014556159973144531, 0.014578687667846679, 0.01455513572692871, 0.01466982364654541, 0.014557184219360352, 0.014561280250549317, 0.014525440216064453, 0.014565376281738282, 0.014586879730224609, 0.0145664644241333, 0.014582719802856445, 0.014539775848388671, 0.014560256004333496, 0.01460428810119629, 0.014592000007629394, 0.030311424255371092, 0.014531583786010742, 0.014569472312927247, 0.014636032104492188, 0.014564352035522461, 0.014538751602172852, 0.01458892822265625, 0.014531583786010742, 0.014553088188171387, 0.014525440216064453, 0.014530624389648437, 0.014523327827453613, 0.014575615882873535, 0.014558208465576173, 0.014516223907470703, 0.014637056350708008, 0.014518272399902344, 0.014577664375305176, 0.01495961570739746, 0.014617600440979005, 0.014619711875915528, 0.014569408416748047, 0.014639103889465332, 0.014576640129089356, 0.014560256004333496, 0.014568448066711426, 0.0145797119140625, 0.014550016403198243, 0.014577664375305176, 0.014582783699035644, 0.014611488342285156, 0.014558176040649413, 0.014578687667846679, 0.014569472312927247, 0.014663680076599121, 0.014584832191467285, 0.014565376281738282, 0.01456332778930664, 0.014550016403198243, 0.014538751602172852, 0.014530559539794922, 0.014534655570983887, 0.014559231758117675, 0.01455513572692871, 0.014628864288330079, 0.014585856437683106, 0.014578687667846679, 0.014625791549682618, 0.014540800094604492, 0.014582783699035644, 0.014582783699035644, 0.014595071792602539, 0.014568448066711426, 0.014524415969848633, 0.014545920372009278, 0.014603263854980468, 0.01457049560546875, 0.014561280250549317, 0.014623744010925293, 0.014557184219360352, 0.014583807945251465, 0.014567487716674805, 0.014582719802856445, 0.03019980812072754, 0.014551039695739745, 0.01457151985168457, 0.014541824340820313, 0.014585856437683106, 0.01539891242980957, 0.015851519584655763, 0.014816255569458007, 0.014586879730224609, 0.014638079643249511, 0.014639103889465332, 0.014531583786010742, 0.014663680076599121, 0.014520319938659668, 0.014543871879577636, 0.014617600440979005, 0.014495743751525878, 0.01458073616027832, 0.014540800094604492, 0.014554112434387208, 0.014567423820495605, 0.01455513572692871, 0.014504960060119629, 0.01479475212097168, 0.015308799743652344, 0.014611455917358398, 0.014626815795898437, 0.014567423820495605, 0.014535679817199706, 0.01460223960876465, 0.01458892822265625, 0.014566399574279786, 0.014566399574279786, 0.014557184219360352, 0.014724096298217774, 0.014551039695739745, 0.014535712242126466, 0.014597087860107423, 0.014573599815368652, 0.014573535919189452, 0.014564352035522461, 0.01457049560546875, 0.014740480422973632, 0.014905344009399414, 0.014684160232543946, 0.014547967910766601, 0.014768128395080566, 0.015746047973632812, 0.014673919677734374, 0.014562303543090821, 0.014558208465576173, 0.014558208465576173, 0.01455513572692871, 0.014536704063415527, 0.01458790397644043, 0.014529536247253418, 0.014589952468872071, 0.014554112434387208, 0.014553088188171387, 0.014586879730224609, 0.014547967910766601, 0.014573599815368652, 0.014557151794433594, 0.03037593650817871, 0.014526464462280274, 0.014798848152160645, 0.014523391723632812, 0.01459404754638672, 0.014567423820495605, 0.014531583786010742, 0.014575615882873535, 0.014536704063415527, 0.014520319938659668, 0.014532608032226562, 0.014419967651367188, 0.014387200355529785, 0.01439027214050293, 0.014443519592285157, 0.014552063941955566, 0.014498815536499024, 0.014529536247253418, 0.014651391983032227, 0.014534655570983887, 0.014548992156982422, 0.014628864288330079, 0.014568448066711426, 0.01458790397644043, 0.01457151985168457, 0.014544896125793457, 0.014608384132385254, 0.014728192329406739, 0.014638079643249511, 0.014592000007629394, 0.014573568344116212, 0.014636032104492188, 0.014647295951843262, 0.014457856178283691, 0.014402560234069824, 0.014632960319519044, 0.014524415969848633, 0.01457049560546875, 0.01459712028503418, 0.014574591636657714, 0.014561280250549317, 0.01458790397644043, 0.014598143577575684, 0.014618623733520507, 0.014533632278442383, 0.014523391723632812, 0.014573568344116212, 0.01456332778930664, 0.01457151985168457, 0.01458073616027832, 0.014542847633361817, 0.014547967910766601, 0.014568448066711426, 0.014557184219360352, 0.014553088188171387, 0.014541824340820313, 0.014518272399902344, 0.014573568344116212, 0.014523455619812012, 0.014740415573120117, 0.014707776069641113, 0.014453696250915528, 0.014403583526611329]",tokens/s,67.13379997376835,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3001.430016,9299.296256,0.0,8652.849152,8210.185216,s,10,11.016480834960937,1.1016480834960938,0.0021733642538701277,1.1017757568359374,1.103871484375,1.1043791625976562,1.1047853051757812,"[1.1037586669921875, 1.1048868408203125, 1.09813720703125, 1.0993441162109374, 1.10018701171875, 1.0994322509765626, 1.101841796875, 1.101709716796875, 1.1036807861328124, 1.10350244140625]",tokens/s,232.3791089324831,kWh,1.2973552644252778e-05,7.108400666602392e-06,6.117329893860212e-05,8.12552522494573e-05,tokens/kWh,3150565.568537876,MB,3002.59328,9299.296256,0.0,8652.849152,8503.104,s,10,641.81039453125,64.18103945312501,0.013566927048965604,64.180421875,64.20006953125001,64.202245703125,64.20398664062499,"[64.180421875, 64.1733046875, 64.15237109375, 64.17387890625, 64.17909765625, 64.204421875, 64.1840859375, 64.180421875, 64.1828046875, 64.1995859375]",tokens/s,0.9815983121621521,kWh,0.0007575022853910923,0.00041517883106595037,0.0035625019055547965,0.004735183022011839,tokens/kWh,13304.659969242997,,s,629,650.7052465209968,1.0345075461383086,0.13087387967036276,1.0186967163085938,1.0195488891601563,1.0197336059570312,2.11880048828125,"[1.0175508422851562, 1.0181458129882812, 1.0181683349609374, 1.0183413696289063, 1.0181898193359376, 1.0181119995117187, 1.0183444213867188, 1.018092529296875, 1.0177116088867189, 1.0180689697265626, 1.0175825805664063, 1.0178242797851562, 1.0176737060546874, 1.0177935180664062, 1.0175949096679688, 1.017802734375, 1.018124267578125, 1.01833935546875, 1.0179113159179687, 1.0183004150390624, 1.0186444702148438, 1.0186260375976564, 1.0186373291015625, 1.0180352172851563, 1.0180280151367187, 1.0180382690429688, 1.019072509765625, 1.0186967163085938, 1.0191267700195312, 1.0186731567382812, 1.0187960205078126, 1.0190377197265625, 1.0188656616210938, 1.01879296875, 1.0181878051757813, 1.0191165161132814, 1.0185533447265624, 1.0191687622070313, 1.0184427490234376, 1.0186332397460938, 1.0186659545898438, 1.01909912109375, 1.0189649658203126, 1.0186096801757814, 1.0187254028320312, 1.0194135131835937, 1.01941455078125, 1.0202962036132812, 1.019704345703125, 1.0197586059570312, 1.0201170043945313, 1.0203515014648437, 1.0195978393554688, 1.020099609375, 1.0194329833984375, 1.020137451171875, 1.0197483520507813, 1.0200903930664063, 1.01933056640625, 1.01880419921875, 1.019261962890625, 1.0196900024414062, 2.1243330078125, 1.0192138061523437, 1.0183055419921876, 1.0189486083984376, 1.0191370239257813, 1.0183291015625, 1.01895166015625, 1.0190971069335937, 1.019052001953125, 1.0191104125976562, 1.018608642578125, 1.0190632934570312, 1.0185861206054687, 1.0184775390625, 1.0194800415039063, 1.0183649291992187, 1.0192107543945312, 1.0193479614257812, 1.01952001953125, 1.0189434814453124, 1.01899365234375, 1.019052001953125, 1.0188851318359375, 1.0190868530273438, 1.0185226440429687, 1.0191022338867188, 1.0190540771484375, 1.0195435791015626, 1.0188001098632813, 1.0175877075195312, 1.01774951171875, 1.017987060546875, 1.0180618286132812, 1.018218505859375, 1.0183157958984375, 1.0183516235351562, 1.018313720703125, 1.0178468017578124, 1.0180331420898439, 1.018102783203125, 1.018229736328125, 1.0181058349609375, 1.0183147583007812, 1.018461181640625, 1.0184550170898437, 1.017839599609375, 1.0192803955078125, 1.0181570434570313, 1.01834033203125, 1.018218505859375, 1.0189649658203126, 1.0185697021484375, 1.01888818359375, 1.0182881469726563, 1.0184069213867188, 1.0182307739257812, 1.0182891235351563, 1.0182410278320313, 1.0186854248046875, 1.0180198364257813, 1.0185799560546875, 1.0184140625, 1.0185236206054689, 2.1177333984375, 1.01767578125, 1.0179533081054688, 1.01821337890625, 1.0182543334960938, 1.0181304321289062, 1.0179686279296876, 1.0175242309570312, 1.0182492065429687, 1.0181949462890625, 1.0180792236328124, 1.017970703125, 1.0186107177734376, 1.0181621704101562, 1.0177321166992188, 1.017511962890625, 1.0179542846679688, 1.0180269775390625, 1.0183741455078126, 1.0180157470703124, 1.0179850463867188, 1.0181283569335937, 1.0180433959960937, 1.0179522705078126, 1.0176942138671874, 1.0177587280273437, 1.0178385620117187, 1.0181632080078125, 1.018629150390625, 1.0183321533203125, 1.0186834106445313, 1.01846630859375, 1.018461181640625, 1.018250244140625, 1.01829638671875, 1.01803515625, 1.01867724609375, 1.0185482177734375, 1.0195558471679687, 1.0182451171875, 1.0182113037109375, 1.0181283569335937, 1.0183987426757812, 1.0187427978515624, 1.0183946533203125, 1.0181652221679687, 1.018517578125, 1.0183423461914063, 1.0193663940429687, 1.0184171752929687, 1.0185789184570313, 1.0189977416992186, 1.0188840942382813, 1.0188493041992188, 1.01865673828125, 1.0181990356445312, 1.018498046875, 1.0186240234375, 1.0186342163085937, 1.0180474853515624, 1.0181181640625, 1.0185072631835939, 1.0182072143554688, 2.11782666015625, 1.0176593627929686, 1.0179297485351562, 1.0178068237304687, 1.0177402954101562, 1.0177413330078124, 1.0181898193359376, 1.0177003784179688, 1.0179614868164062, 1.0176676025390625, 1.0177188110351563, 1.0182338256835937, 1.0195701904296874, 1.01842431640625, 1.0179614868164062, 1.0182686767578124, 1.0188646240234376, 1.0179307250976561, 1.0180372314453126, 1.0181652221679687, 1.01809765625, 1.0183987426757812, 1.0185236206054689, 1.0191226806640625, 1.0184785766601563, 1.0190673828125, 1.0196029663085937, 1.0190448608398437, 1.0190960693359374, 1.0191595458984375, 1.0192824096679687, 1.0184365844726562, 1.0186434326171876, 1.0191533813476563, 1.0193930053710938, 1.0196664428710938, 1.019341796875, 1.01850830078125, 1.0184909057617189, 1.018365966796875, 1.0184058837890626, 1.019093994140625, 1.0190244140625, 1.0187202758789062, 1.0190602416992187, 1.0189383544921875, 1.0194821166992187, 1.0181652221679687, 1.0186793212890626, 1.0183731079101563, 1.0187816772460938, 1.01879296875, 1.0193489990234375, 1.018956787109375, 1.019167724609375, 1.0183106689453125, 1.0189916381835937, 1.0191524047851563, 1.0189639892578124, 1.0186598510742189, 1.0192609252929687, 1.0185523071289062, 1.0191390991210938, 2.1198623046875, 1.0184703979492187, 1.018693603515625, 1.0184253540039063, 1.018598388671875, 1.017944091796875, 1.0177741088867187, 1.0175641479492188, 1.0179666748046876, 1.017946044921875, 1.0179491577148438, 1.0177464599609376, 1.0178068237304687, 1.0175989990234375, 1.0180106201171875, 1.0180372314453126, 1.0184130859375, 1.0181887817382813, 1.0183567504882813, 1.0175958862304688, 1.0183536376953124, 1.0183270263671875, 1.0187335815429688, 1.0194104614257813, 1.01911962890625, 1.0193428344726563, 1.0188841552734376, 1.0194349975585937, 1.0187448120117188, 1.018265625, 1.018481689453125, 1.0187151489257813, 1.0190418090820312, 1.0186414184570312, 1.0186311645507813, 1.019009033203125, 1.0189557495117187, 1.01874072265625, 1.0184427490234376, 1.01905615234375, 1.0199142456054688, 1.0191533813476563, 1.0189649658203126, 1.019109375, 1.0186588134765624, 1.0195548095703124, 1.0190469360351562, 1.0197084350585937, 1.0191718139648438, 1.019072509765625, 1.0192210083007813, 1.0191072998046875, 1.0190694580078126, 1.0184151000976562, 1.0187469482421876, 1.0190745239257812, 1.0195794067382813, 1.0200555419921875, 1.0184437866210938, 1.0190847778320313, 1.01943505859375, 1.0188318481445313, 1.0186168212890625, 2.11917919921875, 1.0183997192382812, 1.02013232421875, 1.0196019287109375, 1.018566650390625, 1.018514404296875, 1.0180843505859376, 1.0178980102539064, 1.018302490234375, 1.0179512329101563, 1.0179932250976562, 1.017970703125, 1.0190079956054687, 1.0196439208984376, 1.0198763427734374, 1.0188820190429688, 1.0180269775390625, 1.0179645385742186, 1.0182420654296875, 1.0186547241210937, 1.019009033203125, 1.0187325439453125, 1.0195947265625, 1.01844482421875, 1.0194401245117188, 1.0188646240234376, 1.0188635864257813, 1.0188073120117187, 1.0185891723632812, 1.0188493041992188, 1.0184304809570313, 1.0185359497070312, 1.0193643798828125, 1.0190960693359374, 1.0195015869140625, 1.0195343627929687, 1.0197268676757814, 1.0195292358398438, 1.0212003784179688, 1.0190858154296876, 1.0195117797851563, 1.0193694458007811, 1.0198139038085938, 1.0197166137695313, 1.0198231201171875, 1.0193295288085937, 1.0196602783203126, 1.01909912109375, 1.0193510131835937, 1.0191390991210938, 1.019315185546875, 1.01920361328125, 1.0198876342773437, 1.019504638671875, 1.0197114868164063, 1.0195404663085939, 1.0196654052734375, 1.019725830078125, 1.0191769409179687, 1.019283447265625, 1.0189865112304688, 1.0192742309570313, 1.0197012329101562, 2.120995849609375, 1.0189475708007814, 1.0187919311523437, 1.0181539916992188, 1.0187530517578125, 1.019020263671875, 1.0189700927734375, 1.0192455444335937, 1.0192025756835938, 1.018956787109375, 1.0191503295898436, 1.0182103271484375, 1.0184130859375, 1.01810791015625, 1.0181693725585939, 1.018672119140625, 1.0189906005859375, 1.0192138061523437, 1.0188738403320312, 1.0185830688476563, 1.0189445190429687, 1.0188543701171875, 1.0183229370117188, 1.0181427001953125, 1.0185963745117188, 1.0190100708007812, 1.0193192749023436, 1.0195661010742187, 1.0191524047851563, 1.0190028686523438, 1.0189066162109375, 1.018893310546875, 1.0182830200195312, 1.01808740234375, 1.01798095703125, 1.018049560546875, 1.0186854248046875, 1.0191216430664063, 1.0186157836914063, 1.019087890625, 1.019009033203125, 1.018883056640625, 1.0186516723632812, 1.018503173828125, 1.018767333984375, 1.0181427001953125, 1.0188851318359375, 1.0190079956054687, 1.0191380615234376, 1.0195916748046876, 1.018988525390625, 1.0186444702148438, 1.0186322021484375, 1.0188236694335937, 1.0185963745117188, 1.0187489013671875, 1.0191278076171875, 1.0186588134765624, 1.0191104125976562, 1.0196490478515625, 1.019409423828125, 1.0184796142578125, 1.0184622192382813, 2.120321044921875, 1.018513427734375, 1.0190059814453125, 1.0185799560546875, 1.0189691162109376, 1.0192957153320312, 1.0182420654296875, 1.0181099243164062, 1.018471435546875, 1.0179880981445313, 1.01793994140625, 1.0180792236328124, 1.0191585083007813, 1.0183577880859376, 1.0186967163085938, 1.019025390625, 1.018302490234375, 1.0182195434570311, 1.0180986938476562, 1.0181119995117187, 1.018119140625, 1.0185779418945313, 1.0192568359375, 1.01869775390625, 1.0190448608398437, 1.0191011962890626, 1.0185687255859375, 1.0183587646484376, 1.0186629028320313, 1.018440673828125, 1.0184990844726562, 1.0189854736328126, 1.0192864990234376, 1.0188861694335938, 1.0186168212890625, 1.0193828125, 1.019452392578125, 1.018482666015625, 1.0182092895507813, 1.0185973510742188, 1.0184847412109375, 1.0187908935546874, 1.0185379638671874, 1.0185676879882812, 1.0197432250976564, 1.0195404663085939, 1.0186229858398437, 1.0183670043945312, 1.01861376953125, 1.0180689697265626, 1.019240478515625, 1.0194298706054687, 1.0187018432617188, 1.0183311157226562, 1.0196664428710938, 1.01880322265625, 1.019241455078125, 1.01879296875, 1.0191953735351562, 1.0192783203125, 1.0192066650390625, 1.0193878784179689, 1.0188646240234376, 2.12357421875, 1.019025390625, 1.0193284912109375, 1.0185912475585936, 1.0186680297851562, 1.0185154418945312, 1.0190069580078125, 1.0191072998046875, 1.0187960205078126, 1.0190796508789062, 1.0191022338867188, 1.01871923828125, 1.0191728515625, 1.0182952880859375, 1.0185615234375, 1.0188533935546875, 1.01926806640625, 1.0197380981445312, 1.0183946533203125, 1.019536376953125, 1.0187807006835938, 1.0187202758789062, 1.0181488647460937, 1.01829736328125, 1.0191585083007813, 1.0188114013671874, 1.0189260864257812, 1.0182645874023437, 1.0184970092773438, 1.01850830078125, 1.0195537719726562, 1.0186332397460938, 1.018650634765625, 1.018513427734375, 1.0189107055664062, 1.0193633422851562, 1.0195169067382812, 1.0182993774414062, 1.0184263916015626, 1.019188232421875, 1.0184898681640624, 1.0184581298828126, 1.018377197265625, 1.0180526123046876, 1.018682373046875, 1.019114501953125, 1.01926708984375, 1.018660888671875, 1.0182830200195312, 1.0192527465820314, 1.0191104125976562, 1.0186793212890626, 1.0189076538085937, 1.0181212158203126, 1.0184489135742187, 1.0176378784179687, 1.0188114013671874, 1.01772900390625, 1.0189046020507813, 1.0190069580078125, 1.0186148071289063, 1.0183670043945312, 1.019093017578125, 2.122827880859375, 1.0188871459960938, 1.0193141479492187, 1.0191267700195312, 1.0201989135742187, 1.018756103515625, 1.0183936157226563, 1.0180618286132812, 1.0186639404296876, 1.018534912109375, 1.0186199340820312, 1.0191769409179687, 1.0182215576171876, 1.0179891357421875, 1.0194401245117188, 1.019241455078125, 1.019087890625, 1.0191400756835938, 1.0193930053710938, 1.0193981323242187, 1.0194821166992187, 1.0188114013671874, 1.0183598022460938, 1.0190151977539061, 1.0194656982421875, 1.0182840576171874, 1.0186342163085937, 1.0182359008789061, 1.0182963256835937, 1.018186767578125, 1.0188738403320312, 1.0185728149414062, 1.0182052001953126, 1.018392578125, 1.019826171875, 1.018545166015625, 1.0191267700195312, 1.0181467895507812, 1.0188943481445312, 1.0193899536132813, 1.0195814208984375, 1.0194718627929686, 1.0192506713867187, 1.0194667358398437, 1.0197903442382812, 1.0192322387695312, 1.0195476684570313, 1.0191769409179687, 1.0188554077148437, 1.0196582641601561, 1.0197176513671875, 1.019526123046875, 1.0196561889648437, 1.0197647094726563, 1.0200924072265625, 1.0190786743164062, 1.019673583984375, 1.0185328369140625, 1.0187786254882814, 1.0190172119140626, 1.019193359375, 1.0191861572265626, 1.0196377563476562]",tokens/s,0.966643504663527,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492a4-40dc4ff350bca5051c2bcf8d;ca2fda43-1e17-410e-9f7b-07544792c4ae) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1392.545792,6237.454336,0.0,5591.007232,5346.100736,s,10,5.702087280273438,0.5702087280273438,0.0032758876463017,0.5691306762695312,0.5706701721191406,0.5753404876708984,0.5790767401123047,"[0.5800108032226563, 0.5690457763671875, 0.56963232421875, 0.5686717529296875, 0.5691049194335938, 0.5689085083007812, 0.5691564331054687, 0.5690445556640625, 0.5692649536132812, 0.5692472534179688]",tokens/s,448.9584031546493,kWh,6.730608862859232e-06,3.6874586645353704e-06,3.2089053449001976e-05,4.2507120976396576e-05,tokens/kWh,6022520.32411586,MB,1392.545792,6237.454336,0.0,5591.007232,5555.34336,s,10,329.95942187500003,32.995942187500006,0.00677698481578348,32.996611328125,33.003900390625,33.0066337890625,33.008820507812494,"[32.98758984375, 32.99876953125, 32.99800390625, 33.00329296875, 32.98576171875, 32.99270703125, 32.99606640625, 32.99070703125, 32.99715625, 33.0093671875]",tokens/s,1.9093256874436688,kWh,0.0003894760356291577,0.00021346548665747832,0.001836931469543996,0.002439872991830632,tokens/kWh,25821.016180326347,,s,629,334.5575883178707,0.5318880577390637,0.0676032283856282,0.5236981811523438,0.5243047729492187,0.52448828125,1.092699482421875,"[0.5232855224609375, 0.5235804443359375, 0.5231001586914062, 0.5230325927734375, 0.52342578125, 0.5232117919921875, 0.5233387451171875, 0.52324658203125, 0.523146240234375, 0.5234544677734375, 0.5234892578125, 0.523303955078125, 0.5232906494140624, 0.52303564453125, 0.5235220336914063, 0.5233828125, 0.5234381103515625, 0.5232639770507812, 0.523114501953125, 0.5234656982421875, 0.5235753173828125, 0.5235343627929687, 0.5234656982421875, 0.52356298828125, 0.5236715698242187, 0.523630615234375, 0.523747314453125, 0.5236019287109375, 0.5239511108398438, 0.5234032592773438, 0.5237637329101562, 0.5235281982421875, 0.5234298706054688, 0.5238978271484375, 0.523704345703125, 0.5234800415039063, 0.523568115234375, 0.5234032592773438, 0.5236162719726563, 0.523826171875, 0.5237114868164062, 0.5236602783203125, 0.5236776733398437, 0.5234022827148438, 0.5235650024414062, 0.5239142456054687, 0.5236613159179687, 0.5238640747070312, 0.5236664428710938, 0.5236551513671875, 0.5237268676757812, 0.523779052734375, 0.5245716552734375, 0.5238753051757813, 0.5239859008789063, 0.5239797973632813, 0.52379443359375, 0.5241487426757813, 0.5243228149414062, 0.5239275512695313, 0.5239859008789063, 0.5239849243164062, 1.092547607421875, 0.5234862060546875, 0.5234585571289062, 0.523430908203125, 0.523419677734375, 0.5231114501953125, 0.52326806640625, 0.5233162231445313, 0.5237933959960938, 0.5237329711914063, 0.5236541137695313, 0.5234268188476563, 0.5234974975585938, 0.5239500732421875, 0.5236981811523438, 0.5235404663085937, 0.5233397827148437, 0.5233663940429687, 0.5233930053710938, 0.523251708984375, 0.5238825073242187, 0.5233387451171875, 0.52321484375, 0.5236510620117187, 0.5232701416015625, 0.523261962890625, 0.5234851684570313, 0.52385791015625, 0.5233141479492187, 0.5233141479492187, 0.5237933959960938, 0.5236162719726563, 0.5235394287109375, 0.5236039428710938, 0.5236265258789062, 0.523404296875, 0.524432373046875, 0.5245040893554688, 0.5246044311523438, 0.52440576171875, 0.5234370727539063, 0.523509765625, 0.5242644653320313, 0.5239736328125, 0.5239613647460938, 0.5238251342773438, 0.5239859008789063, 0.5239480590820312, 0.5240678100585937, 0.5244600219726563, 0.5235599365234375, 0.5235322875976562, 0.5239992065429687, 0.5245665283203125, 0.5246177368164062, 0.5242409057617188, 0.5239818115234375, 0.5243668212890625, 0.5246289672851563, 0.5246351318359375, 0.524642333984375, 0.5243975830078125, 0.5244672241210937, 1.093401611328125, 0.5240750122070312, 0.5239552001953125, 0.52360498046875, 0.523466796875, 0.5243002319335938, 0.5241241455078125, 0.5247160034179688, 0.5239346923828125, 0.5236541137695313, 0.5237217407226562, 0.5237616577148437, 0.5244815063476562, 0.524211181640625, 0.5238456420898437, 0.5239695434570313, 0.5239244995117187, 0.5236213989257813, 0.5235159301757812, 0.5238743286132812, 0.5235824584960938, 0.5235179443359375, 0.52333056640625, 0.5233848266601563, 0.5233059692382812, 0.5234913330078125, 0.5241917724609375, 0.5241712646484376, 0.523509765625, 0.523978759765625, 0.524326904296875, 0.5240657958984375, 0.5243740234375, 0.5234913330078125, 0.523325439453125, 0.5236459350585938, 0.5237012329101562, 0.5237268676757812, 0.5233213500976562, 0.5239797973632813, 0.52371044921875, 0.524242919921875, 0.523598876953125, 0.5236613159179687, 0.5237073974609375, 0.523826171875, 0.5238538208007812, 0.5235712280273438, 0.523441162109375, 0.5235732421875, 0.523472900390625, 0.5238896484375, 0.5235292358398438, 0.5239623413085938, 0.5240350952148437, 0.5237391357421874, 0.5234851684570313, 0.5235486450195312, 0.5237903442382813, 0.5235435791015625, 0.5234749145507812, 0.5236244506835938, 0.5238026123046875, 1.0924083251953125, 0.5236694946289062, 0.52347802734375, 0.5234851684570313, 0.5235445556640625, 0.5234339599609374, 0.5239521484375, 0.52405859375, 0.5237380981445312, 0.5242726440429688, 0.5238128662109375, 0.5241282348632812, 0.5237278442382812, 0.524042236328125, 0.523821044921875, 0.523958251953125, 0.5236961059570312, 0.5237063598632813, 0.5236746215820313, 0.5236930541992187, 0.5239552001953125, 0.5237145385742188, 0.5233878784179687, 0.5233551635742187, 0.5233059692382812, 0.5233325805664063, 0.523447265625, 0.5242828979492188, 0.523799560546875, 0.5240350952148437, 0.5233172607421875, 0.523683837890625, 0.5236930541992187, 0.52356298828125, 0.5234247436523437, 0.5236224365234375, 0.5233048706054687, 0.5234401245117187, 0.5237933959960938, 0.5242378540039062, 0.5238743286132812, 0.5239449462890625, 0.5243023071289062, 0.5239142456054687, 0.5239797973632813, 0.5242982177734375, 0.5239521484375, 0.5240166625976562, 0.5240678100585937, 0.5244170532226563, 0.5238589477539063, 0.5242276000976562, 0.5240964965820313, 0.523994140625, 0.5239715576171875, 0.5243576049804688, 0.5245450439453125, 0.5247354736328125, 0.5245726928710938, 0.5245236206054688, 0.5238475952148437, 0.523821044921875, 0.5236725463867188, 1.0934261474609375, 0.52333056640625, 0.5229783325195313, 0.5233264770507813, 0.523093994140625, 0.5231533813476562, 0.5235353393554687, 0.5235691528320312, 0.5232783203125, 0.5233970947265625, 0.52322509765625, 0.5232906494140624, 0.5234452514648438, 0.5239552001953125, 0.5232609252929687, 0.5235496826171875, 0.5232435302734375, 0.5234022216796875, 0.5231646728515625, 0.52375244140625, 0.5232578735351563, 0.5234319458007812, 0.52356298828125, 0.5234237670898437, 0.523451416015625, 0.5234012451171876, 0.5241415405273437, 0.5235762939453125, 0.523673583984375, 0.5237974853515625, 0.52398388671875, 0.52423681640625, 0.5239766845703125, 0.5234073486328125, 0.5233756103515625, 0.5234616088867188, 0.5233121337890625, 0.5233397827148437, 0.5239388427734375, 0.5235742797851562, 0.5236398315429688, 0.52368896484375, 0.5234555053710938, 0.5237698364257812, 0.5233592529296875, 0.523894775390625, 0.5234401245117187, 0.5235117797851563, 0.523404296875, 0.5233878784179687, 0.5234124755859375, 0.5239408569335937, 0.52353125, 0.5234237670898437, 0.5235128784179688, 0.523874267578125, 0.5239234619140625, 0.5243146362304687, 0.5239275512695313, 0.5238773803710938, 0.5242818603515625, 0.523810791015625, 0.523821044921875, 1.0930257568359374, 0.5232107543945312, 0.52316162109375, 0.5232630004882812, 0.5232639770507812, 0.5235537719726563, 0.5244201049804688, 0.5233796997070312, 0.5232916259765625, 0.523272216796875, 0.52347802734375, 0.5233551635742187, 0.5237022705078125, 0.523598876953125, 0.5238466796875, 0.524136474609375, 0.5237606201171875, 0.5238057250976562, 0.523778076171875, 0.5240791015625, 0.5236336669921875, 0.5233408203125, 0.5235425415039062, 0.5236541137695313, 0.523335693359375, 0.5239746704101562, 0.5234063110351562, 0.5233428344726563, 0.523420654296875, 0.5235691528320312, 0.523378662109375, 0.5238292236328125, 0.523472900390625, 0.5234135131835937, 0.5238937377929688, 0.52394189453125, 0.523778076171875, 0.52375244140625, 0.5239193725585938, 0.5237340087890625, 0.5233941040039063, 0.5235650024414062, 0.5234503784179687, 0.5236265258789062, 0.5240217895507813, 0.52352001953125, 0.5239121704101563, 0.5239193725585938, 0.5238906860351562, 0.5238906860351562, 0.524147705078125, 0.5241671752929687, 0.5243658447265624, 0.5237421875, 0.5238804321289062, 0.5237001953125, 0.523598876953125, 0.5240176391601562, 0.5237852172851563, 0.52413232421875, 0.5235875854492188, 0.52364697265625, 0.52398388671875, 1.0928189697265625, 0.5236561889648438, 0.5236930541992187, 0.5234370727539063, 0.5235292358398438, 0.523916259765625, 0.5238763427734375, 0.5241989135742188, 0.52381591796875, 0.5243770751953125, 0.5241866455078125, 0.5241456909179687, 0.5243525390625, 0.5239951171875, 0.5239971923828125, 0.524368896484375, 0.5239060668945312, 0.524020751953125, 0.5241405639648438, 0.5238804321289062, 0.52356201171875, 0.5234821166992187, 0.5237688598632813, 0.5234370727539063, 0.5234564819335937, 0.523536376953125, 0.5236070556640625, 0.5234503784179687, 0.5238599853515625, 0.5234083862304687, 0.5233295288085937, 0.523884521484375, 0.5232977905273437, 0.5238272094726563, 0.5236193237304687, 0.5233909912109375, 0.5233141479492187, 0.5239879760742188, 0.5235762939453125, 0.5241558837890625, 0.5236766967773437, 0.5239725952148437, 0.5236613159179687, 0.52408935546875, 0.5242327270507813, 0.5238917236328124, 0.5234862060546875, 0.5241200561523438, 0.5234708251953125, 0.523630615234375, 0.5242705688476562, 0.5235712280273438, 0.5233428344726563, 0.52341455078125, 0.5233182983398438, 0.52377294921875, 0.5237073974609375, 0.5236223754882813, 0.5232424926757813, 0.5235609741210937, 0.523462646484375, 0.523536376953125, 0.5239029541015625, 1.092758544921875, 0.5232230224609375, 0.5236244506835938, 0.5231585083007813, 0.5233715209960937, 0.5235814208984375, 0.5233612670898438, 0.523536376953125, 0.523325439453125, 0.5234390869140625, 0.52335205078125, 0.52364697265625, 0.5233899536132812, 0.5232977905273437, 0.5235599365234375, 0.5235517578125, 0.5233920288085937, 0.52366845703125, 0.5234319458007812, 0.523514892578125, 0.5232752685546875, 0.5237012329101562, 0.5237196655273437, 0.5233766479492188, 0.5236971435546875, 0.52352099609375, 0.5235537719726563, 0.523345947265625, 0.5235455932617188, 0.52406884765625, 0.5242286376953125, 0.524389404296875, 0.5235241088867187, 0.5232578735351563, 0.5233694458007813, 0.523747314453125, 0.5242389526367187, 0.523600830078125, 0.5237022705078125, 0.5236387939453125, 0.5238128662109375, 0.5234390869140625, 0.5237319946289063, 0.5238507690429688, 0.5233745727539062, 0.5234442138671875, 0.5238927612304688, 0.523767822265625, 0.523869140625, 0.523968505859375, 0.5244630737304687, 0.5236377563476563, 0.52392138671875, 0.5233858642578125, 0.5234381103515625, 0.5237340087890625, 0.5240494384765625, 0.52394189453125, 0.5239357299804688, 0.5239705810546875, 0.5242203979492187, 0.5239931030273437, 0.5242460327148437, 1.093507080078125, 0.523989013671875, 0.5240360717773438, 0.5239337158203125, 0.5234032592773438, 0.5238927612304688, 0.5234606323242188, 0.5235845336914062, 0.5233848266601563, 0.5233233642578125, 0.5231104125976562, 0.5241026611328125, 0.5236070556640625, 0.523736083984375, 0.523272216796875, 0.5247611083984375, 0.5243330688476563, 0.5240186767578126, 0.524157958984375, 0.5240443115234374, 0.523978759765625, 0.5238374633789062, 0.5240657958984375, 0.5238599853515625, 0.5237462768554687, 0.5236694946289062, 0.523599853515625, 0.52335205078125, 0.5244129028320312, 0.5246812133789063, 0.524706787109375, 0.524564453125, 0.5243494262695313, 0.524263427734375, 0.5238323364257812, 0.523388916015625, 0.5234790649414063, 0.5235701904296876, 0.5234329833984375, 0.52348828125, 0.5235302124023438, 0.5233008422851563, 0.5235025634765625, 0.52366748046875, 0.523378662109375, 0.5235241088867187, 0.523799560546875, 0.5239378051757813, 0.5237391357421874, 0.5240770263671874, 0.5232752685546875, 0.5233377075195312, 0.5234656982421875, 0.5238057250976562, 0.5232496337890625, 0.5235005493164062, 0.52366845703125, 0.52352099609375, 0.5235978393554688, 0.5234442138671875, 0.5232630004882812, 0.523420654296875, 0.5245173950195312, 1.092947998046875, 0.52453271484375, 0.5243463745117187, 0.5242276000976562, 0.5244682006835938, 0.524368896484375, 0.5243944702148438, 0.5240617065429688, 0.5243750610351563, 0.5239592895507813, 0.5239234619140625, 0.52438525390625, 0.5242869873046875, 0.5240463256835938, 0.5240596313476562, 0.524732421875, 0.5245419311523437, 0.5243187255859375, 0.5240545043945313, 0.524000244140625, 0.5237412109375, 0.5240115356445313, 0.5239132080078125, 0.5242838745117188, 0.5240617065429688, 0.524099609375, 0.5238660888671876, 0.5240678100585937, 0.5237862548828125, 0.5238876342773438, 0.5237872924804687, 0.5241005859375, 0.5238323364257812, 0.523931640625, 0.5241927490234375, 0.5244938354492188, 0.5244067993164062, 0.5234135131835937, 0.523315185546875, 0.52359375, 0.5232455444335937, 0.5234421997070312, 0.5235281982421875, 0.5233162231445313, 0.523388916015625, 0.523452392578125, 0.5235916748046875, 0.5241804809570313, 0.5237412109375, 0.52387939453125, 0.5238538208007812, 0.5236848754882812, 0.523737060546875, 0.5239080810546874, 0.5241026611328125, 0.5244927978515626, 0.5238046875, 0.5238814697265625, 0.5236787109375, 0.5235701904296876, 0.5235640258789063, 0.524210205078125, 0.5235947265625]",tokens/s,1.8800948535125506,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1510.83008,1619.525632,0.0,973.078528,855.737856,s,10,0.7501975097656249,0.07501975097656251,0.003494001966293861,0.0744185905456543,0.07687975234985352,0.0807021327972412,0.08376003715515137,"[0.08452451324462891, 0.0724151382446289, 0.07226691436767578, 0.07602333068847657, 0.07370025634765626, 0.072499267578125, 0.07526127624511719, 0.07513692474365234, 0.07233955383300782, 0.07603033447265625]",tokens/s,3412.4346811012333,kWh,8.781473283414488e-07,4.811843592162606e-07,2.329853715734e-06,3.6891854032917087e-06,tokens/kWh,69392012.60299407,MB,1510.83008,1619.525632,0.0,973.078528,915.411456,s,10,44.923669921875,4.492366992187501,0.05186865648995985,4.49665234375,4.532765380859375,4.566419799804687,4.593343334960937,"[4.4801484375, 4.46079150390625, 4.396806640625, 4.52528662109375, 4.43945751953125, 4.50497265625, 4.49965625, 4.4936484375, 4.52282763671875, 4.60007421875]",tokens/s,14.023787484317472,kWh,5.2254403288717625e-05,2.8638463246623156e-05,0.00013152905892685864,0.00021242192546219938,tokens/kWh,296579.5544076776,,s,629,45.51549240112303,0.07236167313374095,0.008872551278845286,0.07199948883056641,0.07308881988525391,0.07345274963378906,0.1417435186767578,"[0.07422156524658204, 0.07438028717041016, 0.07465984344482422, 0.07427993774414063, 0.070793212890625, 0.07357952117919922, 0.07368294525146485, 0.07249817657470703, 0.07192473602294921, 0.0720373764038086, 0.07181107330322266, 0.07195545959472656, 0.07189401245117187, 0.07177318572998047, 0.07202201843261719, 0.07179264068603515, 0.07144448089599609, 0.07195033264160157, 0.07206502532958985, 0.0718704605102539, 0.07257292938232422, 0.07203839874267579, 0.07174246215820312, 0.07193804931640625, 0.0720373764038086, 0.07203123474121094, 0.07179058837890626, 0.07161753845214844, 0.07207730865478515, 0.06936883544921875, 0.06907904052734375, 0.06904524993896484, 0.06900326538085938, 0.0691599349975586, 0.0692305908203125, 0.0704000015258789, 0.07194111633300782, 0.07188582611083984, 0.07171788787841797, 0.07179673767089843, 0.0721244125366211, 0.07180902099609375, 0.07232717132568359, 0.06903193664550782, 0.06900838470458984, 0.0690483169555664, 0.07060889434814453, 0.07242137908935548, 0.06946304321289062, 0.06903091430664063, 0.06926335906982421, 0.06909337615966797, 0.06909645080566407, 0.06918144226074219, 0.06912204742431641, 0.0692152328491211, 0.06930022430419921, 0.06917120361328125, 0.06931763458251954, 0.06909951782226563, 0.06903091430664063, 0.06910975646972656, 0.14174925231933594, 0.07115980529785157, 0.07259852600097656, 0.07286988830566406, 0.07273677062988282, 0.07269068908691406, 0.06918656158447266, 0.07026585388183594, 0.07330303955078125, 0.06960230255126953, 0.06916505432128907, 0.0694824981689453, 0.07036313629150391, 0.0726292495727539, 0.07242649841308593, 0.07292723083496094, 0.07256371307373047, 0.0725749740600586, 0.07288422393798828, 0.0727930908203125, 0.06923571014404296, 0.07285657501220703, 0.07292825317382813, 0.07260671997070313, 0.06921318054199219, 0.06922649383544922, 0.06944051361083985, 0.06901862335205078, 0.06910873413085937, 0.0694814682006836, 0.06917938995361328, 0.0691435546875, 0.06913843536376953, 0.06960230255126953, 0.07092428588867188, 0.07321907043457031, 0.07265586853027343, 0.07284429168701172, 0.07265996551513672, 0.07249919891357422, 0.0727388153076172, 0.06851993560791016, 0.06896537780761719, 0.06924390411376953, 0.06928793334960938, 0.06909951782226563, 0.06925926208496094, 0.06904115295410156, 0.06911590576171875, 0.07245005035400391, 0.07256063842773437, 0.07257087707519531, 0.0727162857055664, 0.0725555191040039, 0.07245414733886718, 0.06913638305664062, 0.06932685089111328, 0.06927565002441406, 0.06910566711425781, 0.06935040283203125, 0.06927974700927735, 0.06926643371582031, 0.06896947479248047, 0.1415485382080078, 0.06951423645019532, 0.06943334197998047, 0.06925619506835938, 0.06917324829101562, 0.06899097442626953, 0.06922752380371094, 0.06912000274658203, 0.06899199676513672, 0.06969446563720703, 0.07301529693603516, 0.06934323120117188, 0.06960537719726563, 0.07306034851074218, 0.07241011047363281, 0.07290675354003906, 0.07274700927734375, 0.0726476821899414, 0.07265996551513672, 0.0726824951171875, 0.07269068908691406, 0.06931763458251954, 0.06954188537597657, 0.06927257537841797, 0.06938623809814454, 0.06953062438964844, 0.06952960205078125, 0.06926233673095702, 0.06965862274169922, 0.06954803466796874, 0.06898995208740234, 0.06898892974853515, 0.0695767059326172, 0.06904729461669921, 0.06909030151367188, 0.0691230697631836, 0.06926131439208984, 0.06881587219238282, 0.06911795043945312, 0.0690145263671875, 0.069106689453125, 0.06901760101318359, 0.06876467132568359, 0.06921625518798828, 0.06918246459960938, 0.06903091430664063, 0.0689797134399414, 0.06893158721923828, 0.0690544662475586, 0.06931865692138672, 0.06913433837890624, 0.0691619873046875, 0.06918758392333985, 0.06922956848144532, 0.06901248168945312, 0.06939955139160156, 0.0690708465576172, 0.06908415985107422, 0.06909747314453125, 0.06877798461914063, 0.0688721923828125, 0.06958284759521484, 0.07407615661621093, 0.14876364135742187, 0.0725524444580078, 0.06927769470214844, 0.07208243560791015, 0.07297433471679687, 0.0731176986694336, 0.07251251220703125, 0.0712273941040039, 0.07263846588134766, 0.07023104095458985, 0.07284735870361328, 0.07289651489257812, 0.07278694152832031, 0.07275827026367188, 0.0733675537109375, 0.07290982055664062, 0.07269888305664063, 0.06938419342041016, 0.07068057250976563, 0.07282994842529297, 0.07284838104248047, 0.07352217864990235, 0.07396147155761719, 0.07278694152832031, 0.0727224349975586, 0.07267123413085938, 0.07291801452636719, 0.07321497344970704, 0.07286579132080079, 0.0730408935546875, 0.06961766052246093, 0.07272755432128906, 0.07313715362548828, 0.07269990539550782, 0.07287091064453124, 0.07342694091796875, 0.07279821014404297, 0.07299993896484375, 0.07290777587890625, 0.07300505828857422, 0.07294976043701172, 0.07384166717529297, 0.07301222229003906, 0.07304806518554688, 0.07300096130371093, 0.06921318054199219, 0.06943846130371094, 0.07083724975585938, 0.07355596923828125, 0.07319039916992187, 0.06939443206787109, 0.06938111877441407, 0.06927155303955078, 0.0692520980834961, 0.06911283111572265, 0.06944153594970703, 0.06942515563964843, 0.06953266906738281, 0.06910975646972656, 0.06917017364501953, 0.06945382690429687, 0.0691568603515625, 0.0722903060913086, 0.1428500518798828, 0.07174861145019532, 0.07272755432128906, 0.07288422393798828, 0.07220531463623046, 0.07277977752685547, 0.07296819305419922, 0.07285759735107422, 0.0730439682006836, 0.06917120361328125, 0.06948044586181641, 0.06915174102783203, 0.06912921905517579, 0.06931148529052734, 0.06940672302246094, 0.06928179168701172, 0.07211110687255859, 0.07298047637939453, 0.07278079986572265, 0.07271218872070312, 0.07268966674804687, 0.06976306915283204, 0.0731504669189453, 0.07382220458984375, 0.07313919830322266, 0.07456563568115235, 0.0726456298828125, 0.07287091064453124, 0.07290777587890625, 0.072595458984375, 0.06927974700927735, 0.06974668884277344, 0.06926131439208984, 0.06974668884277344, 0.0696094741821289, 0.0693565444946289, 0.06932173156738282, 0.06926541137695312, 0.06931763458251954, 0.06929203033447266, 0.06896025848388672, 0.0688875503540039, 0.0692838363647461, 0.06925107574462891, 0.06969344329833985, 0.06929510498046874, 0.06936064147949218, 0.06916607666015626, 0.06940364837646484, 0.06927667236328125, 0.0688721923828125, 0.06915071868896484, 0.06949273681640625, 0.06910361480712891, 0.06893260955810547, 0.06888857269287109, 0.06916710662841796, 0.06919065856933594, 0.06913433837890624, 0.06897663879394532, 0.06965760040283203, 0.06907596588134765, 0.06904524993896484, 0.14172877502441406, 0.06887628936767579, 0.06923980712890625, 0.06865408325195313, 0.06912102508544922, 0.06938317108154297, 0.06938521575927735, 0.06910771179199218, 0.06913843536376953, 0.06909951782226563, 0.07147007751464844, 0.07286374664306641, 0.07196057891845703, 0.07204454040527344, 0.07199948883056641, 0.07233126068115234, 0.07187763214111328, 0.07217971038818359, 0.07222169494628906, 0.07219097900390625, 0.07204966735839843, 0.07197491455078125, 0.07207833862304687, 0.07198822021484375, 0.0723394546508789, 0.07189299011230468, 0.0720547866821289, 0.07195750427246093, 0.0719288330078125, 0.07224626922607422, 0.0721981430053711, 0.0721295394897461, 0.07224729919433594, 0.07194624328613282, 0.07236812591552734, 0.07330508422851563, 0.07221247863769531, 0.07189094543457031, 0.07187763214111328, 0.0722503662109375, 0.07257292938232422, 0.07226982116699218, 0.07204761505126953, 0.07212134552001953, 0.07196876525878906, 0.07214694213867187, 0.07219404602050782, 0.07190940856933593, 0.07228310394287109, 0.0723609619140625, 0.07222476959228516, 0.07254732513427735, 0.07211724853515625, 0.07198617553710937, 0.07232102203369141, 0.0694302749633789, 0.06944563293457032, 0.06930226898193359, 0.06912000274658203, 0.07197286224365235, 0.07240499114990234, 0.07262515258789062, 0.07231488037109375, 0.1474877471923828, 0.07206195068359375, 0.07203635406494141, 0.0718704605102539, 0.07204147338867188, 0.07205785369873047, 0.07204249572753907, 0.07156735992431641, 0.0718551025390625, 0.07209369659423828, 0.07219404602050782, 0.07199334716796875, 0.07205683135986328, 0.0719974365234375, 0.07223910522460937, 0.07229440307617188, 0.0720373764038086, 0.07192985534667969, 0.07207218933105469, 0.07226060485839844, 0.07216844940185548, 0.07213158416748047, 0.07198822021484375, 0.07197695922851563, 0.07198515319824218, 0.07197901153564454, 0.07212748718261719, 0.07194111633300782, 0.07195136260986328, 0.07207936096191406, 0.07236505889892578, 0.07226982116699218, 0.06943231964111328, 0.06955929565429687, 0.069607421875, 0.06953164672851563, 0.07322624206542969, 0.07243981170654297, 0.07254937744140624, 0.07196979522705078, 0.07131033325195313, 0.07205785369873047, 0.07247872161865235, 0.07209471893310547, 0.07210291290283204, 0.06919782257080079, 0.06972108459472656, 0.06934835052490235, 0.06924082946777343, 0.06914765167236328, 0.06955519866943359, 0.06937907409667969, 0.06931148529052734, 0.06938829040527343, 0.06970674896240234, 0.06969548797607422, 0.06920089721679687, 0.07203942108154297, 0.07206809234619141, 0.07200972747802735, 0.07209779357910157, 0.07210291290283204, 0.07222681427001953, 0.14453248596191406, 0.06921011352539062, 0.0692326431274414, 0.06925312042236328, 0.06934220886230469, 0.07179571533203125, 0.07198617553710937, 0.07202098846435546, 0.07208243560791015, 0.07199436950683594, 0.07232921600341796, 0.07199027252197265, 0.07135231781005859, 0.07213772583007813, 0.07198207855224609, 0.07201996612548828, 0.07227597045898437, 0.0715315170288086, 0.07204863739013671, 0.0719319076538086, 0.07195442962646484, 0.0720148468017578, 0.07350169372558593, 0.07210291290283204, 0.07235584259033204, 0.07218790435791016, 0.07257804870605469, 0.07216844940185548, 0.07202713775634766, 0.07208550262451172, 0.07201689910888671, 0.0721786880493164, 0.07204557037353515, 0.07207218933105469, 0.0721070098876953, 0.0719482879638672, 0.07234662628173828, 0.07192985534667969, 0.07208345794677734, 0.07192371368408203, 0.07228108978271484, 0.07198925018310547, 0.07188787078857421, 0.07216639709472657, 0.07209062194824219, 0.07224217224121093, 0.07236914825439453, 0.07234559631347656, 0.07200768280029297, 0.07227903747558594, 0.07189913940429687, 0.06916403198242188, 0.06938009643554688, 0.0693903350830078, 0.06937190246582031, 0.06929714965820312, 0.06908108520507812, 0.06912102508544922, 0.06940876770019531, 0.06922752380371094, 0.06929100799560547, 0.06920191955566406, 0.06866534423828125, 0.14497279357910156, 0.07190630340576172, 0.07215411376953125, 0.07193702697753906, 0.07205785369873047, 0.07186431884765625, 0.07196672058105469, 0.07226060485839844, 0.07261901092529296, 0.07256473541259766, 0.0723609619140625, 0.07218585968017578, 0.07166259002685547, 0.07175373077392579, 0.0723773422241211, 0.0719626235961914, 0.07179878234863281, 0.072015869140625, 0.0722001953125, 0.07199641418457031, 0.07210495758056641, 0.07236300659179687, 0.07228006744384766, 0.07196672058105469, 0.07183257293701172, 0.07211519622802734, 0.07188480377197265, 0.07188070678710938, 0.07184076690673828, 0.07201689910888671, 0.07219404602050782, 0.07205068969726562, 0.07195442962646484, 0.07184076690673828, 0.07215513610839844, 0.07230873870849609, 0.07222067260742188, 0.07214284515380859, 0.07292928314208984, 0.07242649841308593, 0.07191654205322266, 0.07287398529052734, 0.07126732635498047, 0.07227391815185547, 0.07001599884033204, 0.07193292999267578, 0.07222681427001953, 0.07207218933105469, 0.07356108856201173, 0.07293440246582031, 0.07190835571289063, 0.07186943817138672, 0.06945689392089843, 0.06953164672851563, 0.06899814605712891, 0.06878720092773437, 0.06898278045654296, 0.06849638366699219, 0.06950809478759766, 0.07029452514648438, 0.07316889953613281, 0.07317708587646485, 0.07281254577636719, 0.14895103454589845, 0.07272550201416016, 0.07273779296875, 0.07310028839111328, 0.07281459045410156, 0.07278899383544922, 0.0730245132446289, 0.07308595275878907, 0.07223705291748046, 0.072880126953125, 0.07315660858154296, 0.07311974334716796, 0.07316172790527344, 0.07296717071533203, 0.07294464111328125, 0.07245823669433593, 0.07288217926025391, 0.07276441955566407, 0.07286784362792968, 0.07301734161376953, 0.07289548492431641, 0.07284735870361328, 0.07271321868896484, 0.07318630218505859, 0.07307878112792969, 0.07253504180908203, 0.07293644714355468, 0.07297843170166016, 0.07267430114746094, 0.07276748657226563, 0.07329894256591797, 0.07337881469726562, 0.0731668472290039, 0.07399219512939453, 0.07357746887207031, 0.07269580841064453, 0.0729169921875, 0.07252992248535156, 0.07297126770019531, 0.07306240081787109, 0.07285964965820313, 0.07280947113037109, 0.07383859252929688, 0.07331942749023437, 0.07286271667480469, 0.07300812530517578, 0.07352012634277344, 0.07395225524902344, 0.073385986328125, 0.07318630218505859, 0.07355289459228516, 0.07336959838867188, 0.07346995544433593, 0.0732938232421875, 0.07325389099121093, 0.07335321807861328, 0.07319347381591797, 0.0742266845703125, 0.0733337631225586, 0.0729722900390625, 0.0729200668334961, 0.07297740936279297, 0.0695920639038086]",tokens/s,13.819470400466983,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1310.69952,921.174016,0.0,274.726912,220.646912,s,10,0.36048070526123044,0.03604807052612304,0.0010977874344766398,0.03588955307006836,0.036477619552612306,0.03779876155853271,0.03885567516326904,"[0.03911990356445313, 0.03553478240966797, 0.03469347381591797, 0.035817951202392576, 0.036184032440185546, 0.035873409271240234, 0.03543529510498047, 0.03591993713378906, 0.03590569686889648, 0.03599622344970703]",tokens/s,7101.628360787961,kWh,4.1762897090252873e-07,2.2884077887720709e-07,9.141624902616015e-07,1.5606322400413373e-06,tokens/kWh,164036083.2179266,MB,1311.0272,921.174016,0.0,274.726912,250.723328,s,10,21.866080322265628,2.186608032226563,0.01854961703991556,2.191078857421875,2.2041080810546876,2.2073608032226564,2.2099629809570316,"[2.1803974609375, 2.17111474609375, 2.154228759765625, 2.200171142578125, 2.210613525390625, 2.181986572265625, 2.161680419921875, 2.20071923828125, 2.20338525390625, 2.201783203125]",tokens/s,28.81174818325753,kWh,2.578141039891902e-05,1.4128932798644376e-05,5.28597971955373e-05,9.27701403931007e-05,tokens/kWh,679097.8188999841,,s,629,22.151173107147223,0.03521649142630718,0.004356343174673542,0.03486105728149414,0.035241778564453125,0.03557929000854492,0.07092830078125002,"[0.03474124908447265, 0.03529420852661133, 0.03632339096069336, 0.03650860977172852, 0.036083713531494144, 0.037152767181396484, 0.03645132827758789, 0.036311038970947264, 0.03472588729858399, 0.0354252815246582, 0.03531980895996094, 0.03479657745361328, 0.03465929412841797, 0.03616358566284179, 0.035585025787353515, 0.03515903854370117, 0.03486105728149414, 0.034651134490966795, 0.034900993347167966, 0.03449958419799805, 0.03328102493286133, 0.03349708938598633, 0.033560577392578124, 0.03324620819091797, 0.033328128814697267, 0.033255424499511715, 0.033754112243652344, 0.033223743438720706, 0.03327481460571289, 0.033438720703125, 0.0335175666809082, 0.03376332855224609, 0.03339468765258789, 0.03345305633544922, 0.03355033493041992, 0.034203647613525394, 0.03342233657836914, 0.03335372924804687, 0.0336732177734375, 0.03367116928100586, 0.03360870361328125, 0.03362508773803711, 0.03436441421508789, 0.03604377746582031, 0.03504127883911133, 0.03473920059204102, 0.0348037109375, 0.03472895812988281, 0.03472895812988281, 0.03479865646362305, 0.03479955291748047, 0.03479449462890625, 0.03474739074707031, 0.03478220748901367, 0.0347064323425293, 0.034907135009765625, 0.03474431991577148, 0.03473715209960938, 0.03485388946533203, 0.034683902740478514, 0.035019775390625, 0.0350750732421875, 0.0711178207397461, 0.0347955207824707, 0.034948097229003904, 0.03439616012573242, 0.03447808074951172, 0.03496755218505859, 0.034920448303222655, 0.0348590087890625, 0.034699264526367186, 0.0346163215637207, 0.03491430282592774, 0.03490304183959961, 0.03508019256591797, 0.034423809051513675, 0.03472793579101562, 0.03449446487426758, 0.03476377487182617, 0.03476582336425781, 0.03468902587890625, 0.03387494277954101, 0.0342210578918457, 0.034008094787597656, 0.03374998474121094, 0.03369062423706055, 0.033459201812744144, 0.03354937744140625, 0.03469715118408203, 0.03489484786987305, 0.03487027359008789, 0.03510067367553711, 0.03503513717651367, 0.03503923034667969, 0.03469619369506836, 0.03455590438842773, 0.03517852783203125, 0.03441353607177734, 0.03464704132080078, 0.03492659378051758, 0.034506752014160154, 0.03476070404052734, 0.03487846374511719, 0.03493478393554687, 0.03499212646484375, 0.034945056915283206, 0.035192798614501954, 0.03450265502929688, 0.03448524856567383, 0.03455487823486328, 0.034563072204589845, 0.03503104019165039, 0.03474431991577148, 0.03377872085571289, 0.03339875030517578, 0.03340800094604492, 0.03347558212280274, 0.03381350326538086, 0.033650688171386715, 0.033535999298095705, 0.033334270477294925, 0.03363123321533203, 0.03390771102905273, 0.03361996841430664, 0.03386880111694336, 0.06886093139648437, 0.03341823959350586, 0.03395993423461914, 0.03393228912353516, 0.03374387359619141, 0.03333529663085937, 0.0334919662475586, 0.03423027038574219, 0.03382476806640625, 0.03374387359619141, 0.0336824951171875, 0.033724353790283206, 0.03363532638549805, 0.033941505432128906, 0.03381760025024414, 0.033503231048583985, 0.03344998550415039, 0.033301502227783206, 0.033445888519287106, 0.033957889556884766, 0.03387801742553711, 0.03366092681884766, 0.03360153579711914, 0.03381043243408203, 0.033825790405273434, 0.03360870361328125, 0.03370703887939453, 0.03348271942138672, 0.03366502380371094, 0.03378995132446289, 0.03319193649291992, 0.03369267272949219, 0.03357900619506836, 0.033358848571777344, 0.0341104621887207, 0.03495731353759766, 0.03543961715698242, 0.03482316970825195, 0.034435073852539064, 0.034557952880859374, 0.034900993347167966, 0.034678783416748044, 0.03517542266845703, 0.03521023941040039, 0.035046398162841795, 0.03481702423095703, 0.03453235244750977, 0.03470131301879883, 0.03486412811279297, 0.03474739074707031, 0.03477196884155274, 0.03457843017578125, 0.034781185150146485, 0.03486310577392578, 0.035125247955322264, 0.03486617660522461, 0.03496243286132812, 0.03503513717651367, 0.03523891067504883, 0.03469823837280273, 0.0339128303527832, 0.03414220809936523, 0.035117088317871095, 0.0711659164428711, 0.03465318298339844, 0.03465727996826172, 0.03458355331420898, 0.034283519744873044, 0.034304000854492187, 0.03577958297729492, 0.03595161437988281, 0.03544473648071289, 0.03500543975830078, 0.03521843338012695, 0.03495935821533203, 0.0349194221496582, 0.0354252815246582, 0.03515903854370117, 0.0349306869506836, 0.03492454528808594, 0.036211711883544925, 0.03564441680908203, 0.03498086547851562, 0.034841598510742186, 0.034941951751708986, 0.03537612915039062, 0.034994174957275394, 0.0350382080078125, 0.03486105728149414, 0.03484672164916992, 0.03494297790527344, 0.03524095916748047, 0.03526041412353516, 0.03520614242553711, 0.03488153457641602, 0.03502796936035156, 0.03500543975830078, 0.03404185485839844, 0.033903617858886716, 0.03378073501586914, 0.03472281646728516, 0.03411251068115234, 0.03379097747802735, 0.03348787307739258, 0.0343633918762207, 0.03492659378051758, 0.03500339126586914, 0.03493788909912109, 0.03485590362548828, 0.034895870208740236, 0.034948097229003904, 0.03518771362304687, 0.034479103088378905, 0.03459174346923828, 0.035253280639648436, 0.034963424682617185, 0.034976768493652347, 0.03491843032836914, 0.034911201477050784, 0.035046398162841795, 0.03507199859619141, 0.03536383819580078, 0.03499929428100586, 0.03523379135131836, 0.03531161499023437, 0.03527679824829102, 0.07160934448242187, 0.0352624626159668, 0.03535769653320313, 0.03481087875366211, 0.03554921722412109, 0.034968544006347656, 0.03489491271972656, 0.034890689849853516, 0.03483238220214844, 0.034993152618408206, 0.035932159423828124, 0.03508838272094727, 0.03496755218505859, 0.03476377487182617, 0.03488358306884766, 0.03527884674072266, 0.03521228790283203, 0.035040256500244144, 0.03476684951782227, 0.03499622344970703, 0.03482316970825195, 0.03518259048461914, 0.03503411102294922, 0.036178943634033206, 0.039564289093017575, 0.035097599029541016, 0.03457843017578125, 0.03505561447143555, 0.034756607055664065, 0.0350013427734375, 0.03477811050415039, 0.035004417419433595, 0.03501772689819336, 0.0348671989440918, 0.03473100662231445, 0.0349224967956543, 0.03482931137084961, 0.03578777694702148, 0.0350115852355957, 0.03483443069458008, 0.03486412811279297, 0.03486207962036133, 0.034938880920410156, 0.03505561447143555, 0.035004417419433595, 0.034887680053710936, 0.034531326293945314, 0.03493580627441406, 0.03510067367553711, 0.03492454528808594, 0.034887680053710936, 0.03578879928588867, 0.034974720001220705, 0.03519180679321289, 0.034850879669189455, 0.03495315170288086, 0.035060768127441404, 0.034809825897216796, 0.03377766418457031, 0.03550620651245117, 0.034968544006347656, 0.034915328979492184, 0.03489177703857422, 0.07134719848632813, 0.03493273544311523, 0.03501465606689453, 0.03539148712158203, 0.03512319946289062, 0.034813953399658204, 0.03508019256591797, 0.03486515045166016, 0.035079166412353514, 0.03493580627441406, 0.034939903259277344, 0.034925567626953126, 0.03743027114868164, 0.034576385498046876, 0.03397836685180664, 0.03377356719970703, 0.03403878402709961, 0.03371724700927734, 0.03380223846435547, 0.03486310577392578, 0.03486412811279297, 0.03496243286132812, 0.03475046539306641, 0.03492659378051758, 0.03499622344970703, 0.034783233642578126, 0.033949695587158206, 0.033876991271972655, 0.03427123260498047, 0.03477811050415039, 0.03472076797485352, 0.034127872467041014, 0.03329740905761719, 0.0333834228515625, 0.03379814529418945, 0.03392409515380859, 0.03363532638549805, 0.03386982345581055, 0.03498086547851562, 0.034928638458251955, 0.03496345520019531, 0.03495423889160156, 0.03523891067504883, 0.03508838272094727, 0.034769920349121096, 0.03489279937744141, 0.03482624053955078, 0.034955265045166016, 0.03508838272094727, 0.03486412811279297, 0.03486515045166016, 0.03471769714355469, 0.034560001373291016, 0.034579456329345705, 0.03481292724609375, 0.03479244613647461, 0.034802688598632815, 0.03475254440307617, 0.03517948913574219, 0.03433884811401367, 0.033702880859375, 0.03359539031982422, 0.034111488342285154, 0.07044096374511719, 0.03437670516967774, 0.03483238220214844, 0.0348671989440918, 0.03470438385009766, 0.035230720520019534, 0.034816001892089846, 0.03467366409301758, 0.0347770881652832, 0.03501055908203125, 0.03429580688476563, 0.03443609619140625, 0.03509964752197266, 0.03524505615234375, 0.0347248649597168, 0.034753536224365236, 0.03496345520019531, 0.03496448135375976, 0.034770942687988284, 0.034799617767333986, 0.03484467315673828, 0.03481190490722656, 0.034353153228759765, 0.034062335968017575, 0.03349094390869141, 0.033584129333496096, 0.033587200164794925, 0.034141185760498044, 0.0347740478515625, 0.0345906867980957, 0.0344268798828125, 0.03489484786987305, 0.03420979309082031, 0.03364352035522461, 0.03377356719970703, 0.033686527252197264, 0.033290241241455076, 0.0337520637512207, 0.03401932907104492, 0.03371417617797851, 0.03375513458251953, 0.033691646575927735, 0.03370809555053711, 0.03350316619873047, 0.03325439834594727, 0.03360255813598633, 0.033860607147216795, 0.03376230239868164, 0.033691646575927735, 0.033783809661865234, 0.033797119140625, 0.03425177764892578, 0.03381452941894531, 0.033791999816894534, 0.033675262451171875, 0.03391385650634766, 0.034165760040283204, 0.03502899169921875, 0.03486822509765625, 0.034702335357666016, 0.035046398162841795, 0.03521535873413086, 0.034825214385986326, 0.07150080108642579, 0.03484364700317383, 0.034351104736328124, 0.03453747177124023, 0.035064830780029296, 0.03482931137084961, 0.03483238220214844, 0.03492659378051758, 0.034974720001220705, 0.03501465606689453, 0.034869247436523435, 0.034830337524414064, 0.034277374267578126, 0.034797569274902344, 0.03501363372802734, 0.03530342483520508, 0.0351016960144043, 0.03491635131835937, 0.03483238220214844, 0.03484672164916992, 0.03495116806030273, 0.03500646209716797, 0.03499827194213867, 0.03480678558349609, 0.03486207962036133, 0.03505152130126953, 0.034784255981445314, 0.034783233642578126, 0.034835456848144535, 0.03493580627441406, 0.034770942687988284, 0.035037185668945314, 0.035019775390625, 0.0342476806640625, 0.03505049514770508, 0.034315265655517575, 0.03477913665771484, 0.034791454315185544, 0.0353023681640625, 0.03562700653076172, 0.03510374450683594, 0.03493580627441406, 0.035156993865966796, 0.03526144027709961, 0.03488256072998047, 0.034933761596679686, 0.03517337417602539, 0.0348590087890625, 0.0352911376953125, 0.035888126373291016, 0.03522048187255859, 0.03506073760986328, 0.03483955383300781, 0.03486105728149414, 0.03498400115966797, 0.03498387145996094, 0.03485494232177734, 0.03438998413085938, 0.03479244613647461, 0.03507712173461914, 0.03500339126586914, 0.03471257781982422, 0.03476684951782227, 0.07122022247314454, 0.03482316970825195, 0.03523276901245117, 0.034941951751708986, 0.03477196884155274, 0.034991104125976565, 0.03495731353759766, 0.034939903259277344, 0.03488153457641602, 0.034900993347167966, 0.0347955207824707, 0.0350300178527832, 0.03487948989868164, 0.03520614242553711, 0.035297279357910154, 0.03513651275634765, 0.03494297790527344, 0.0349378547668457, 0.034948097229003904, 0.03488051223754883, 0.03481497573852539, 0.03454873657226563, 0.03495935821533203, 0.035062782287597655, 0.035043327331542966, 0.0349306869506836, 0.03497881698608398, 0.034909183502197266, 0.03493791961669922, 0.03483539199829101, 0.034869247436523435, 0.035031105041503904, 0.03493983840942383, 0.03483340835571289, 0.03487744140625, 0.03489484786987305, 0.03516108703613281, 0.03484364700317383, 0.034969600677490234, 0.035146751403808595, 0.035194881439208986, 0.03479244613647461, 0.03557068634033203, 0.03522969436645508, 0.035337215423583986, 0.03480473709106445, 0.03436236953735351, 0.033745918273925785, 0.03496755218505859, 0.03541401672363281, 0.03512934494018555, 0.035163135528564454, 0.0348487663269043, 0.03500339126586914, 0.03484467315673828, 0.03498495864868164, 0.03576115036010742, 0.03517030334472656, 0.03490816116333008, 0.034825214385986326, 0.0347586555480957, 0.03516723251342774, 0.035053569793701174, 0.07151315307617187, 0.03519071960449219, 0.03483852767944336, 0.03482419204711914, 0.03502592086791992, 0.03529216003417969, 0.035125247955322264, 0.034872318267822264, 0.034939903259277344, 0.0347658576965332, 0.03481699371337891, 0.03497267150878906, 0.03488665771484375, 0.034958335876464845, 0.035007488250732424, 0.03504537582397461, 0.03489382553100586, 0.03520716857910156, 0.034988033294677735, 0.034874366760253905, 0.03459481430053711, 0.03430297470092773, 0.0346879997253418, 0.03478732681274414, 0.03508736038208008, 0.0354856948852539, 0.034981952667236325, 0.0348732795715332, 0.03489382553100586, 0.03475558471679688, 0.03478732681274414, 0.03488358306884766, 0.03473920059204102, 0.03476172637939453, 0.034685951232910156, 0.035111934661865234, 0.03508736038208008, 0.03526553726196289, 0.03492147064208984, 0.03475763320922851, 0.034767871856689454, 0.03482316970825195, 0.03498092651367188, 0.03539142227172851, 0.03607961654663086, 0.03499008178710938, 0.03491123199462891, 0.034990142822265625, 0.03488249588012695, 0.03484467315673828, 0.03486207962036133, 0.0349378547668457, 0.034953216552734374, 0.03488051223754883, 0.03490816116333008, 0.0349378547668457, 0.03494604873657227, 0.03508838272094727, 0.03482726287841797, 0.03486003112792969, 0.03481702423095703, 0.034729984283447264, 0.0349224967956543]",tokens/s,28.395787300179112,,,,,,main,False,False -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2082.578432,5566.365696,0.0,4919.918592,4635.53792,s,10,5.0974686889648435,0.5097468688964844,0.002523113409812242,0.5091204681396484,0.5118887878417968,0.5140431793212891,0.5157666925048828,"[0.5161975708007812, 0.508827880859375, 0.5094269104003907, 0.508350830078125, 0.5081865539550782, 0.5073575439453125, 0.5094130554199219, 0.5110750427246094, 0.5072232666015625, 0.5114100341796874]",tokens/s,502.2100489880333,kWh,5.994651640454928e-06,3.28480666631549e-06,2.7987258500894452e-05,3.726671680766487e-05,tokens/kWh,6869400.417569034,MB,2082.578432,5566.365696,0.0,4919.918592,4794.464768,s,10,295.3985,29.53985,0.004666201106872227,29.538681640625,29.5469361328125,29.54772978515625,29.548364707031247,"[29.534837890625, 29.546759765625, 29.542654296875, 29.540826171875, 29.5363359375, 29.535224609375, 29.54083984375, 29.5359609375, 29.5485234375, 29.536537109375]",tokens/s,2.132712251416307,kWh,0.0003485870901164082,0.0001910528620806053,0.0016025825737317144,0.002142222525928728,tokens/kWh,29408.709523623045,,s,629,299.5096441345215,0.47616795569876236,0.06052059742525611,0.46878207397460936,0.46948126220703124,0.4697759826660156,0.977483603515625,"[0.4688783264160156, 0.46847384643554685, 0.46878515625, 0.4694343566894531, 0.4695541687011719, 0.46879034423828125, 0.4688465270996094, 0.46848306274414064, 0.46828851318359377, 0.4686806945800781, 0.46836428833007815, 0.4685475769042969, 0.4686540832519531, 0.46948455810546874, 0.4689162292480469, 0.46845745849609377, 0.4684011535644531, 0.4683714599609375, 0.4684134521484375, 0.46870220947265623, 0.468790283203125, 0.4682076110839844, 0.4684062805175781, 0.46837042236328125, 0.469607421875, 0.46908721923828123, 0.4688885803222656, 0.4687656860351562, 0.4693012390136719, 0.4686929931640625, 0.46864382934570314, 0.468430908203125, 0.4682413330078125, 0.4685291442871094, 0.46904525756835935, 0.4706498413085938, 0.46837042236328125, 0.4685875244140625, 0.4687718505859375, 0.4685537414550781, 0.46882406616210937, 0.46856805419921876, 0.4686806945800781, 0.46866943359375, 0.4686581726074219, 0.4682403869628906, 0.4695777893066406, 0.4685884704589844, 0.4686663818359375, 0.4685537414550781, 0.46855987548828126, 0.4685066223144531, 0.4687831115722656, 0.46864794921875, 0.4684666748046875, 0.4691046447753906, 0.46919476318359377, 0.4684984436035156, 0.4691128234863281, 0.4691221008300781, 0.46927557373046874, 0.46959002685546875, 0.977623046875, 0.46814208984375, 0.4683407287597656, 0.46863873291015623, 0.46877490234375, 0.46880459594726565, 0.4687513732910156, 0.46857113647460935, 0.468421630859375, 0.46894284057617186, 0.4688281555175781, 0.46850765991210935, 0.4687431640625, 0.4691404724121094, 0.4699504699707031, 0.46978561401367186, 0.46937701416015626, 0.4690975341796875, 0.4691311950683594, 0.47030169677734374, 0.4690933837890625, 0.4689039306640625, 0.4690298767089844, 0.46860595703125, 0.4687359924316406, 0.4692275085449219, 0.46936166381835936, 0.46901248168945314, 0.46843902587890623, 0.46878106689453125, 0.46938113403320314, 0.46973849487304686, 0.46935861206054685, 0.4687390441894531, 0.4685845031738281, 0.46890591430664064, 0.46899813842773436, 0.4688281555175781, 0.4687083435058594, 0.4687564697265625, 0.4687308654785156, 0.4687615966796875, 0.4689858703613281, 0.4688762817382813, 0.46893264770507814, 0.46929302978515625, 0.46915994262695315, 0.4696995849609375, 0.4688701477050781, 0.4687912902832031, 0.4691885986328125, 0.46969558715820314, 0.4696114196777344, 0.4692490234375, 0.46899301147460937, 0.46896435546875, 0.4694558715820312, 0.46925927734375, 0.4685393981933594, 0.4686530456542969, 0.46877902221679685, 0.4687155151367188, 0.46934527587890623, 0.9787330322265625, 0.468917236328125, 0.46886502075195313, 0.46889984130859375, 0.4691875915527344, 0.4684267578125, 0.46861004638671877, 0.46856500244140625, 0.46833560180664063, 0.4684933166503906, 0.4683735046386719, 0.4683263854980469, 0.4683735046386719, 0.468236328125, 0.46872265625, 0.4684236755371094, 0.46842572021484374, 0.46854452514648437, 0.46859982299804687, 0.4687155151367188, 0.46844732666015626, 0.4685188293457031, 0.4686991577148438, 0.46980194091796873, 0.46855987548828126, 0.4690032653808594, 0.46884454345703125, 0.4687575073242187, 0.4688609313964844, 0.4688558044433594, 0.4689459228515625, 0.46866329956054686, 0.4685557861328125, 0.46875238037109374, 0.4686827392578125, 0.46909747314453126, 0.46889984130859375, 0.4689756164550781, 0.46924798583984373, 0.46901144409179685, 0.4689469299316406, 0.46897048950195314, 0.46984909057617186, 0.4700190734863281, 0.4696781005859375, 0.469550048828125, 0.4697763977050781, 0.47159500122070314, 0.46917837524414063, 0.4684482421875, 0.4692193298339844, 0.46906060791015625, 0.4689141845703125, 0.46894284057617186, 0.468885498046875, 0.469064697265625, 0.46948043823242186, 0.4687861633300781, 0.4690513916015625, 0.46873907470703124, 0.46875341796875, 0.4690708618164062, 0.46917938232421874, 0.9775615844726563, 0.46874624633789064, 0.46858853149414065, 0.4686796875, 0.46987161254882814, 0.46889370727539065, 0.4687656860351562, 0.468790283203125, 0.4687278137207031, 0.46867864990234376, 0.4685322265625, 0.46837454223632813, 0.4686530456542969, 0.46875955200195313, 0.46893466186523436, 0.468706298828125, 0.4689858703613281, 0.46853018188476564, 0.46881585693359373, 0.46894796752929685, 0.4691353454589844, 0.46871038818359373, 0.46851071166992186, 0.468632568359375, 0.46843902587890623, 0.46874725341796875, 0.468890625, 0.4688670654296875, 0.46844723510742187, 0.468490234375, 0.4689090576171875, 0.46941900634765626, 0.46909030151367187, 0.4689100952148437, 0.4687175598144531, 0.46938113403320314, 0.4690616455078125, 0.4687718505859375, 0.46904730224609376, 0.4689336242675781, 0.4687933349609375, 0.46920501708984375, 0.46855471801757814, 0.4687718505859375, 0.47131646728515625, 0.4688424987792969, 0.46912005615234376, 0.4692090148925781, 0.4688189392089844, 0.4685926513671875, 0.4688619384765625, 0.46930743408203124, 0.4693247680664063, 0.4690636901855469, 0.46892135620117187, 0.4687145690917969, 0.4691373291015625, 0.4687145080566406, 0.4686673889160156, 0.46867352294921877, 0.46872677612304686, 0.4689254455566406, 0.46927871704101565, 0.9772830810546875, 0.46864077758789063, 0.4684922790527344, 0.46857421875, 0.46906878662109375, 0.4690033264160156, 0.46841543579101563, 0.4685895690917969, 0.46849639892578127, 0.468279296875, 0.46869912719726564, 0.4685619201660156, 0.4687575073242187, 0.46905242919921875, 0.46849432373046873, 0.46846875, 0.46872576904296875, 0.46877490234375, 0.4686592102050781, 0.4684892272949219, 0.4687974548339844, 0.46863565063476564, 0.4685844421386719, 0.46846157836914065, 0.46825778198242185, 0.4684994506835938, 0.4686448669433594, 0.46830081176757815, 0.46856805419921876, 0.4685209655761719, 0.46874929809570315, 0.4689264526367187, 0.46845541381835937, 0.4686520385742188, 0.46867770385742186, 0.46937490844726565, 0.46933709716796873, 0.4687554626464844, 0.4690616455078125, 0.46915789794921875, 0.46891213989257813, 0.46888754272460936, 0.4687278137207031, 0.46915277099609376, 0.4688332824707031, 0.4693790588378906, 0.46926437377929686, 0.46878515625, 0.4690831298828125, 0.46895718383789065, 0.4689776611328125, 0.4690616455078125, 0.46907391357421874, 0.46851788330078126, 0.46869195556640625, 0.4686376953125, 0.46900634765625, 0.4722012023925781, 0.4687718505859375, 0.4688670654296875, 0.46880459594726565, 0.468642822265625, 0.4693155822753906, 0.9763594360351563, 0.46846771240234375, 0.46886502075195313, 0.4687083740234375, 0.4689837646484375, 0.4684646301269531, 0.4687247314453125, 0.46836224365234375, 0.4683929748535156, 0.4685619201660156, 0.46836737060546874, 0.4687503356933594, 0.46857830810546874, 0.46878411865234376, 0.4683345947265625, 0.468358154296875, 0.46845849609375, 0.4687032470703125, 0.4686520385742188, 0.4686458740234375, 0.46885784912109374, 0.46872677612304686, 0.4685823974609375, 0.46873095703125, 0.4685475158691406, 0.468790283203125, 0.46927053833007815, 0.46861822509765627, 0.46847589111328125, 0.46836224365234375, 0.470181884765625, 0.4690575256347656, 0.4688353271484375, 0.4692449340820313, 0.4685137939453125, 0.46857217407226565, 0.46875238037109374, 0.46873190307617185, 0.4689664001464844, 0.4691271667480469, 0.4686090087890625, 0.4692101135253906, 0.4687974548339844, 0.469317626953125, 0.4686315612792969, 0.46899917602539065, 0.4692490234375, 0.4690309143066406, 0.4687646789550781, 0.46850253295898436, 0.4687083435058594, 0.46883941650390626, 0.46927871704101565, 0.4686152038574219, 0.4694292297363281, 0.4691435546875, 0.46974566650390626, 0.4686940307617187, 0.468969482421875, 0.46890188598632815, 0.46895001220703125, 0.4687564697265625, 0.46904730224609376, 0.9786060791015625, 0.46841650390625, 0.4683786315917969, 0.46915994262695315, 0.46917938232421874, 0.4692777099609375, 0.46883636474609375, 0.468716552734375, 0.46857318115234375, 0.4691128234863281, 0.468600830078125, 0.4694783935546875, 0.46952960205078126, 0.4696739807128906, 0.4687196044921875, 0.46852505493164065, 0.46841854858398435, 0.4683591613769531, 0.4683100280761719, 0.4684431457519531, 0.4690411376953125, 0.4687923278808594, 0.4687216491699219, 0.46836224365234375, 0.4686090087890625, 0.4691363830566406, 0.46857113647460935, 0.46862335205078126, 0.46869094848632814, 0.4686581726074219, 0.4694435729980469, 0.4694640502929687, 0.46918450927734373, 0.46924288940429687, 0.4694343566894531, 0.4690462646484375, 0.469317626953125, 0.46890188598632815, 0.46938323974609375, 0.4693267822265625, 0.46866329956054686, 0.4690370483398438, 0.468864013671875, 0.4687575073242187, 0.4686438598632813, 0.46876776123046876, 0.46984698486328125, 0.4690380859375, 0.46935552978515627, 0.46853836059570314, 0.46839910888671876, 0.46874008178710935, 0.46875955200195313, 0.4687667236328125, 0.4688209838867187, 0.4691302490234375, 0.46921624755859376, 0.4689674377441406, 0.46906777954101564, 0.46867864990234376, 0.46859161376953123, 0.46864794921875, 0.4688332824707031, 0.980326416015625, 0.468701171875, 0.46863360595703124, 0.468738037109375, 0.46885989379882814, 0.4684267578125, 0.468389892578125, 0.46853839111328127, 0.46828131103515624, 0.4683601989746094, 0.46820352172851565, 0.467962890625, 0.4681553955078125, 0.4686499938964844, 0.4686612548828125, 0.4687503356933594, 0.46850253295898436, 0.4680570983886719, 0.4683458557128906, 0.46834994506835936, 0.46889471435546876, 0.46862130737304686, 0.4687196044921875, 0.46831512451171875, 0.4685035400390625, 0.4694077453613281, 0.4687421569824219, 0.4688087158203125, 0.4701829528808594, 0.4684830322265625, 0.4688619384765625, 0.4692244567871094, 0.46869094848632814, 0.4689715270996094, 0.46905548095703126, 0.4686315612792969, 0.468864013671875, 0.4689029235839844, 0.46895001220703125, 0.46892340087890627, 0.46898687744140627, 0.469212158203125, 0.46885272216796875, 0.4688230285644531, 0.46921624755859376, 0.46866943359375, 0.46926849365234374, 0.46906777954101564, 0.469248046875, 0.4691763000488281, 0.46952960205078126, 0.4694057006835938, 0.46893157958984377, 0.4688035888671875, 0.4690667419433594, 0.4687615966796875, 0.4690288696289063, 0.46915890502929686, 0.46848818969726563, 0.4687585144042969, 0.4686315612792969, 0.4686049194335937, 0.46867352294921877, 0.9793843383789063, 0.4697733154296875, 0.46955825805664064, 0.46915890502929686, 0.46897869873046877, 0.4683345947265625, 0.46859982299804687, 0.468316162109375, 0.46864694213867186, 0.4686110534667969, 0.46845745849609377, 0.46816357421875, 0.4684892272949219, 0.46949169921875, 0.46866842651367185, 0.46861312866210936, 0.4686090087890625, 0.4685424499511719, 0.46886605834960937, 0.4688752746582031, 0.46871353149414063, 0.4691127624511719, 0.46852615356445315, 0.46842361450195313, 0.46897665405273437, 0.4689141845703125, 0.4685926513671875, 0.46885989379882814, 0.468421630859375, 0.4683888549804687, 0.46856600952148436, 0.4689182739257812, 0.46858547973632814, 0.46870220947265623, 0.4691896362304688, 0.46975283813476565, 0.46981631469726565, 0.4697753601074219, 0.470096923828125, 0.469834716796875, 0.46959820556640625, 0.4698542175292969, 0.46952243041992187, 0.4697272338867188, 0.4697907104492188, 0.4687656860351562, 0.4691937255859375, 0.46966680908203123, 0.46944461059570314, 0.46914764404296877, 0.469760009765625, 0.4693544921875, 0.46949993896484377, 0.4696063537597656, 0.46877694702148437, 0.4688281555175781, 0.4692244567871094, 0.468706298828125, 0.4684646301269531, 0.46868582153320315, 0.4686725158691406, 0.46858547973632814, 0.4686612548828125, 0.9803509521484375, 0.46829159545898436, 0.4681697387695313, 0.4690083923339844, 0.46880972290039064, 0.46866329956054686, 0.4685547485351563, 0.46837042236328125, 0.468105224609375, 0.4683243713378906, 0.46842364501953127, 0.4681779174804688, 0.4683816833496094, 0.4685599365234375, 0.4682454528808594, 0.46819635009765626, 0.46841036987304685, 0.46834994506835936, 0.468537353515625, 0.46845849609375, 0.46887115478515623, 0.46867352294921877, 0.46852197265625, 0.46868377685546875, 0.46900018310546876, 0.46878207397460936, 0.4686499938964844, 0.4685537414550781, 0.46879437255859374, 0.4684031982421875, 0.4687779846191406, 0.4690032653808594, 0.4684912719726563, 0.46856298828125, 0.46899917602539065, 0.4696033020019531, 0.4696708984375, 0.469855224609375, 0.4690411376953125, 0.468674560546875, 0.46875442504882814, 0.469359619140625, 0.4701552734375, 0.469064697265625, 0.46861312866210936, 0.4686315612792969, 0.4697047119140625, 0.4690083923339844, 0.46888754272460936, 0.46868582153320315, 0.469073974609375, 0.4693411254882813, 0.4695132141113281, 0.46920089721679686, 0.46941696166992186, 0.46919271850585936, 0.46985626220703125, 0.46913946533203127, 0.46915789794921875, 0.4686253967285156, 0.46875955200195313, 0.468642822265625, 0.4691517333984375]",tokens/s,2.1000993200656053,,,,,,main,False,False -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2280.779776,9584.508928,0.0,8938.061824,8629.0688,s,10,10.176551086425778,1.0176551086425778,0.0015328797671352301,1.0172308959960938,1.018193273925781,1.0201789428710937,1.0217674780273438,"[1.0221646118164063, 1.0172261352539063, 1.0167738647460938, 1.0172377319335937, 1.0172356567382812, 1.0169893188476562, 1.0167650146484375, 1.017517822265625, 1.016888916015625, 1.0177520141601561]",tokens/s,251.55870375521562,kWh,1.2027085920174918e-05,6.590293178160209e-06,5.696557335020391e-05,7.558295244853905e-05,tokens/kWh,3387007.145219666,MB,2281.074688,9584.508928,0.0,8938.061824,8715.66592,s,10,595.0380507812499,59.50380507812499,0.008158332777429248,59.504548828124996,59.5143921875,59.51552421875,59.51642984375,"[59.500625, 59.49740234375, 59.49325390625, 59.50875, 59.50894140625, 59.514140625, 59.4951328125, 59.50847265625, 59.49467578125, 59.51665625]",tokens/s,1.0587558210316923,kWh,0.0007025720743172699,0.00038506887292403917,0.0033221314077029936,0.004409772354944303,tokens/kWh,14286.451755125965,,s,629,603.2482678222655,0.9590592493199772,0.12088183404247937,0.9444280395507813,0.945362939453125,0.9457379272460937,1.96203095703125,"[0.9439191284179688, 0.9438074951171875, 0.9435494384765625, 0.9451939697265626, 0.9448151245117188, 0.9447935791015625, 0.94410546875, 0.9439744262695312, 0.9437614135742187, 0.9440634765625, 0.94390576171875, 0.9446031494140625, 0.9444812622070312, 0.945565673828125, 0.9449103393554688, 0.9443286743164062, 0.9433487548828124, 0.9437614135742187, 0.943837158203125, 0.9445468139648437, 0.9442355346679687, 0.9440911254882812, 0.9451141357421875, 0.9447239379882812, 0.9452031860351563, 0.9445714111328125, 0.9450618896484375, 0.9444659423828125, 0.9439969482421875, 0.9436231689453125, 0.944216064453125, 0.9437163696289063, 0.9447649536132813, 0.9448642578125, 0.9447239379882812, 0.9447444458007812, 0.944990234375, 0.9443727416992187, 0.9441873779296875, 0.9447096557617187, 0.9451028442382813, 0.9457059936523438, 0.9445437622070313, 0.9448652954101563, 0.9439764404296875, 0.945954833984375, 0.9447557373046875, 0.9458032836914062, 0.9443123168945312, 0.9440501708984375, 0.9439887084960937, 0.9450444946289063, 0.9437235107421875, 0.944195556640625, 0.9443512573242188, 0.9439027099609375, 0.9452513427734375, 0.9451724853515625, 0.94409521484375, 0.9442816162109375, 0.9435177001953124, 0.943952880859375, 1.961964599609375, 0.9452953491210937, 0.9439201049804687, 0.9446041870117188, 0.9441761474609375, 0.9450936279296875, 0.9438955688476562, 0.944521240234375, 0.9442867431640625, 0.944669677734375, 0.9439436645507813, 0.9444669189453125, 0.9447761840820312, 0.9443717041015625, 0.9457489624023437, 0.9448304443359375, 0.9437224731445313, 0.943720458984375, 0.943825927734375, 0.9439569702148437, 0.9447649536132813, 0.9438760986328125, 0.94512744140625, 0.9446553344726563, 0.9442754516601563, 0.9448980712890624, 0.944395263671875, 0.9436589965820312, 0.9441065063476562, 0.9436221313476563, 0.94472705078125, 0.9441556396484375, 0.9437644653320313, 0.94514892578125, 0.9441781616210938, 0.9451366577148438, 0.943805419921875, 0.9449574584960938, 0.94401025390625, 0.9444935913085938, 0.9444280395507813, 0.9449482421875, 0.9444423828125, 0.944490478515625, 0.9455534057617188, 0.9445355224609375, 0.944510986328125, 0.9453905639648438, 0.944805908203125, 0.9447341918945312, 0.9445673217773437, 0.9435391845703125, 0.94436865234375, 0.9436989135742188, 0.9443194580078125, 0.9437255859375, 0.943572998046875, 0.9442672729492188, 0.9445857543945313, 0.9441392822265625, 0.9437747192382813, 0.9438228759765624, 0.9449410400390625, 1.9620567626953125, 0.94470556640625, 0.9442181396484375, 0.945459228515625, 0.944817138671875, 0.944289794921875, 0.9441679077148437, 0.943963134765625, 0.9437173461914062, 0.9446461181640625, 0.9440194702148438, 0.9441679077148437, 0.9439436645507813, 0.94363134765625, 0.9440819091796875, 0.9446000366210937, 0.9443461303710937, 0.9433681640625, 0.9446809692382813, 0.945306640625, 0.9444782104492188, 0.9442139892578125, 0.9447669677734375, 0.9443020629882812, 0.9446246337890625, 0.9445652465820312, 0.9445877685546875, 0.94449560546875, 0.9440122680664063, 0.9438228759765624, 0.9451950073242188, 0.9438597412109375, 0.9438197631835937, 0.9448263549804687, 0.9439232177734375, 0.9453045654296875, 0.9447987060546875, 0.944822265625, 0.944021484375, 0.9442969360351563, 0.9446051635742188, 0.945090576171875, 0.9438689575195313, 0.9441822509765625, 0.9437224731445313, 0.9438904418945312, 0.9443860473632812, 0.9447403564453125, 0.944142333984375, 0.9441279907226563, 0.94382080078125, 0.9441802368164063, 0.9439436645507813, 0.9441648559570313, 0.9440030517578125, 0.9440491333007812, 0.944716796875, 0.9447926025390625, 0.9442672729492188, 0.9440327758789062, 0.9443020629882812, 0.944078857421875, 0.9440512084960937, 1.962303466796875, 0.9448621826171875, 0.9440481567382812, 0.945122314453125, 0.9454274291992187, 0.9450147705078125, 0.9446143798828125, 0.9449840698242188, 0.9448417358398438, 0.94497998046875, 0.9441136474609375, 0.9453189086914062, 0.9442518920898437, 0.9458114624023437, 0.9444659423828125, 0.9440430297851562, 0.943193115234375, 0.9432238159179688, 0.9438320922851563, 0.9455195922851563, 0.9445857543945313, 0.9451468505859375, 0.9443491821289063, 0.9451049194335938, 0.9449103393554688, 0.945438720703125, 0.9439078369140625, 0.9441853637695312, 0.9438812255859375, 0.9447454833984374, 0.9449072875976563, 0.9441033935546875, 0.9460459594726562, 0.9442908325195313, 0.9438238525390625, 0.9456240844726562, 0.9443204956054687, 0.9439620971679688, 0.9440112915039063, 0.944047119140625, 0.9453885498046875, 0.94415771484375, 0.9452503051757812, 0.9443839721679688, 0.9442600708007812, 0.9446881103515625, 0.9450864868164063, 0.944437255859375, 0.9439979248046875, 0.94411572265625, 0.9444382934570312, 0.9443983154296876, 0.9445570678710937, 0.9448038330078125, 0.944584716796875, 0.9450680541992188, 0.9447341918945312, 0.944616455078125, 0.9441351928710937, 0.9442734375, 0.944194580078125, 0.9450393676757812, 0.9440450439453125, 1.96240380859375, 0.944021484375, 0.9450997924804687, 0.9445232543945312, 0.944395263671875, 0.9442621459960937, 0.9440450439453125, 0.9442600708007812, 0.9443963012695312, 0.944542724609375, 0.9447127075195313, 0.944289794921875, 0.9443440551757812, 0.9460879516601562, 0.9445314331054687, 0.944732177734375, 0.9441167602539062, 0.9434654541015625, 0.9446461181640625, 0.9446021118164063, 0.9455134887695312, 0.94460107421875, 0.944163818359375, 0.9440798950195313, 0.9460429077148438, 0.9448857421875, 0.9443890991210937, 0.944552978515625, 0.9442201538085937, 0.945523681640625, 0.9453793334960937, 0.946145263671875, 0.9447127075195313, 0.9450925903320313, 0.9457950439453126, 0.9460930786132813, 0.9446625366210938, 0.9440460815429688, 0.9442805786132813, 0.9451847534179687, 0.9439201049804687, 0.9439293212890625, 0.9453352661132812, 0.9441095581054687, 0.9442600708007812, 0.9441249389648437, 0.9455513305664063, 0.9446041870117188, 0.9449758911132813, 0.944733154296875, 0.9457428588867187, 0.9441658935546875, 0.9438597412109375, 0.9436641235351563, 0.94344189453125, 0.945201171875, 0.9446563720703125, 0.9440634765625, 0.9435924682617187, 0.9437214965820313, 0.9441249389648437, 0.9439600830078125, 0.94362109375, 1.9618406982421874, 0.9449922485351563, 0.9445714111328125, 0.946429931640625, 0.9437757568359375, 0.9441884155273438, 0.943705078125, 0.94535986328125, 0.9445396728515625, 0.9449246826171875, 0.943847412109375, 0.9441607666015625, 0.9455728759765625, 0.9447864379882812, 0.9441095581054687, 0.9441699829101563, 0.9446051635742188, 0.9435985717773437, 0.9450465087890625, 0.9443768310546875, 0.9445549926757812, 0.944733154296875, 0.9450895385742187, 0.9451468505859375, 0.945554443359375, 0.9442498779296875, 0.9442662353515625, 0.9444259643554688, 0.945138671875, 0.9445048217773437, 0.9450762329101563, 0.944775146484375, 0.9441412963867187, 0.9443184814453125, 0.945238037109375, 0.9445673217773437, 0.9456680908203124, 0.9446410522460937, 0.9445181274414063, 0.9456373901367188, 0.9438515014648438, 0.9446522827148438, 0.9439600830078125, 0.944447509765625, 0.9449236450195313, 0.9451837158203125, 0.944015380859375, 0.944595947265625, 0.9445867309570313, 0.944869384765625, 0.9447772216796875, 0.9439764404296875, 0.9448857421875, 0.9445509033203126, 0.9448018188476562, 0.9448990478515625, 0.94510693359375, 0.944690185546875, 0.9443369140625, 0.9444546508789062, 0.9454674072265625, 0.9442078857421875, 0.9450659790039062, 1.9620792236328124, 0.9442550048828126, 0.9450751953125, 0.9438463745117187, 0.9439702758789063, 0.944837646484375, 0.9442098999023437, 0.9442938842773437, 0.9439365234375, 0.94470556640625, 0.9441126098632813, 0.944058349609375, 0.9444188232421875, 0.9445037841796875, 0.943837158203125, 0.9439406127929687, 0.9434613647460938, 0.9443983154296876, 0.944353271484375, 0.9448919067382813, 0.9447465209960938, 0.9449727783203125, 0.9444126586914062, 0.9454131469726562, 0.945016845703125, 0.9439498291015626, 0.9441412963867187, 0.9441597290039062, 0.9443522338867187, 0.9438381958007812, 0.9442887573242188, 0.944189453125, 0.9438013305664062, 0.9440122680664063, 0.9451192016601563, 0.9441802368164063, 0.944279541015625, 0.9441546020507813, 0.944953369140625, 0.9451735229492187, 0.9439866943359375, 0.9443461303710937, 0.9438648071289063, 0.9442754516601563, 0.9446963500976563, 0.944337890625, 0.9436928100585937, 0.9441341552734375, 0.9442744140625, 0.9443624877929687, 0.9439539184570312, 0.9444546508789062, 0.9440614624023438, 0.9443717041015625, 0.9440122680664063, 0.9457305297851563, 0.9445120239257813, 0.9440767822265625, 0.9440235595703125, 0.944637939453125, 0.9447669677734375, 0.9439303588867187, 0.9450260620117188, 1.96250830078125, 0.945138671875, 0.94508544921875, 0.9441658935546875, 0.944089111328125, 0.9439273071289063, 0.944532470703125, 0.9449574584960938, 0.9450444946289063, 0.9448878173828125, 0.9445734252929687, 0.9452164916992187, 0.9463602905273437, 0.9454663696289063, 0.9445283813476563, 0.944996337890625, 0.9449359130859375, 0.9434193725585938, 0.9439549560546875, 0.9449943237304688, 0.9439518432617188, 0.9447526245117187, 0.94417919921875, 0.9459722290039062, 0.943705078125, 0.9437808837890626, 0.9436303100585938, 0.945122314453125, 0.9443286743164062, 0.9444597778320313, 0.9451847534179687, 0.944753662109375, 0.9440460815429688, 0.9454346313476563, 0.9441884155273438, 0.9441730346679688, 0.9445990600585937, 0.9442191162109375, 0.9447506103515625, 0.94445361328125, 0.9448314819335938, 0.9444966430664062, 0.9454448852539062, 0.9437501220703125, 0.9454315795898437, 0.943909912109375, 0.9438177490234375, 0.9439487915039062, 0.944637939453125, 0.944753662109375, 0.9441044311523438, 0.9440993041992187, 0.9442928466796875, 0.9440347900390625, 0.9455585327148438, 0.9453936767578125, 0.9444710693359375, 0.9441751098632812, 0.944163818359375, 0.9446604614257812, 0.9442406616210938, 0.9449768676757813, 0.9444280395507813, 1.9621160888671876, 0.9457469482421875, 0.94449560546875, 0.9444403076171874, 0.9441771240234375, 0.9445068969726562, 0.9441464233398438, 0.9440808715820312, 0.9447423706054687, 0.9440921630859375, 0.9448734741210938, 0.944405517578125, 0.945375244140625, 0.944026611328125, 0.9439928588867188, 0.9442150268554688, 0.9443993530273438, 0.9442857055664062, 0.9451458740234375, 0.9446522827148438, 0.944701416015625, 0.9448027954101562, 0.9459158935546875, 0.9446707153320313, 0.9441566772460938, 0.9440133056640625, 0.9440880737304688, 0.944232421875, 0.9440133056640625, 0.9441699829101563, 0.9440634765625, 0.9437409057617188, 0.943952880859375, 0.944626708984375, 0.944152587890625, 0.9444495239257813, 0.9439201049804687, 0.945048583984375, 0.9437726440429688, 0.9444515991210938, 0.94368359375, 0.9439324340820312, 0.9438914794921875, 0.9448427734375, 0.9442590942382812, 0.9434368286132813, 0.9441812744140625, 0.9437726440429688, 0.9446533203125, 0.9439723510742187, 0.9444556884765625, 0.9440726928710937, 0.9439303588867187, 0.94407373046875, 0.9452779541015625, 0.9441228637695313, 0.9435852661132812, 0.9436907348632813, 0.943984619140625, 0.944078857421875, 0.9446338500976562, 0.9449257202148438, 0.9452236938476563, 1.963953125, 0.9454633178710937, 0.9447341918945312, 0.9460571899414062, 0.94517041015625, 0.9456434936523438, 0.9455032348632812, 0.9461248168945312, 0.9460029296875, 0.9438279418945312, 0.9447096557617187, 0.94533837890625, 0.94419970703125, 0.943636474609375, 0.943752197265625, 0.9444884643554687, 0.9433948364257813, 0.9433794555664062, 0.9446205444335938, 0.9441167602539062, 0.9440307006835937, 0.9440122680664063, 0.9449257202148438, 0.9440286865234375, 0.943515625, 0.9437122802734375, 0.9445457763671875, 0.9440726928710937, 0.9442130126953125, 0.9443328247070313, 0.9440071411132812, 0.9451980590820312, 0.9459169311523438, 0.944890869140625, 0.9443102416992187, 0.9445673217773437, 0.9445437622070313, 0.9452554321289063, 0.9446389770507813, 0.9451735229492187, 0.9450977172851562, 0.944343017578125, 0.9447926025390625, 0.9471918334960937, 0.9450096435546875, 0.9446543579101563, 0.9449779052734375, 0.9458626708984375, 0.9440061645507812, 0.9450895385742187, 0.945090576171875, 0.9444638671875, 0.9445816040039062, 0.944679931640625, 0.9448161010742188, 0.944236572265625, 0.9442642211914063, 0.9439815673828125, 0.9447567138671875, 0.944205810546875, 0.9459619750976562, 0.9442539672851562, 0.9440655517578125]",tokens/s,1.0426884477773943,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669480ef-69208ced482777fa740d5535;1009ffa5-3e2a-4d1a-b243-a0fc9b7c959d) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1740.263424,9941.024768,0.0,9294.577664,8910.102528,s,10,10.642888305664064,1.0642888305664062,0.0008435144191809837,1.06450927734375,1.0651936889648437,1.065327557373047,1.0654346520996094,"[1.065136474609375, 1.06546142578125, 1.063631591796875, 1.063218505859375, 1.06368701171875, 1.0628612060546876, 1.0644658203125, 1.064552734375, 1.0647095947265626, 1.0651639404296875]",tokens/s,240.5362084498799,kWh,1.2557922783825134e-05,6.880571250694628e-06,6.097363211219897e-05,8.041212614671872e-05,tokens/kWh,3183599.4428614704,MB,1740.86144,9941.024768,0.0,9294.577664,9220.867072,s,10,630.7354296875,63.07354296875,0.00490218127220892,63.07426171875,63.081011718750005,63.081064453125,63.081106640625,"[63.0750703125, 63.07307421875, 63.081, 63.075046875, 63.07416015625, 63.0811171875, 63.07436328125, 63.0665078125, 63.06765234375, 63.0674375]",tokens/s,0.9988340124037992,kWh,0.0007445980064074199,0.0004081038876869388,0.003603297882636,0.004755999776730358,tokens/kWh,13246.426189555263,,s,629,639.312124511719,1.016394474581429,0.12643667238363906,1.0011002807617186,1.0017415283203124,1.002017578125,2.065029130859375,"[1.0008678588867188, 1.0004398193359374, 1.001291748046875, 1.0011023559570313, 1.0010675048828126, 1.0010203857421875, 1.0008780517578124, 1.000616943359375, 1.0006917114257812, 1.0008555297851562, 1.0007982177734376, 1.000685546875, 1.0008023071289063, 1.000985595703125, 1.0008780517578124, 1.0010081787109375, 1.0006251220703124, 1.0007255249023437, 1.000806396484375, 1.0009149169921876, 1.0006405029296874, 1.0007285766601564, 1.0012835693359374, 1.0011812133789062, 1.0010265502929687, 1.0012303466796875, 1.0011586303710938, 1.0011637573242187, 1.0010664672851564, 1.0014730224609374, 1.0011238403320313, 1.0014033813476562, 1.0011033325195313, 1.0013900756835938, 1.0010501098632814, 1.0017617797851563, 1.0014638061523438, 1.0016256103515624, 1.0009415893554687, 1.0013726806640626, 1.000985595703125, 1.0008145751953126, 1.001359375, 1.0016143188476563, 1.0011217651367188, 1.0013255615234375, 1.0009302978515624, 1.001133056640625, 1.0009508056640626, 1.001533447265625, 1.0019143676757813, 1.0015897827148437, 1.0020361938476563, 1.0020464477539062, 1.00206591796875, 1.0012640991210937, 1.0014044189453124, 1.001734130859375, 1.0015846557617187, 1.00234033203125, 1.0014207763671874, 1.0013255615234375, 2.067092529296875, 1.0008361206054688, 1.0011576538085938, 1.0006384887695312, 1.001079833984375, 1.0007879638671875, 1.0012293090820312, 1.0010501098632814, 1.001322509765625, 1.0013265991210938, 1.0013900756835938, 1.000838134765625, 1.001486328125, 1.0010675048828126, 1.0011484375, 1.0016091918945313, 1.001449462890625, 1.0011023559570313, 1.0005330200195313, 1.0008074340820312, 1.0010623779296874, 1.0006220703125, 1.0009927978515625, 1.0007716064453125, 1.001026611328125, 1.0008770141601562, 1.0014136352539063, 1.0009508056640626, 1.0009508056640626, 1.00097021484375, 1.001290771484375, 1.00075830078125, 1.0016583862304687, 1.0007131958007813, 1.0006619873046876, 1.0005339965820312, 1.0010706176757813, 1.0007859497070313, 1.0006456298828126, 1.0009036865234375, 1.00139111328125, 1.0010776977539062, 1.0015672607421875, 1.0008135375976563, 1.0011463623046875, 1.000784912109375, 1.0035189819335937, 1.00113818359375, 1.0015252685546876, 1.0012507934570312, 1.0016573486328124, 1.0014893798828124, 1.001533447265625, 1.0010839233398436, 1.0011525268554688, 1.0011566162109375, 1.0015396118164062, 1.0014955444335938, 1.0011074829101563, 1.0009037475585938, 1.0016132202148438, 1.0009343872070313, 1.002029052734375, 2.06504150390625, 1.0005913696289062, 1.0010398559570313, 1.00097021484375, 1.0005196533203125, 1.0015057983398437, 1.0011084594726563, 1.0007337036132813, 1.0012190551757814, 1.0008473510742188, 1.0012252197265625, 1.0008402099609375, 1.0011617431640625, 1.0010194091796876, 1.0008719482421875, 1.0009927978515625, 1.0008237915039062, 1.00075927734375, 1.0011688842773439, 1.000806396484375, 1.0008606567382812, 1.0010132446289062, 1.0011740112304688, 1.00221337890625, 1.0005995483398438, 1.000721435546875, 1.00090673828125, 1.0007039794921875, 1.001343994140625, 1.001470947265625, 1.0014852905273437, 1.00143408203125, 1.0017413330078124, 1.002018798828125, 1.0018693237304688, 1.0017587280273437, 1.0011954956054687, 1.0007203979492187, 1.0011064453125, 1.00105419921875, 1.0013839111328124, 1.0012108764648437, 1.001881591796875, 1.0012119140625, 1.0016245727539062, 1.0019154052734376, 1.0013501586914062, 1.0026547241210937, 1.0016737060546874, 1.0011678466796874, 1.00153857421875, 1.0014085083007813, 1.0018191528320313, 1.001275390625, 1.0014157104492187, 1.0018908081054687, 1.0011791381835937, 1.0009927978515625, 1.0019522705078125, 1.0013368530273437, 1.0016307373046875, 1.0016552734375, 1.001829345703125, 2.064997314453125, 1.001175048828125, 1.0011033935546876, 1.0011688232421876, 1.001164794921875, 1.0007736206054687, 1.001064453125, 1.0007973022460936, 1.00076025390625, 1.0006077270507812, 1.0007183227539063, 1.0011361083984376, 1.0011351318359376, 1.0006005859375, 1.0008688354492188, 1.000642578125, 1.00075830078125, 1.0010562744140625, 1.001421875, 1.0009476318359376, 1.000995849609375, 1.0008811645507814, 1.001438232421875, 1.000816650390625, 1.0011361083984376, 1.0006569213867187, 1.0008370971679688, 1.0008350830078125, 1.00099072265625, 1.0007787475585936, 1.0014464111328125, 1.0013634643554687, 1.0017720336914062, 1.001069580078125, 1.001523193359375, 1.0014351196289062, 1.002029052734375, 1.001865234375, 1.0020095825195312, 1.0007572631835937, 1.0010081176757812, 1.0010040283203125, 1.000943603515625, 1.001080810546875, 1.0031155395507811, 1.000857666015625, 1.00139208984375, 1.001006103515625, 1.0015139770507813, 1.0011719970703126, 1.0016010131835937, 1.0014443359375, 1.0011443481445312, 1.0013952026367188, 1.0011340942382811, 1.0010603637695312, 1.0010203857421875, 1.0011033935546876, 1.0015077514648438, 1.00125390625, 1.002071044921875, 1.0015109252929688, 1.0014412841796876, 2.065207275390625, 1.0004561767578124, 1.0008197021484375, 1.0008104858398437, 1.0015027465820312, 1.0008872680664063, 1.0009497680664063, 1.0020311279296874, 1.0013726806640626, 1.0015405883789064, 1.00103271484375, 1.0013511962890624, 1.0011668701171874, 1.0011986083984374, 1.00210791015625, 1.0007920532226562, 1.0011351318359376, 1.0023803100585937, 1.0009579467773437, 1.0010460205078124, 1.0012252197265625, 1.0007500610351563, 1.001091064453125, 1.0007879638671875, 1.0017576904296874, 1.0010828857421874, 1.0014474487304688, 1.0011094970703125, 1.0007890014648437, 1.0009169921875, 1.0007787475585936, 1.0004838256835937, 1.0008545532226563, 1.0006609497070313, 1.0009682006835938, 1.0007777099609374, 1.0011300048828125, 1.00124365234375, 1.000796142578125, 1.0009343872070313, 1.0009251708984375, 1.000722412109375, 1.0007971801757813, 1.0006896362304687, 1.0014924926757813, 1.0007080688476562, 1.0007705688476562, 1.0013204345703124, 1.001218017578125, 1.0010511474609376, 1.0009517822265626, 1.0008607177734374, 1.0009199829101563, 1.00119140625, 1.0017146606445313, 1.0010194091796876, 1.0013358154296874, 1.003936767578125, 1.001802734375, 1.0014443359375, 1.0012477416992187, 1.0015396118164062, 1.0018938598632812, 2.064691162109375, 1.0003916625976563, 1.0009784545898437, 1.0010337524414064, 1.0008944702148437, 1.00107470703125, 1.0008616943359374, 1.001354248046875, 1.00143408203125, 1.0012088623046875, 1.0020802612304687, 1.0009651489257811, 1.001312255859375, 1.0014505004882812, 1.0007481079101563, 1.0010009155273438, 1.0012723388671876, 1.0009712524414063, 1.0011033325195313, 1.0011371459960938, 1.0015723266601562, 1.0016163940429688, 1.0008381958007813, 1.0017801513671876, 1.0015897827148437, 1.0014259033203126, 1.0019799194335937, 1.0012518310546874, 1.0014515380859375, 1.0014484252929687, 1.001759765625, 1.00107568359375, 1.0014505004882812, 1.0014423217773438, 1.001112548828125, 1.0011678466796874, 1.0010286254882812, 1.0009210815429688, 1.0033643798828125, 1.0010767211914062, 1.001469970703125, 1.0009139404296874, 1.001143310546875, 1.00132763671875, 1.0011791381835937, 1.001169921875, 1.0012620849609375, 1.0009456787109374, 1.0010347290039063, 1.00092724609375, 1.0015047607421874, 1.00096923828125, 1.0009251708984375, 1.0013870239257812, 1.0013132934570312, 1.0014105834960938, 1.0011658325195312, 1.0012948608398438, 1.0014935302734376, 1.0009978637695311, 1.0015344848632812, 1.001290771484375, 1.0020157470703126, 2.0664228515625, 1.001302001953125, 1.000827880859375, 1.0009938354492187, 1.0006937255859376, 1.0006784057617188, 1.000911865234375, 1.0006487426757813, 1.0011852416992189, 1.0008043823242188, 1.0012477416992187, 1.0013726806640626, 1.0013101806640625, 1.0008023071289063, 1.0008514404296875, 1.0008248291015625, 1.0011658325195312, 1.001006103515625, 1.0012498168945312, 1.0011207885742188, 1.0012610473632813, 1.0012415771484375, 1.0010951538085937, 1.0023833618164062, 1.0010685424804688, 1.0005288696289063, 1.000853515625, 1.000543212890625, 1.0008811645507814, 1.0009784545898437, 1.0013153076171875, 1.0010859375, 1.0009425659179687, 1.0009343872070313, 1.000748046875, 1.000453125, 1.0013388671875, 1.00075927734375, 1.000826904296875, 1.00143310546875, 1.0011207885742188, 1.0015027465820312, 1.001175048828125, 1.0009395141601563, 1.0011279296875, 1.0010224609375, 1.0013798217773437, 1.0013460693359375, 1.00193896484375, 1.00166552734375, 1.0014197998046874, 1.0018252563476562, 1.0011893920898438, 1.0011268920898437, 1.0015764770507813, 1.0016574096679687, 1.0018969116210938, 1.0011760864257813, 1.0016829223632813, 1.0013952026367188, 1.0015078125, 1.0017791748046876, 1.001365478515625, 2.065933349609375, 1.00049609375, 1.0008944702148437, 1.00080126953125, 1.0008135375976563, 1.0008340454101563, 1.0008822021484376, 1.001302001953125, 1.0008237915039062, 1.0006292724609376, 1.0011105346679687, 1.0010654907226562, 1.0013255615234375, 1.0006692504882813, 1.0007797241210938, 1.0010091552734375, 1.0015211791992187, 1.0007367553710937, 1.0007234497070312, 1.0005473022460938, 1.0006712036132812, 1.00077978515625, 1.0011351318359376, 1.001279541015625, 1.0010480346679687, 1.0010921020507813, 1.000892333984375, 1.001080810546875, 1.00067431640625, 1.0010296020507812, 1.0010767822265625, 1.0005595703125, 1.0013173828125, 1.0010501098632814, 1.0013409423828126, 1.0012006225585937, 1.0011443481445312, 1.0012139282226562, 1.0008053588867187, 1.0007408447265624, 1.0009384765625, 1.0005883178710937, 1.0010715942382813, 1.0009569091796875, 1.0010900268554688, 1.0009027099609376, 1.000849365234375, 1.0009651489257811, 1.0008524780273438, 1.001027587890625, 1.0014033813476562, 1.0012498168945312, 1.00135009765625, 1.001059326171875, 1.0014095458984376, 1.0020269775390624, 1.0020638427734374, 1.0020095825195312, 1.0013235473632813, 1.0012037353515626, 1.0014484252929687, 1.0009476928710936, 1.0012733154296876, 2.066282470703125, 1.0010859375, 1.0004767456054688, 1.0010264892578125, 1.0008135375976563, 1.0007572631835937, 1.0011146240234374, 1.0009620361328124, 1.0013931274414063, 1.0009630737304687, 1.0009375, 1.0007900390625, 1.0008340454101563, 1.0009886474609375, 1.0007900390625, 1.0007756958007812, 1.0007982177734376, 1.0019952392578124, 1.0009794311523437, 1.0006138916015626, 1.0008309936523438, 1.0008893432617187, 1.0010368041992188, 1.0011248779296875, 1.0005238037109374, 1.0006343383789063, 1.0008145751953126, 1.0007039794921875, 1.0007521362304688, 1.0009896850585938, 1.0010880126953126, 1.0012303466796875, 1.00091796875, 1.0013450317382813, 1.000784912109375, 1.0007367553710937, 1.0009825439453126, 1.0009108276367187, 1.001238525390625, 1.0008944702148437, 1.0009989013671876, 1.0009886474609375, 1.0009651489257811, 1.0011422729492188, 1.0012374877929688, 1.0012569580078126, 1.00099072265625, 1.0008780517578124, 1.0009682006835938, 1.0007080688476562, 1.001248779296875, 1.0015518798828125, 1.0010582885742187, 1.0014033813476562, 1.001016357421875, 1.0009476318359376, 1.0012354736328124, 1.0020833129882813, 1.0019143676757813, 1.0018539428710938, 1.0019850463867188, 1.0015999755859375, 1.0015641479492188, 2.0665712890625, 1.0007060546875, 1.0006599731445311, 1.00056884765625, 1.000685546875, 1.0005678100585937, 1.0009722900390625, 1.000806396484375, 1.0007203979492187, 1.0008207397460938, 1.0005872802734375, 1.0005545043945312, 1.00069384765625, 1.0008534545898438, 1.0013716430664064, 1.0008811645507814, 1.0013511962890624, 1.0014893798828124, 1.0010726318359375, 1.0017157592773438, 1.0011299438476562, 1.00086376953125, 1.00071728515625, 1.0005729370117187, 1.0008995971679688, 1.0005995483398438, 1.0024898681640626, 1.0008678588867188, 1.000722412109375, 1.0012149658203124, 1.0008237915039062, 1.00071728515625, 1.00101220703125, 1.0010562744140625, 1.0010572509765625, 1.00170751953125, 1.00140234375, 1.000974365234375, 1.0007828369140626, 1.0009139404296874, 1.0011300048828125, 1.0010675048828126, 1.0013562622070313, 1.000953857421875, 1.0009722900390625, 1.0011146240234374, 1.0015631103515625, 1.000975341796875, 1.0017495727539063, 1.0013224487304688, 1.0012518310546874, 1.001439208984375, 1.0012415771484375, 1.0010685424804688, 1.0010286254882812, 1.00105419921875, 1.0016696166992187, 1.0011002807617186, 1.0011443481445312, 1.0017423095703124, 1.0011033325195313, 1.0011924438476563, 1.0012406005859376]",tokens/s,0.9838699688049951,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948ff4-490b5da27371db416ba8b2b0;c8eb74d6-1499-4ace-8392-7542c3b2b752) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2110.81216,2911.371264,0.0,2264.92416,2140.859392,s,10,2.5174565429687497,0.25174565429687495,0.002151641767107536,0.250954475402832,0.2536562973022461,0.2552590721130371,0.2565412919616699,"[0.25686184692382813, 0.2507518005371094, 0.24988601684570313, 0.2505283203125, 0.2511571502685547, 0.24977813720703124, 0.24948822021484374, 0.2533001251220703, 0.25277372741699217, 0.25293119812011716]",tokens/s,1016.8993809049352,kWh,2.9486055983114377e-06,1.615695148140686e-06,1.3450261437707742e-05,1.801456218415987e-05,tokens/kWh,14210725.599820558,MB,2113.232896,2911.371264,0.0,2264.92416,2246.908928,s,10,147.68545214843752,14.768545214843751,0.008287027024456562,14.7677890625,14.7791720703125,14.7827984375,14.785699531250001,"[14.7716796875, 14.761337890625, 14.760306640625, 14.757224609375, 14.7783662109375, 14.769251953125, 14.7864248046875, 14.7672763671875, 14.7652822265625, 14.7683017578125]",tokens/s,4.265823009884493,kWh,0.0001741533532096603,9.545023489099372e-05,0.0007804473459020919,0.0010500509340027457,tokens/kWh,59997.08962673545,,s,629,149.7085112609864,0.23801035176627394,0.029876551877239144,0.2342451171875,0.23526665954589845,0.2357182434082031,0.48483614379882817,"[0.23634022521972656, 0.23580979919433595, 0.23378636169433595, 0.2359173126220703, 0.23436288452148438, 0.2340474853515625, 0.23413555908203126, 0.23381607055664064, 0.23380070495605468, 0.23397683715820314, 0.23489945983886718, 0.23450828552246095, 0.23392665100097657, 0.2347868194580078, 0.23529164123535157, 0.2351278076171875, 0.23502951049804688, 0.23457279968261718, 0.23408128356933594, 0.2355589141845703, 0.2354534454345703, 0.23438336181640626, 0.23522406005859375, 0.23521279907226564, 0.23508070373535156, 0.23463629150390625, 0.23408230590820311, 0.2338928680419922, 0.23402598571777344, 0.23431475830078125, 0.2341826629638672, 0.2337505340576172, 0.23392562866210936, 0.23407513427734375, 0.23401370239257813, 0.23463833618164062, 0.2346096649169922, 0.234144775390625, 0.23394610595703125, 0.2363443145751953, 0.2340720672607422, 0.23385702514648438, 0.23410585021972657, 0.233997314453125, 0.23401370239257813, 0.23508172607421876, 0.23504486083984374, 0.23416934204101564, 0.23398399353027344, 0.2338693084716797, 0.23474688720703124, 0.23387954711914063, 0.23373619079589844, 0.23434751892089845, 0.23412428283691405, 0.23393894958496095, 0.2339563446044922, 0.23393997192382812, 0.23378125, 0.23382528686523438, 0.23372799682617187, 0.23389593505859374, 0.4848691101074219, 0.23401983642578125, 0.23492710876464845, 0.23426150512695312, 0.2343546905517578, 0.2339911651611328, 0.2339113006591797, 0.23409356689453126, 0.23416627502441406, 0.23411199951171874, 0.2339788818359375, 0.23402496337890624, 0.23383450317382812, 0.23386317443847657, 0.234029052734375, 0.2345779266357422, 0.23393177795410156, 0.23390617370605468, 0.234102783203125, 0.234682373046875, 0.2339594268798828, 0.23381196594238282, 0.2337935333251953, 0.23399935913085937, 0.23403213500976563, 0.23468646240234375, 0.23564390563964843, 0.2349363250732422, 0.23442739868164061, 0.2346414031982422, 0.23534591674804686, 0.23548109436035156, 0.23447039794921876, 0.23574732971191406, 0.23480320739746094, 0.2339983367919922, 0.23404850769042967, 0.23442739868164061, 0.23391641235351562, 0.2341724090576172, 0.23490765380859374, 0.2338805694580078, 0.23490252685546875, 0.23385600280761717, 0.23409561157226563, 0.2344857635498047, 0.2364610595703125, 0.23387852478027343, 0.23375360107421875, 0.23386521911621094, 0.2337822723388672, 0.2338170928955078, 0.23385804748535155, 0.23377407836914063, 0.23408230590820311, 0.2339430389404297, 0.234102783203125, 0.2338928680419922, 0.2352168884277344, 0.2338314208984375, 0.23406898498535156, 0.23446630859375, 0.23422157287597656, 0.4860119018554688, 0.2341160888671875, 0.2342451171875, 0.2343505859375, 0.2357442626953125, 0.23456153869628907, 0.23385906982421875, 0.2339041290283203, 0.2340095977783203, 0.23387545776367188, 0.23382733154296875, 0.2345359344482422, 0.23387750244140626, 0.23376690673828124, 0.23402598571777344, 0.23404544067382813, 0.2340095977783203, 0.23381503295898437, 0.23389695739746094, 0.23463833618164062, 0.23435673522949219, 0.23512678527832032, 0.2348257293701172, 0.2361292724609375, 0.23491583251953124, 0.23533772277832032, 0.23424716186523437, 0.23495065307617188, 0.233818115234375, 0.2338734130859375, 0.23402496337890624, 0.23422872924804689, 0.2353070068359375, 0.23398912048339843, 0.23424000549316407, 0.23436390686035155, 0.23477760314941407, 0.23421133422851562, 0.23425331115722656, 0.23424409484863282, 0.23404544067382813, 0.2338621368408203, 0.23387135314941407, 0.2337925109863281, 0.23464857482910156, 0.23441407775878906, 0.2338365478515625, 0.2360289306640625, 0.23435264587402344, 0.23396249389648438, 0.23382424926757814, 0.23385600280761717, 0.23380787658691407, 0.2338191375732422, 0.23494041442871094, 0.23401164245605469, 0.2339215393066406, 0.2339051513671875, 0.23380685424804687, 0.23376588439941406, 0.23408639526367186, 0.23465472412109376, 0.23369830322265625, 0.48475137329101564, 0.23387852478027343, 0.23382937622070313, 0.23382220458984376, 0.23377714538574218, 0.23383244323730468, 0.23383244323730468, 0.23393792724609375, 0.23368499755859376, 0.233818115234375, 0.23384474182128906, 0.23382118225097656, 0.2341908416748047, 0.23382220458984376, 0.2341754913330078, 0.2348564453125, 0.23380685424804687, 0.23420314025878905, 0.23381094360351562, 0.23416831970214844, 0.23417855834960938, 0.2338488311767578, 0.2338140106201172, 0.23392256164550781, 0.23402803039550782, 0.2338140106201172, 0.234967041015625, 0.23461068725585937, 0.2341754913330078, 0.23411097717285156, 0.23385906982421875, 0.23410995483398436, 0.23426661682128908, 0.23414886474609375, 0.23437004089355468, 0.2345041961669922, 0.23398809814453125, 0.23385292053222656, 0.23385906982421875, 0.23410995483398436, 0.2341386260986328, 0.23388978576660155, 0.2344837188720703, 0.23398809814453125, 0.23559475708007813, 0.2338191375732422, 0.2342256622314453, 0.23473458862304689, 0.23459942626953126, 0.23494554138183593, 0.23424000549316407, 0.23441714477539063, 0.23474380493164063, 0.23442842102050782, 0.23444992065429687, 0.2339368896484375, 0.23582514953613282, 0.2339532775878906, 0.23572274780273436, 0.2348922882080078, 0.23533465576171875, 0.23438438415527343, 0.23461785888671874, 0.4860794982910156, 0.23520664978027345, 0.23465676879882813, 0.23474688720703124, 0.23444070434570313, 0.23441510009765626, 0.23433421325683593, 0.23486770629882814, 0.23470387268066406, 0.23463424682617187, 0.23388978576660155, 0.23490663146972657, 0.23415705871582032, 0.23375666809082032, 0.23418675231933594, 0.23416114807128907, 0.23495986938476562, 0.23515545654296874, 0.23488922119140626, 0.2340095977783203, 0.23481138610839844, 0.23467724609375, 0.23522201538085938, 0.23416831970214844, 0.23409767150878907, 0.23391949462890624, 0.2341826629638672, 0.2353428497314453, 0.23429426574707032, 0.23422361755371093, 0.23447756958007812, 0.23421542358398437, 0.2339983367919922, 0.23531520080566407, 0.2346967010498047, 0.2343055419921875, 0.23412838745117187, 0.23422976684570312, 0.23411712646484376, 0.2343055419921875, 0.2340843505859375, 0.234566650390625, 0.23416934204101564, 0.2340843505859375, 0.23419596862792968, 0.23391232299804687, 0.23488613891601562, 0.23576576232910157, 0.23572377014160156, 0.23397683715820314, 0.23519743347167968, 0.23507557678222657, 0.2344837188720703, 0.23419187927246093, 0.23482879638671875, 0.234134521484375, 0.23423487854003905, 0.23815577697753906, 0.2349475860595703, 0.234608642578125, 0.23501414489746095, 0.2343126983642578, 0.23413145446777345, 0.484052978515625, 0.23418675231933594, 0.23383552551269532, 0.233818115234375, 0.2337884216308594, 0.23386009216308593, 0.23376998901367188, 0.23425535583496093, 0.23407923889160157, 0.23388365173339845, 0.23385702514648438, 0.23382528686523438, 0.2338170928955078, 0.23406182861328126, 0.2349547576904297, 0.2348021697998047, 0.2346516418457031, 0.23388160705566408, 0.23406387329101563, 0.23462911987304688, 0.23546675109863283, 0.2349168701171875, 0.23516160583496093, 0.23408741760253907, 0.23404135131835938, 0.23388876342773438, 0.2348124084472656, 0.23441407775878906, 0.23469465637207032, 0.23460762023925782, 0.23387443542480468, 0.23417446899414063, 0.23448883056640624, 0.23504998779296876, 0.23461068725585937, 0.2344058837890625, 0.2338805694580078, 0.23380274963378905, 0.23606375122070314, 0.23502951049804688, 0.2342696990966797, 0.2343864288330078, 0.23570431518554688, 0.23431884765625, 0.23458610534667967, 0.23478271484375, 0.23399320983886718, 0.23410176086425782, 0.23421235656738282, 0.2348636169433594, 0.23454310607910156, 0.23408026123046874, 0.2343055419921875, 0.23477145385742187, 0.23488204956054687, 0.23403724670410156, 0.23455743408203125, 0.2344806365966797, 0.23533567810058595, 0.23407411193847658, 0.23458099365234375, 0.2344069061279297, 0.2347008056640625, 0.4853893127441406, 0.23457997131347658, 0.23390719604492188, 0.23476838684082033, 0.23429324340820312, 0.23598899841308593, 0.2341908416748047, 0.23444070434570313, 0.23391641235351562, 0.23489126586914064, 0.23460453796386718, 0.2361405487060547, 0.23541043090820313, 0.2344110107421875, 0.23417958068847655, 0.23468031311035156, 0.23411302185058594, 0.23393075561523438, 0.23403724670410156, 0.23467213439941406, 0.23507046508789062, 0.2341273651123047, 0.23511961364746095, 0.23591935729980468, 0.23496397399902344, 0.23582514953613282, 0.23585279846191406, 0.234498046875, 0.23445606994628906, 0.2352015380859375, 0.23556710815429688, 0.23550361633300781, 0.23390719604492188, 0.2345175018310547, 0.23411814880371093, 0.23459738159179688, 0.2349291534423828, 0.23403826904296876, 0.2339665985107422, 0.2354534454345703, 0.23551487731933593, 0.2355968017578125, 0.2346639404296875, 0.23512371826171874, 0.23418060302734375, 0.23528550720214844, 0.2347694091796875, 0.2348185577392578, 0.23459327697753907, 0.2342686767578125, 0.2342451171875, 0.23486463928222656, 0.23477247619628908, 0.23453797912597657, 0.23439974975585937, 0.2347448272705078, 0.2346782684326172, 0.23409767150878907, 0.2341160888671875, 0.2345482177734375, 0.2349998016357422, 0.23420620727539063, 0.23429632568359374, 0.487478271484375, 0.23422157287597656, 0.2341580810546875, 0.2345779266357422, 0.23406285095214843, 0.23428608703613282, 0.23372492980957033, 0.23382118225097656, 0.23385498046875, 0.23446015930175781, 0.23385804748535155, 0.2345359344482422, 0.2342328338623047, 0.23421644592285157, 0.23417446899414063, 0.23571148681640625, 0.23593370056152344, 0.23512165832519533, 0.23528550720214844, 0.23476223754882813, 0.23465983581542968, 0.2345216064453125, 0.2348072967529297, 0.23432908630371094, 0.23471615600585938, 0.23378125, 0.2351810607910156, 0.2338682861328125, 0.23424716186523437, 0.23442431640625, 0.23430758666992188, 0.23453388977050782, 0.2349547576904297, 0.2341376037597656, 0.2346414031982422, 0.2344806365966797, 0.23443865966796876, 0.23413555908203126, 0.23448268127441407, 0.23451443481445314, 0.23403929138183593, 0.23566233825683594, 0.23403622436523439, 0.23466085815429688, 0.23410687255859375, 0.23414579772949218, 0.2337955780029297, 0.23434034729003905, 0.23408741760253907, 0.23395840454101563, 0.23467520141601564, 0.23385292053222656, 0.234287109375, 0.23367372131347655, 0.23419801330566406, 0.23389695739746094, 0.23498240661621095, 0.23478067016601561, 0.23465061950683594, 0.2337198028564453, 0.2345113525390625, 0.23452978515625, 0.23437619018554687, 0.48710348510742185, 0.23419290161132814, 0.23404953002929688, 0.23400653076171876, 0.23467213439941406, 0.23747993469238282, 0.23400857543945314, 0.2344908752441406, 0.23426559448242187, 0.23419903564453126, 0.23388978576660155, 0.23458099365234375, 0.23397273254394532, 0.23450009155273438, 0.23396044921875, 0.23411712646484376, 0.23380787658691407, 0.23464755249023436, 0.23406387329101563, 0.23385292053222656, 0.23458201599121092, 0.23409971618652345, 0.2341406707763672, 0.23461273193359375, 0.23395840454101563, 0.23406080627441406, 0.2345912322998047, 0.2337884216308594, 0.23409356689453126, 0.23391743469238283, 0.23382424926757814, 0.23414988708496093, 0.2346895294189453, 0.2340044860839844, 0.23400344848632812, 0.23432704162597656, 0.23545549011230468, 0.234819580078125, 0.23408332824707032, 0.23495884704589845, 0.234134521484375, 0.23427583312988282, 0.23382528686523438, 0.23380992126464845, 0.23423078918457033, 0.23484005737304686, 0.23429119873046875, 0.23448678588867186, 0.23548927307128906, 0.23426559448242187, 0.23432908630371094, 0.23404953002929688, 0.23414579772949218, 0.23459942626953126, 0.23440896606445313, 0.23396351623535155, 0.23510426330566406, 0.23443251037597657, 0.23493734741210937, 0.23475814819335938, 0.23529983520507813, 0.23406387329101563, 0.23430758666992188, 0.48767181396484377, 0.23440896606445313, 0.23380992126464845, 0.23463116455078126, 0.23454617309570314, 0.23481651306152343, 0.23389797973632812, 0.23491993713378906, 0.2345707550048828, 0.23445504760742186, 0.23386317443847657, 0.234925048828125, 0.23413349914550782, 0.23387443542480468, 0.23547187805175782, 0.2346229705810547, 0.23446015930175781, 0.2339246063232422, 0.23416831970214844, 0.23383244323730468, 0.23388365173339845, 0.23380685424804687, 0.23409152221679688, 0.23424000549316407, 0.23371160888671874, 0.23394508361816407, 0.23730380249023436, 0.23463526916503907, 0.2349864959716797, 0.2340843505859375, 0.23448678588867186, 0.234819580078125, 0.23532952880859376, 0.23434547424316407, 0.23549746704101562, 0.2339911651611328, 0.2340966339111328, 0.2338170928955078, 0.23386624145507812, 0.2344622039794922, 0.2341201934814453, 0.234745849609375, 0.2340843505859375, 0.23513189697265624, 0.23424307250976562, 0.23459225463867187, 0.23526194763183594, 0.23420109558105467, 0.23379148864746094, 0.23442124938964845, 0.2343987274169922, 0.2351595458984375, 0.23452774047851563, 0.23387852478027343, 0.233818115234375, 0.23384780883789064, 0.23484825134277343, 0.234134521484375, 0.2350243835449219, 0.2341396484375, 0.23421029663085938, 0.23383450317382812, 0.23446015930175781]",tokens/s,4.2014979288884025,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1513.762816,1825.046528,0.0,1178.599424,1091.247104,s,10,1.2350289306640625,0.12350289306640624,0.0006278590763107522,0.12335015869140625,0.12446991119384765,0.12452100448608398,0.12456187911987306,"[0.12457209777832032, 0.12258493041992187, 0.123223388671875, 0.12296672058105469, 0.12311740875244141, 0.12295513916015625, 0.1234769287109375, 0.12388646697998047, 0.12445855712890624, 0.12378729248046876]",tokens/s,2072.8259366551956,kWh,1.4517080513280905e-06,7.954704248798562e-07,6.551292503901485e-06,8.798470980109433e-06,tokens/kWh,29095964.580520324,MB,1513.762816,1825.046528,0.0,1178.599424,1159.734784,s,10,69.32218505859375,6.932218505859375,0.003149777960520474,6.933029541015625,6.935241162109374,6.935911108398438,6.936447065429688,"[6.93331884765625, 6.93092578125, 6.9333740234375, 6.9281669921875, 6.932740234375, 6.934138671875, 6.93509228515625, 6.93250244140625, 6.9253447265625, 6.9365810546875]",tokens/s,9.087999743047627,kWh,8.183205577456532e-05,4.4849814286347e-05,0.00036005222300090266,0.000486734093061815,tokens/kWh,129434.12203508627,,s,629,70.322675743103,0.11180075634833549,0.014707892425374562,0.10997555541992188,0.11031490783691406,0.11055718231201171,0.23335628784179688,"[0.10998989105224609, 0.11027967834472656, 0.11003801727294922, 0.11055718231201171, 0.11002572631835937, 0.10997555541992188, 0.10988441467285157, 0.11009740447998047, 0.10994892883300782, 0.1100421142578125, 0.10996940612792969, 0.11002470397949218, 0.11002470397949218, 0.11015475463867187, 0.10990592193603516, 0.10996428680419922, 0.10994073486328125, 0.10987417602539062, 0.10995712280273437, 0.10995814514160156, 0.1099161605834961, 0.10992947387695312, 0.11009024047851562, 0.1098045425415039, 0.1099530258178711, 0.10990898895263672, 0.10999091339111328, 0.1099161605834961, 0.10990386962890625, 0.10983321380615234, 0.10983936309814453, 0.10992025756835938, 0.10994380950927735, 0.11009740447998047, 0.11012198638916015, 0.11004415893554688, 0.11021311950683593, 0.11015270233154296, 0.10997555541992188, 0.10997042846679687, 0.1100769271850586, 0.10993254089355468, 0.11002880096435547, 0.10997248077392578, 0.1100738525390625, 0.10994585418701172, 0.10987519836425781, 0.11011788940429687, 0.11002674865722656, 0.11031449890136719, 0.11004313659667969, 0.10998271942138672, 0.1100400619506836, 0.11014860534667968, 0.11063295745849609, 0.10996736145019531, 0.10992845153808593, 0.10991410827636719, 0.11014860534667968, 0.1098608627319336, 0.11007590484619141, 0.10992332458496094, 0.23466700744628907, 0.10985779571533204, 0.10981171417236328, 0.1099315185546875, 0.10995507049560548, 0.10985472106933594, 0.10984754943847656, 0.10977689361572265, 0.10997862243652344, 0.1100738525390625, 0.11007488250732422, 0.10998169708251954, 0.11045171356201172, 0.11078348541259765, 0.11011686706542968, 0.10994380950927735, 0.10991513824462891, 0.10980556488037109, 0.10991718292236329, 0.10992332458496094, 0.10974310302734375, 0.10984857940673828, 0.10985164642333985, 0.10992947387695312, 0.11002572631835937, 0.11035648345947266, 0.11005951690673828, 0.10994073486328125, 0.10990694427490234, 0.1097676773071289, 0.10987827301025391, 0.10998374176025391, 0.10983628845214843, 0.10985164642333985, 0.10997555541992188, 0.11007590484619141, 0.10993049621582031, 0.10982195281982422, 0.10988543701171875, 0.11021414184570312, 0.11008614349365234, 0.11017420959472657, 0.10999193572998046, 0.10999603271484375, 0.1100400619506836, 0.10992127990722657, 0.11122073364257813, 0.10994790649414063, 0.1098946533203125, 0.10998067474365235, 0.11016397094726563, 0.11005030059814454, 0.10995097351074219, 0.10986803436279297, 0.10994483184814453, 0.11000115203857422, 0.10993663787841797, 0.1098629150390625, 0.10973286437988282, 0.11027046203613282, 0.10998169708251954, 0.11012403106689453, 0.11069951629638672, 0.2334771270751953, 0.109949951171875, 0.10998374176025391, 0.109949951171875, 0.11041484832763672, 0.11017215728759766, 0.11003084564208984, 0.1100421142578125, 0.11011174774169921, 0.11004927825927735, 0.10999603271484375, 0.11010457611083985, 0.10996121978759765, 0.1101322250366211, 0.10993869018554688, 0.11010150146484375, 0.11002470397949218, 0.1104005126953125, 0.11045171356201172, 0.11004927825927735, 0.11002982330322265, 0.11002470397949218, 0.10999193572998046, 0.10997042846679687, 0.10986803436279297, 0.1099315185546875, 0.10996018981933593, 0.11003699493408203, 0.10990592193603516, 0.11009228515625, 0.10992537689208984, 0.11011686706542968, 0.11002572631835937, 0.10996121978759765, 0.1100738525390625, 0.11009945678710938, 0.1099683837890625, 0.10994278717041016, 0.11000012969970703, 0.10996940612792969, 0.11009638214111328, 0.11007590484619141, 0.10986803436279297, 0.11006259155273437, 0.10995507049560548, 0.10999501037597656, 0.10975743865966797, 0.11042304229736329, 0.11009535980224609, 0.1101465606689453, 0.11001651000976563, 0.11085004425048828, 0.11017932891845703, 0.11005542755126953, 0.11006463623046875, 0.10995097351074219, 0.10983526611328125, 0.11001036834716797, 0.10994278717041016, 0.11001856231689452, 0.11001139068603516, 0.11006976318359375, 0.1099315185546875, 0.23315866088867188, 0.10991718292236329, 0.10996428680419922, 0.10990898895263672, 0.10994687652587891, 0.10980659484863281, 0.11001344299316407, 0.10984243011474609, 0.1099192352294922, 0.10988441467285157, 0.10989568328857421, 0.10988543701171875, 0.11012300872802734, 0.10975027465820313, 0.10980352020263671, 0.10992742156982421, 0.10989055633544922, 0.10984754943847656, 0.10997657775878907, 0.10991001892089844, 0.10981273651123047, 0.109844482421875, 0.10982809448242188, 0.1099530258178711, 0.11010150146484375, 0.110129150390625, 0.10988031768798828, 0.10981785583496094, 0.10985779571533204, 0.10993561553955078, 0.10999910736083984, 0.10990694427490234, 0.10990796661376953, 0.10995097351074219, 0.11003801727294922, 0.10986803436279297, 0.10986393737792968, 0.10978406524658203, 0.10994175720214844, 0.10994585418701172, 0.11002674865722656, 0.1098967056274414, 0.10996121978759765, 0.10992947387695312, 0.11166207885742188, 0.11004415893554688, 0.11000012969970703, 0.10999807739257812, 0.10993459320068359, 0.11005235290527343, 0.10998169708251954, 0.11006668853759766, 0.11001651000976563, 0.11010150146484375, 0.11001548767089844, 0.10998579406738282, 0.10998271942138672, 0.1099653091430664, 0.10990386962890625, 0.11008306884765626, 0.10987110137939453, 0.10999807739257812, 0.10995916748046874, 0.23322726440429686, 0.10992332458496094, 0.11020185852050782, 0.11035545349121094, 0.10995814514160156, 0.10994175720214844, 0.10995507049560548, 0.10988441467285157, 0.10981273651123047, 0.10998374176025391, 0.10985574340820313, 0.10997452545166016, 0.10989772796630859, 0.10986803436279297, 0.11038003540039062, 0.11002880096435547, 0.11003494262695312, 0.11040563201904297, 0.1099653091430664, 0.11018342590332031, 0.11008819580078125, 0.11007488250732422, 0.10995916748046874, 0.10976461029052734, 0.11010047912597656, 0.1102008285522461, 0.11007488250732422, 0.10994483184814453, 0.11033497619628906, 0.11017727661132813, 0.1098629150390625, 0.10993561553955078, 0.11003699493408203, 0.11001241302490235, 0.11000627136230469, 0.10999295806884765, 0.11002162933349609, 0.11002265930175781, 0.10992230224609376, 0.10989158630371093, 0.1097359390258789, 0.10989977264404296, 0.11002674865722656, 0.10999910736083984, 0.10985779571533204, 0.1099683837890625, 0.10974720001220703, 0.109876220703125, 0.10991001892089844, 0.10994790649414063, 0.10997452545166016, 0.11035135650634766, 0.11020697784423829, 0.11008409881591796, 0.110060546875, 0.10996326446533203, 0.11019058990478516, 0.11144703674316406, 0.10995097351074219, 0.10988646697998047, 0.11015679931640625, 0.11033395385742187, 0.11005542755126953, 0.23340646362304687, 0.11011686706542968, 0.11028275299072265, 0.11017420959472657, 0.11006259155273437, 0.1100738525390625, 0.11010867309570313, 0.11038105773925781, 0.10999091339111328, 0.10990284729003906, 0.109949951171875, 0.11034009552001953, 0.10992845153808593, 0.10986495971679687, 0.1099192352294922, 0.1098946533203125, 0.10980556488037109, 0.1097553939819336, 0.11004518127441407, 0.11012300872802734, 0.10977996826171875, 0.10986393737792968, 0.10984038543701172, 0.10991718292236329, 0.1098936309814453, 0.10987519836425781, 0.10983116912841796, 0.1099130859375, 0.11000934600830078, 0.1101690902709961, 0.11019468688964844, 0.10981478118896484, 0.1098608627319336, 0.10990796661376953, 0.11005542755126953, 0.10980352020263671, 0.10990284729003906, 0.10992845153808593, 0.11151667022705078, 0.10988748931884766, 0.11004108428955078, 0.11066572570800781, 0.11073741149902344, 0.11011788940429687, 0.1100206069946289, 0.10988748931884766, 0.11012198638916015, 0.11024691009521484, 0.1098967056274414, 0.1100052490234375, 0.11016089630126953, 0.11015782165527344, 0.11018956756591797, 0.11015065765380859, 0.110060546875, 0.11035648345947266, 0.1099335708618164, 0.1101629409790039, 0.10992639923095703, 0.11004108428955078, 0.10989158630371093, 0.11015372467041015, 0.11010047912597656, 0.23385498046875, 0.10988851165771485, 0.11034111785888671, 0.10995097351074219, 0.10985574340820313, 0.11010662078857422, 0.11007078552246094, 0.10991820526123047, 0.10985984039306641, 0.10997248077392578, 0.10986393737792968, 0.11004723358154297, 0.11019468688964844, 0.1100052490234375, 0.11016397094726563, 0.1102387237548828, 0.11004518127441407, 0.11020902252197265, 0.1100943374633789, 0.10998989105224609, 0.11055513763427735, 0.11010253143310547, 0.10998169708251954, 0.11044454193115234, 0.11019570922851563, 0.1099653091430664, 0.10991513824462891, 0.11001753234863282, 0.10987007904052734, 0.1103984603881836, 0.10986188507080077, 0.10991104125976563, 0.11026534271240235, 0.11026534271240235, 0.11053056335449218, 0.11001753234863282, 0.11013529968261719, 0.11008716583251953, 0.11022541046142578, 0.10997657775878907, 0.10988236999511719, 0.11003494262695312, 0.11012198638916015, 0.11003187561035156, 0.1098260498046875, 0.11009945678710938, 0.10990898895263672, 0.11006771087646484, 0.1099161605834961, 0.10987827301025391, 0.1099130859375, 0.11001036834716797, 0.11009843444824219, 0.1100021743774414, 0.11010355377197266, 0.11002572631835937, 0.11081318664550781, 0.11031654357910156, 0.10989875030517578, 0.11011993408203125, 0.10990898895263672, 0.11030118560791016, 0.10994483184814453, 0.23407717895507812, 0.11024076843261718, 0.10993766021728515, 0.11036876678466796, 0.11012403106689453, 0.10995200347900391, 0.11002674865722656, 0.1097349090576172, 0.10983219146728515, 0.10998681640625, 0.10994380950927735, 0.10989158630371093, 0.1099192352294922, 0.10996736145019531, 0.10994892883300782, 0.10987725067138672, 0.1102376937866211, 0.10967346954345703, 0.11009945678710938, 0.11035238647460938, 0.10991820526123047, 0.10983628845214843, 0.10993561553955078, 0.10991513824462891, 0.10991718292236329, 0.11002982330322265, 0.10980352020263671, 0.10987315368652344, 0.10994380950927735, 0.10986803436279297, 0.10989158630371093, 0.10974515533447265, 0.10991513824462891, 0.10988851165771485, 0.11000627136230469, 0.10975743865966797, 0.10976255798339844, 0.10985472106933594, 0.10994483184814453, 0.10986905670166015, 0.11017011260986329, 0.11067801666259766, 0.11012812805175781, 0.1102387237548828, 0.11023974609375, 0.11021209716796875, 0.11020697784423829, 0.11080191802978516, 0.11018956756591797, 0.11028173065185547, 0.11045887756347657, 0.11006873321533203, 0.11001548767089844, 0.11007180786132813, 0.10984960174560547, 0.1098045425415039, 0.10983936309814453, 0.10996018981933593, 0.1098270721435547, 0.1103984603881836, 0.11062681579589843, 0.11046092987060546, 0.1100206069946289, 0.23471104431152343, 0.10992947387695312, 0.10997350311279297, 0.10981990051269531, 0.11024486541748046, 0.11018342590332031, 0.10990796661376953, 0.10981171417236328, 0.11014348602294922, 0.11026739501953126, 0.10997760009765625, 0.10999705505371093, 0.1097492446899414, 0.10981785583496094, 0.10969292449951172, 0.10985574340820313, 0.10983116912841796, 0.11017215728759766, 0.10983116912841796, 0.11038310241699219, 0.11015577697753906, 0.10982911682128907, 0.10989977264404296, 0.10992845153808593, 0.10990694427490234, 0.10996940612792969, 0.109949951171875, 0.10994892883300782, 0.10998681640625, 0.10987007904052734, 0.11038105773925781, 0.11008409881591796, 0.11003289794921875, 0.10989568328857421, 0.1100021743774414, 0.10995814514160156, 0.1100400619506836, 0.11009945678710938, 0.10990284729003906, 0.10982297515869141, 0.10980863952636719, 0.10979840087890624, 0.10979328155517579, 0.10987725067138672, 0.10977484893798828, 0.10974412536621093, 0.10975641632080078, 0.10971238708496094, 0.10985062408447266, 0.10984243011474609, 0.10988339233398438, 0.10988031768798828, 0.10987519836425781, 0.10985062408447266, 0.10984550476074219, 0.10994175720214844, 0.10979737854003906, 0.10983219146728515, 0.10980659484863281, 0.10979942321777343, 0.10972467041015625, 0.10989977264404296, 0.10978406524658203, 0.23415501403808595, 0.11078553771972656, 0.11036672210693359, 0.11091149139404297, 0.11072921752929688, 0.11045990753173827, 0.11007794952392579, 0.10978508758544922, 0.10992537689208984, 0.1098792953491211, 0.10985574340820313, 0.10979122924804688, 0.10985164642333985, 0.1100738525390625, 0.1099653091430664, 0.10994483184814453, 0.11005235290527343, 0.10987315368652344, 0.11015270233154296, 0.10990080261230468, 0.10979532623291016, 0.10991001892089844, 0.10990489959716797, 0.11083776092529297, 0.11055718231201171, 0.10991104125976563, 0.1105827865600586, 0.11010765075683594, 0.11010969543457032, 0.10981683349609375, 0.11006771087646484, 0.11014144134521485, 0.11008102416992188, 0.10999501037597656, 0.10991104125976563, 0.10976051330566407, 0.10998579406738282, 0.11004518127441407, 0.10994278717041016, 0.10981478118896484, 0.11005644989013671, 0.10996736145019531, 0.10994278717041016, 0.10976051330566407, 0.10995404815673829, 0.10989260864257812, 0.11064320373535157, 0.1107957763671875, 0.11001856231689452, 0.11012710571289062, 0.11023359680175782, 0.11013632202148438, 0.10996428680419922, 0.1100021743774414, 0.11024384307861328, 0.10992435455322265, 0.10974002838134765, 0.11081215667724609, 0.11024076843261718, 0.11022643280029297, 0.1098967056274414, 0.11006361389160156, 0.10993561553955078]",tokens/s,8.944483317128187,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1459.101696,2483.552256,0.0,1837.105152,1664.652288,s,10,1.3924120025634767,0.13924120025634767,0.0012999906015036696,0.139004638671875,0.1399875732421875,0.14133694152832033,0.14241643615722657,"[0.14268630981445313, 0.13829638671875, 0.13825672912597656, 0.13859184265136718, 0.13820169067382812, 0.13813597106933595, 0.13968771362304688, 0.13956375122070314, 0.1395741729736328, 0.13941743469238282]",tokens/s,1838.5362919071044,kWh,1.625099111365401e-06,8.904696633330225e-07,6.785176661013463e-06,9.300745435711887e-06,tokens/kWh,27524675.497196376,MB,1459.101696,2483.552256,0.0,1837.105152,1763.594752,s,10,82.5780146484375,8.25780146484375,0.007490083700840557,8.258056640625,8.26394970703125,8.268834228515626,8.272741845703125,"[8.261927734375, 8.246669921875, 8.2540546875, 8.2559892578125, 8.2628642578125, 8.261189453125, 8.27371875, 8.2601240234375, 8.2488525390625, 8.2526240234375]",tokens/s,7.629149267902877,kWh,9.755284506867285e-05,5.34662001699176e-05,0.00039530770056858775,0.0005463267458071782,tokens/kWh,115315.60642691902,,s,629,83.69452130126959,0.13305965230726477,0.01654754583003512,0.13084364318847655,0.13196041564941405,0.1323978759765625,0.2692352648925781,"[0.13379481506347657, 0.13254144287109376, 0.13190042114257813, 0.1307463684082031, 0.13086207580566406, 0.13051699829101562, 0.13088563537597656, 0.13059686279296875, 0.1305753631591797, 0.1306071014404297, 0.13055078125, 0.13059071350097656, 0.13054771423339845, 0.13065216064453125, 0.13135154724121093, 0.13138841247558594, 0.13123583984375, 0.13066546630859374, 0.13075045776367186, 0.13078834533691405, 0.130555908203125, 0.13037977600097655, 0.13052517700195312, 0.1304883270263672, 0.1306112060546875, 0.13059686279296875, 0.13050572204589844, 0.13065011596679688, 0.13144883728027343, 0.13071359252929687, 0.13058047485351562, 0.13251072692871094, 0.1310064697265625, 0.13063475036621094, 0.1306859588623047, 0.1307494354248047, 0.13061734008789064, 0.130661376953125, 0.13066035461425782, 0.13148672485351562, 0.13165568542480469, 0.13089280700683595, 0.13063372802734374, 0.13172633361816405, 0.13140377807617187, 0.13122969055175782, 0.13104946899414063, 0.13110272216796875, 0.13083135986328126, 0.13193215942382813, 0.13141708374023436, 0.13122560119628907, 0.1308784637451172, 0.13365965270996094, 0.13181951904296876, 0.13121638488769533, 0.13081907653808594, 0.13171200561523438, 0.1312593994140625, 0.13223219299316405, 0.13111807250976562, 0.13094400024414063, 0.2691379699707031, 0.13103199768066406, 0.1306234893798828, 0.13054873657226562, 0.13060812377929687, 0.1306552276611328, 0.13165773010253906, 0.13123992919921876, 0.13063168334960937, 0.13058560180664064, 0.13068389892578125, 0.13071565246582031, 0.1307463684082031, 0.13084466552734375, 0.1307125701904297, 0.13099623107910155, 0.13122764587402344, 0.1306798095703125, 0.1305128936767578, 0.1305200653076172, 0.13082931518554688, 0.13104742431640626, 0.13062757873535155, 0.13068800354003907, 0.13034291076660157, 0.1306429443359375, 0.1306183624267578, 0.13080677795410156, 0.13188607788085938, 0.13072589111328126, 0.13059686279296875, 0.13140786743164062, 0.13105459594726562, 0.13074227905273436, 0.13063987731933593, 0.13232537841796874, 0.1305538635253906, 0.13083340454101564, 0.1308590087890625, 0.13096754455566406, 0.13071974182128906, 0.1309388732910156, 0.13309132385253905, 0.13125836181640624, 0.13162495422363282, 0.1309450225830078, 0.13123890686035156, 0.13122969055175782, 0.13076173400878907, 0.13033779907226561, 0.13071974182128906, 0.1306808319091797, 0.13068389892578125, 0.13058355712890626, 0.1307535400390625, 0.13082521057128907, 0.13138330078125, 0.13090815734863281, 0.1307678680419922, 0.13069209289550782, 0.13084774780273437, 0.13068287658691408, 0.13077197265625, 0.26904986572265627, 0.13090304565429686, 0.13155123901367188, 0.1310064697265625, 0.13056512451171876, 0.1314580535888672, 0.13161677551269532, 0.13072178649902344, 0.13050572204589844, 0.1319393310546875, 0.1306808319091797, 0.13063168334960937, 0.13043609619140625, 0.13061427307128906, 0.13054566955566407, 0.13078732299804688, 0.130440185546875, 0.1307043914794922, 0.13066957092285156, 0.13071052551269532, 0.1307473907470703, 0.1307606964111328, 0.130587646484375, 0.132389892578125, 0.13151539611816407, 0.13108224487304687, 0.1304524841308594, 0.13092556762695312, 0.1307709503173828, 0.13067878723144533, 0.13066444396972657, 0.13064601135253906, 0.13057638549804687, 0.13061222839355469, 0.13061734008789064, 0.13069517517089843, 0.1307484130859375, 0.13075045776367186, 0.13066752624511718, 0.13076991271972657, 0.13184307861328126, 0.13099417114257814, 0.13157273864746094, 0.1309870147705078, 0.13192396545410157, 0.13073715209960937, 0.1307791290283203, 0.13263565063476562, 0.1323888702392578, 0.13131365966796876, 0.13071359252929687, 0.13122764587402344, 0.13066854858398438, 0.13092250061035157, 0.13190963745117187, 0.1317898254394531, 0.13199052429199218, 0.1315246124267578, 0.13117645263671876, 0.13117439270019532, 0.13063679504394532, 0.1306071014404297, 0.13064601135253906, 0.27082650756835935, 0.1319393310546875, 0.13072076416015624, 0.13070233154296876, 0.13060914611816407, 0.1306982421875, 0.1306941375732422, 0.1307361297607422, 0.13072384643554688, 0.13180825805664062, 0.13077708435058594, 0.13068902587890624, 0.1307166748046875, 0.1306112060546875, 0.13075456237792968, 0.13076480102539062, 0.1316259765625, 0.13118873596191405, 0.13114265441894532, 0.13099110412597656, 0.13076377868652345, 0.13220045471191405, 0.1319833526611328, 0.131051513671875, 0.1304217529296875, 0.1321359405517578, 0.13102284240722656, 0.13118975830078125, 0.13145599365234376, 0.13123890686035156, 0.13081805419921874, 0.13088050842285157, 0.13064396667480468, 0.13075967407226563, 0.1305620422363281, 0.13079756164550782, 0.1306480712890625, 0.13070130920410156, 0.13066546630859374, 0.13078323364257813, 0.13063168334960937, 0.13063372802734374, 0.1307361297607422, 0.13077810668945314, 0.1324031982421875, 0.1307852783203125, 0.13074227905273436, 0.13091635131835938, 0.1307842559814453, 0.13073408508300782, 0.13077606201171876, 0.13099725341796875, 0.1314017333984375, 0.13113139343261718, 0.13084774780273437, 0.1308579864501953, 0.1312112579345703, 0.1308897247314453, 0.13232537841796874, 0.13256089782714844, 0.1308651580810547, 0.13114060974121095, 0.13100236511230468, 0.2692731018066406, 0.13084364318847655, 0.1314017333984375, 0.13194137573242187, 0.1307535400390625, 0.13124607849121095, 0.1310320587158203, 0.13100338745117188, 0.13084979248046874, 0.130735107421875, 0.13073408508300782, 0.13077810668945314, 0.13074021911621095, 0.13047500610351562, 0.13073100280761718, 0.1308590087890625, 0.1306746826171875, 0.1320499267578125, 0.13072793579101563, 0.13061222839355469, 0.13069004821777344, 0.13063475036621094, 0.13072486877441405, 0.1314580535888672, 0.13120716857910156, 0.13121434020996095, 0.13137100219726563, 0.1323335723876953, 0.13257215881347656, 0.13070130920410156, 0.13072280883789061, 0.13117543029785156, 0.1306480712890625, 0.1327298583984375, 0.13261415100097657, 0.13112832641601563, 0.1316864013671875, 0.1313607635498047, 0.13099008178710939, 0.13083135986328126, 0.1309696044921875, 0.13129830932617187, 0.13074227905273436, 0.13066444396972657, 0.13106892395019532, 0.13083340454101564, 0.13119078063964842, 0.13266432189941407, 0.13248716735839844, 0.13077503967285156, 0.1307914276123047, 0.13080677795410156, 0.13104946899414063, 0.13092556762695312, 0.13065216064453125, 0.1306480712890625, 0.13058969116210936, 0.13318450927734374, 0.1311262664794922, 0.13230592346191405, 0.13067059326171876, 0.13073408508300782, 0.13068185424804687, 0.27066061401367186, 0.1313095703125, 0.13158502197265626, 0.13086822509765625, 0.13064703369140626, 0.1305917510986328, 0.13066957092285156, 0.13082009887695312, 0.13070950317382812, 0.13068800354003907, 0.1308579864501953, 0.13097062683105468, 0.13119488525390624, 0.13108735656738282, 0.1311057891845703, 0.13334323120117186, 0.13187992858886718, 0.13169049072265626, 0.13072793579101563, 0.13088359069824218, 0.13073919677734375, 0.13076582336425782, 0.1306378173828125, 0.13069926452636718, 0.13055999755859374, 0.13076991271972657, 0.1307361297607422, 0.1307115478515625, 0.13066444396972657, 0.1318144073486328, 0.13064601135253906, 0.1307914276123047, 0.13064396667480468, 0.13078221130371093, 0.13085696411132813, 0.13073408508300782, 0.13069004821777344, 0.13040435791015625, 0.13216152954101562, 0.13084774780273437, 0.13076275634765624, 0.1308784637451172, 0.13086822509765625, 0.1307484130859375, 0.13084364318847655, 0.13232025146484375, 0.13216461181640626, 0.1316546630859375, 0.13190963745117187, 0.13114572143554687, 0.13153176879882814, 0.131056640625, 0.13069107055664062, 0.13102284240722656, 0.13223219299316405, 0.13078630065917968, 0.13118258666992189, 0.1313228759765625, 0.13151641845703124, 0.13187481689453126, 0.13146418762207032, 0.13131263732910156, 0.13112115478515626, 0.2720235595703125, 0.13148159790039063, 0.1317724151611328, 0.13106072998046875, 0.13132492065429688, 0.13174169921875, 0.13085285949707032, 0.13085183715820312, 0.1307484130859375, 0.13077503967285156, 0.13059071350097656, 0.13101158142089844, 0.13111500549316407, 0.13174578857421876, 0.1308159942626953, 0.13084466552734375, 0.13157171630859374, 0.13134848022460938, 0.13074432373046874, 0.13075045776367186, 0.13068800354003907, 0.13058969116210936, 0.13147442626953126, 0.13185433959960938, 0.130872314453125, 0.13104537963867188, 0.13076991271972657, 0.13386341857910156, 0.132885498046875, 0.13136895751953126, 0.1308590087890625, 0.13111091613769532, 0.13062757873535155, 0.1306480712890625, 0.13030911254882813, 0.13067263793945313, 0.13060096740722657, 0.13076582336425782, 0.13124710083007812, 0.13097164916992188, 0.13212261962890626, 0.13122047424316408, 0.1317918701171875, 0.13056410217285155, 0.13102079772949218, 0.13193011474609376, 0.13195468139648436, 0.13295513916015625, 0.13061427307128906, 0.13063270568847657, 0.13238067626953126, 0.13169151306152344, 0.13129830932617187, 0.13212364196777343, 0.13201408386230468, 0.1310709686279297, 0.13222093200683593, 0.1328711700439453, 0.13222195434570314, 0.13068287658691408, 0.1308057556152344, 0.13097779846191407, 0.13187481689453126, 0.271072265625, 0.13202432250976562, 0.13072998046875, 0.13133721923828126, 0.1315010528564453, 0.13107609558105468, 0.13116006469726563, 0.1324267578125, 0.1318338623046875, 0.13117543029785156, 0.13102694702148437, 0.13115699768066405, 0.1313720245361328, 0.13114572143554687, 0.13089791870117187, 0.13081497192382813, 0.1320273895263672, 0.13255679321289063, 0.13251686096191406, 0.13236122131347655, 0.13119488525390624, 0.13082009887695312, 0.130629638671875, 0.1312522277832031, 0.13159219360351562, 0.131557373046875, 0.13065113830566405, 0.13070233154296876, 0.13060403442382812, 0.13083750915527342, 0.13064396667480468, 0.13056614685058593, 0.13110272216796875, 0.13064909362792967, 0.13075967407226563, 0.13072486877441405, 0.13138330078125, 0.13096754455566406, 0.13072076416015624, 0.13083241271972657, 0.13185635375976562, 0.13205708312988282, 0.1308344268798828, 0.1318041534423828, 0.1312798767089844, 0.13098086547851562, 0.13061529541015626, 0.1309071350097656, 0.13069107055664062, 0.13145703125, 0.13063372802734374, 0.13061016845703124, 0.13082623291015624, 0.13071871948242186, 0.13061529541015626, 0.1307125701904297, 0.13055897521972656, 0.13065728759765624, 0.13056410217285155, 0.13088255310058594, 0.13070335388183593, 0.130735107421875, 0.1306234893798828, 0.27049368286132813, 0.1308221435546875, 0.13063679504394532, 0.13080166625976564, 0.1306071014404297, 0.13121945190429687, 0.1331988525390625, 0.1310433349609375, 0.13162701416015626, 0.1306234893798828, 0.13115391540527344, 0.13173452758789062, 0.13072793579101563, 0.1314897918701172, 0.13150413513183593, 0.1313218536376953, 0.1311191101074219, 0.13160960388183593, 0.13073817443847657, 0.1321881561279297, 0.13074227905273436, 0.1307494354248047, 0.13070233154296876, 0.13128807067871093, 0.1311068115234375, 0.1307729949951172, 0.1307146301269531, 0.1308078155517578, 0.13059788513183593, 0.13057638549804687, 0.13050982666015626, 0.13075762939453126, 0.13053439331054686, 0.13056614685058593, 0.13058355712890626, 0.13062144470214843, 0.1315635223388672, 0.13227008056640624, 0.13068698120117186, 0.13062451171875, 0.130693115234375, 0.1305917510986328, 0.13058457946777344, 0.13053439331054686, 0.13104229736328124, 0.13112832641601563, 0.13049139404296875, 0.13087026977539062, 0.13059686279296875, 0.13072793579101563, 0.13054771423339845, 0.13068185424804687, 0.1312030792236328, 0.13103411865234374, 0.13126451110839843, 0.13072998046875, 0.13075456237792968, 0.13058969116210936, 0.13065933227539062, 0.13062144470214843, 0.1305753631591797, 0.13052517700195312, 0.13057331848144532, 0.2703288269042969, 0.13065420532226563, 0.13157785034179686, 0.13085285949707032, 0.130619384765625, 0.13085594177246093, 0.130735107421875, 0.13151129150390625, 0.13070335388183593, 0.13061734008789064, 0.1304698944091797, 0.1307709503173828, 0.1307484130859375, 0.13094297790527343, 0.1308395538330078, 0.130735107421875, 0.13068185424804687, 0.13075660705566405, 0.13218304443359374, 0.1309634552001953, 0.13148159790039063, 0.13058253479003906, 0.1310627899169922, 0.1309020233154297, 0.13047193908691407, 0.13053030395507811, 0.13151846313476562, 0.1308170166015625, 0.13100338745117188, 0.1308968963623047, 0.13076889038085937, 0.1309644775390625, 0.13093785095214844, 0.1307740173339844, 0.13066648864746094, 0.13088665771484376, 0.1311293487548828, 0.13094706726074218, 0.13092250061035157, 0.13099314880371093, 0.13093785095214844, 0.13083135986328126, 0.1308078155517578, 0.13092250061035157, 0.13095321655273437, 0.13083544921875, 0.13072691345214843, 0.13097471618652343, 0.1313228759765625, 0.13215846252441407, 0.13237759399414062, 0.1312788543701172, 0.13164134216308593, 0.1313638458251953, 0.1314334716796875, 0.13113446044921875, 0.1308159942626953, 0.13188607788085938, 0.1307361297607422, 0.1308733367919922, 0.13075558471679688, 0.13074534606933594, 0.1307606964111328]",tokens/s,7.515426221697727,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1368.223744,6256.328704,0.0,5609.8816,5292.617728,s,10,5.657606628417969,0.5657606628417968,0.0009498936514595362,0.5654808349609375,0.5660143920898437,0.5672829833984374,0.5682978564453125,"[0.5685515747070312, 0.5654931030273438, 0.565177734375, 0.5654685668945313, 0.5652743530273437, 0.5651702880859375, 0.5656128540039063, 0.5657227172851562, 0.5657324829101562, 0.5654029541015625]",tokens/s,452.48815764977473,kWh,6.682694372203615e-06,3.660675940348786e-06,3.1253065125888854e-05,4.159643543844126e-05,tokens/kWh,6154373.50103846,MB,1368.551424,6256.328704,0.0,5609.8816,5503.949312,s,10,330.81899609375,33.081899609375,0.006904428548072782,33.079876953125,33.091131640625,33.0930072265625,33.0945076953125,"[33.07798046875, 33.078234375, 33.07119921875, 33.0948828125, 33.09071484375, 33.08729296875, 33.08378515625, 33.07924609375, 33.0805078125, 33.07515234375]",tokens/s,1.9043646448327465,kWh,0.0003903993130889204,0.00021397087349180462,0.001808246989806311,0.0024126171763870358,tokens/kWh,26112.721328770578,,s,629,335.3824949340819,0.5331995149985406,0.06720644230645373,0.5250816040039062,0.52554873046875,0.52574228515625,1.0904419140625001,"[0.5248307495117187, 0.5246986083984375, 0.5245040893554688, 0.524674072265625, 0.52523828125, 0.5248563232421875, 0.5253027954101562, 0.5246597290039062, 0.5248173828125, 0.5247999877929688, 0.5255188598632813, 0.5246453247070313, 0.5245347900390624, 0.5246586303710937, 0.525032470703125, 0.5246986083984375, 0.5246525268554687, 0.5253273315429687, 0.5247651977539063, 0.5249915161132812, 0.5252413330078125, 0.5252423706054687, 0.5247785034179687, 0.5247273559570312, 0.5247057495117188, 0.5248440551757813, 0.5247232055664063, 0.5247467651367187, 0.5255465087890625, 0.5256212768554688, 0.5258025512695312, 0.5255167846679687, 0.5248409423828125, 0.5254369506835938, 0.5253928833007813, 0.5247928466796875, 0.5255188598632813, 0.5249791870117188, 0.5249310913085937, 0.5250969848632813, 0.5247672119140625, 0.5248972778320312, 0.5248655395507813, 0.5252689819335937, 0.5248040771484375, 0.525211669921875, 0.5255167846679687, 0.525169677734375, 0.5251573486328125, 0.524832763671875, 0.5248081665039063, 0.5248256225585938, 0.5252925415039063, 0.5251204833984375, 0.525022216796875, 0.5254482421875, 0.525276123046875, 0.5254348754882813, 0.5250242309570312, 0.5254471435546875, 0.5251328125, 0.5251553344726563, 1.0913822021484374, 0.5247088623046875, 0.52463818359375, 0.5248921508789063, 0.5250447387695313, 0.52459423828125, 0.5247610473632812, 0.524621826171875, 0.5250303955078125, 0.5247098999023437, 0.5248706665039062, 0.52496484375, 0.524769287109375, 0.525318115234375, 0.5248880615234375, 0.5248450317382812, 0.5251092529296875, 0.5249443969726563, 0.5248491821289063, 0.5246842651367187, 0.524643310546875, 0.5247713012695312, 0.5255167846679687, 0.5258618774414062, 0.5249320678710937, 0.5256673583984375, 0.5253673095703125, 0.525570068359375, 0.5251849975585937, 0.5251604614257812, 0.5255874633789063, 0.5250549926757813, 0.5249617309570312, 0.5247528686523437, 0.5250518798828125, 0.525127685546875, 0.5249607543945313, 0.5249474487304687, 0.525033447265625, 0.5248983154296875, 0.5250068359375, 0.52510205078125, 0.5252761840820312, 0.5249771728515625, 0.5250181274414063, 0.5249228515625, 0.5253406982421875, 0.5251481323242188, 0.5250140380859375, 0.5252628784179687, 0.525149169921875, 0.5251195068359376, 0.5251184692382812, 0.5249095458984375, 0.5248972778320312, 0.5250303955078125, 0.525391845703125, 0.524969970703125, 0.5247293701171875, 0.5249556274414062, 0.5251163940429687, 0.5255157470703125, 0.5251348266601562, 1.090335693359375, 0.524874755859375, 0.5245787963867188, 0.524506103515625, 0.5248809204101562, 0.5247160034179688, 0.5246280517578125, 0.5246934204101562, 0.5246658325195312, 0.5245573120117187, 0.5246976928710938, 0.5247764282226562, 0.524717041015625, 0.5248132934570312, 0.52461669921875, 0.52514306640625, 0.525487060546875, 0.5247119140625, 0.524663818359375, 0.524600341796875, 0.524516357421875, 0.524632080078125, 0.5244651489257812, 0.5252577514648438, 0.5246760864257812, 0.524959716796875, 0.5246556396484375, 0.5247354736328125, 0.5250734252929687, 0.5248143310546876, 0.5247682495117187, 0.5251287231445313, 0.5246996459960938, 0.5248102416992187, 0.5250416870117187, 0.5254154052734376, 0.5251204833984375, 0.524943359375, 0.5247672119140625, 0.5249054565429687, 0.5247723388671875, 0.5249924926757813, 0.5250969848632813, 0.5248839721679688, 0.524780517578125, 0.5248870239257812, 0.5249146728515625, 0.525117431640625, 0.5246310424804688, 0.5249474487304687, 0.5247897338867188, 0.52493310546875, 0.5250109252929688, 0.5253570556640625, 0.5257471923828125, 0.5256693725585937, 0.52505908203125, 0.5255249633789062, 0.5257697143554687, 0.5254031372070312, 0.5251993408203125, 0.5257103271484375, 0.5251307373046875, 1.090680908203125, 0.525180908203125, 0.524788818359375, 0.5252505493164062, 0.5254584350585938, 0.5254379272460937, 0.52552294921875, 0.5255587768554687, 0.525601806640625, 0.5258516235351562, 0.5258157958984375, 0.5254614868164063, 0.5251604614257812, 0.5255485229492187, 0.5253007202148438, 0.5253621826171875, 0.5256417236328125, 0.525749267578125, 0.5251840209960937, 0.5252259521484375, 0.5253324584960938, 0.5253058471679688, 0.52491162109375, 0.5250846557617187, 0.525254638671875, 0.525391845703125, 0.52520654296875, 0.5254430541992188, 0.5258055419921875, 0.525138916015625, 0.5251840209960937, 0.5249403076171875, 0.525707275390625, 0.5251296997070313, 0.52486962890625, 0.5257564086914063, 0.5252894897460938, 0.5254993896484375, 0.5254983520507812, 0.5253345336914063, 0.5253857421875, 0.5251768188476562, 0.52502734375, 0.5253447875976562, 0.5251942138671875, 0.52514306640625, 0.5253836669921875, 0.5265366821289063, 0.5247119140625, 0.5249392700195312, 0.5253396606445313, 0.5246771240234375, 0.5253150634765625, 0.5247682495117187, 0.5253621826171875, 0.5255966796875, 0.5253765258789063, 0.5253641967773437, 0.5249669189453126, 0.5252526245117187, 0.5254635009765625, 0.5250498657226562, 0.5251553344726563, 1.090404296875, 0.524969970703125, 0.5259376831054687, 0.5252003784179687, 0.5258106689453125, 0.5252955932617187, 0.5250447387695313, 0.525154296875, 0.5248737182617188, 0.5253058471679688, 0.5248440551757813, 0.524874755859375, 0.5246546020507813, 0.5248102416992187, 0.5245665283203125, 0.5248348388671875, 0.5247498168945313, 0.5249761352539063, 0.5246576538085937, 0.5247181396484375, 0.5246954956054688, 0.5256632080078125, 0.5248604125976563, 0.524747802734375, 0.5248604125976563, 0.5250211791992188, 0.5254573974609374, 0.5250242309570312, 0.52548095703125, 0.5251154174804687, 0.5251461181640625, 0.5252608032226562, 0.52522607421875, 0.5251276245117188, 0.5253990478515626, 0.5256478881835938, 0.525464599609375, 0.5254779052734375, 0.5252710571289062, 0.5255720825195312, 0.5254932250976563, 0.52564892578125, 0.5251912231445313, 0.5251798095703125, 0.5252771606445312, 0.5259796752929687, 0.5252730712890625, 0.5259386596679687, 0.5251870727539063, 0.525286376953125, 0.52527001953125, 0.5250816040039062, 0.525275146484375, 0.52520654296875, 0.5252454833984375, 0.5256109619140625, 0.5253765258789063, 0.52531201171875, 0.5252474975585938, 0.5265930786132812, 0.525812744140625, 0.5261884765625, 0.5251481323242188, 1.09045654296875, 0.5245020141601563, 0.5246781616210937, 0.5249658813476562, 0.5249915161132812, 0.5245040893554688, 0.52481640625, 0.5248297119140625, 0.5247836303710938, 0.524885986328125, 0.5248921508789063, 0.52484814453125, 0.5248286743164062, 0.524632080078125, 0.5257512817382812, 0.524990478515625, 0.5250908203125, 0.5249392700195312, 0.5247129516601563, 0.5252608032226562, 0.5249863891601563, 0.5254144287109375, 0.524600341796875, 0.5249567260742187, 0.5247467041015625, 0.5249075317382812, 0.5252260131835937, 0.5248870239257812, 0.5250662231445312, 0.5250048828125, 0.5253692626953125, 0.5249249267578125, 0.524853271484375, 0.5260421142578126, 0.5258383178710937, 0.5250867309570313, 0.5253775634765625, 0.524843017578125, 0.525390869140625, 0.5253294067382812, 0.5256530151367188, 0.5254686889648438, 0.525365234375, 0.525201416015625, 0.5253119506835937, 0.5253365478515625, 0.5253560180664063, 0.5255802612304687, 0.5252782592773437, 0.5253252563476563, 0.5253990478515626, 0.52508056640625, 0.5262468872070313, 0.5258137817382813, 0.525549560546875, 0.5252843627929688, 0.5254717407226562, 0.525453369140625, 0.5256119384765625, 0.5255628662109375, 0.5259458618164062, 0.5253795776367187, 0.52560693359375, 1.0914058837890626, 0.5248624877929687, 0.5247764282226562, 0.5250928344726562, 0.5249915161132812, 0.5249924926757813, 0.524921875, 0.5255986938476562, 0.525233154296875, 0.5249976196289062, 0.525106201171875, 0.525254638671875, 0.5254993896484375, 0.52495361328125, 0.5254133911132812, 0.5256365966796875, 0.5252393188476563, 0.5251287231445313, 0.5250734252929687, 0.5250263061523438, 0.5252188110351562, 0.5251512451171875, 0.5254318237304687, 0.5252034301757813, 0.5248993530273437, 0.5250560302734375, 0.5257349243164062, 0.5247313842773438, 0.5247518920898437, 0.5246392211914063, 0.5248256225585938, 0.5250816040039062, 0.5247979736328126, 0.5251195068359376, 0.5250089111328125, 0.525053955078125, 0.5249034423828125, 0.5249392700195312, 0.525000732421875, 0.5251604614257812, 0.5249515380859375, 0.5250344848632813, 0.5250201416015625, 0.524843017578125, 0.5250570068359375, 0.525391845703125, 0.5252781982421875, 0.5252321166992188, 0.5249515380859375, 0.5251287231445313, 0.5253140258789063, 0.524959716796875, 0.5254113159179687, 0.5252976684570313, 0.5251635131835938, 0.525433837890625, 0.5251604614257812, 0.5252894897460938, 0.5252474975585938, 0.5253939208984375, 0.5253867797851562, 0.5253079223632813, 0.525212646484375, 1.0918123779296875, 0.5249392700195312, 0.5248511962890625, 0.5250714111328125, 0.525445068359375, 0.5252464599609376, 0.5249423217773438, 0.5251881103515625, 0.5252290649414062, 0.5253109741210937, 0.5252495727539063, 0.524802001953125, 0.52481640625, 0.5247365112304687, 0.5250303955078125, 0.5251235961914062, 0.5250078735351562, 0.5248880615234375, 0.5249197998046875, 0.5250938720703126, 0.525497314453125, 0.5250078735351562, 0.5247528686523437, 0.5247733764648438, 0.524821533203125, 0.524938232421875, 0.5251942138671875, 0.5249832763671874, 0.5250949096679688, 0.5249238891601562, 0.524906494140625, 0.5249658813476562, 0.5247979736328126, 0.5248522338867188, 0.52481640625, 0.52491162109375, 0.5247928466796875, 0.5249320678710937, 0.5248573608398438, 0.5247938842773437, 0.5249791870117188, 0.52478466796875, 0.5247897338867188, 0.5247600708007812, 0.52487890625, 0.5249884033203125, 0.5252925415039063, 0.5250416870117187, 0.5249464111328125, 0.5251000366210937, 0.5250816040039062, 0.5249525756835938, 0.5252413940429688, 0.5255003662109375, 0.5252474975585938, 0.5254031372070312, 0.5251502075195312, 0.5256693725585937, 0.5256007690429687, 0.5253478393554688, 0.5253816528320312, 0.525201416015625, 0.5253990478515626, 1.0916168212890625, 0.52502734375, 0.5249310913085937, 0.524822509765625, 0.5248081665039063, 0.5246361694335937, 0.5247744140625, 0.5250693359375, 0.5248952026367187, 0.5249024047851563, 0.5249915161132812, 0.5248173828125, 0.525412353515625, 0.5252208862304687, 0.5250160522460937, 0.524969970703125, 0.525085693359375, 0.5248706665039062, 0.5251307373046875, 0.5251522827148437, 0.5256038208007813, 0.5248409423828125, 0.5249832763671874, 0.5250211791992188, 0.524906494140625, 0.524864501953125, 0.52510107421875, 0.5247754516601563, 0.5251942138671875, 0.5250908203125, 0.5250857543945312, 0.5254880981445312, 0.5257154541015625, 0.5253683471679688, 0.5251204833984375, 0.5250826416015625, 0.525117431640625, 0.525169677734375, 0.5254266967773438, 0.525581298828125, 0.5249771728515625, 0.5250396118164062, 0.5251522827148437, 0.5250263061523438, 0.5251163940429687, 0.5250775146484375, 0.5251696166992188, 0.5251348266601562, 0.5248726806640625, 0.5248040771484375, 0.525322265625, 0.5251829833984375, 0.5251051635742188, 0.5248706665039062, 0.5250303955078125, 0.5250109252929688, 0.5251963500976562, 0.5250313720703125, 0.5255403442382812, 0.5248737182617188, 0.5250242309570312, 0.5249843139648438, 0.5250836181640625, 1.0909224853515624, 0.5246965942382813, 0.5248829345703125, 0.5249362182617188, 0.5247160034179688, 0.5250303955078125, 0.5250089111328125, 0.5251287231445313, 0.5249238891601562, 0.5249658813476562, 0.5249515380859375, 0.5246607055664062, 0.52463720703125, 0.525317138671875, 0.5247047729492188, 0.5247109375, 0.5245911254882812, 0.5245593872070312, 0.5245286254882813, 0.5245368041992188, 0.5244630737304687, 0.5245419311523437, 0.5247344360351562, 0.5246781616210937, 0.5250242309570312, 0.525285400390625, 0.5248081665039063, 0.5248297119140625, 0.5248153686523438, 0.5249310913085937, 0.5255239868164062, 0.5253621826171875, 0.5255106811523438, 0.5247969360351562, 0.52493115234375, 0.5251900634765625, 0.525365234375, 0.52534375, 0.52518603515625, 0.5249095458984375, 0.524705810546875, 0.5250477905273437, 0.5248071899414063, 0.5250426635742188, 0.5253519287109375, 0.52510205078125, 0.5249894409179687, 0.5251450805664063, 0.52478466796875, 0.5255065307617187, 0.5247191162109375, 0.5256325073242187, 0.5251266479492187, 0.5251030883789063, 0.5249208374023437, 0.5255485229492187, 0.525053955078125, 0.5254256591796875, 0.525159423828125, 0.525106201171875, 0.5251512451171875, 0.5250416870117187, 0.5253683471679688]",tokens/s,1.8754705731544732,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1767.067648,22129.672192,0.0,21483.225088,20799.168,s,10,28.05056616210938,2.805056616210938,0.0027095714336187707,2.804907958984375,2.8082408447265625,2.808313903808594,2.808372351074219,"[2.805282470703125, 2.808224609375, 2.803196533203125, 2.80234619140625, 2.80277294921875, 2.800419677734375, 2.804533447265625, 2.80722705078125, 2.80817626953125, 2.808386962890625]",tokens/s,91.26375507735885,kWh,3.3081144508388305e-05,1.8129723654910774e-05,0.00015725384802519894,0.000208464716188498,tokens/kWh,1228025.5607789264,MB,1772.261376,22129.672192,0.0,21483.225088,20902.144,s,10,1667.162484375,166.71624843749996,0.015085835868376713,166.71732031250002,166.73279531249997,166.73349140624998,166.73404828124998,"[166.730125, 166.715734375, 166.68440625, 166.724703125, 166.7341875, 166.71721875, 166.708265625, 166.717421875, 166.69778125, 166.732640625]",tokens/s,0.377887581987055,kWh,0.0019678901955319777,0.0010785770549249355,0.009270038554913802,0.012316505805370715,tokens/kWh,5115.087103074991,,s,629,1689.7617126464822,2.686425616290118,0.33327430639158717,2.64620947265625,2.6474256835937497,2.647894970703125,5.4507228125000005,"[2.646453369140625, 2.646373291015625, 2.64631591796875, 2.6454580078125, 2.646531982421875, 2.647617431640625, 2.64826171875, 2.647658447265625, 2.647793701171875, 2.64637646484375, 2.64711376953125, 2.64620947265625, 2.646602783203125, 2.646426513671875, 2.64635498046875, 2.64557568359375, 2.644958251953125, 2.647402587890625, 2.64601904296875, 2.646287353515625, 2.647202880859375, 2.64688525390625, 2.646246337890625, 2.646867919921875, 2.64715478515625, 2.647743408203125, 2.646212646484375, 2.647287841796875, 2.648404052734375, 2.64638671875, 2.64559912109375, 2.647835693359375, 2.648161376953125, 2.6474189453125, 2.647456787109375, 2.6468076171875, 2.646687744140625, 2.64502880859375, 2.646285400390625, 2.646274169921875, 2.64652490234375, 2.64616455078125, 2.64710546875, 2.6460908203125, 2.646740966796875, 2.645139404296875, 2.646036376953125, 2.646411376953125, 2.64627099609375, 2.64452197265625, 2.64525830078125, 2.647901123046875, 2.64601806640625, 2.645980224609375, 2.64719677734375, 2.64639990234375, 2.64538720703125, 2.645042236328125, 2.64540576171875, 2.646120361328125, 2.646118408203125, 2.64612353515625, 5.45506494140625, 2.64526025390625, 2.6462626953125, 2.64690576171875, 2.646042724609375, 2.64546826171875, 2.646393798828125, 2.647192626953125, 2.644919189453125, 2.644935791015625, 2.645579833984375, 2.647086181640625, 2.646035400390625, 2.644955078125, 2.645644287109375, 2.645537841796875, 2.644991943359375, 2.645560302734375, 2.646414306640625, 2.645199951171875, 2.646506591796875, 2.646548583984375, 2.64702880859375, 2.64570166015625, 2.644788330078125, 2.64700732421875, 2.646330322265625, 2.64543017578125, 2.645243896484375, 2.646043701171875, 2.645831787109375, 2.6459453125, 2.64614697265625, 2.64631494140625, 2.64669091796875, 2.646116455078125, 2.647741455078125, 2.6478857421875, 2.645671875, 2.647362548828125, 2.646822998046875, 2.648393798828125, 2.647374755859375, 2.64627294921875, 2.646519775390625, 2.645937255859375, 2.6523720703125, 2.647458740234375, 2.646916015625, 2.6467646484375, 2.644612060546875, 2.645245849609375, 2.64698779296875, 2.646232177734375, 2.645567626953125, 2.645909423828125, 2.64679541015625, 2.64620947265625, 2.645465087890625, 2.646067138671875, 2.6467060546875, 2.6458798828125, 2.6457109375, 5.4513837890625, 2.647235595703125, 2.645803955078125, 2.64654443359375, 2.64496533203125, 2.64464892578125, 2.645158935546875, 2.646266845703125, 2.64560546875, 2.646096923828125, 2.64475537109375, 2.64629052734375, 2.644760498046875, 2.64480859375, 2.6463427734375, 2.64760009765625, 2.645761962890625, 2.645760986328125, 2.646381591796875, 2.646036376953125, 2.64513134765625, 2.646679443359375, 2.6458369140625, 2.6495498046875, 2.646917236328125, 2.644770751953125, 2.645843017578125, 2.644592529296875, 2.6452919921875, 2.6457333984375, 2.64617578125, 2.64542822265625, 2.645474365234375, 2.644977783203125, 2.6475703125, 2.645327880859375, 2.64601611328125, 2.64604052734375, 2.644948974609375, 2.645347412109375, 2.64539453125, 2.64572314453125, 2.645024658203125, 2.64540771484375, 2.64523681640625, 2.64541796875, 2.644529052734375, 2.649079833984375, 2.645666748046875, 2.646816650390625, 2.645528564453125, 2.645729248046875, 2.6472724609375, 2.646464599609375, 2.644356201171875, 2.644589599609375, 2.64574267578125, 2.645088134765625, 2.6443017578125, 2.644094970703125, 2.64595556640625, 2.645572509765625, 2.644812744140625, 5.4503955078125, 2.645255126953125, 2.645544921875, 2.64656884765625, 2.648037353515625, 2.645263427734375, 2.64638671875, 2.6460732421875, 2.6460478515625, 2.645971923828125, 2.645147705078125, 2.646581298828125, 2.6468291015625, 2.64502880859375, 2.645927978515625, 2.64683935546875, 2.64574365234375, 2.645297119140625, 2.64578662109375, 2.645689453125, 2.64529296875, 2.645930908203125, 2.64618798828125, 2.64557373046875, 2.645583984375, 2.64784375, 2.6465341796875, 2.645923828125, 2.648280029296875, 2.647232421875, 2.64726220703125, 2.646688720703125, 2.646456298828125, 2.647128173828125, 2.64785009765625, 2.6451201171875, 2.645867431640625, 2.645412841796875, 2.647185302734375, 2.644790283203125, 2.645506103515625, 2.64781005859375, 2.6459228515625, 2.64651171875, 2.6510693359375, 2.647773193359375, 2.6475908203125, 2.648330322265625, 2.646329345703125, 2.64608349609375, 2.64578759765625, 2.64604150390625, 2.646233154296875, 2.647012451171875, 2.644991943359375, 2.6461533203125, 2.646445068359375, 2.6459423828125, 2.646255615234375, 2.647103515625, 2.64721826171875, 2.6459013671875, 2.646496337890625, 5.45085009765625, 2.646131591796875, 2.647123046875, 2.64625244140625, 2.6469755859375, 2.645900146484375, 2.64694384765625, 2.6482001953125, 2.647193603515625, 2.646340576171875, 2.646576171875, 2.647742431640625, 2.645885986328125, 2.64563916015625, 2.645662841796875, 2.646833251953125, 2.64673291015625, 2.648217529296875, 2.645130126953125, 2.648642578125, 2.64700732421875, 2.6464482421875, 2.645592041015625, 2.646584228515625, 2.64501953125, 2.64555224609375, 2.64648388671875, 2.6452490234375, 2.646088623046875, 2.64539453125, 2.646591552734375, 2.645818359375, 2.6455, 2.645107666015625, 2.647025634765625, 2.64500830078125, 2.647201904296875, 2.6471865234375, 2.647762939453125, 2.6468916015625, 2.646411376953125, 2.645792724609375, 2.647160888671875, 2.64576611328125, 2.645645263671875, 2.646044677734375, 2.647015380859375, 2.64671435546875, 2.645159912109375, 2.646703125, 2.646371337890625, 2.646978515625, 2.6460908203125, 2.648290283203125, 2.646096923828125, 2.646834228515625, 2.6468515625, 2.654116943359375, 2.646571044921875, 2.646265869140625, 2.647033935546875, 2.645507080078125, 2.64506884765625, 5.44818896484375, 2.646950927734375, 2.64728466796875, 2.64570068359375, 2.647045166015625, 2.645780517578125, 2.64650341796875, 2.647185302734375, 2.647396240234375, 2.64726416015625, 2.645887939453125, 2.646220947265625, 2.646308837890625, 2.6452724609375, 2.645916748046875, 2.6456513671875, 2.64530029296875, 2.645833740234375, 2.64532275390625, 2.646921142578125, 2.645572509765625, 2.646182861328125, 2.646026123046875, 2.646593505859375, 2.646531005859375, 2.646508544921875, 2.646131591796875, 2.647275634765625, 2.646612060546875, 2.646274169921875, 2.645583984375, 2.646960205078125, 2.647244873046875, 2.6451845703125, 2.6467060546875, 2.646052001953125, 2.64616650390625, 2.645667724609375, 2.652001220703125, 2.64635693359375, 2.645919677734375, 2.646162353515625, 2.64641845703125, 2.645307373046875, 2.644989013671875, 2.646035400390625, 2.645531494140625, 2.645157958984375, 2.645906494140625, 2.645951416015625, 2.646036376953125, 2.645796875, 2.64467041015625, 2.645960693359375, 2.64549267578125, 2.64652294921875, 2.64587255859375, 2.647626708984375, 2.646477783203125, 2.646447021484375, 2.646128662109375, 2.64745263671875, 2.646246337890625, 5.45296484375, 2.646928466796875, 2.647333984375, 2.646259765625, 2.64629443359375, 2.64618701171875, 2.6448466796875, 2.646443115234375, 2.646686767578125, 2.6447626953125, 2.64540576171875, 2.64494287109375, 2.645835693359375, 2.645675048828125, 2.64589404296875, 2.64648095703125, 2.64646044921875, 2.646432861328125, 2.646467529296875, 2.645669921875, 2.64584814453125, 2.6454580078125, 2.64523486328125, 2.650271728515625, 2.647341064453125, 2.64589306640625, 2.64591064453125, 2.64768505859375, 2.64700830078125, 2.64631201171875, 2.645865478515625, 2.647047119140625, 2.646507568359375, 2.64665087890625, 2.647560302734375, 2.645760986328125, 2.645769287109375, 2.645876708984375, 2.645303466796875, 2.646839111328125, 2.646023193359375, 2.645792724609375, 2.64610400390625, 2.646148193359375, 2.646067138671875, 2.64551416015625, 2.645665771484375, 2.64494482421875, 2.645905517578125, 2.645917724609375, 2.645688232421875, 2.64422705078125, 2.64477587890625, 2.64602001953125, 2.646118408203125, 2.645832763671875, 2.64635888671875, 2.646981689453125, 2.645694580078125, 2.6462197265625, 2.6456484375, 2.647057373046875, 2.646053955078125, 5.45440771484375, 2.64606103515625, 2.64624853515625, 2.645525390625, 2.6462197265625, 2.646288330078125, 2.64601806640625, 2.6458369140625, 2.6465341796875, 2.64682080078125, 2.6460498046875, 2.645517333984375, 2.646379638671875, 2.645887939453125, 2.645572509765625, 2.645886962890625, 2.6497626953125, 2.645772216796875, 2.64593603515625, 2.64641748046875, 2.64673681640625, 2.645414794921875, 2.644895751953125, 2.6456689453125, 2.646624267578125, 2.64646142578125, 2.645294921875, 2.646984619140625, 2.646635498046875, 2.64559619140625, 2.644748291015625, 2.64690576171875, 2.64658642578125, 2.645821533203125, 2.6462392578125, 2.647307373046875, 2.64610205078125, 2.647371826171875, 2.64780078125, 2.64745263671875, 2.646867919921875, 2.64559814453125, 2.646322265625, 2.6461328125, 2.6461328125, 2.6467666015625, 2.64734619140625, 2.647150634765625, 2.6465341796875, 2.646138916015625, 2.645937255859375, 2.64439697265625, 2.645222412109375, 2.646958984375, 2.646992919921875, 2.647083984375, 2.64567822265625, 2.646795166015625, 2.645927978515625, 2.646295654296875, 2.64530224609375, 2.646948974609375, 2.646352783203125, 5.45413623046875, 2.646625244140625, 2.6464501953125, 2.647415771484375, 2.64749462890625, 2.64658837890625, 2.645314453125, 2.647509033203125, 2.647132080078125, 2.64675537109375, 2.646547607421875, 2.646182861328125, 2.64610302734375, 2.6463896484375, 2.646077392578125, 2.64599853515625, 2.646762451171875, 2.64707080078125, 2.646077392578125, 2.64540576171875, 2.646277099609375, 2.64477392578125, 2.645159912109375, 2.6459638671875, 2.64671240234375, 2.646478759765625, 2.645445556640625, 2.646115234375, 2.646445068359375, 2.645536865234375, 2.64454443359375, 2.646118408203125, 2.645505126953125, 2.644704345703125, 2.6460517578125, 2.64701953125, 2.645275634765625, 2.64492431640625, 2.645380126953125, 2.645751708984375, 2.64485888671875, 2.64445654296875, 2.645286865234375, 2.646550537109375, 2.646221923828125, 2.645315673828125, 2.6454169921875, 2.64634375, 2.645382080078125, 2.6473984375, 2.646042724609375, 2.64547119140625, 2.644828125, 2.64538720703125, 2.646898681640625, 2.646388671875, 2.64596484375, 2.6477158203125, 2.64626171875, 2.6452685546875, 2.645350341796875, 2.6460498046875, 2.644905029296875, 5.4557060546875, 2.64730419921875, 2.64740966796875, 2.646350830078125, 2.645677978515625, 2.6465546875, 2.64831689453125, 2.64722119140625, 2.646279052734375, 2.645445556640625, 2.646408203125, 2.646245361328125, 2.646921142578125, 2.64698974609375, 2.646667236328125, 2.646288330078125, 2.64876953125, 2.647458740234375, 2.645729248046875, 2.645494873046875, 2.646150146484375, 2.6453955078125, 2.6452890625, 2.6467861328125, 2.647785400390625, 2.646699951171875, 2.65012744140625, 2.64613671875, 2.646697998046875, 2.6466845703125, 2.646642578125, 2.6476943359375, 2.64681787109375, 2.645788818359375, 2.646158203125, 2.647160888671875, 2.645675048828125, 2.64652490234375, 2.64439306640625, 2.64669189453125, 2.646427734375, 2.64625146484375, 2.646435791015625, 2.645409912109375, 2.646538330078125, 2.64618505859375, 2.6468310546875, 2.647814208984375, 2.645505126953125, 2.645622802734375, 2.6466826171875, 2.646696044921875, 2.6457548828125, 2.6469130859375, 2.6469990234375, 2.64591259765625, 2.64690185546875, 2.64622705078125, 2.64720703125, 2.646657958984375, 2.645370849609375, 2.645916748046875, 2.646266845703125]",tokens/s,0.3722418346281902,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1562.181632,1957.167104,0.0,1310.72,1163.955712,s,10,1.3245970916748049,0.13245970916748048,0.001266998397567483,0.13200260162353517,0.1341699493408203,0.13443305816650392,0.1346435452270508,"[0.1346961669921875, 0.1341114807128906, 0.1312022705078125, 0.13115029907226564, 0.13112367248535156, 0.1315875244140625, 0.13163551330566406, 0.13236968994140624, 0.13365353393554688, 0.13306694030761718]",tokens/s,1932.6631593031557,kWh,1.5487117942793545e-06,8.486256414471426e-07,6.508118482104666e-06,8.905455917831163e-06,tokens/kWh,28746422.683134936,MB,1562.181632,1959.264256,0.0,1312.817152,1232.77568,s,10,76.92137255859376,7.692137255859374,0.017487914910414803,7.693824951171875,7.702772509765625,7.711682641601563,7.718810747070313,"[7.69504345703125, 7.69082470703125, 7.6926064453125, 7.6866689453125, 7.7205927734375, 7.69639892578125, 7.6467568359375, 7.70079248046875, 7.69208056640625, 7.699607421875]",tokens/s,8.190181467707257,kWh,9.084797721340027e-05,4.979003748630931e-05,0.0003722497123017,0.0005128877270014096,tokens/kWh,122833.9004489902,,s,629,77.98684770965575,0.12398544945891218,0.01575529620495015,0.121744384765625,0.12330782623291016,0.12368689422607422,0.2534359008789062,"[0.12507750701904297, 0.12424604797363281, 0.1234441909790039, 0.12341862487792969, 0.12180786895751954, 0.1224120330810547, 0.12155903625488282, 0.12088832092285157, 0.12079001617431641, 0.12080332946777343, 0.1209518051147461, 0.12134912109375, 0.12099378967285156, 0.12220416259765625, 0.12147408294677735, 0.12154876708984375, 0.12117708587646485, 0.12111360168457032, 0.12086067199707032, 0.12105318450927735, 0.12099174499511718, 0.12216934204101562, 0.12297727966308594, 0.12166143798828125, 0.12153446197509765, 0.12155801391601563, 0.12161023712158203, 0.12131839752197265, 0.12115455627441406, 0.12147609710693359, 0.12320972442626953, 0.12328857421875, 0.12294041442871094, 0.1213460464477539, 0.12115360260009765, 0.12151289367675781, 0.12153343963623046, 0.12244172668457032, 0.12324454498291015, 0.12321791839599609, 0.1221396484375, 0.123504638671875, 0.12190930938720704, 0.12093536376953125, 0.12353228759765625, 0.1231247329711914, 0.12405248260498047, 0.12286566162109375, 0.12216831970214843, 0.1221580810546875, 0.12220928192138672, 0.12256153869628907, 0.12320460510253907, 0.12468940734863282, 0.12308889770507812, 0.1220997085571289, 0.12128972625732422, 0.12146380615234376, 0.12163587188720704, 0.12112380981445313, 0.12155289459228516, 0.12149759674072266, 0.2558975982666016, 0.12277452850341797, 0.12251545715332031, 0.12256665802001954, 0.12305101013183593, 0.12229837036132812, 0.1212968978881836, 0.1239582748413086, 0.12347596740722656, 0.12176076507568359, 0.12155289459228516, 0.12141977691650391, 0.1213685760498047, 0.1208463363647461, 0.12073062133789063, 0.1210040283203125, 0.12351385498046875, 0.12159487915039062, 0.12144640350341797, 0.12170649719238281, 0.12249292755126953, 0.12232396697998046, 0.12123442840576172, 0.12141567993164062, 0.12308889770507812, 0.12184268951416016, 0.1215098876953125, 0.12254208374023437, 0.12142387390136719, 0.12173619079589844, 0.12151910400390625, 0.12153446197509765, 0.12306739044189453, 0.12236492919921875, 0.12201881408691406, 0.12173824310302735, 0.12155699157714844, 0.12209356689453126, 0.12139315032958985, 0.12139520263671875, 0.12346572875976562, 0.12301107025146485, 0.12293836975097656, 0.12192870330810547, 0.12156313323974609, 0.12141056060791015, 0.12382415771484374, 0.12120060729980468, 0.12172902679443359, 0.12317593383789062, 0.12274995422363282, 0.12246323394775391, 0.12149657440185548, 0.12139110565185547, 0.12291788482666016, 0.12123340606689453, 0.12196150207519531, 0.12387631988525391, 0.12276838684082031, 0.12148223876953125, 0.12265267181396484, 0.1214044189453125, 0.12125389099121094, 0.25443635559082034, 0.12313497924804688, 0.12153446197509765, 0.12169830322265625, 0.12147097778320312, 0.12127334594726563, 0.12249088287353516, 0.1221396484375, 0.12196044921875, 0.12192460632324219, 0.12144742584228516, 0.1228226547241211, 0.12275917053222657, 0.12112697601318359, 0.12176787567138672, 0.1217228775024414, 0.12139424133300782, 0.12269356536865235, 0.1223720932006836, 0.12153343963623046, 0.1215478057861328, 0.12162351989746094, 0.12261682891845703, 0.12271308898925781, 0.1215098876953125, 0.12233932495117188, 0.12170342254638672, 0.121238525390625, 0.12113817596435547, 0.12111769866943359, 0.12136653137207032, 0.12165529632568359, 0.1218897933959961, 0.121744384765625, 0.12428594970703125, 0.12338790130615235, 0.12270489501953125, 0.1215129623413086, 0.12190310668945313, 0.12194918060302734, 0.12152114868164063, 0.12212838745117187, 0.12310630035400391, 0.12293427276611328, 0.12179046630859375, 0.12117298889160157, 0.12185906982421875, 0.12301107025146485, 0.1216522216796875, 0.120953857421875, 0.12247654724121093, 0.12268236541748047, 0.12308684539794922, 0.12277657318115234, 0.12169420623779297, 0.12158668518066407, 0.12113510131835938, 0.1211361312866211, 0.1227540512084961, 0.12328550720214844, 0.12320972442626953, 0.12324147033691406, 0.12298242950439453, 0.25302217102050784, 0.1215467529296875, 0.12238642883300781, 0.12331622314453125, 0.12284416198730469, 0.12259225463867188, 0.1225902099609375, 0.12293222045898437, 0.12212531280517579, 0.12088832092285157, 0.12144127655029296, 0.12321894073486328, 0.12279398345947265, 0.12161740875244141, 0.12199628448486328, 0.12130611419677734, 0.12155699157714844, 0.12218367767333985, 0.12152627563476562, 0.12282061004638672, 0.12332236480712891, 0.12172697448730468, 0.1214730224609375, 0.12131743621826171, 0.12206380462646485, 0.12272434997558594, 0.12205158233642578, 0.12277043151855468, 0.12318822479248047, 0.12324352264404297, 0.12150886535644531, 0.12119551849365234, 0.12129280090332031, 0.12111567687988281, 0.1214658203125, 0.12191129302978515, 0.12111974334716796, 0.1216358413696289, 0.12116377258300781, 0.12117708587646485, 0.12161539459228515, 0.1231072998046875, 0.12339097595214844, 0.12407295989990234, 0.12407295989990234, 0.12135116577148437, 0.12155903625488282, 0.12125183868408203, 0.12111154937744141, 0.1215272979736328, 0.12139315032958985, 0.12157440185546875, 0.12285030364990235, 0.12157644653320313, 0.1217976303100586, 0.12137881469726562, 0.12175360107421875, 0.12123750305175782, 0.12148838043212891, 0.12119859313964844, 0.12210585784912109, 0.12211199951171875, 0.12212940979003906, 0.2530303955078125, 0.12125593566894531, 0.12254003143310546, 0.1212252197265625, 0.12286156463623046, 0.12248985290527344, 0.12266598510742187, 0.12236185455322265, 0.12159385681152343, 0.1212590103149414, 0.12165631866455077, 0.12115660858154297, 0.12235878753662109, 0.12386713409423829, 0.1217802276611328, 0.12304691314697265, 0.12239974212646484, 0.12245503997802734, 0.12413849639892578, 0.12299366760253906, 0.12299263763427734, 0.12301107025146485, 0.12142489624023438, 0.12230963134765625, 0.12332236480712891, 0.12175052642822265, 0.12143718719482421, 0.12149247741699219, 0.1212938232421875, 0.12226150512695312, 0.12151193237304687, 0.12162457275390624, 0.12326092529296875, 0.12302438354492187, 0.12452352142333985, 0.12339405059814453, 0.12212735748291016, 0.12276131439208984, 0.12299766540527343, 0.1216911392211914, 0.12096409606933593, 0.12123033905029297, 0.12140953826904297, 0.12123238372802735, 0.12380467224121093, 0.12290969848632813, 0.12300806427001953, 0.12272940826416015, 0.12320665740966796, 0.12299878692626953, 0.12275917053222657, 0.12293119812011719, 0.12306432342529297, 0.12317798614501953, 0.12322406768798828, 0.12217958068847656, 0.1228267822265625, 0.12593353271484375, 0.1234872283935547, 0.12296601867675781, 0.12340940856933594, 0.12347187042236328, 0.12346470642089843, 0.25439436340332033, 0.12301414489746093, 0.1228062744140625, 0.1231431655883789, 0.12312268829345703, 0.12464742279052735, 0.12372991943359375, 0.1234708480834961, 0.12335411071777344, 0.12233523559570313, 0.12217958068847656, 0.12264959716796875, 0.12329881286621094, 0.12281549072265625, 0.12341248321533203, 0.123109375, 0.12312268829345703, 0.12331110382080078, 0.12178943634033203, 0.12153343963623046, 0.12128153228759765, 0.12127027130126954, 0.12139008331298828, 0.12159487915039062, 0.12141875457763672, 0.12109516906738281, 0.12095590209960938, 0.12107469177246094, 0.12224205017089844, 0.1224263687133789, 0.12177005004882813, 0.12150163269042968, 0.12146585845947265, 0.12140953826904297, 0.12113005065917969, 0.12179347229003906, 0.12241817474365234, 0.12146585845947265, 0.1236316146850586, 0.12164096069335938, 0.12152934265136718, 0.12139622497558594, 0.12146688079833984, 0.12121600341796875, 0.12141567993164062, 0.12133478546142579, 0.12168498992919922, 0.1214167709350586, 0.12150675201416015, 0.12333977508544922, 0.12334899139404297, 0.12139826965332032, 0.12266291046142579, 0.12162457275390624, 0.1213675537109375, 0.12148838043212891, 0.123219970703125, 0.12320358276367188, 0.12155494689941407, 0.12110848236083985, 0.12138294219970704, 0.12153238677978516, 0.12358963012695312, 0.25359359741210935, 0.12145458984375, 0.12112588500976562, 0.12119039916992187, 0.12124671936035156, 0.12149964904785156, 0.12119961547851563, 0.12119245147705078, 0.12146688079833984, 0.12120269012451172, 0.12097740936279297, 0.1208616943359375, 0.12124671936035156, 0.12146176147460938, 0.12102349090576171, 0.1214505615234375, 0.12145657348632813, 0.12126924896240235, 0.12115869140625, 0.12115657806396485, 0.12137574768066406, 0.12156928253173828, 0.12129894256591797, 0.1242234878540039, 0.12195740509033202, 0.12128765106201173, 0.12127436828613282, 0.12119142150878906, 0.12085968017578125, 0.12119548797607421, 0.12113203430175781, 0.12153139495849609, 0.12129587554931641, 0.12157234954833984, 0.12127641296386718, 0.12110438537597656, 0.12128870391845703, 0.12145970916748047, 0.12127846527099609, 0.12160921478271484, 0.12154879760742188, 0.1213306884765625, 0.12123238372802735, 0.12116070556640625, 0.12110643005371094, 0.12110031890869141, 0.12115042877197266, 0.12125593566894531, 0.12130406188964844, 0.12108185577392579, 0.12092108917236329, 0.12128562927246093, 0.12140748596191406, 0.12148838043212891, 0.12150784301757812, 0.12131737518310547, 0.12162969970703125, 0.12153958129882812, 0.122281982421875, 0.12196249389648438, 0.12154879760742188, 0.1211883544921875, 0.12114022064208985, 0.25422848510742185, 0.12144435119628906, 0.12121600341796875, 0.12331622314453125, 0.12173926544189453, 0.12114329528808594, 0.1231800308227539, 0.1233438720703125, 0.1232721939086914, 0.12304691314697265, 0.12367155456542969, 0.12309913635253907, 0.12304998779296875, 0.12310630035400391, 0.12295680236816406, 0.12294348907470704, 0.12386918640136718, 0.12152320098876954, 0.12160205078125, 0.121059326171875, 0.12094258880615234, 0.12096717071533203, 0.12145868682861329, 0.12323331451416016, 0.12289020538330078, 0.12314832305908203, 0.1236971206665039, 0.12315955352783203, 0.12311039733886718, 0.12137471771240234, 0.12235366058349609, 0.1222451171875, 0.1222973403930664, 0.12167884826660157, 0.12279296112060546, 0.12148735809326172, 0.12146995544433593, 0.12277760314941406, 0.12156723022460937, 0.1216358413696289, 0.12169728088378906, 0.12312166595458984, 0.12145664215087891, 0.12275507354736329, 0.1232353286743164, 0.12311347198486328, 0.123072509765625, 0.1227171859741211, 0.12111154937744141, 0.12276019287109376, 0.12255232238769531, 0.12235059356689452, 0.12264141082763672, 0.1211514892578125, 0.12123545837402344, 0.12166246032714843, 0.12119347381591797, 0.12130201721191407, 0.12149964904785156, 0.12166246032714843, 0.12141875457763672, 0.12106034851074218, 0.12132147216796875, 0.25745306396484374, 0.12312064361572266, 0.12307865905761718, 0.12312166595458984, 0.12121913909912109, 0.12116063690185547, 0.1212774429321289, 0.12236185455322265, 0.1215498275756836, 0.12132454681396485, 0.12093132781982421, 0.12260454559326171, 0.12201676940917969, 0.12116480255126953, 0.12119449615478516, 0.121206787109375, 0.12122112274169922, 0.12207103729248046, 0.12143001556396485, 0.12134400177001953, 0.12148531341552735, 0.12378009796142578, 0.12321279907226562, 0.12303257751464844, 0.12391117095947266, 0.12308889770507812, 0.12307762908935548, 0.12230758666992188, 0.12212636566162109, 0.12145769500732422, 0.12122207641601562, 0.12131123352050781, 0.12108799743652343, 0.1211156463623047, 0.12251033782958984, 0.12306432342529297, 0.12318310546875, 0.12159795379638672, 0.12140850830078125, 0.12139520263671875, 0.12131839752197265, 0.12117196655273438, 0.12120371246337891, 0.12156723022460937, 0.12148941040039063, 0.12145664215087891, 0.12146892547607421, 0.1215447006225586, 0.12250316619873047, 0.12295782470703125, 0.12217036437988281, 0.12270387268066406, 0.12305919647216797, 0.12331520080566406, 0.12305203247070312, 0.12250521850585938, 0.12303667449951172, 0.12276838684082031, 0.12293427276611328, 0.12293427276611328, 0.12159283447265624, 0.12137062072753907, 0.12141670227050781, 0.2548654022216797, 0.1212416000366211, 0.12151193237304687, 0.12308480072021484, 0.12140338897705077, 0.12150377655029297, 0.12124156951904297, 0.121275390625, 0.12150374603271484, 0.12148838043212891, 0.12146482849121094, 0.12227788543701172, 0.12289740753173828, 0.12292819213867187, 0.12136339569091797, 0.1211822052001953, 0.12195027160644531, 0.12174432373046876, 0.12153241729736328, 0.12099174499511718, 0.12128463745117188, 0.12138390350341798, 0.121385986328125, 0.12258918762207031, 0.1230161895751953, 0.12225638580322265, 0.12275917053222657, 0.12151602935791016, 0.12140850830078125, 0.12134809875488281, 0.12176076507568359, 0.1217791976928711, 0.12272946929931641, 0.12169420623779297, 0.12219187164306641, 0.12145561981201172, 0.12164096069335938, 0.12145254516601563, 0.12175360107421875, 0.12436787414550782, 0.1233602523803711, 0.12332339477539063, 0.1232701416015625, 0.12218675231933594, 0.1233039321899414, 0.12280217742919922, 0.12283596801757812, 0.1221048355102539, 0.12304793548583984, 0.1227509765625, 0.12293023681640625, 0.12202182769775391, 0.12274483489990234, 0.1233070068359375, 0.12290560150146485, 0.12302130889892578, 0.1229496307373047, 0.12324864196777344, 0.12319129943847656, 0.12199935913085938, 0.12175363159179688, 0.12325577545166015, 0.12313600158691407]",tokens/s,8.06546255519598,,,,,,main,False,False -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3724, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 101, in __init__ - assert self.in_features % self.group_size == 0 -AssertionError - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694958f-529911556505267b75f3ce9b;baf86dc0-c37b-49ce-8e47-31cfeb96b820) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,4294.987776,14621.868032,0.0,13975.420928,13366.068224,s,10,16.495098876953126,1.6495098876953125,0.0005626635207808348,1.6493680419921875,1.6498059448242186,1.6504339782714843,1.650936405029297,"[1.65106201171875, 1.6493150634765625, 1.6490108642578125, 1.64951513671875, 1.6496663818359374, 1.6489195556640626, 1.649384521484375, 1.6492862548828124, 1.6495875244140625, 1.6493515625]",tokens/s,155.19761470340868,kWh,1.9482200029823513e-05,1.067632958434842e-05,9.151657321320173e-05,0.00012167510282737365,tokens/kWh,2103963.7037595077,MB,4294.987776,14621.868032,0.0,13975.420928,13814.414848,s,10,977.7317578124998,97.77317578125,0.006501026483324052,97.774078125,97.7795859375,97.78208203125,97.78407890625,"[97.7740859375, 97.76746875, 97.7754296875, 97.784578125, 97.7782421875, 97.7740703125, 97.77903125, 97.76209375, 97.764875, 97.7718828125]",tokens/s,0.6443485086436308,kWh,0.0011542207027475041,0.0006326151578875942,0.005356944729996799,0.007143780590631898,tokens/kWh,8818.859874086276,,s,629,991.0235808105483,1.5755541825286914,0.19600214423351972,1.55186181640625,1.5529105224609374,1.5533291259765625,3.2017358984375,"[1.5526031494140624, 1.5511746826171875, 1.5512442626953125, 1.5510794677734374, 1.55169482421875, 1.5518966064453126, 1.5526973876953125, 1.5532349853515626, 1.55080810546875, 1.55123095703125, 1.5509954833984374, 1.5513375244140626, 1.551034423828125, 1.552995361328125, 1.5513057861328126, 1.55249462890625, 1.5510445556640624, 1.551331298828125, 1.5506575927734374, 1.55245263671875, 1.5513251953125, 1.5512381591796875, 1.551388671875, 1.5524095458984375, 1.5524761962890625, 1.5533260498046875, 1.553095703125, 1.5521700439453125, 1.551240234375, 1.551688720703125, 1.5509698486328125, 1.55129443359375, 1.5527669677734375, 1.5515802001953125, 1.551595458984375, 1.5514736328125, 1.552204833984375, 1.5519498291015625, 1.55194677734375, 1.55318994140625, 1.55307421875, 1.5523338623046874, 1.5521546630859375, 1.55177978515625, 1.5524197998046876, 1.5533404541015625, 1.5529237060546874, 1.5524351806640626, 1.55150439453125, 1.55194775390625, 1.552548828125, 1.5523931884765625, 1.55225390625, 1.5514603271484375, 1.5512484130859374, 1.5517244873046876, 1.5518822021484375, 1.552468017578125, 1.55346630859375, 1.5518555908203124, 1.5518289794921876, 1.5518668212890625, 3.202376708984375, 1.55222216796875, 1.5525550537109376, 1.551751220703125, 1.5510302734375, 1.55141943359375, 1.5515596923828125, 1.5518863525390625, 1.5530260009765624, 1.5516702880859374, 1.5516968994140625, 1.55188330078125, 1.55222119140625, 1.5524822998046874, 1.5535472412109375, 1.552794677734375, 1.550878662109375, 1.551520751953125, 1.55161083984375, 1.5515064697265626, 1.5530260009765624, 1.5537049560546874, 1.5521280517578124, 1.5518392333984374, 1.5516507568359375, 1.5517174072265625, 1.5512647705078124, 1.5524556884765626, 1.5519549560546875, 1.552384033203125, 1.5521689453125, 1.5524669189453124, 1.5522928466796875, 1.5529041748046875, 1.552691162109375, 1.55226318359375, 1.552175048828125, 1.5517655029296875, 1.5517972412109375, 1.55106201171875, 1.5525765380859375, 1.551730712890625, 1.5510968017578124, 1.550551025390625, 1.5507015380859375, 1.5512001953125, 1.5515802001953125, 1.5522396240234375, 1.5507763671875, 1.5507752685546874, 1.551515625, 1.551489990234375, 1.5510538330078125, 1.552689208984375, 1.5505909423828126, 1.55152587890625, 1.551647705078125, 1.5511357421875, 1.5508541259765625, 1.5514869384765626, 1.5528365478515624, 1.5521771240234374, 1.5512657470703124, 3.201815673828125, 1.5507783203125, 1.5535196533203126, 1.55132421875, 1.551177734375, 1.55173681640625, 1.5521044921875, 1.5513375244140626, 1.5526204833984374, 1.5516661376953125, 1.5514337158203124, 1.551310791015625, 1.5512391357421875, 1.5521761474609375, 1.5526307373046875, 1.553349609375, 1.551321044921875, 1.5509493408203125, 1.5514971923828125, 1.5512432861328125, 1.551467529296875, 1.552627685546875, 1.552321533203125, 1.5515975341796875, 1.5512330322265624, 1.55135595703125, 1.5512811279296874, 1.55175732421875, 1.5532247314453125, 1.5510487060546876, 1.55163232421875, 1.551236083984375, 1.55205224609375, 1.551705078125, 1.5527781982421875, 1.55230517578125, 1.5521064453125, 1.5526748046875, 1.551935546875, 1.5518392333984374, 1.553038330078125, 1.552990234375, 1.5518115234375, 1.551340576171875, 1.5512269287109375, 1.5518863525390625, 1.551740966796875, 1.553649658203125, 1.551879150390625, 1.5528212890625, 1.552669677734375, 1.5528980712890625, 1.5524495849609374, 1.5538206787109374, 1.55340185546875, 1.5514490966796874, 1.55186181640625, 1.5515064697265626, 1.5515709228515624, 1.5511982421875, 1.5528519287109375, 1.5516334228515625, 1.5513446044921875, 3.2030166015625, 1.55180029296875, 1.552606201171875, 1.5531837158203126, 1.551551513671875, 1.55184130859375, 1.5515115966796875, 1.551283203125, 1.551794189453125, 1.5518525390625, 1.5510743408203125, 1.551730712890625, 1.551551513671875, 1.5510927734375, 1.550951416015625, 1.5540325927734375, 1.55168359375, 1.5516273193359376, 1.5512708740234376, 1.5510076904296874, 1.55110302734375, 1.552822265625, 1.5531990966796876, 1.551572021484375, 1.552153564453125, 1.5514736328125, 1.5518802490234376, 1.551657958984375, 1.5528406982421874, 1.5522344970703126, 1.5521812744140624, 1.5515064697265626, 1.55281103515625, 1.55241064453125, 1.552889892578125, 1.5524730224609375, 1.5517911376953124, 1.5517276611328126, 1.5515074462890626, 1.550802978515625, 1.552126953125, 1.55321240234375, 1.5522139892578124, 1.5518084716796876, 1.5521444091796874, 1.5523614501953125, 1.55158837890625, 1.5536046142578126, 1.5520296630859376, 1.5530731201171875, 1.5527608642578126, 1.552880615234375, 1.5533311767578124, 1.5541422119140624, 1.55335888671875, 1.551869873046875, 1.55161083984375, 1.5517900390625, 1.5514500732421874, 1.551742919921875, 1.553448974609375, 1.551916015625, 1.552142333984375, 3.20272802734375, 1.5516005859375, 1.55409814453125, 1.552426025390625, 1.5515802001953125, 1.5510958251953124, 1.5515125732421875, 1.55104052734375, 1.5517767333984376, 1.5526820068359375, 1.5530455322265626, 1.5519539794921875, 1.5517962646484376, 1.552195556640625, 1.5515853271484374, 1.5515821533203125, 1.5523768310546875, 1.5513426513671875, 1.551593505859375, 1.5516016845703124, 1.5524136962890625, 1.551958984375, 1.5531632080078126, 1.5515289306640625, 1.55102099609375, 1.5516641845703125, 1.5517286376953126, 1.5518515625, 1.5519652099609376, 1.5515596923828125, 1.55163134765625, 1.5519027099609375, 1.5522764892578125, 1.5519969482421876, 1.5524884033203126, 1.5527935791015626, 1.5509124755859376, 1.55156884765625, 1.551499267578125, 1.5518935546875, 1.551709228515625, 1.552568359375, 1.552501708984375, 1.5518760986328124, 1.5515699462890624, 1.551488037109375, 1.5521546630859375, 1.5521934814453124, 1.5524454345703125, 1.553112060546875, 1.5525970458984375, 1.551457275390625, 1.551424560546875, 1.5510076904296874, 1.5529554443359375, 1.5528099365234376, 1.552541748046875, 1.5528201904296874, 1.5534459228515625, 1.5517440185546876, 1.5520296630859376, 1.5527730712890624, 1.55129345703125, 3.201416259765625, 1.5519610595703126, 1.551810546875, 1.5517440185546876, 1.551605712890625, 1.5520235595703125, 1.5515657958984375, 1.552027587890625, 1.551488037109375, 1.5524935302734375, 1.5531314697265626, 1.5514276123046875, 1.5520655517578126, 1.5519825439453125, 1.551853515625, 1.5516712646484374, 1.5525252685546875, 1.551730712890625, 1.5519703369140625, 1.55103125, 1.55213720703125, 1.552764892578125, 1.553755126953125, 1.5528775634765626, 1.5516845703125, 1.5517542724609374, 1.5515330810546875, 1.5523287353515625, 1.552110595703125, 1.5518494873046875, 1.5517244873046876, 1.55184228515625, 1.551310791015625, 1.5519027099609375, 1.5507896728515624, 1.552606201171875, 1.5512637939453124, 1.5521474609375, 1.5516856689453125, 1.551458251953125, 1.5516610107421875, 1.552607177734375, 1.5517041015625, 1.5525919189453126, 1.55161083984375, 1.552396240234375, 1.550496826171875, 1.551563720703125, 1.5521248779296875, 1.5515289306640625, 1.5516282958984375, 1.5522191162109376, 1.5513795166015625, 1.551456298828125, 1.5522652587890624, 1.5525396728515626, 1.5528642578125, 1.5518760986328124, 1.5516190185546874, 1.5520552978515625, 1.5518668212890625, 1.5532369384765625, 1.552636962890625, 3.202765869140625, 1.5513466796875, 1.553580078125, 1.5520194091796875, 1.5519456787109376, 1.5510753173828125, 1.5518084716796876, 1.55133544921875, 1.5519498291015625, 1.5519241943359374, 1.5514061279296876, 1.551531005859375, 1.5519334716796875, 1.5514398193359376, 1.5528131103515626, 1.55154638671875, 1.55287451171875, 1.552290771484375, 1.5517265625, 1.550793701171875, 1.5515279541015625, 1.5525499267578124, 1.5527659912109375, 1.5533721923828125, 1.5519078369140624, 1.55173583984375, 1.5521668701171876, 1.5522508544921876, 1.5519713134765625, 1.5531519775390625, 1.5524608154296875, 1.551698974609375, 1.552278564453125, 1.55198974609375, 1.5515361328125, 1.55272705078125, 1.551920166015625, 1.552759765625, 1.5514500732421874, 1.5516590576171876, 1.551510498046875, 1.5519405517578124, 1.552752685546875, 1.55184130859375, 1.55072509765625, 1.5517808837890625, 1.5512965087890624, 1.552079833984375, 1.552720947265625, 1.5514542236328126, 1.553217529296875, 1.5522928466796875, 1.5522979736328124, 1.551899658203125, 1.551836181640625, 1.5530526123046875, 1.55264306640625, 1.552206787109375, 1.55278955078125, 1.5506513671875, 1.5514705810546876, 1.552828369140625, 1.5512606201171875, 3.20153076171875, 1.5501854248046876, 1.5515657958984375, 1.5514951171875, 1.552109619140625, 1.552099365234375, 1.552288818359375, 1.5523583984375, 1.5526461181640625, 1.5514920654296875, 1.5523031005859376, 1.5516446533203125, 1.55148291015625, 1.5517808837890625, 1.5515330810546875, 1.5520235595703125, 1.5527147216796875, 1.5511695556640626, 1.552215087890625, 1.5506851806640625, 1.5511387939453125, 1.5506053466796874, 1.55132421875, 1.5518238525390624, 1.551515625, 1.5510947265625, 1.5512073974609375, 1.5515657958984375, 1.5510599365234374, 1.5514132080078125, 1.551810546875, 1.55152490234375, 1.55114697265625, 1.5516077880859376, 1.5506268310546876, 1.5511910400390625, 1.5526318359375, 1.552217041015625, 1.551873046875, 1.5515443115234375, 1.551283203125, 1.551500244140625, 1.55306494140625, 1.5515596923828125, 1.5516978759765625, 1.5517235107421874, 1.551678466796875, 1.553006591796875, 1.5524617919921875, 1.5526624755859375, 1.5517911376953124, 1.5515576171875, 1.5515587158203126, 1.551556640625, 1.5509442138671874, 1.5528365478515624, 1.5521904296875, 1.55300244140625, 1.5519825439453125, 1.5525980224609375, 1.551983642578125, 1.5516201171875, 1.5528509521484375, 3.202553955078125, 1.551009765625, 1.5523553466796876, 1.5506207275390624, 1.5519232177734374, 1.551784912109375, 1.551542236328125, 1.551203369140625, 1.5513343505859376, 1.55032373046875, 1.5522550048828125, 1.5514869384765626, 1.551784912109375, 1.5508438720703126, 1.5520716552734375, 1.5515863037109374, 1.55217919921875, 1.552343017578125, 1.5512073974609375, 1.5511490478515626, 1.5520225830078125, 1.551941650390625, 1.5513641357421875, 1.5520562744140625, 1.5527935791015626, 1.553005615234375, 1.5519703369140625, 1.5519405517578124, 1.55236962890625, 1.5524075927734375, 1.552400390625, 1.5521392822265625, 1.5526983642578125, 1.5521812744140624, 1.5518033447265625, 1.5511234130859375, 1.5530147705078126, 1.552041015625, 1.5515872802734374, 1.5513333740234374, 1.5513599853515625, 1.5520286865234374, 1.5513743896484375, 1.55210546875, 1.5510108642578124, 1.551393798828125, 1.55116845703125, 1.551873046875, 1.5514449462890625, 1.5518966064453126, 1.55174609375, 1.5518658447265625, 1.5518095703125, 1.5519150390625, 1.5517449951171876, 1.5523460693359374, 1.552300048828125, 1.552015380859375, 1.551193115234375, 1.5512535400390626, 1.5524495849609374, 1.55131494140625, 1.553142822265625, 3.20293701171875, 1.5507025146484374, 1.5517491455078125, 1.5505745849609376, 1.551394775390625, 1.5516416015625, 1.5522529296875, 1.5509012451171875, 1.551172607421875, 1.5507313232421875, 1.551066162109375, 1.5524403076171875, 1.552574462890625, 1.5519908447265625, 1.5519549560546875, 1.5521126708984374, 1.5522078857421875, 1.553606689453125, 1.552162841796875, 1.5526041259765626, 1.552289794921875, 1.5522764892578125, 1.55186376953125, 1.5506749267578126, 1.551815673828125, 1.5521044921875, 1.550876708984375, 1.5515504150390624, 1.550856201171875, 1.5514798583984375, 1.552759765625, 1.551753173828125, 1.5519580078125, 1.5519119873046876, 1.551141845703125, 1.551688720703125, 1.5519180908203125, 1.5516416015625, 1.5518433837890624, 1.5516488037109375, 1.55226416015625, 1.5521085205078125, 1.5519918212890624, 1.5524515380859376, 1.55057666015625, 1.5518023681640625, 1.5514920654296875, 1.5522559814453125, 1.551362060546875, 1.5525518798828124, 1.5522672119140626, 1.551783935546875, 1.551810546875, 1.5519447021484376, 1.5518751220703124, 1.5525919189453126, 1.5539844970703125, 1.5537889404296874, 1.5529072265625, 1.55158935546875, 1.55196826171875, 1.5523502197265624, 1.552722900390625]",tokens/s,0.6346973091049438,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,4144.504832,15760.621568,0.0,15114.174464,14045.205504,s,10,15.963228881835938,1.5963228881835938,0.0014829914244840832,1.5958744506835938,1.5984191040039064,1.5985000183105469,1.5985647497558595,"[1.595896728515625, 1.598401123046875, 1.5945810546875, 1.5952257080078125, 1.5950601806640625, 1.5945120849609375, 1.5958521728515624, 1.5975928955078125, 1.5975260009765626, 1.5985809326171876]",tokens/s,160.36855820021125,kWh,1.8825581471125285e-05,1.0316417645935872e-05,8.981001629239093e-05,0.00011895201540945208,tokens/kWh,2152128.310889114,MB,4144.504832,15760.621568,0.0,15114.174464,14169.857024,s,10,928.8058671874999,92.88058671875,0.004527086516632578,92.879125,92.88628203124999,92.886953515625,92.887490703125,"[92.8758671875, 92.88065625, 92.887625, 92.8848203125, 92.8846328125, 92.875515625, 92.87759375, 92.8768203125, 92.876203125, 92.8861328125]",tokens/s,0.6782902889144009,kWh,0.0010964920105205644,0.0006009745227833628,0.005288325397323612,0.006985791930627541,tokens/kWh,9018.304671198623,,s,629,941.6992608642577,1.4971371396888042,0.1896008283998211,1.4742733154296874,1.4748571533203125,1.4750364013671875,3.06951080078125,"[1.473944580078125, 1.47329638671875, 1.4738431396484375, 1.47358203125, 1.47386572265625, 1.4748426513671875, 1.47376123046875, 1.4739620361328125, 1.47388720703125, 1.47437158203125, 1.474060302734375, 1.4738883056640626, 1.4746326904296876, 1.473596435546875, 1.4739219970703126, 1.474044921875, 1.474198486328125, 1.4741043701171874, 1.4745589599609374, 1.4744586181640624, 1.4741165771484375, 1.4748856201171876, 1.474040771484375, 1.473933349609375, 1.474250732421875, 1.4747626953125, 1.4741483154296875, 1.4741063232421876, 1.47391796875, 1.47371826171875, 1.4743121337890626, 1.4736240234375, 1.474345947265625, 1.4741422119140626, 1.47420263671875, 1.473943603515625, 1.474134033203125, 1.474208740234375, 1.473671142578125, 1.474809814453125, 1.473850341796875, 1.47422314453125, 1.4740899658203126, 1.4746153564453126, 1.474150390625, 1.474164794921875, 1.474740234375, 1.4742220458984374, 1.474397216796875, 1.47470947265625, 1.4741094970703126, 1.474333740234375, 1.4749337158203124, 1.4750013427734374, 1.4743634033203126, 1.47471875, 1.4743265380859376, 1.4740859375, 1.4744422607421874, 1.4750106201171875, 1.4745364990234375, 1.4744473876953126, 3.072501708984375, 1.4742149658203125, 1.47382275390625, 1.4743377685546875, 1.4739732666015626, 1.4742138671875, 1.473607666015625, 1.473517578125, 1.4735635986328126, 1.4740213623046876, 1.47407568359375, 1.474303955078125, 1.474218017578125, 1.4742906494140624, 1.4739681396484374, 1.4741094970703126, 1.47432958984375, 1.47487744140625, 1.474449462890625, 1.474229248046875, 1.474135009765625, 1.474888671875, 1.4737879638671876, 1.4744893798828125, 1.4746142578125, 1.474745361328125, 1.4743009033203125, 1.4740203857421874, 1.4740869140625, 1.4739189453125, 1.474797607421875, 1.4739200439453124, 1.474556884765625, 1.4741739501953126, 1.4747054443359375, 1.4741309814453125, 1.4740633544921875, 1.4746910400390625, 1.4741361083984375, 1.4743233642578124, 1.474419677734375, 1.4740213623046876, 1.47390673828125, 1.4743306884765626, 1.4746715087890625, 1.474198486328125, 1.474620361328125, 1.47437255859375, 1.4746910400390625, 1.474514892578125, 1.4745426025390624, 1.47401220703125, 1.4740531005859374, 1.47437158203125, 1.475441650390625, 1.4746439208984374, 1.474841552734375, 1.47451904296875, 1.474598876953125, 1.474634765625, 1.4747381591796875, 1.4739384765625, 1.473933349609375, 3.0693232421875, 1.473892333984375, 1.474171875, 1.4738431396484375, 1.474007080078125, 1.4742384033203124, 1.47473095703125, 1.4749234619140625, 1.473754150390625, 1.4738544921875, 1.474039794921875, 1.47367529296875, 1.47413916015625, 1.47485693359375, 1.4745743408203125, 1.474783203125, 1.475303466796875, 1.474924560546875, 1.4741279296875, 1.4751273193359375, 1.4750792236328125, 1.474249755859375, 1.4747965087890624, 1.474628662109375, 1.4746798095703124, 1.47464599609375, 1.475072021484375, 1.4749490966796874, 1.474650146484375, 1.474839599609375, 1.4745999755859376, 1.47433984375, 1.474587646484375, 1.4739671630859374, 1.47449853515625, 1.47369580078125, 1.4738216552734376, 1.4743746337890624, 1.474156494140625, 1.47430712890625, 1.4742518310546875, 1.474651123046875, 1.4738739013671875, 1.474051025390625, 1.4744114990234376, 1.474198486328125, 1.474423828125, 1.4742803955078125, 1.4740162353515625, 1.473574951171875, 1.4743848876953125, 1.4738145751953124, 1.47405419921875, 1.4739793701171875, 1.475199951171875, 1.4745067138671875, 1.4744473876953126, 1.4743388671875, 1.474714599609375, 1.4745006103515625, 1.4744422607421874, 1.4752808837890625, 1.474572265625, 3.06950341796875, 1.4739578857421876, 1.4735145263671876, 1.4739886474609376, 1.4746470947265624, 1.4735728759765625, 1.4737674560546874, 1.4742681884765625, 1.4737213134765625, 1.47318994140625, 1.47401318359375, 1.47386474609375, 1.47394970703125, 1.4739599609375, 1.4740623779296875, 1.474145263671875, 1.4742425537109376, 1.4741483154296875, 1.4742333984375, 1.4738831787109374, 1.474155517578125, 1.4743111572265626, 1.474144287109375, 1.4746112060546874, 1.4750372314453124, 1.4743541259765625, 1.4747115478515624, 1.4738729248046876, 1.474272216796875, 1.47419140625, 1.4745753173828124, 1.4750064697265626, 1.47466650390625, 1.4748436279296875, 1.474193359375, 1.4741329345703125, 1.4744525146484375, 1.4744913330078124, 1.4748323974609374, 1.4744207763671875, 1.4753935546875, 1.4751805419921875, 1.474482177734375, 1.4740582275390626, 1.4748548583984376, 1.474802734375, 1.4744371337890625, 1.4744453125, 1.4744443359375, 1.4745057373046875, 1.4743941650390624, 1.4749481201171875, 1.47416162109375, 1.47437255859375, 1.4748753662109375, 1.474218994140625, 1.47468798828125, 1.4740306396484375, 1.4754232177734374, 1.474871337890625, 1.4750535888671874, 1.474193359375, 1.47433984375, 3.0700166015625, 1.474471923828125, 1.4742957763671876, 1.474418701171875, 1.4738094482421875, 1.473839111328125, 1.4741832275390625, 1.47508935546875, 1.474249755859375, 1.474166748046875, 1.4742117919921875, 1.47376953125, 1.474102294921875, 1.4737879638671876, 1.4748671875, 1.4747493896484376, 1.474883544921875, 1.4746419677734375, 1.4745159912109376, 1.4746552734375, 1.474293701171875, 1.4748323974609374, 1.4744105224609374, 1.474787353515625, 1.47443408203125, 1.4740316162109375, 1.4738883056640626, 1.47376953125, 1.47512939453125, 1.47419140625, 1.4745528564453125, 1.4741678466796875, 1.47439208984375, 1.4739793701171875, 1.4745281982421874, 1.47403466796875, 1.4748272705078125, 1.474185302734375, 1.4739229736328125, 1.4739056396484376, 1.473829833984375, 1.474093017578125, 1.4740869140625, 1.474112548828125, 1.4749757080078125, 1.4745753173828124, 1.4741094970703126, 1.4738267822265625, 1.4748529052734376, 1.4741513671875, 1.473798095703125, 1.47447607421875, 1.474387939453125, 1.4741156005859375, 1.4746163330078126, 1.474620361328125, 1.4744791259765626, 1.4744678955078125, 1.4747166748046876, 1.4750802001953125, 1.4745999755859376, 1.4739814453125, 1.4747626953125, 3.069513671875, 1.47412890625, 1.4743746337890624, 1.4740838623046875, 1.4746378173828125, 1.4743603515625, 1.4743265380859376, 1.4740521240234374, 1.474298828125, 1.474333740234375, 1.47361279296875, 1.4742005615234375, 1.4735267333984374, 1.473796142578125, 1.4742364501953125, 1.47390869140625, 1.473765380859375, 1.4739835205078125, 1.4745712890625, 1.4740101318359375, 1.4739835205078125, 1.4743961181640626, 1.474408447265625, 1.4742752685546876, 1.4744114990234376, 1.4744586181640624, 1.4740623779296875, 1.4737520751953126, 1.47413916015625, 1.4744801025390626, 1.4740203857421874, 1.474050048828125, 1.47420361328125, 1.4737581787109375, 1.4745753173828124, 1.474017333984375, 1.474734130859375, 1.47439208984375, 1.4745015869140625, 1.4748160400390624, 1.474017333984375, 1.473838134765625, 1.4741207275390624, 1.473882080078125, 1.4738841552734374, 1.47435107421875, 1.4739732666015626, 1.4739046630859376, 1.4738043212890626, 1.4740582275390626, 1.4742333984375, 1.4741370849609374, 1.4745313720703126, 1.47392822265625, 1.4746859130859375, 1.4744012451171875, 1.4736475830078124, 1.4745435791015624, 1.4739937744140625, 1.4749501953125, 1.4744842529296875, 1.474662353515625, 1.474905029296875, 3.070246826171875, 1.4750576171875, 1.4738739013671875, 1.4734581298828124, 1.4737919921875, 1.474293701171875, 1.4743428955078124, 1.47382275390625, 1.47458154296875, 1.47357080078125, 1.474060302734375, 1.4740726318359374, 1.4753177490234375, 1.473629150390625, 1.4743223876953124, 1.4743489990234375, 1.473955810546875, 1.4739876708984374, 1.4747186279296876, 1.4738134765625, 1.4743214111328125, 1.4742047119140624, 1.4742978515625, 1.4737418212890625, 1.4734571533203125, 1.473523681640625, 1.473606689453125, 1.473850341796875, 1.4743746337890624, 1.4742579345703124, 1.4741832275390625, 1.474040771484375, 1.4740684814453124, 1.4739732666015626, 1.4740592041015625, 1.474809814453125, 1.4747279052734374, 1.4743153076171875, 1.4744268798828124, 1.474154541015625, 1.4741319580078125, 1.473996826171875, 1.4746644287109374, 1.4744012451171875, 1.4747647705078124, 1.474788330078125, 1.4744166259765625, 1.474029541015625, 1.4741083984375, 1.4744698486328125, 1.47449853515625, 1.473933349609375, 1.475099609375, 1.4742476806640625, 1.474344970703125, 1.47441455078125, 1.47498388671875, 1.4740675048828125, 1.4742537841796874, 1.4744873046875, 1.474292724609375, 1.47422509765625, 1.4751826171875, 3.072003173828125, 1.47430908203125, 1.4742056884765624, 1.474298828125, 1.4740029296875, 1.47441357421875, 1.4743634033203126, 1.473666015625, 1.4739056396484376, 1.4738585205078125, 1.4737633056640624, 1.47390771484375, 1.474503662109375, 1.4742467041015626, 1.47406640625, 1.4739844970703124, 1.474124755859375, 1.4739844970703124, 1.47361279296875, 1.474107421875, 1.473881103515625, 1.47378076171875, 1.474008056640625, 1.4745169677734375, 1.474460693359375, 1.474466796875, 1.4740469970703125, 1.4737305908203124, 1.47407666015625, 1.4743223876953124, 1.4743223876953124, 1.4740029296875, 1.4749521484375, 1.474461669921875, 1.4740694580078124, 1.4741329345703125, 1.47427734375, 1.474609130859375, 1.4740633544921875, 1.474460693359375, 1.474523193359375, 1.4746634521484374, 1.4743521728515625, 1.4738759765625, 1.4742733154296874, 1.4747269287109375, 1.474681884765625, 1.474302001953125, 1.4744627685546876, 1.4744678955078125, 1.4744381103515625, 1.474345947265625, 1.4741944580078126, 1.4742548828125, 1.4745078125, 1.4744801025390626, 1.47441455078125, 1.47418115234375, 1.4743223876953124, 1.4743634033203126, 1.4744627685546876, 1.4740469970703125, 1.4741370849609374, 3.0712216796875, 1.4734315185546876, 1.4745855712890625, 1.47361279296875, 1.473840087890625, 1.4741380615234374, 1.4739364013671874, 1.4736414794921875, 1.473987548828125, 1.4741053466796874, 1.4739149169921875, 1.47364453125, 1.4739620361328125, 1.47382373046875, 1.473976318359375, 1.474334716796875, 1.4739261474609375, 1.473628173828125, 1.4737264404296875, 1.4739302978515625, 1.4734212646484375, 1.4736036376953126, 1.4741319580078125, 1.47426513671875, 1.474302978515625, 1.4739517822265624, 1.4746705322265625, 1.47380126953125, 1.4740101318359375, 1.4745528564453125, 1.4744248046875, 1.47426513671875, 1.4744791259765626, 1.4744033203125, 1.4740244140625, 1.474103271484375, 1.4746705322265625, 1.4742518310546875, 1.474093017578125, 1.474193359375, 1.4737838134765624, 1.473976318359375, 1.474071533203125, 1.47503515625, 1.4746429443359375, 1.4743326416015625, 1.4748948974609375, 1.473871826171875, 1.4741412353515626, 1.474756591796875, 1.474630615234375, 1.474577392578125, 1.474472900390625, 1.474883544921875, 1.4748580322265625, 1.47447705078125, 1.4747811279296874, 1.47460400390625, 1.4745589599609374, 1.4746265869140625, 1.4749808349609375, 1.4749000244140624, 1.474957275390625, 3.07293701171875, 1.4741534423828124, 1.473850341796875, 1.474186279296875, 1.4741903076171874, 1.47452001953125, 1.4740736083984376, 1.474038818359375, 1.474935791015625, 1.4743348388671875, 1.473924072265625, 1.4737950439453125, 1.474008056640625, 1.4743111572265626, 1.474093017578125, 1.4743223876953124, 1.4741851806640625, 1.4741422119140626, 1.4739793701171875, 1.474102294921875, 1.4738380126953126, 1.4738585205078125, 1.4744945068359374, 1.4743634033203126, 1.474292724609375, 1.4741781005859376, 1.4750125732421875, 1.4740316162109375, 1.4740899658203126, 1.473870849609375, 1.47460302734375, 1.4747423095703125, 1.4751129150390625, 1.4748599853515625, 1.474618408203125, 1.475168212890625, 1.4755379638671875, 1.4745087890625, 1.474472900390625, 1.474514892578125, 1.474840576171875, 1.47443603515625, 1.4743101806640626, 1.4746552734375, 1.4748037109375, 1.4745220947265625, 1.4747740478515625, 1.4746317138671876, 1.4742313232421875, 1.4737684326171876, 1.4743951416015626, 1.474081787109375, 1.4740859375, 1.47426611328125, 1.474807861328125, 1.47405615234375, 1.4741063232421876, 1.474302978515625, 1.4747801513671874, 1.4749112548828125, 1.4753310546875, 1.4741790771484375, 1.4742220458984374]",tokens/s,0.6679414821062154,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,5984.595968,19933.954048,0.0,19287.506944,18376.399872,s,10,24.56320629882812,2.456320629882813,0.001691664801696018,2.4561865234375,2.4583423828125,2.4586625244140623,2.4589186376953127,"[2.456571533203125, 2.458982666015625, 2.4545791015625, 2.455032470703125, 2.454942626953125, 2.453741943359375, 2.455801513671875, 2.458271240234375, 2.45821728515625, 2.45706591796875]",tokens/s,104.22092168489151,kWh,2.8975297907988232e-05,1.5879395596475657e-05,0.00013865674981416998,0.00018351144331863386,tokens/kWh,1395008.3731590684,MB,5988.143104,19933.954048,0.0,19287.506944,18871.04,s,10,1458.421484375,145.84214843750001,0.012211079453744932,145.84282812499998,145.852815625,145.8578921875,145.8619534375,"[145.82440625, 145.837078125, 145.840359375, 145.8516875, 145.845296875, 145.8391875, 145.86296875, 145.8513125, 145.82034375, 145.84884375]",tokens/s,0.43197388872119064,kWh,0.001721854172928466,0.000943728472916573,0.008154870135002046,0.010820452780847084,tokens/kWh,5822.307187691273,,s,629,1478.2121892089838,2.350098869966589,0.29170661243792756,2.3148583984375,2.3159804199218748,2.3165888183593752,4.76929474609375,"[2.31657470703125, 2.316729248046875, 2.31575048828125, 2.31400439453125, 2.31361328125, 2.314355712890625, 2.31430859375, 2.313990234375, 2.314439697265625, 2.31400439453125, 2.314337158203125, 2.31450830078125, 2.3143525390625, 2.313754638671875, 2.3145595703125, 2.314746826171875, 2.313860107421875, 2.314616943359375, 2.31397998046875, 2.314455078125, 2.314322021484375, 2.315072509765625, 2.314198974609375, 2.314560546875, 2.31431689453125, 2.314648681640625, 2.314249267578125, 2.3142431640625, 2.31486767578125, 2.314310546875, 2.31423681640625, 2.314349609375, 2.31522509765625, 2.314227783203125, 2.314047607421875, 2.314390380859375, 2.31465380859375, 2.31459130859375, 2.3147294921875, 2.31495263671875, 2.31514306640625, 2.314708984375, 2.314705810546875, 2.314702880859375, 2.31510009765625, 2.314967041015625, 2.314390625, 2.3144326171875, 2.31436279296875, 2.3152353515625, 2.31499169921875, 2.31524560546875, 2.314324951171875, 2.3151728515625, 2.31494970703125, 2.314418212890625, 2.315260009765625, 2.3155927734375, 2.314987548828125, 2.3148759765625, 2.314437744140625, 2.314975341796875, 4.77530908203125, 2.315552734375, 2.313754638671875, 2.31431787109375, 2.314470458984375, 2.31491064453125, 2.313483154296875, 2.3140751953125, 2.314690673828125, 2.313935791015625, 2.31429541015625, 2.313943115234375, 2.3145830078125, 2.314388427734375, 2.3144130859375, 2.314663818359375, 2.314819580078125, 2.31438134765625, 2.31452978515625, 2.313989013671875, 2.314265625, 2.31451025390625, 2.31416015625, 2.313996337890625, 2.314390625, 2.314997802734375, 2.31469970703125, 2.31465380859375, 2.314850341796875, 2.315021240234375, 2.31556396484375, 2.314933349609375, 2.315074462890625, 2.314526611328125, 2.31516162109375, 2.314416015625, 2.31450634765625, 2.3146474609375, 2.31455322265625, 2.314646484375, 2.315080810546875, 2.314736572265625, 2.315146240234375, 2.316416015625, 2.316883056640625, 2.317212646484375, 2.317498291015625, 2.316708984375, 2.314590087890625, 2.31486669921875, 2.314609619140625, 2.314794921875, 2.3152783203125, 2.314463134765625, 2.31488720703125, 2.3152783203125, 2.3158486328125, 2.315106201171875, 2.31514208984375, 2.315134033203125, 2.314939453125, 2.31448974609375, 2.31490966796875, 4.76900048828125, 2.314322998046875, 2.31459423828125, 2.314704833984375, 2.3145400390625, 2.314397705078125, 2.315212890625, 2.315070556640625, 2.3151708984375, 2.314818603515625, 2.31474072265625, 2.314556396484375, 2.314756103515625, 2.314300537109375, 2.314501220703125, 2.314758056640625, 2.31469580078125, 2.314715087890625, 2.314291259765625, 2.313954345703125, 2.3144990234375, 2.3141162109375, 2.314789794921875, 2.3138662109375, 2.314201171875, 2.31396044921875, 2.314891357421875, 2.313994140625, 2.3142041015625, 2.31410595703125, 2.31444384765625, 2.314310546875, 2.3143935546875, 2.3148564453125, 2.314733642578125, 2.314330078125, 2.31438134765625, 2.3142685546875, 2.3148154296875, 2.315052978515625, 2.31476318359375, 2.31545751953125, 2.315313232421875, 2.315279296875, 2.315707275390625, 2.314758056640625, 2.3152158203125, 2.31581689453125, 2.315241455078125, 2.3146220703125, 2.315763671875, 2.3165615234375, 2.31669873046875, 2.3161025390625, 2.31678759765625, 2.31653173828125, 2.316707763671875, 2.3152138671875, 2.31585693359375, 2.314685546875, 2.315296875, 2.3153837890625, 2.315890625, 4.7694091796875, 2.314258544921875, 2.3142666015625, 2.31421435546875, 2.314051513671875, 2.314406982421875, 2.314616943359375, 2.314188720703125, 2.31503662109375, 2.31619287109375, 2.31446533203125, 2.313934814453125, 2.315040771484375, 2.314817626953125, 2.315828125, 2.3156357421875, 2.316775390625, 2.314610595703125, 2.31668017578125, 2.31729052734375, 2.31545556640625, 2.315461669921875, 2.315554931640625, 2.31465576171875, 2.314517578125, 2.3154697265625, 2.31606689453125, 2.314260498046875, 2.314863525390625, 2.314272705078125, 2.314649658203125, 2.3157421875, 2.315529296875, 2.3159091796875, 2.314768310546875, 2.3160771484375, 2.315504638671875, 2.314666015625, 2.314771484375, 2.315051025390625, 2.3154248046875, 2.314206298828125, 2.314478515625, 2.31497021484375, 2.315460693359375, 2.314807373046875, 2.31501708984375, 2.315440185546875, 2.315284423828125, 2.31450634765625, 2.314502197265625, 2.314387451171875, 2.314957763671875, 2.3149384765625, 2.314912841796875, 2.314670166015625, 2.3156572265625, 2.316516357421875, 2.3157412109375, 2.315861083984375, 2.315828125, 2.315040771484375, 2.3151728515625, 4.7702314453125, 2.31434033203125, 2.314682373046875, 2.313740234375, 2.314062744140625, 2.31486767578125, 2.314884033203125, 2.314498046875, 2.314965087890625, 2.314472412109375, 2.314133544921875, 2.31417041015625, 2.313879638671875, 2.313923583984375, 2.314146728515625, 2.314799072265625, 2.3140986328125, 2.314390625, 2.315798583984375, 2.31642822265625, 2.315537353515625, 2.316564453125, 2.314660888671875, 2.313965576171875, 2.314789794921875, 2.31402294921875, 2.314662841796875, 2.314347412109375, 2.315102294921875, 2.3144150390625, 2.315137939453125, 2.31604833984375, 2.315811767578125, 2.3149404296875, 2.31493115234375, 2.315124755859375, 2.3152158203125, 2.314859619140625, 2.315197509765625, 2.314984375, 2.315462646484375, 2.31446533203125, 2.3143095703125, 2.31438037109375, 2.31520556640625, 2.31512158203125, 2.314987548828125, 2.314483642578125, 2.31514208984375, 2.314712158203125, 2.31526513671875, 2.314966064453125, 2.314924072265625, 2.314987548828125, 2.31532958984375, 2.31476318359375, 2.3151298828125, 2.315618408203125, 2.316012451171875, 2.317046875, 2.316642333984375, 2.317076416015625, 2.317365234375, 4.7678056640625, 2.314080322265625, 2.31505908203125, 2.315978759765625, 2.315252685546875, 2.31602392578125, 2.3155966796875, 2.31449609375, 2.31440087890625, 2.31406396484375, 2.31448583984375, 2.314968017578125, 2.314263671875, 2.31400146484375, 2.314314697265625, 2.314521484375, 2.3153408203125, 2.31357861328125, 2.3147294921875, 2.31446728515625, 2.315052978515625, 2.314033203125, 2.314588134765625, 2.314469482421875, 2.31505517578125, 2.3144765625, 2.3147109375, 2.31465478515625, 2.315134033203125, 2.31459521484375, 2.31478466796875, 2.314600341796875, 2.314883056640625, 2.315136962890625, 2.315252685546875, 2.31488818359375, 2.315322265625, 2.31524755859375, 2.31511767578125, 2.3150263671875, 2.3153603515625, 2.315388916015625, 2.31535009765625, 2.31446826171875, 2.31495068359375, 2.31469775390625, 2.315263916015625, 2.314312744140625, 2.3146865234375, 2.314904541015625, 2.31448974609375, 2.314483642578125, 2.314174560546875, 2.314915771484375, 2.314462158203125, 2.3165869140625, 2.317093994140625, 2.31678759765625, 2.315781005859375, 2.315177978515625, 2.315336669921875, 2.31464453125, 2.31521484375, 4.770552734375, 2.314354736328125, 2.31482568359375, 2.313882568359375, 2.31376171875, 2.313788330078125, 2.315693115234375, 2.31585986328125, 2.31478271484375, 2.315926513671875, 2.3154453125, 2.315216796875, 2.314701904296875, 2.315554931640625, 2.31552197265625, 2.315801513671875, 2.316396484375, 2.314292236328125, 2.314648681640625, 2.31490771484375, 2.315061279296875, 2.3164384765625, 2.316370849609375, 2.316866455078125, 2.31659521484375, 2.31632080078125, 2.315591796875, 2.31587646484375, 2.3159716796875, 2.315903076171875, 2.315663330078125, 2.31596240234375, 2.316310546875, 2.314661865234375, 2.315462646484375, 2.31440380859375, 2.31526611328125, 2.315337646484375, 2.315576416015625, 2.31480419921875, 2.314577880859375, 2.316158935546875, 2.316590087890625, 2.31474072265625, 2.314672119140625, 2.315419677734375, 2.315107421875, 2.315031494140625, 2.314947509765625, 2.31512158203125, 2.31507861328125, 2.315275146484375, 2.31488916015625, 2.314337158203125, 2.315274169921875, 2.315357177734375, 2.314990478515625, 2.314895263671875, 2.315843505859375, 2.315281494140625, 2.315041748046875, 2.314649658203125, 2.3151708984375, 4.7742177734375, 2.314451904296875, 2.314017822265625, 2.316307373046875, 2.31636376953125, 2.315851806640625, 2.3162041015625, 2.315658203125, 2.315419677734375, 2.315569091796875, 2.31410693359375, 2.3144345703125, 2.3141181640625, 2.3143701171875, 2.314511474609375, 2.314544189453125, 2.31514208984375, 2.314577880859375, 2.314501220703125, 2.314337158203125, 2.315443115234375, 2.314441650390625, 2.314269775390625, 2.3141171875, 2.31440380859375, 2.315146240234375, 2.31492919921875, 2.314220458984375, 2.31446533203125, 2.3145595703125, 2.314896484375, 2.3150439453125, 2.314417236328125, 2.31532861328125, 2.315260009765625, 2.315807861328125, 2.31515234375, 2.315829345703125, 2.315790283203125, 2.3159296875, 2.31573193359375, 2.315969482421875, 2.316851318359375, 2.3155537109375, 2.314724365234375, 2.3145625, 2.315716552734375, 2.31531005859375, 2.315375732421875, 2.315345947265625, 2.31587744140625, 2.31564599609375, 2.315336669921875, 2.3145, 2.31510107421875, 2.31535400390625, 2.314984375, 2.31453076171875, 2.3153857421875, 2.314976318359375, 2.315102294921875, 2.31464453125, 2.31491064453125, 4.77313525390625, 2.31535205078125, 2.314526611328125, 2.3150869140625, 2.315890625, 2.314620849609375, 2.3140322265625, 2.313732177734375, 2.31478466796875, 2.31486669921875, 2.3156357421875, 2.31585986328125, 2.314743896484375, 2.314322021484375, 2.3139072265625, 2.31398291015625, 2.314142822265625, 2.31423388671875, 2.31518408203125, 2.31429833984375, 2.314175537109375, 2.3156572265625, 2.3145810546875, 2.313807861328125, 2.314330078125, 2.31499169921875, 2.314958740234375, 2.3144365234375, 2.3145185546875, 2.31440185546875, 2.315052978515625, 2.314460205078125, 2.3145419921875, 2.3140576171875, 2.314851318359375, 2.314546142578125, 2.314408935546875, 2.31389892578125, 2.31480419921875, 2.31436279296875, 2.3144580078125, 2.314177490234375, 2.314638427734375, 2.3145419921875, 2.31480322265625, 2.314745849609375, 2.314291259765625, 2.31537255859375, 2.31572265625, 2.31389599609375, 2.313974853515625, 2.314586181640625, 2.314895263671875, 2.314090576171875, 2.314375244140625, 2.314859619140625, 2.3148583984375, 2.3144326171875, 2.314412109375, 2.314270751953125, 2.31518115234375, 2.314324951171875, 2.314473388671875, 4.7714609375, 2.31414794921875, 2.313611328125, 2.314270751953125, 2.314892333984375, 2.31461572265625, 2.315198486328125, 2.3143720703125, 2.314142822265625, 2.31568701171875, 2.315873291015625, 2.31440478515625, 2.314210205078125, 2.31602587890625, 2.316198974609375, 2.31553857421875, 2.315966552734375, 2.31545947265625, 2.31545849609375, 2.31590185546875, 2.31619287109375, 2.31564599609375, 2.316030029296875, 2.315187255859375, 2.314642333984375, 2.313966552734375, 2.3152138671875, 2.31450830078125, 2.315548583984375, 2.314388427734375, 2.315707275390625, 2.3161650390625, 2.31663623046875, 2.316718994140625, 2.315875244140625, 2.315216796875, 2.31624609375, 2.315987060546875, 2.315421630859375, 2.315747314453125, 2.314558349609375, 2.313786376953125, 2.313956298828125, 2.3144541015625, 2.3148984375, 2.314367919921875, 2.31458203125, 2.315187255859375, 2.31473046875, 2.314349609375, 2.314651611328125, 2.31442626953125, 2.315454345703125, 2.31490771484375, 2.314850341796875, 2.315431884765625, 2.31543798828125, 2.314609619140625, 2.31511767578125, 2.314015625, 2.31457177734375, 2.314642333984375, 2.31451953125]",tokens/s,0.4255140125292758,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 103, in __init__ - assert out_features % (32 // self.w_bit) == 0 -AssertionError - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1390.702592,1340.604416,0.0,694.157312,598.499328,s,10,0.6226632919311523,0.062266329193115234,0.002317832326909016,0.06196879959106445,0.06357897109985351,0.06612861976623535,0.06816833869934083,"[0.06867826843261719, 0.06029948806762695, 0.06301238250732422, 0.06221964645385742, 0.06201440048217773, 0.06192319869995117, 0.06178224182128906, 0.06027980804443359, 0.06031568145751953, 0.06213817596435547]",tokens/s,4111.3713192571795,kWh,7.181983443623039e-07,3.9353970671072603e-07,2.1138383914060147e-06,3.2255764424790445e-06,tokens/kWh,79365658.99621,MB,1391.030272,1340.604416,0.0,694.157312,659.032576,s,10,37.91430688476563,3.791430688476563,0.033463632163375424,3.808232421875,3.8238944580078122,3.8254893432617187,3.8267652514648436,"[3.8235400390625, 3.807294921875, 3.809169921875, 3.815871337890625, 3.827084228515625, 3.813744140625, 3.745643798828125, 3.73081494140625, 3.753345703125, 3.7877978515625]",tokens/s,16.616418754924958,kWh,4.459946840148583e-05,2.4442959093412353e-05,0.00012145201046119378,0.00019049443795609197,tokens/kWh,330718.3174267859,,s,629,38.408798217773445,0.06106327220631707,0.007411049818688859,0.06058700942993164,0.06105620422363281,0.061356236267089845,0.1198044711303711,"[0.05935715103149414, 0.059200511932373044, 0.06025625610351563, 0.06658048248291015, 0.06115430450439453, 0.06075187301635742, 0.06081740951538086, 0.06116556930541992, 0.06187519836425781, 0.060846080780029295, 0.0618015022277832, 0.061259742736816405, 0.06081024169921875, 0.06082559967041016, 0.06123110580444336, 0.06096486282348633, 0.06082559967041016, 0.06090752029418945, 0.06103142547607422, 0.05951590347290039, 0.05919027328491211, 0.060930049896240235, 0.06138163375854492, 0.06130278396606445, 0.0615464973449707, 0.061396991729736325, 0.06103859329223633, 0.060919807434082034, 0.06081228637695312, 0.06087680053710937, 0.06095257568359375, 0.06107648086547852, 0.06047641754150391, 0.05975551986694336, 0.060652542114257815, 0.060800033569335936, 0.06089315032958984, 0.060319744110107425, 0.06068940734863281, 0.06150451278686524, 0.06105190277099609, 0.061037567138671874, 0.059990016937255856, 0.060765182495117184, 0.06077849578857422, 0.06057881546020508, 0.060082176208496096, 0.0589219856262207, 0.059184127807617185, 0.05976166534423828, 0.060783649444580076, 0.06094025421142578, 0.06090854263305664, 0.060641281127929686, 0.05964799880981445, 0.05914112091064453, 0.05936537551879883, 0.06067507171630859, 0.06081740951538086, 0.060158977508544924, 0.06099660873413086, 0.05982624053955078, 0.11980588531494141, 0.06112768173217773, 0.06088294219970703, 0.060576766967773435, 0.06073548889160156, 0.06021529769897461, 0.06116966247558594, 0.06180044937133789, 0.06070483016967773, 0.05943494415283203, 0.059440128326416014, 0.060862464904785155, 0.060211200714111325, 0.05941452789306641, 0.06035456085205078, 0.060521472930908204, 0.06083174514770508, 0.06059929656982422, 0.060902400970458986, 0.06084096145629883, 0.06026649475097656, 0.05905920028686523, 0.059622398376464845, 0.06106521606445312, 0.06076313781738281, 0.05981081771850586, 0.05929369735717773, 0.059216896057128904, 0.061050880432128904, 0.06076620864868164, 0.059224063873291016, 0.06019583892822265, 0.06055321502685547, 0.060796928405761716, 0.061064193725585934, 0.06083071899414062, 0.060793857574462894, 0.05939507293701172, 0.058877952575683595, 0.05913702392578125, 0.05904793548583984, 0.05907865524291992, 0.06065049743652344, 0.06088294219970703, 0.06102732849121094, 0.06089215850830078, 0.06101504135131836, 0.060728321075439455, 0.06069760131835938, 0.060909599304199216, 0.06107542419433594, 0.060761089324951174, 0.06076416015625, 0.060911617279052734, 0.06071091079711914, 0.06042009735107422, 0.06087372970581055, 0.06067302322387695, 0.06060851287841797, 0.060984321594238285, 0.06090956878662109, 0.06082867050170898, 0.060943359375, 0.12434432220458984, 0.059063297271728515, 0.05924863815307617, 0.059273216247558595, 0.059202560424804686, 0.0593807373046875, 0.05934284973144531, 0.06072115325927734, 0.060693504333496094, 0.06077439880371094, 0.0608798713684082, 0.05987430572509766, 0.05933158493041992, 0.05966438293457031, 0.05923430252075195, 0.06037811279296875, 0.060916736602783204, 0.06084505462646484, 0.060793857574462894, 0.06016204833984375, 0.06031257629394531, 0.060813312530517576, 0.06093008041381836, 0.06087472152709961, 0.06037811279296875, 0.06069964981079102, 0.06112972640991211, 0.06072524642944336, 0.060851200103759766, 0.06091468811035156, 0.0610334701538086, 0.0608901138305664, 0.06094847869873047, 0.06083071899414062, 0.060846080780029295, 0.06088806533813477, 0.06078566360473633, 0.06029625701904297, 0.06034220886230469, 0.06101094436645508, 0.06097919845581055, 0.061350910186767575, 0.05945548629760742, 0.05929779052734375, 0.059101184844970706, 0.059858943939208986, 0.05947903823852539, 0.05919948959350586, 0.059469825744628904, 0.06209024047851563, 0.06109183883666992, 0.06083071899414062, 0.06089318466186523, 0.06092083358764649, 0.06089113616943359, 0.06088601684570313, 0.06026956939697266, 0.06099763107299805, 0.06088499069213867, 0.06075494384765625, 0.06154547119140625, 0.0613570556640625, 0.06097919845581055, 0.12348928070068359, 0.060916736602783204, 0.060777473449707034, 0.0607303695678711, 0.06054297637939453, 0.06096694564819336, 0.06082352066040039, 0.060835838317871094, 0.06073855972290039, 0.060864513397216796, 0.05947903823852539, 0.059015167236328124, 0.059270145416259766, 0.05909404754638672, 0.05935817718505859, 0.05992550277709961, 0.06107955169677735, 0.061110271453857425, 0.06092083358764649, 0.06084505462646484, 0.06058803176879883, 0.06083891296386719, 0.06079487991333008, 0.060827648162841794, 0.060832767486572265, 0.06084403228759765, 0.06105702209472656, 0.06056345748901367, 0.06058598327636719, 0.06076006317138672, 0.06095462417602539, 0.0609617919921875, 0.06127206420898437, 0.05955788803100586, 0.06099967956542969, 0.06095667266845703, 0.06068633651733398, 0.060887039184570314, 0.06139187240600586, 0.061454334259033204, 0.06054912185668945, 0.06096281433105469, 0.06029414367675781, 0.06135500717163086, 0.062328830718994144, 0.06116147232055664, 0.0603770866394043, 0.05916262435913086, 0.059469825744628904, 0.0593438720703125, 0.05933055877685547, 0.05895884704589844, 0.05933670425415039, 0.06046003341674805, 0.06081024169921875, 0.06076620864868164, 0.061451263427734375, 0.06086348724365234, 0.06087168121337891, 0.06075699234008789, 0.060631038665771485, 0.0605296630859375, 0.06076006317138672, 0.12325580596923828, 0.060826625823974606, 0.06067612838745117, 0.06101910400390625, 0.06079283142089844, 0.06072115325927734, 0.06085529708862305, 0.06092902374267578, 0.0608983039855957, 0.060955646514892575, 0.06091059112548828, 0.060889087677001956, 0.06080716705322266, 0.059873279571533204, 0.05963673782348633, 0.06089932632446289, 0.061224960327148435, 0.06137139129638672, 0.06110105514526367, 0.06112870407104492, 0.06075699234008789, 0.0609249267578125, 0.0607457275390625, 0.06121267318725586, 0.060843006134033206, 0.06098124694824219, 0.061001728057861325, 0.06096588897705078, 0.06096895980834961, 0.06099558258056641, 0.06101708984375, 0.06083174514770508, 0.06094643020629883, 0.06118297576904297, 0.06099148941040039, 0.06102937698364258, 0.06078771209716797, 0.06082457733154297, 0.059865089416503904, 0.060216320037841796, 0.059087871551513675, 0.05941452789306641, 0.06077439880371094, 0.06028083038330078, 0.06068121719360352, 0.06069760131835938, 0.06096691131591797, 0.06012416076660156, 0.06075494384765625, 0.06013849639892578, 0.06086963272094727, 0.060609535217285154, 0.06098739242553711, 0.06088294219970703, 0.06077337646484375, 0.06087782287597656, 0.060805118560791016, 0.06075801467895508, 0.060829696655273435, 0.060837886810302735, 0.060200958251953124, 0.060911617279052734, 0.06101094436645508, 0.1231247329711914, 0.060744705200195315, 0.06079897689819336, 0.0600002555847168, 0.059115520477294924, 0.0603351058959961, 0.06076620864868164, 0.060727294921875, 0.060744705200195315, 0.060527614593505856, 0.06128844833374023, 0.06094438552856445, 0.060677120208740234, 0.06176768112182617, 0.062110721588134764, 0.0609249267578125, 0.060832767486572265, 0.060432384490966794, 0.06098636627197265, 0.06112972640991211, 0.060895233154296874, 0.062491649627685546, 0.060980224609375, 0.06099558258056641, 0.06110310363769531, 0.06090956878662109, 0.060767230987548826, 0.06076211166381836, 0.06077542495727539, 0.06094847869873047, 0.06072115325927734, 0.06075392150878906, 0.060911617279052734, 0.06179123306274414, 0.06146355056762695, 0.06151174545288086, 0.05984864044189453, 0.0592097282409668, 0.059241470336914064, 0.0594442253112793, 0.05915238571166992, 0.05912063980102539, 0.059140094757080076, 0.05916159820556641, 0.05927219009399414, 0.059033599853515625, 0.059902976989746094, 0.06051942443847656, 0.060739585876464844, 0.06076313781738281, 0.060609535217285154, 0.059551742553710936, 0.05947699356079102, 0.06035257720947266, 0.060338111877441404, 0.06081126403808594, 0.06072524642944336, 0.060757022857666015, 0.06074979019165039, 0.060472320556640625, 0.06065459060668945, 0.06086656188964844, 0.06003609466552735, 0.12312576293945313, 0.06074060821533203, 0.06058700942993164, 0.060955646514892575, 0.060897281646728516, 0.060668926239013675, 0.05904793548583984, 0.059138046264648435, 0.0591984977722168, 0.059074527740478514, 0.05915545654296875, 0.05922918319702149, 0.05921279907226563, 0.05931417465209961, 0.05925068664550781, 0.05924665451049805, 0.05927315139770508, 0.062034942626953124, 0.06089625549316406, 0.05932646560668945, 0.05910630416870117, 0.05935615921020508, 0.059224063873291016, 0.0590909423828125, 0.05909299087524414, 0.05916159820556641, 0.05918105697631836, 0.05901004791259765, 0.05914828872680664, 0.05935718536376953, 0.059216896057128904, 0.05914214324951172, 0.05914214324951172, 0.05903564834594727, 0.05917388916015625, 0.059701248168945314, 0.05926399993896484, 0.05921177673339844, 0.059167774200439456, 0.05920764923095703, 0.05906227111816406, 0.05908992004394531, 0.059063297271728515, 0.05934694290161133, 0.05926707077026367, 0.05919027328491211, 0.05903462219238281, 0.0592988166809082, 0.05978316879272461, 0.06010367965698242, 0.05926502227783203, 0.059202560424804686, 0.05930092620849609, 0.05915334320068359, 0.059085823059082034, 0.059358207702636716, 0.05939199829101562, 0.05905929565429688, 0.05903247833251953, 0.05921382522583008, 0.059084800720214846, 0.05944319915771484, 0.059240447998046876, 0.11974553680419922, 0.059086910247802736, 0.05899875259399414, 0.05906224060058594, 0.059066368103027345, 0.059218944549560545, 0.05904281616210937, 0.05910220718383789, 0.05922304153442383, 0.059017215728759766, 0.05900185775756836, 0.05915750503540039, 0.059101184844970706, 0.05916262435913086, 0.05922099304199219, 0.059033599853515625, 0.05897625732421875, 0.05926604843139648, 0.059210750579833986, 0.059121662139892575, 0.05920358276367187, 0.05924556732177735, 0.059066368103027345, 0.05899776077270508, 0.05931008148193359, 0.06076620864868164, 0.059312126159667966, 0.059284481048583984, 0.059046913146972656, 0.05910425567626953, 0.059017215728759766, 0.05894041442871094, 0.05906227111816406, 0.05915955352783203, 0.059063297271728515, 0.05913600158691406, 0.05902233505249024, 0.059099136352539064, 0.059030529022216796, 0.05913497543334961, 0.0589035530090332, 0.05907353591918945, 0.059066368103027345, 0.058962944030761716, 0.059486209869384764, 0.059453441619873044, 0.058998783111572264, 0.058987518310546876, 0.05903564834594727, 0.05900185775756836, 0.05933977508544922, 0.05908070373535156, 0.059009025573730466, 0.06041702270507812, 0.05994598388671875, 0.058993663787841794, 0.05916262435913086, 0.059085823059082034, 0.05953638458251953, 0.05912473678588867, 0.05950054550170898, 0.060255264282226564, 0.05989884948730469, 0.11980083465576172, 0.059218944549560545, 0.05915238571166992, 0.059079681396484375, 0.05905715179443359, 0.059115520477294924, 0.05936025619506836, 0.0590643196105957, 0.05912575912475586, 0.05918207931518555, 0.06097919845581055, 0.059486209869384764, 0.05931622314453125, 0.059044864654541014, 0.05903363037109375, 0.059218910217285155, 0.059025409698486325, 0.05917491149902344, 0.05916364669799805, 0.05909401702880859, 0.0590489616394043, 0.05904076766967774, 0.059096065521240235, 0.059121662139892575, 0.05902438354492188, 0.05902131271362305, 0.05898854446411133, 0.05910015869140625, 0.059300865173339844, 0.05925785446166992, 0.06097612762451172, 0.060662784576416016, 0.060878849029541014, 0.06074060821533203, 0.06073446273803711, 0.06080412673950195, 0.06049481582641601, 0.06058700942993164, 0.060581886291503906, 0.0608092155456543, 0.059243518829345705, 0.059063297271728515, 0.05907251358032226, 0.05892403030395508, 0.05899776077270508, 0.05919744110107422, 0.05904793548583984, 0.059481086730957033, 0.05923942565917969, 0.05914419174194336, 0.05912985610961914, 0.059145217895507814, 0.05928550338745117, 0.059261951446533206, 0.05939199829101562, 0.05910323333740235, 0.05878988647460937, 0.059832321166992185, 0.061055999755859375, 0.060690433502197265, 0.06087174224853516, 0.06086547088623047, 0.060862464904785155, 0.12235059356689452, 0.05892607879638672, 0.05911859130859375, 0.05902950286865234, 0.05892403030395508, 0.058929153442382816, 0.05973811340332031, 0.059853824615478515, 0.058992641448974606, 0.059038719177246096, 0.05915545654296875, 0.05913190460205078, 0.058912769317626956, 0.05937152099609375, 0.060902400970458986, 0.06070272064208984, 0.06099353790283203, 0.060695552825927736, 0.059014144897460936, 0.05908172988891602, 0.05928140640258789, 0.059154430389404294, 0.06019583892822265, 0.06073241424560547, 0.0606668815612793, 0.060614654541015625, 0.06086252975463867, 0.06079072189331055, 0.06075801467895508, 0.06062899017333984, 0.06069247817993164, 0.060826625823974606, 0.061156352996826174, 0.06073241424560547, 0.06075392150878906, 0.060655616760253904, 0.06082252883911133, 0.06071705627441406, 0.060818431854248046, 0.060819454193115234, 0.06071091079711914, 0.06072012710571289, 0.0607723503112793, 0.061059070587158204, 0.06127308654785156, 0.060447742462158206, 0.05913907241821289, 0.05907251358032226, 0.06083174514770508, 0.06084505462646484, 0.06077644729614258, 0.060709888458251954, 0.0603054084777832, 0.059133953094482425, 0.05898342514038086, 0.06038323211669922, 0.06078668975830078, 0.059170848846435545, 0.060978145599365235, 0.06088601684570313, 0.06031568145751953, 0.05907247924804687, 0.05908889770507812]",tokens/s,16.3764561555309,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 3 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply - param_applied = fn(param) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 22.18 GiB of which 950.50 MiB is free. Process 170958 has 21.25 GiB memory in use. Of the allocated memory 21.01 GiB is allocated by PyTorch, and 1.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",dbrx,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2628.173824,8389.132288,0.0,7742.685184,7007.145472,s,10,5.8011931152343745,0.5801193115234377,0.0010375594919882804,0.5799924316406251,0.5812619140624999,0.5815937561035156,0.5818592297363281,"[0.5800941772460938, 0.5819255981445313, 0.5788403930664062, 0.5789609375, 0.5798906860351563, 0.5787822265625, 0.57971533203125, 0.5806090698242188, 0.5811865234375, 0.5811881713867187]",tokens/s,441.28853309110576,kWh,6.83268827420694e-06,3.744032605603327e-06,3.3321369249667614e-05,4.3898090129477875e-05,tokens/kWh,5831688.787483131,MB,2628.173824,8389.132288,0.0,7742.685184,7283.985408,s,10,337.06498046875,33.706498046875,0.006009418787724905,33.706857421875,33.7119703125,33.715088671875,33.717583359375006,"[33.70903515625, 33.70971875, 33.7039609375, 33.70134375, 33.71127734375, 33.70996875, 33.71820703125, 33.69992578125, 33.69686328125, 33.7046796875]",tokens/s,1.869075805869452,kWh,0.0003979287963388143,0.0002180992035304977,0.0019156344121359354,0.0025316624120052473,tokens/kWh,24884.8344476149,,s,629,341.74955133056636,0.5433220211932693,0.06898772267341372,0.5349846801757813,0.5355790161132813,0.5358112670898437,1.1147944775390626,"[0.5351260375976562, 0.535109619140625, 0.534408203125, 0.5347737426757813, 0.5349273681640625, 0.5355181884765625, 0.5343795166015625, 0.5353154296875, 0.5344747314453125, 0.5348731079101563, 0.5341849365234375, 0.5352171630859375, 0.5348259887695312, 0.5350338745117188, 0.5346682739257812, 0.5355919189453126, 0.5350399780273437, 0.535109619140625, 0.53498779296875, 0.5350379638671875, 0.5342330932617188, 0.534898681640625, 0.5348997192382813, 0.535445556640625, 0.5350767822265625, 0.5357158203125, 0.5351649169921875, 0.53486181640625, 0.5343006591796875, 0.5351065673828125, 0.5353646240234375, 0.5352591552734375, 0.53469091796875, 0.5347808227539063, 0.5347430419921875, 0.5353215942382813, 0.5346262817382812, 0.5358970947265626, 0.5353533325195312, 0.535372802734375, 0.5349212646484375, 0.5354249877929688, 0.5347993774414063, 0.5352724609375, 0.5352263793945312, 0.5359462280273437, 0.5349867553710937, 0.5358069458007813, 0.5356390380859375, 0.5357721557617188, 0.5345842895507813, 0.5350502319335938, 0.5345679321289063, 0.5353901977539063, 0.534408203125, 0.5348423461914062, 0.5349427490234375, 0.5352744750976562, 0.5348515625, 0.5350000610351563, 0.5347225341796875, 0.5350891723632812, 1.117781005859375, 0.5343908081054688, 0.5352744750976562, 0.5350615234375, 0.5358694458007812, 0.5350860595703125, 0.5349007568359375, 0.5344706420898437, 0.5346253051757812, 0.534213623046875, 0.5346007080078125, 0.534765625, 0.5350972900390625, 0.5347195434570312, 0.5356298217773438, 0.5349805908203125, 0.5357711181640625, 0.5348290405273437, 0.5349857177734375, 0.5353421020507813, 0.5351044921875, 0.5350942993164063, 0.5355233154296875, 0.5351280517578125, 0.536111083984375, 0.53477783203125, 0.5354240112304688, 0.5351700439453125, 0.5359093627929687, 0.5350686645507813, 0.5352868041992187, 0.5346743774414062, 0.534993896484375, 0.534455322265625, 0.5347471313476563, 0.5344000244140625, 0.5349601440429688, 0.5342822265625, 0.5353840942382813, 0.5345341186523438, 0.5348598022460938, 0.5351505737304687, 0.5354915771484375, 0.5350184936523438, 0.535066650390625, 0.5345361328125, 0.53806591796875, 0.5347102661132812, 0.535362548828125, 0.5352243041992187, 0.5350154418945312, 0.5345475463867188, 0.5348187255859375, 0.5344368896484375, 0.5351710815429688, 0.5345422973632813, 0.5351874389648438, 0.5350225830078125, 0.5353594970703125, 0.5346467895507813, 0.53593701171875, 0.5349805908203125, 0.535109619140625, 1.11461376953125, 0.5346682739257812, 0.5355612182617188, 0.535363525390625, 0.5350051879882812, 0.5345147094726562, 0.5349580688476563, 0.5345648803710937, 0.5349765014648438, 0.5342883911132813, 0.534782958984375, 0.5348720703125, 0.5348485107421875, 0.5344696044921875, 0.5351680297851562, 0.535014404296875, 0.5347891235351563, 0.5343958740234375, 0.534729736328125, 0.5343323974609375, 0.5347850341796875, 0.534245361328125, 0.5351393432617187, 0.5362525024414062, 0.53484130859375, 0.5343948974609375, 0.53480859375, 0.5345147094726562, 0.535352294921875, 0.5345781860351563, 0.5349120483398437, 0.5347604370117187, 0.5352069091796875, 0.5347593994140625, 0.5351116943359375, 0.5349611206054687, 0.5350625, 0.5345709838867188, 0.535088134765625, 0.5350850830078125, 0.535235595703125, 0.53505126953125, 0.5352868041992187, 0.5351475219726562, 0.5351701049804688, 0.535184326171875, 0.5351956176757813, 0.536447998046875, 0.5354301147460937, 0.53513427734375, 0.5351146850585937, 0.5348946533203125, 0.5352826538085937, 0.5349775390625, 0.5352734985351563, 0.5348341674804687, 0.5351444702148438, 0.5347440185546875, 0.53526220703125, 0.5349683227539063, 0.5353143920898438, 0.5344860229492188, 0.534940673828125, 1.1146219482421875, 0.5341777954101562, 0.5348720703125, 0.5346262817382812, 0.5350717163085937, 0.5346693725585937, 0.5351198120117188, 0.5349754638671875, 0.53484033203125, 0.5343754272460938, 0.5346856689453126, 0.534470703125, 0.5363936767578125, 0.5344901123046875, 0.5354598388671875, 0.5345167236328126, 0.535103515625, 0.5345525512695313, 0.5347041015625, 0.5342689208984375, 0.53492431640625, 0.5343795166015625, 0.5349335327148438, 0.5343723754882812, 0.53469287109375, 0.5351454467773438, 0.5353369750976562, 0.5347051391601563, 0.5347532958984375, 0.5346580200195312, 0.5352058715820313, 0.5351024780273438, 0.5352191772460938, 0.53522021484375, 0.5348075561523438, 0.534297607421875, 0.534724609375, 0.5345833129882812, 0.5349539794921875, 0.5344808959960937, 0.53492529296875, 0.5351454467773438, 0.5350830078125, 0.5344010009765625, 0.5376593627929688, 0.5356707763671875, 0.5352120361328125, 0.5348014526367187, 0.5355078735351563, 0.5344962768554687, 0.5350451049804688, 0.5349385986328125, 0.5351188354492188, 0.5344829711914062, 0.5351536865234375, 0.5346395874023437, 0.5351792602539063, 0.534635498046875, 0.5352366333007812, 0.534640625, 0.5355222778320312, 0.53492431640625, 0.5352960205078126, 1.1157667236328126, 0.5344829711914062, 0.5347286987304688, 0.53443994140625, 0.5350502319335938, 0.5349959716796875, 0.5357506713867187, 0.5345075073242187, 0.5348935546875, 0.5344307250976562, 0.5347952880859375, 0.5344050903320312, 0.5348792114257812, 0.5342566528320313, 0.5350963745117188, 0.5344285888671875, 0.534782958984375, 0.5359862060546875, 0.5356195678710938, 0.53477783203125, 0.5348853759765625, 0.534413330078125, 0.5349529418945312, 0.5347205200195313, 0.5349427490234375, 0.5349754638671875, 0.5349099731445313, 0.5344389038085937, 0.5352949829101562, 0.5348628540039062, 0.5350799560546875, 0.5344368896484375, 0.5352714233398438, 0.5348720703125, 0.5350604858398438, 0.534603759765625, 0.5353707275390625, 0.5352437744140625, 0.535731201171875, 0.5351393432617187, 0.535103515625, 0.5343733520507813, 0.534950927734375, 0.53507275390625, 0.535462890625, 0.5348690185546875, 0.5352427368164062, 0.5344849853515625, 0.5353912353515625, 0.5345740966796875, 0.5349765014648438, 0.5348229370117188, 0.5356441650390625, 0.5349171142578125, 0.5356072998046875, 0.5357066040039062, 0.5356267700195313, 0.5386843872070313, 0.535677978515625, 0.53519970703125, 0.5360302124023437, 0.5351813354492188, 0.5363681030273437, 1.114861572265625, 0.534814697265625, 0.5358141479492188, 0.5348894653320313, 0.535541748046875, 0.5344706420898437, 0.5349130249023437, 0.5350021362304688, 0.5350850830078125, 0.534813720703125, 0.5355775756835938, 0.5344389038085937, 0.53571484375, 0.5351659545898437, 0.5355847778320313, 0.53517822265625, 0.5354332275390625, 0.53538818359375, 0.5351044921875, 0.5344112548828125, 0.5350000610351563, 0.5343201293945312, 0.5348331298828125, 0.5342853393554687, 0.5347276611328124, 0.5348331298828125, 0.534877197265625, 0.534382568359375, 0.5348782348632812, 0.5349212036132812, 0.5350440673828125, 0.5346990356445313, 0.5352007446289062, 0.5345945434570313, 0.5349151000976563, 0.5347184448242187, 0.5349703979492187, 0.5343866577148437, 0.5375672607421875, 0.5345842895507813, 0.535604248046875, 0.5345730590820312, 0.5350553588867187, 0.5347706909179688, 0.5352007446289062, 0.5347727661132813, 0.535562255859375, 0.5352611694335937, 0.5354332275390625, 0.5348392944335938, 0.5353809814453125, 0.5349007568359375, 0.5352796020507813, 0.5347973022460938, 0.535889892578125, 0.535098388671875, 0.5354475708007812, 0.5351219482421875, 0.5354977416992187, 0.5351229248046875, 0.5353697509765625, 0.5347573852539063, 0.5351884765625, 1.11545751953125, 0.534709228515625, 0.5355346069335938, 0.5345218505859375, 0.53505126953125, 0.5347532958984375, 0.5353421020507813, 0.5347676391601562, 0.535130126953125, 0.53502978515625, 0.5353748779296875, 0.5347379150390625, 0.53549462890625, 0.5350656127929687, 0.53538818359375, 0.5347123413085938, 0.53515673828125, 0.5347584228515625, 0.5352509155273437, 0.5351055297851562, 0.5358919677734375, 0.5351751708984375, 0.535287841796875, 0.5365626831054687, 0.5354188842773437, 0.5353696899414062, 0.5352734985351563, 0.5352540283203125, 0.5353543701171875, 0.5349550170898437, 0.5355130615234375, 0.5348792114257812, 0.535568359375, 0.5348311767578126, 0.5353768310546875, 0.5346519165039062, 0.53517822265625, 0.5345648803710937, 0.5352017822265625, 0.535309326171875, 0.5357240600585937, 0.5350317993164062, 0.5355181884765625, 0.5348699951171875, 0.5352345581054687, 0.5348505859375, 0.5359411010742188, 0.5351085815429687, 0.5355888671875, 0.5350133666992187, 0.5354229736328125, 0.5347727661132813, 0.5353400268554688, 0.5348945922851562, 0.5355601806640625, 0.5356461791992188, 0.5350768432617188, 0.5353328857421875, 0.5356503295898437, 0.5349284057617187, 0.53524169921875, 0.53501953125, 0.5352315063476563, 1.1161282958984375, 0.5346826171875, 0.53496630859375, 0.5344050903320312, 0.5346508178710937, 0.5344050903320312, 0.5348168334960938, 0.5343477172851563, 0.5350819702148437, 0.5347593994140625, 0.5352212524414063, 0.534572021484375, 0.5355878295898437, 0.53490380859375, 0.5348966674804687, 0.5346734008789062, 0.5364859008789062, 0.53452392578125, 0.5349498901367188, 0.5348843383789063, 0.5350963134765625, 0.5344942016601563, 0.5349078979492188, 0.5345894165039062, 0.5349498901367188, 0.5344747314453125, 0.53500927734375, 0.5347010498046875, 0.535119873046875, 0.5345147094726562, 0.53530419921875, 0.5354342651367188, 0.535141357421875, 0.5348782348632812, 0.5348505859375, 0.5344890747070312, 0.5351444702148438, 0.53507275390625, 0.535056396484375, 0.5347092895507812, 0.5349498291015625, 0.534508544921875, 0.5350236206054687, 0.5345904541015625, 0.535593994140625, 0.5347225341796875, 0.5352581176757812, 0.5349776000976563, 0.5351546020507812, 0.5346416625976562, 0.5357752075195312, 0.5347604370117187, 0.5349846801757813, 0.5344286499023437, 0.5350154418945312, 0.5346324462890625, 0.5349918823242188, 0.5349908447265626, 0.5351976928710938, 0.5346375732421875, 0.5349151000976563, 0.5348065185546875, 0.5351188354492188, 1.1169342041015624, 0.534445068359375, 0.5351669921875, 0.5345443725585938, 0.53528369140625, 0.534782958984375, 0.5349293823242187, 0.5344174194335938, 0.5348372192382812, 0.5343057861328125, 0.53471435546875, 0.534593505859375, 0.5350717163085937, 0.5345064697265625, 0.5349273681640625, 0.5345054931640625, 0.5349867553710937, 0.535751708984375, 0.5350215454101562, 0.5345771484375, 0.535593994140625, 0.534761474609375, 0.5349488525390625, 0.5347625122070313, 0.5349222412109375, 0.5344276733398438, 0.53481982421875, 0.5344440307617188, 0.5348536376953125, 0.5344583740234375, 0.5348925170898438, 0.5345833129882812, 0.5350532836914063, 0.5344010009765625, 0.5347758178710937, 0.5343866577148437, 0.5347593994140625, 0.534382568359375, 0.5348362426757812, 0.53475634765625, 0.5349376831054687, 0.5345422973632813, 0.5351393432617187, 0.53452490234375, 0.5351270141601563, 0.5344603881835938, 0.5351065673828125, 0.5345259399414063, 0.5350799560546875, 0.5351802978515625, 0.5355919189453126, 0.5348382568359376, 0.5353246459960938, 0.5350021362304688, 0.535816162109375, 0.5346324462890625, 0.5352212524414063, 0.5346836547851562, 0.5353799438476563, 0.5346682739257812, 0.5351454467773438, 0.5346416625976562, 0.53532568359375, 1.116949462890625, 0.5343743896484375, 0.5350113525390625, 0.5348218994140626, 0.5349816284179687, 0.5344235229492188, 0.5348731079101563, 0.534445068359375, 0.534877197265625, 0.53441943359375, 0.5347359008789062, 0.5345535888671875, 0.5347891235351563, 0.5345515747070313, 0.5351004028320312, 0.53458740234375, 0.5349119873046875, 0.5347153930664063, 0.5349099731445313, 0.5344542846679687, 0.5348812866210938, 0.5343672485351563, 0.5352898559570313, 0.5349918823242188, 0.5351209106445313, 0.5350277099609375, 0.5352345581054687, 0.5350748291015625, 0.5352857666015625, 0.5347102661132812, 0.5349519653320313, 0.5349539794921875, 0.5350645751953125, 0.534740966796875, 0.5353809814453125, 0.5346959228515625, 0.5358069458007813, 0.53460888671875, 0.5353052368164063, 0.5350338745117188, 0.5352212524414063, 0.53507275390625, 0.5351802978515625, 0.5347666015625, 0.5349642333984375, 0.53477783203125, 0.535235595703125, 0.5350543212890625, 0.5352120361328125, 0.5350082397460938, 0.5354352416992187, 0.5352120361328125, 0.5354669799804688, 0.535140380859375, 0.535362548828125, 0.534782958984375, 0.5354803466796875, 0.5349232788085938, 0.5352908935546875, 0.535625732421875, 0.5352755126953125, 0.53500732421875, 0.5353860473632812]",tokens/s,1.8405291171591998,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3905.671168,12732.33408,0.0,12085.886976,11337.501696,s,10,10.970613281250001,1.097061328125,0.0017840508518984434,1.097439453125,1.0989015869140624,1.0993332397460938,1.0996785620117187,"[1.0981041259765625, 1.099764892578125, 1.0942474365234376, 1.09466943359375, 1.09612890625, 1.0954654541015625, 1.0967747802734376, 1.0988056640625, 1.098486328125, 1.098166259765625]",tokens/s,233.35067369253872,kWh,1.2926500191291174e-05,7.083296092205274e-06,6.255157781898113e-05,8.256137410247758e-05,tokens/kWh,3100723.5863376665,MB,3909.746688,12732.33408,0.0,12085.886976,11686.806016,s,10,636.987296875,63.6987296875,0.008792231338471245,63.69969140625,63.70640390625,63.70996171875,63.71280796875,"[63.682359375, 63.687671875, 63.70561328125, 63.70497265625, 63.69943359375, 63.69994921875, 63.70396484375, 63.71351953125, 63.6979921875, 63.6918203125]",tokens/s,0.9890307123716925,kWh,0.0007521740069488685,0.00041225789324771534,0.003629864737222821,0.0047942966374194046,tokens/kWh,13140.613684244328,,s,629,645.8482131958002,1.0267857125529425,0.13035148276579694,1.0110596313476563,1.01170830078125,1.0120460571289063,2.107026708984375,"[1.0111242065429686, 1.0104995727539063, 1.0103193359375, 1.0103952026367187, 1.0104954223632812, 1.0104678955078126, 1.0100704956054687, 1.0106204223632813, 1.0101810913085938, 1.0110658569335937, 1.0104442749023437, 1.0103756713867187, 1.0101524658203125, 1.0103828735351563, 1.0101749877929687, 1.0103490600585938, 1.0100950927734376, 1.0109429931640626, 1.0103602905273437, 1.0109215698242187, 1.0110667724609375, 1.0112665405273438, 1.0110863647460937, 1.01104638671875, 1.0108724365234374, 1.0105343017578126, 1.0103214111328125, 1.010872314453125, 1.0106912841796876, 1.0113380737304687, 1.01102490234375, 1.011188720703125, 1.0115625, 1.0118072509765625, 1.011420166015625, 1.0109522094726562, 1.010282470703125, 1.0109522094726562, 1.0106019897460938, 1.0113290405273438, 1.0105897216796875, 1.0110146484375, 1.0105538330078125, 1.0109419555664063, 1.0105497436523438, 1.0111878051757812, 1.0105476684570311, 1.0111273193359376, 1.0112327880859375, 1.0111181640625, 1.0113422241210936, 1.0108223266601561, 1.0107902221679688, 1.0107567749023438, 1.0107431030273437, 1.0107125854492187, 1.0108497924804687, 1.0110812377929688, 1.0113054809570312, 1.011472412109375, 1.010951171875, 1.011072998046875, 2.110793701171875, 1.0105712890625, 1.0109173583984374, 1.0111078491210939, 1.0105538940429688, 1.0114539184570313, 1.0104873046875, 1.0103736572265625, 1.01060302734375, 1.0104473876953124, 1.0103705444335938, 1.0103173217773438, 1.0103797607421876, 1.0106593017578125, 1.0105569458007813, 1.0109276123046875, 1.0109112548828125, 1.0105016479492188, 1.0110904541015624, 1.0104453125, 1.0106286010742187, 1.0105743408203125, 1.0106060791015625, 1.0110525512695312, 1.0107782592773438, 1.0118408813476563, 1.0113218383789062, 1.0111395874023437, 1.0107064208984375, 1.010524169921875, 1.0105466918945312, 1.0102415161132812, 1.010682861328125, 1.0110791625976563, 1.0108632202148438, 1.01125830078125, 1.0108221435546876, 1.0107473754882812, 1.0111160278320312, 1.0105282592773437, 1.0108026733398439, 1.0104678344726563, 1.0108098754882813, 1.0106849365234376, 1.0112696533203125, 1.0114283447265624, 1.01136181640625, 1.01125634765625, 1.0113484497070313, 1.0112225341796874, 1.0113013916015625, 1.0110914306640626, 1.0109788208007813, 1.011293212890625, 1.0114918212890625, 1.0115665893554688, 1.0111375122070312, 1.0111918334960937, 1.0110986328125, 1.0106838989257811, 1.011178466796875, 1.01096142578125, 1.0111539306640625, 2.106599365234375, 1.0114857177734375, 1.0112235717773437, 1.011198974609375, 1.01125732421875, 1.010946044921875, 1.0106123657226562, 1.010948974609375, 1.0108016357421874, 1.0105702514648438, 1.011140625, 1.0107391967773438, 1.0105272216796874, 1.010386962890625, 1.0104515991210938, 1.010242431640625, 1.0104063720703125, 1.010171875, 1.0105518188476563, 1.0103695068359375, 1.01096142578125, 1.0108364868164061, 1.0114160766601563, 1.0112081909179687, 1.0110771484375, 1.01035107421875, 1.0108743896484376, 1.01146728515625, 1.0117959594726562, 1.0120355834960937, 1.012463623046875, 1.0119403686523438, 1.01194140625, 1.0119802856445312, 1.011323974609375, 1.0110104370117188, 1.0110330810546875, 1.0113382568359375, 1.0116249389648437, 1.0108795166015625, 1.01194140625, 1.0112030639648437, 1.0113484497070313, 1.011093505859375, 1.0115000610351563, 1.0112604370117189, 1.0109788208007813, 1.0104954833984374, 1.011493896484375, 1.01127783203125, 1.0117058715820313, 1.0108528442382811, 1.0114898071289062, 1.0111221923828124, 1.0114129638671876, 1.011178466796875, 1.0114631958007811, 1.0113863525390625, 1.0115481567382814, 1.01125732421875, 1.0124105834960937, 1.0113810424804688, 1.0117324829101562, 2.10709814453125, 1.0106634521484374, 1.0110371704101562, 1.0109081420898438, 1.0120171508789062, 1.0121226196289062, 1.011262451171875, 1.0106112060546875, 1.0108251953125, 1.0108651733398437, 1.0108108520507812, 1.0108057861328126, 1.0110238647460938, 1.0108446655273438, 1.01113037109375, 1.0110238647460938, 1.0114150390625, 1.01075146484375, 1.0109603881835938, 1.0105835571289064, 1.010745361328125, 1.0105211181640625, 1.0108262329101563, 1.01054052734375, 1.0108231811523438, 1.0112245483398437, 1.0115286865234374, 1.0111610717773438, 1.0113402709960937, 1.0108292846679687, 1.0113024291992188, 1.0110156860351562, 1.011251220703125, 1.0109337768554687, 1.0112122802734376, 1.0112041015625, 1.0106275634765625, 1.0104483642578126, 1.0109224853515626, 1.0123694458007813, 1.0121512451171875, 1.0115277099609374, 1.011831787109375, 1.0113003540039063, 1.0119905395507813, 1.0121932983398438, 1.01144677734375, 1.0107801513671875, 1.011119140625, 1.0112194213867187, 1.0110924682617188, 1.0110105590820313, 1.0112767944335936, 1.0110873413085937, 1.0113557739257812, 1.0116627807617187, 1.0114488525390626, 1.01161474609375, 1.0121482543945313, 1.0110945434570313, 1.0111918334960937, 1.0114703369140625, 1.011420166015625, 2.107243408203125, 1.011304443359375, 1.0116792602539062, 1.0110904541015624, 1.0111897583007812, 1.0108170166015624, 1.0107166748046874, 1.010555908203125, 1.0108446655273438, 1.0105784301757812, 1.0112839965820313, 1.0110791625976563, 1.011198974609375, 1.0106951904296875, 1.0111498413085938, 1.0108477172851562, 1.0110279541015625, 1.0106736450195313, 1.01078125, 1.0110596313476563, 1.0108917846679688, 1.0110576782226564, 1.0108016357421874, 1.01058251953125, 1.0113638305664063, 1.01103515625, 1.0112214965820312, 1.0109030151367187, 1.0114221801757812, 1.01172021484375, 1.011646484375, 1.0109634399414062, 1.0110392456054687, 1.01072998046875, 1.0113239135742187, 1.0107545776367188, 1.0113812255859376, 1.0107801513671875, 1.0113116455078126, 1.011267578125, 1.0112327880859375, 1.0109522094726562, 1.0110904541015624, 1.0106736450195313, 1.0117355346679688, 1.0115389404296875, 1.0110474243164063, 1.0108436279296875, 1.01097265625, 1.0110955810546876, 1.011800048828125, 1.0115277099609374, 1.0110167236328125, 1.0107422485351563, 1.0111610717773438, 1.0109552612304689, 1.01119384765625, 1.0112645263671876, 1.0109450073242188, 1.0111027221679687, 1.0116802368164062, 1.0114508666992188, 1.0115563354492187, 2.106843017578125, 1.01136181640625, 1.0113515625, 1.0108784790039063, 1.0107105102539062, 1.0108948364257813, 1.0118154296875, 1.0109542236328124, 1.0106787719726562, 1.0108887329101564, 1.0112225341796874, 1.0112604370117189, 1.0115020751953125, 1.010966552734375, 1.0110965576171875, 1.0111488037109375, 1.0121779174804688, 1.01123583984375, 1.0113885498046875, 1.011718017578125, 1.0113597412109374, 1.0111610717773438, 1.0113341674804688, 1.0110709838867187, 1.01110986328125, 1.0114406127929687, 1.0113106079101561, 1.0103971557617188, 1.0105989379882812, 1.0106224365234375, 1.010735107421875, 1.0104473876953124, 1.010703369140625, 1.0104063720703125, 1.01077099609375, 1.0111979370117188, 1.01096240234375, 1.0108262329101563, 1.011472412109375, 1.011409912109375, 1.0115133666992187, 1.0109255981445313, 1.0116690063476563, 1.0109921264648438, 1.0113484497070313, 1.011146728515625, 1.0115604248046874, 1.0110126342773438, 1.0111477661132813, 1.0111273193359376, 1.01097265625, 1.0108907470703126, 1.0107822265625, 1.0106972045898437, 1.0111293334960938, 1.011146728515625, 1.0112236938476562, 1.0110370483398436, 1.0111190795898437, 1.0111826171875, 1.01117236328125, 1.0109869995117187, 1.0112276611328126, 2.108168212890625, 1.0113248901367187, 1.0116497192382812, 1.0111066284179688, 1.0111590576171876, 1.010862060546875, 1.0108671875, 1.0107658081054687, 1.0111181030273437, 1.010798583984375, 1.0113054809570312, 1.0110842895507812, 1.01054052734375, 1.0102753295898437, 1.0105231323242188, 1.0106941528320312, 1.0107473754882812, 1.0110699462890624, 1.01083544921875, 1.0106234741210938, 1.0105692138671876, 1.0113546142578125, 1.0107955322265625, 1.01106689453125, 1.011330078125, 1.0107811889648437, 1.010820068359375, 1.0105374755859375, 1.0116658935546874, 1.011794921875, 1.0121381225585937, 1.0115756225585937, 1.0113126220703126, 1.0110075073242188, 1.0110658569335937, 1.01071875, 1.0106624145507812, 1.0105538330078125, 1.0108600463867188, 1.0104791259765624, 1.0111047973632812, 1.010713623046875, 1.0108712768554688, 1.0103910522460937, 1.0111826171875, 1.0108528442382811, 1.0117273559570312, 1.0112481079101563, 1.0113095703125, 1.0112542724609375, 1.0117642211914062, 1.0118338623046874, 1.0117969970703125, 1.0115205078125, 1.0120662841796875, 1.0122158203125, 1.011726318359375, 1.0116546630859375, 1.011726318359375, 1.0113873901367187, 1.0115338134765626, 1.0122117309570313, 1.0120550537109374, 2.11171728515625, 1.0110648193359375, 1.0112655639648438, 1.011040283203125, 1.0112337646484375, 1.011103759765625, 1.0117447509765625, 1.011694580078125, 1.0115451049804687, 1.0108006591796874, 1.0108999633789062, 1.0106654663085937, 1.0113208618164062, 1.0110341186523437, 1.0105753784179687, 1.0110699462890624, 1.0127493286132812, 1.0114774780273437, 1.0117990112304687, 1.0114754638671875, 1.01243701171875, 1.0126602172851562, 1.0127821044921874, 1.012316162109375, 1.0116639404296874, 1.0120989990234375, 1.0121307983398438, 1.0107197265625, 1.0107975463867187, 1.0107320556640624, 1.0111702880859375, 1.0104524536132813, 1.0106388549804688, 1.01058251953125, 1.0108671875, 1.0108385009765626, 1.0115369262695313, 1.0105947875976562, 1.010924560546875, 1.0109214477539064, 1.0113648681640626, 1.0106675415039061, 1.0112532348632812, 1.0106183471679688, 1.0113659057617188, 1.0111826171875, 1.0120530395507812, 1.0109584350585938, 1.0110103759765625, 1.0113085327148437, 1.0110975952148438, 1.0110146484375, 1.01137109375, 1.0108394775390626, 1.0119804077148438, 1.01148046875, 1.0113474731445313, 1.0114826049804688, 1.0118276977539062, 1.0114006958007813, 1.011209228515625, 1.0110105590820313, 1.0114006958007813, 2.110498779296875, 1.0119219360351563, 1.01136181640625, 1.0108313598632812, 1.0110064697265626, 1.0115112915039062, 1.0112849731445313, 1.011282958984375, 1.0111334228515625, 1.01097265625, 1.011146728515625, 1.0113351440429688, 1.0111365356445312, 1.010808837890625, 1.010713623046875, 1.0107698974609376, 1.0108856201171874, 1.0107218017578126, 1.01076171875, 1.0107422485351563, 1.0111477661132813, 1.011072998046875, 1.01098291015625, 1.0104760131835937, 1.0106972045898437, 1.01064501953125, 1.0107012939453126, 1.010724853515625, 1.0107740478515626, 1.0106009521484376, 1.010820068359375, 1.0108784790039063, 1.0107863159179689, 1.0108590698242188, 1.01104736328125, 1.0110914306640626, 1.0109132690429687, 1.0108549194335938, 1.0113065795898437, 1.0108507690429687, 1.0111047973632812, 1.0121011352539062, 1.0114979858398438, 1.0112440185546876, 1.0114396362304687, 1.0111139526367188, 1.0110658569335937, 1.01098291015625, 1.011177490234375, 1.0114918212890625, 1.0113802490234376, 1.0121666259765625, 1.0110453491210938, 1.0111324462890625, 1.0114744262695312, 1.0106736450195313, 1.0109685668945312, 1.0110310668945313, 1.0110238647460938, 1.0108549194335938, 1.0109911499023438, 1.0115542602539063, 1.010951171875, 2.10921875, 1.0104708862304688, 1.0106941528320312, 1.0109368286132812, 1.0107924194335938, 1.0110361328125, 1.0113505249023438, 1.0111631469726563, 1.010882568359375, 1.011051513671875, 1.0112184448242187, 1.0114017333984375, 1.0117243041992188, 1.0111027221679687, 1.0108477172851562, 1.0110914306640626, 1.0114437255859374, 1.01085595703125, 1.0109983520507813, 1.0107094116210937, 1.0110496215820313, 1.010912109375, 1.0108661499023437, 1.010777099609375, 1.0107576904296875, 1.0106285400390624, 1.0111324462890625, 1.011009521484375, 1.01085693359375, 1.0107545776367188, 1.0111273193359376, 1.010572265625, 1.0108016357421874, 1.0107443237304687, 1.0111344604492187, 1.010861083984375, 1.0115419921875, 1.0107218017578126, 1.0108344116210937, 1.0103900146484375, 1.0109389038085939, 1.0106685180664063, 1.0109644775390625, 1.0104258422851562, 1.0109030151367187, 1.0110167236328125, 1.0119915771484376, 1.011103759765625, 1.0113802490234376, 1.0111365356445312, 1.0113638305664063, 1.0108446655273438, 1.0109859619140624, 1.01060400390625, 1.0110167236328125, 1.0108129272460937, 1.0115399780273437, 1.0108231811523438, 1.0112481079101563, 1.0108765869140626, 1.0110595703125, 1.0108630981445312, 1.0109900512695313]",tokens/s,0.9739130451218064,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949147-39661df06dc372185a19cac3;64568921-0650-4593-88c7-3037a39950ba) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1308.95872,1044.905984,0.0,398.45888,290.479104,s,10,0.7298112030029297,0.07298112030029298,0.0019203205238493336,0.07210595321655273,0.07568494338989258,0.07613730201721192,0.07649918891906739,"[0.07658966064453125, 0.07404208374023437, 0.0716014404296875, 0.07261046600341797, 0.07435145568847656, 0.07145033264160157, 0.07128550720214843, 0.07136418914794922, 0.07093164825439453, 0.07558441925048828]",tokens/s,3507.7565121862385,kWh,8.692573962940111e-07,4.763112833442766e-07,2.233504646279191e-06,3.579073325917479e-06,tokens/kWh,71526894.44672821,MB,1309.261824,1044.905984,0.0,398.45888,337.28256,s,10,45.10692724609375,4.510692724609375,0.02785183557667033,4.507988037109375,4.524690771484375,4.5543905517578125,4.578150375976563,"[4.4999814453125, 4.51792333984375, 4.49663525390625, 4.50728076171875, 4.5086953125, 4.47609033203125, 4.5180908203125, 4.4837802734375, 4.514359375, 4.58409033203125]",tokens/s,13.966812604255988,kWh,5.335015537424221e-05,2.9239079429191457e-05,0.0001294696562717161,0.00021205889107514978,tokens/kWh,297087.2840114681,,s,629,45.688359863281214,0.07263650216737877,0.008731184173797845,0.07209983825683594,0.0731078628540039,0.07340298156738281,0.14137104125976563,"[0.07185408020019532, 0.07194009399414063, 0.07217356872558593, 0.07269068908691406, 0.07360717010498047, 0.0735498275756836, 0.07288934326171875, 0.07259142303466797, 0.07237420654296875, 0.07188172912597657, 0.07213568115234376, 0.0719452133178711, 0.07213875579833984, 0.07207730865478515, 0.07207014465332032, 0.0719257583618164, 0.06970162963867188, 0.06967910766601562, 0.0696995849609375, 0.07190425872802735, 0.07216028594970703, 0.07225545501708984, 0.07087206268310547, 0.06890290832519531, 0.06927776336669922, 0.06949574279785156, 0.06937395477294922, 0.06961663818359375, 0.06992694091796875, 0.07223088073730469, 0.07267635345458984, 0.07262620544433594, 0.07189091491699219, 0.07197593688964844, 0.07230976104736328, 0.07216025543212891, 0.07246540832519531, 0.07132262420654296, 0.07190016174316406, 0.07246745300292969, 0.07053619384765625, 0.06935552215576171, 0.06963097381591797, 0.07147932434082031, 0.06940361785888671, 0.0716410903930664, 0.07243264007568359, 0.07219302368164063, 0.07221145629882812, 0.07042355346679688, 0.06942002868652344, 0.06980608367919922, 0.06986752319335937, 0.07069696044921875, 0.0722165756225586, 0.0733306884765625, 0.07286374664306641, 0.07210086059570313, 0.07077069091796875, 0.07022796630859375, 0.07062940979003907, 0.07200457763671875, 0.14657945251464843, 0.07262617492675781, 0.07222579193115235, 0.07230054473876953, 0.07205580902099609, 0.07209779357910157, 0.07216537475585938, 0.07003648376464844, 0.07229849243164063, 0.07284019470214843, 0.07227597045898437, 0.07243981170654297, 0.07234457397460937, 0.07199948883056641, 0.07222169494628906, 0.07268147277832031, 0.07375667572021484, 0.07214591979980468, 0.07202201843261719, 0.07222271728515625, 0.07206604766845703, 0.0720025634765625, 0.07187251281738281, 0.0722913589477539, 0.072248291015625, 0.07194624328613282, 0.07201999664306641, 0.07205987548828124, 0.07217356872558593, 0.0712837142944336, 0.06940569305419922, 0.06932991790771484, 0.06942310333251953, 0.06952140808105468, 0.06934835052490235, 0.06939238739013671, 0.06940774536132813, 0.06954188537597657, 0.06909951782226563, 0.0694824981689453, 0.06964530944824218, 0.06932991790771484, 0.07233843231201172, 0.07286271667480469, 0.06985113525390625, 0.06934528350830078, 0.07116902160644531, 0.0729722900390625, 0.0716072998046875, 0.07257907104492188, 0.0729917449951172, 0.07322112274169922, 0.07285657501220703, 0.07319859313964844, 0.07252787017822265, 0.07272038269042969, 0.07294668579101563, 0.0729886703491211, 0.07285350036621094, 0.07272959899902344, 0.07295283508300782, 0.07268556976318359, 0.07238143920898438, 0.1415720977783203, 0.0693934097290039, 0.06967814636230468, 0.06940972900390625, 0.06931148529052734, 0.06929817962646484, 0.06965049743652343, 0.0701173095703125, 0.07237427520751953, 0.07200054168701171, 0.07237423706054688, 0.07208857727050781, 0.07215309143066406, 0.07220838165283203, 0.07202713775634766, 0.0719810562133789, 0.07221862030029297, 0.07206092834472656, 0.07223705291748046, 0.07211007690429687, 0.07236300659179687, 0.07231590270996094, 0.07095603179931641, 0.0694814682006836, 0.06937395477294922, 0.06936780548095703, 0.07176601409912109, 0.07184486389160157, 0.0720343017578125, 0.07231897735595703, 0.07199129486083984, 0.07193299102783203, 0.0723834228515625, 0.07228825378417969, 0.07213260650634766, 0.07228108978271484, 0.07208448028564453, 0.07200153350830078, 0.07205990600585938, 0.07209983825683594, 0.07197491455078125, 0.07196057891845703, 0.0720547866821289, 0.073133056640625, 0.07183769226074219, 0.07257807922363281, 0.0721899185180664, 0.07195340728759765, 0.0722677764892578, 0.07214284515380859, 0.07201487731933594, 0.06975177764892578, 0.06966681671142579, 0.06874931335449219, 0.06921011352539062, 0.06935346984863282, 0.06987059020996093, 0.07226265716552735, 0.07209164428710937, 0.07197081756591797, 0.07196057891845703, 0.0713338851928711, 0.07061196899414063, 0.1456865234375, 0.0722279052734375, 0.07271212768554687, 0.07285043334960938, 0.0725432357788086, 0.07223500823974609, 0.0720865249633789, 0.07014915466308594, 0.06968726348876954, 0.06959820556640625, 0.06964019012451172, 0.0694681625366211, 0.07034880065917969, 0.07004876708984376, 0.06967193603515626, 0.06940160369873047, 0.07108914947509766, 0.07227597045898437, 0.07151821136474609, 0.06967501068115234, 0.06957772827148437, 0.0698071060180664, 0.07123967742919922, 0.07210086059570313, 0.07223603057861328, 0.07214284515380859, 0.07249510192871093, 0.07254835510253907, 0.07294361877441406, 0.07138508605957031, 0.07067545318603516, 0.07234969329833985, 0.07242546844482421, 0.07169741058349609, 0.07204966735839843, 0.06960543823242188, 0.06955718231201172, 0.07074406433105469, 0.07213772583007813, 0.07239683532714844, 0.07193702697753906, 0.07239676666259766, 0.0722012176513672, 0.07219200134277344, 0.07279411315917969, 0.07149260711669922, 0.07210393524169922, 0.07228211212158203, 0.07218380737304687, 0.07210598754882812, 0.07205171203613281, 0.07216435241699219, 0.07028326416015625, 0.06974668884277344, 0.07239577484130859, 0.07243673706054687, 0.07223094177246094, 0.07230358123779297, 0.07244390106201172, 0.07284838104248047, 0.07209062194824219, 0.072163330078125, 0.07201894378662109, 0.14699827575683594, 0.07176294708251953, 0.0707747802734375, 0.0720343017578125, 0.0722135009765625, 0.07226268768310547, 0.0720823974609375, 0.07234976196289063, 0.07255648040771484, 0.07198515319824218, 0.07197798156738282, 0.07256678771972656, 0.07222476959228516, 0.07208755493164062, 0.07251971435546875, 0.07147618865966797, 0.06935346984863282, 0.06952550506591797, 0.06944051361083985, 0.06955213165283203, 0.06944255828857422, 0.06934323120117188, 0.07061299133300782, 0.07203533172607422, 0.07238861083984376, 0.07203225708007813, 0.07213568115234376, 0.0720374755859375, 0.07249091339111328, 0.0709017562866211, 0.06946304321289062, 0.07154790496826172, 0.07224217224121093, 0.07243673706054687, 0.07215615844726563, 0.07217459106445312, 0.07221247863769531, 0.07201894378662109, 0.07098880004882813, 0.06991667175292969, 0.07119564819335937, 0.07190630340576172, 0.0721786880493164, 0.07199334716796875, 0.06973235321044922, 0.06972415924072266, 0.06972930908203125, 0.06958694458007812, 0.06966268920898437, 0.07221145629882812, 0.07257907104492188, 0.07218688201904297, 0.07213772583007813, 0.07218380737304687, 0.07208345794677734, 0.07205068969726562, 0.07378431701660157, 0.07249612426757812, 0.07221862030029297, 0.0720650863647461, 0.07227897644042969, 0.07233740997314453, 0.07240601348876953, 0.1438771514892578, 0.07196463775634766, 0.07234867095947266, 0.07229952239990234, 0.07298047637939453, 0.07293030548095703, 0.07180902099609375, 0.07185408020019532, 0.07234047698974609, 0.07194316864013672, 0.07218278503417969, 0.07320985412597657, 0.07245516967773437, 0.07180287933349609, 0.07000985717773438, 0.0729917449951172, 0.07227187347412109, 0.07225958251953125, 0.07234047698974609, 0.07209677124023438, 0.07210393524169922, 0.07231283569335938, 0.07199436950683594, 0.07164825439453125, 0.06944358062744141, 0.0694302749633789, 0.06980812835693359, 0.06952652740478515, 0.06984703826904297, 0.07122022247314454, 0.07212850952148438, 0.06986752319335937, 0.06965248107910156, 0.06977843475341797, 0.06978047943115234, 0.06932173156738282, 0.0697364501953125, 0.0695572509765625, 0.06944153594970703, 0.07092131042480469, 0.07262095642089844, 0.07213362884521485, 0.0723927001953125, 0.0719974365234375, 0.07194422149658203, 0.0721714859008789, 0.07187967681884766, 0.0721244125366211, 0.07195852661132812, 0.07079424285888672, 0.06962790679931641, 0.06972518157958985, 0.06952345275878906, 0.06946918487548828, 0.06943539428710938, 0.06939238739013671, 0.06983372497558593, 0.06957260894775391, 0.06938521575927735, 0.06923776245117187, 0.07017164611816407, 0.06932582092285157, 0.06944461059570313, 0.1413570556640625, 0.06928076934814453, 0.07196774291992188, 0.0723220443725586, 0.07226573181152343, 0.07199231719970703, 0.07206502532958985, 0.07222476959228516, 0.07213875579833984, 0.06940057373046875, 0.06947840118408204, 0.07286784362792968, 0.07213260650634766, 0.0724295654296875, 0.07200563049316407, 0.07263334655761719, 0.07273677062988282, 0.07225856018066407, 0.0693411865234375, 0.0695367660522461, 0.07020543670654297, 0.07206809234619141, 0.07203020477294922, 0.0722001953125, 0.07224012756347656, 0.07218688201904297, 0.07218694305419922, 0.07206291198730469, 0.07192678070068359, 0.0715857925415039, 0.07136051177978515, 0.0714076156616211, 0.07242240142822266, 0.07221453094482422, 0.07244499206542969, 0.07197689819335938, 0.07119974517822265, 0.07206297302246094, 0.0721981430053711, 0.0724316177368164, 0.07192473602294921, 0.07172608184814454, 0.06988082885742188, 0.06962483215332031, 0.07035497283935546, 0.06945276641845703, 0.06980198669433593, 0.07191449737548829, 0.07256678771972656, 0.07234457397460937, 0.07288422393798828, 0.07266918182373047, 0.07230668640136718, 0.07237324523925781, 0.0723978271484375, 0.07207526397705079, 0.07200057220458984, 0.07223903656005859, 0.07222172546386718, 0.07205680084228516, 0.0722012176513672, 0.07250841522216797, 0.07297948455810546, 0.14137648010253906, 0.06947328186035157, 0.069607421875, 0.06972518157958985, 0.07188377380371094, 0.07261798095703124, 0.07253196716308594, 0.07225138854980469, 0.0723763198852539, 0.07227289581298828, 0.0724869155883789, 0.0720404510498047, 0.07224832153320312, 0.0722790756225586, 0.07235478210449219, 0.07247462463378906, 0.07243059539794922, 0.07224217224121093, 0.07231999969482422, 0.07176908874511718, 0.07236198425292968, 0.07177011108398437, 0.06941900634765626, 0.0709969940185547, 0.07273983764648438, 0.07211827087402344, 0.07241318511962891, 0.07507456207275391, 0.07245209503173829, 0.07226681518554688, 0.07216531372070313, 0.07257190704345703, 0.07180902099609375, 0.07219200134277344, 0.07004978942871094, 0.06961151885986328, 0.06961663818359375, 0.06986041259765625, 0.06977324676513671, 0.06963404846191407, 0.06956646728515625, 0.06976614379882813, 0.06975692749023438, 0.06940774536132813, 0.07025663757324219, 0.06947020721435547, 0.07162265777587891, 0.07189810943603515, 0.07244185638427734, 0.07230054473876953, 0.07227497863769532, 0.07341053009033204, 0.07264460754394532, 0.07066521453857422, 0.06972415924072266, 0.0694620132446289, 0.06928281402587891, 0.06948761749267578, 0.0691251220703125, 0.06890598297119141, 0.06905343627929687, 0.06960025787353516, 0.06945689392089843, 0.14089112854003907, 0.0694128646850586, 0.069570556640625, 0.06914662170410156, 0.06950399780273438, 0.07003545379638672, 0.06975794982910156, 0.06944870758056641, 0.070181884765625, 0.06986444854736328, 0.069607421875, 0.0728453140258789, 0.07321395111083985, 0.07279718780517579, 0.07399935913085938, 0.07330099487304688, 0.07279718780517579, 0.07354777526855469, 0.07338188934326172, 0.073301025390625, 0.07308284759521484, 0.0729917449951172, 0.07306854248046875, 0.0727388153076172, 0.07293440246582031, 0.0727357406616211, 0.07297023773193359, 0.07296102142333985, 0.07303270721435547, 0.07281462097167969, 0.07316886138916015, 0.073059326171875, 0.0725074234008789, 0.07300093078613282, 0.07273785400390625, 0.07161849975585938, 0.06959820556640625, 0.06925823974609376, 0.06970883178710938, 0.06981629180908203, 0.06969036865234375, 0.06949683380126953, 0.06930738830566406, 0.06938419342041016, 0.06971298980712891, 0.0707419204711914, 0.07269376373291016, 0.07251353454589844, 0.07328562927246093, 0.06910873413085937, 0.06949581146240234, 0.06906163024902344, 0.07005286407470702, 0.07312281799316406, 0.07338393402099609, 0.07292108917236328, 0.07231078338623047, 0.07324467468261718, 0.07336345672607422, 0.07308595275878907, 0.07299993896484375, 0.07279001617431641, 0.07310745239257813, 0.14934938049316407, 0.07343206024169922, 0.07323033905029297, 0.07366553497314453, 0.07251865386962891, 0.07295795440673829, 0.07302963256835937, 0.073480224609375, 0.07338902282714843, 0.07340748596191406, 0.0733655014038086, 0.07314534759521485, 0.07351602935791016, 0.07292825317382813, 0.07283715057373047, 0.07243465423583985, 0.07391846466064453, 0.07298047637939453, 0.07312384033203125, 0.07325183868408203, 0.07271321868896484, 0.07309209442138671, 0.07304192352294922, 0.07337062072753907, 0.0729354248046875, 0.07303884887695312, 0.07325081634521484, 0.0731668472290039, 0.07339622497558594, 0.07318016052246094, 0.07310028839111328, 0.07337574768066406, 0.07324774169921874, 0.07300300598144531, 0.07360620880126953, 0.0735692138671875, 0.07393798065185547, 0.07294355010986328, 0.07357542419433594, 0.07405875396728516, 0.07427174377441406, 0.07364198303222656, 0.07310950469970703, 0.07334912109375, 0.0739277114868164, 0.07357746887207031, 0.07303472137451172, 0.07309414672851562, 0.07331635284423828, 0.07301529693603516, 0.0722135009765625, 0.07023104095458985, 0.07103794860839843, 0.06991667175292969, 0.0706355209350586, 0.0701685791015625, 0.06987264251708984, 0.0711445083618164, 0.0702627182006836, 0.0697343978881836, 0.06988800048828125, 0.07153561401367188, 0.07306034851074218]",tokens/s,13.767182754693582,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c2a-59aef85712156b732b4173de;1d74bf77-9e91-488e-9a34-a6e2b55a3aa0) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1435.398144,8196.194304,0.0,7549.7472,6940.714496,s,10,6.101695007324219,0.6101695007324219,0.0005946945898735176,0.610120330810547,0.6105357299804687,0.6111274536132812,0.6116008325195312,"[0.6117191772460937, 0.6094563598632813, 0.6096676025390625, 0.609661865234375, 0.6102354736328125, 0.6099964599609375, 0.6100880737304688, 0.610152587890625, 0.6103131713867187, 0.6104042358398437]",tokens/s,419.55554922477825,kWh,7.199752165211573e-06,3.945167113402825e-06,3.557207094129864e-05,4.671699021991304e-05,tokens/kWh,5479805.07294925,MB,1435.398144,8196.194304,0.0,7549.7472,7094.0672,s,10,359.3025859375,35.93025859375,0.004744165567684035,35.92844140625,35.93707578125,35.938278125,35.93924,"[35.92640625, 35.93118359375, 35.93375390625, 35.9282890625, 35.93948046875, 35.93680859375, 35.92859375, 35.92646484375, 35.927859375, 35.92374609375]",tokens/s,1.753396787713594,kWh,0.00042417653058966,0.00023248355862878645,0.0021270168535728984,0.002783676942791345,tokens/kWh,22631.93656977539,,s,629,364.2223344116213,0.5790498162346918,0.0724714604302608,0.570292236328125,0.5706096801757813,0.570741552734375,1.1803278564453126,"[0.570017822265625, 0.5699727172851563, 0.5701652221679687, 0.5702686767578125, 0.5701201782226563, 0.5703833618164063, 0.5702440795898438, 0.5700086059570313, 0.5700679931640625, 0.5700485229492187, 0.5702072143554687, 0.5704335327148438, 0.5702359008789063, 0.5702522583007813, 0.5702430419921874, 0.5702123413085938, 0.5699952392578125, 0.5701222534179687, 0.570018798828125, 0.5700730590820312, 0.5700311279296875, 0.5700730590820312, 0.5705441284179688, 0.5700914916992188, 0.5701908569335937, 0.570039306640625, 0.5701427001953125, 0.5701519165039063, 0.570018798828125, 0.5704693603515625, 0.5701949462890625, 0.5705523071289063, 0.5700341796875, 0.570376220703125, 0.5702338256835937, 0.5700792236328125, 0.5701652221679687, 0.5701939086914063, 0.5703106689453125, 0.5701212158203125, 0.570377197265625, 0.5703854370117187, 0.5702174682617187, 0.57017138671875, 0.5701898193359375, 0.570281982421875, 0.5704048461914063, 0.5703516235351562, 0.5703372802734376, 0.5703587646484375, 0.570261474609375, 0.5706219482421875, 0.570228759765625, 0.5703167724609375, 0.5704724731445312, 0.570250244140625, 0.570260498046875, 0.5705205688476562, 0.5706659545898437, 0.5705738525390625, 0.5702584228515625, 0.5703618774414062, 1.180654541015625, 0.5706793212890625, 0.5700546264648437, 0.5700679931640625, 0.5699635009765625, 0.5702379760742188, 0.5699317626953125, 0.570335205078125, 0.5702041625976563, 0.5703126831054688, 0.5702020874023438, 0.57031884765625, 0.5707837524414062, 0.5704570922851563, 0.5700587768554688, 0.5700219116210937, 0.5701283569335938, 0.5701038208007813, 0.570292236328125, 0.5705799560546875, 0.5701365966796875, 0.5702850341796875, 0.5703014526367187, 0.5703843994140625, 0.5703741455078125, 0.570166259765625, 0.57010791015625, 0.5701795654296875, 0.5702543334960938, 0.5704069213867188, 0.5702564086914063, 0.5703895263671875, 0.5704017944335937, 0.5704888305664062, 0.5703884887695313, 0.5702901611328125, 0.5705441284179688, 0.5706096801757813, 0.5703915405273438, 0.5703915405273438, 0.5706311645507812, 0.5706148071289062, 0.5704232788085938, 0.5703792724609374, 0.5704356079101562, 0.5703280639648437, 0.5702430419921874, 0.5703812866210938, 0.5702645874023438, 0.5704130859375, 0.5704847412109375, 0.5704356079101562, 0.57042431640625, 0.5707622680664063, 0.5705584716796875, 0.5703618774414062, 0.5702379760742188, 0.5702461547851563, 0.5704683227539062, 0.5704161376953125, 0.5703577880859375, 0.5702789306640625, 0.570260498046875, 1.1801353759765625, 0.5700740966796874, 0.569970703125, 0.5700802612304687, 0.5700914916992188, 0.570197021484375, 0.5702625122070313, 0.57080322265625, 0.570260498046875, 0.5700802612304687, 0.5700341796875, 0.5702266845703124, 0.5705256958007813, 0.5703792724609374, 0.5704345703125, 0.570514404296875, 0.5704652709960938, 0.5702963256835938, 0.5707018432617188, 0.5703864135742187, 0.57019189453125, 0.5703259887695312, 0.5704099731445312, 0.5703536376953126, 0.5701396484375, 0.5705430908203125, 0.5701375732421875, 0.5701621704101563, 0.5701939086914063, 0.5704110107421875, 0.5704468383789062, 0.5702154541015625, 0.5702461547851563, 0.5703546752929688, 0.5702359008789063, 0.57044580078125, 0.5702901611328125, 0.5705093383789063, 0.5704324951171875, 0.5704232788085938, 0.5704314575195313, 0.570387451171875, 0.5705912475585937, 0.5705697021484375, 0.5704417114257813, 0.5703413696289062, 0.5704591064453125, 0.57055126953125, 0.5703720703125, 0.5705902099609375, 0.5705113525390625, 0.5704478759765625, 0.5705379638671875, 0.5709127807617187, 0.570514404296875, 0.5705072631835938, 0.5707048950195313, 0.5702860717773437, 0.5703311157226563, 0.5702573852539062, 0.5707479248046875, 0.570392578125, 0.5701949462890625, 1.1800667724609375, 0.5705154418945313, 0.5700843505859375, 0.5702154541015625, 0.5700464477539062, 0.5703096313476562, 0.570397705078125, 0.570355712890625, 0.5701539916992188, 0.5700249633789063, 0.5699932250976563, 0.5700464477539062, 0.5700894775390625, 0.57014990234375, 0.5704530029296875, 0.5702830200195312, 0.5701437377929688, 0.5704365844726562, 0.5703106689453125, 0.5702205200195313, 0.5703505859375, 0.5701375732421875, 0.5701458129882813, 0.5701437377929688, 0.570576904296875, 0.5702389526367188, 0.5702512817382812, 0.5702799072265625, 0.5704345703125, 0.570302490234375, 0.5701304321289062, 0.5706710815429688, 0.570281982421875, 0.5702225952148438, 0.5702225952148438, 0.5704365844726562, 0.570186767578125, 0.570102783203125, 0.5702564086914063, 0.5701857299804688, 0.57017138671875, 0.5702359008789063, 0.570239990234375, 0.5702092895507812, 0.5701734619140625, 0.5702164306640625, 0.5704601440429687, 0.570261474609375, 0.5702410278320312, 0.5704693603515625, 0.5702573852539062, 0.5701826782226562, 0.5703209228515626, 0.5703567504882813, 0.5702246704101562, 0.57021337890625, 0.5702625122070313, 0.5704263916015625, 0.5704110107421875, 0.5707018432617188, 0.5706383056640625, 0.5707294921875, 0.5706885375976563, 1.1809197998046874, 0.5703690185546875, 0.5707325439453125, 0.5704867553710937, 0.570355712890625, 0.57044482421875, 0.5706455078125, 0.5701949462890625, 0.5700352172851563, 0.5702830200195312, 0.5702727661132813, 0.5703720703125, 0.5707427978515625, 0.5703864135742187, 0.5704652709960938, 0.5703075561523437, 0.5703997192382813, 0.57046630859375, 0.5704171752929688, 0.570387451171875, 0.5707837524414062, 0.5702307739257813, 0.5702277221679688, 0.5705277709960938, 0.5703301391601563, 0.5702706909179688, 0.5701836547851562, 0.5702860717773437, 0.5702338256835937, 0.5703905029296875, 0.5705430908203125, 0.5707396850585937, 0.5704151000976563, 0.570481689453125, 0.5704909057617188, 0.5703987426757813, 0.5704140625, 0.5709854736328125, 0.5703946533203125, 0.5703843994140625, 0.5704007568359375, 0.5707048950195313, 0.5705410766601563, 0.570271728515625, 0.5703301391601563, 0.5703751831054688, 0.5703905029296875, 0.5704356079101562, 0.570440673828125, 0.5706219482421875, 0.570377197265625, 0.5703679809570312, 0.5704591064453125, 0.5703720703125, 0.570461181640625, 0.5709004516601562, 0.5705205688476562, 0.5705410766601563, 0.5705912475585937, 0.570640380859375, 0.5711124267578125, 0.5708656616210938, 0.5706577758789062, 1.1808399658203126, 0.5708021850585937, 0.5704427490234375, 0.5701908569335937, 0.5700781860351563, 0.5701417236328125, 0.5700914916992188, 0.5700802612304687, 0.5703956298828124, 0.5703792724609374, 0.570260498046875, 0.5703670043945313, 0.570735595703125, 0.5702215576171875, 0.570071044921875, 0.5703117065429687, 0.5704857788085937, 0.5703362426757812, 0.5710018310546875, 0.570376220703125, 0.570514404296875, 0.5702471923828125, 0.5702748413085937, 0.5703157958984375, 0.5701990356445312, 0.5703075561523437, 0.5709547729492187, 0.5701570434570312, 0.5700372314453125, 0.5704427490234375, 0.5703782348632812, 0.5703587646484375, 0.5703833618164063, 0.5702738037109375, 0.57059228515625, 0.5702174682617187, 0.570513427734375, 0.570365966796875, 0.570503173828125, 0.5704345703125, 0.570229736328125, 0.5707151489257812, 0.5703434448242187, 0.5710581665039063, 0.5702789306640625, 0.5702758178710937, 0.57023486328125, 0.5705379638671875, 0.5704007568359375, 0.570302490234375, 0.5705543823242187, 0.5705687255859375, 0.570534912109375, 0.5706015014648438, 0.5708339233398437, 0.5707653198242187, 0.5704918823242188, 0.570392578125, 0.5703731079101563, 0.5703987426757813, 0.5705912475585937, 0.5709179077148437, 0.5703670043945313, 1.1807825927734374, 0.5701509399414062, 0.5701007080078125, 0.5700905151367187, 0.5701478271484375, 0.5702461547851563, 0.5700157470703126, 0.57044580078125, 0.570102783203125, 0.5701734619140625, 0.57012939453125, 0.5702072143554687, 0.5703987426757813, 0.5702000732421875, 0.5708021850585937, 0.57046630859375, 0.570323974609375, 0.5701529541015625, 0.5706157836914062, 0.5701539916992188, 0.5702246704101562, 0.570071044921875, 0.57004541015625, 0.570197998046875, 0.5702573852539062, 0.5706096801757813, 0.570081298828125, 0.5700628662109375, 0.57021337890625, 0.5700740966796874, 0.5700669555664063, 0.5701386108398437, 0.5707857666015625, 0.5702532958984375, 0.5702543334960938, 0.5701437377929688, 0.5706444702148438, 0.570166259765625, 0.570166259765625, 0.5702123413085938, 0.570260498046875, 0.5701417236328125, 0.5706701049804688, 0.5704171752929688, 0.5703782348632812, 0.5703321533203125, 0.5704345703125, 0.5704058837890625, 0.5704703979492187, 0.5707151489257812, 0.5706455078125, 0.5702062377929688, 0.5701334838867187, 0.5703731079101563, 0.5704519653320312, 0.5702543334960938, 0.5702492065429687, 0.5701478271484375, 0.5702041625976563, 0.5701406860351562, 0.5706015014648438, 0.5703741455078125, 0.5703331909179687, 1.1804027099609375, 0.5701099243164063, 0.570060791015625, 0.5707489013671875, 0.570060791015625, 0.5700116577148437, 0.57004541015625, 0.570176513671875, 0.5700474853515625, 0.5701836547851562, 0.57000244140625, 0.570007568359375, 0.57016015625, 0.5703587646484375, 0.5702225952148438, 0.5700638427734375, 0.5701898193359375, 0.5701396484375, 0.5701703491210938, 0.5700423583984375, 0.5705799560546875, 0.5704273681640625, 0.570176513671875, 0.5700433959960938, 0.5704765625, 0.5706629028320312, 0.570208251953125, 0.5702266845703124, 0.5704099731445312, 0.5703434448242187, 0.5707540283203125, 0.5704099731445312, 0.5702256469726562, 0.570092529296875, 0.570076171875, 0.5700700073242188, 0.570123291015625, 0.5702697143554688, 0.5704888305664062, 0.5702543334960938, 0.5701212158203125, 0.5701160888671875, 0.57030859375, 0.5702307739257813, 0.57025537109375, 0.5702860717773437, 0.5701652221679687, 0.5702532958984375, 0.5705809936523437, 0.5704335327148438, 0.5703147583007813, 0.5702205200195313, 0.5703004150390625, 0.5702901611328125, 0.5703424072265625, 0.570323974609375, 0.57051953125, 0.570292236328125, 0.5702225952148438, 0.5703424072265625, 0.5705471801757812, 0.5703259887695312, 0.5703372802734376, 1.180564453125, 0.570250244140625, 0.5700833129882813, 0.570123291015625, 0.5700126953125, 0.5700361938476562, 0.570144775390625, 0.5701990356445312, 0.57012939453125, 0.570377197265625, 0.5702850341796875, 0.5701099243164063, 0.5703854370117187, 0.5702758178710937, 0.57021337890625, 0.5702860717773437, 0.5702338256835937, 0.57029833984375, 0.5704212646484375, 0.5705707397460937, 0.570218505859375, 0.57014990234375, 0.5701017456054688, 0.5704498901367188, 0.5705891723632812, 0.5702011108398437, 0.5701519165039063, 0.5703117065429687, 0.5702676391601562, 0.57019189453125, 0.570197998046875, 0.5702072143554687, 0.57019189453125, 0.5701068725585937, 0.5702052001953125, 0.5701652221679687, 0.5702225952148438, 0.5702748413085937, 0.5703250122070312, 0.5701754760742187, 0.5702564086914063, 0.57019189453125, 0.5701898193359375, 0.5701693725585938, 0.5705338745117188, 0.5704939575195312, 0.5702215576171875, 0.5701632080078125, 0.5703229370117188, 0.5706946411132813, 0.5702041625976563, 0.5702686767578125, 0.5705799560546875, 0.570397705078125, 0.5707550659179688, 0.5704202270507812, 0.5704949951171875, 0.5702778930664063, 0.5703250122070312, 0.5701703491210938, 0.5702062377929688, 0.5706311645507812, 0.57051953125, 1.1806351318359376, 0.5705595092773438, 0.5698693237304687, 0.5699307250976563, 0.570076171875, 0.5700352172851563, 0.5700628662109375, 0.5699164428710938, 0.5702594604492187, 0.57012939453125, 0.5700106201171875, 0.5700147094726562, 0.570076171875, 0.570018798828125, 0.5700126953125, 0.570271728515625, 0.5703526611328125, 0.5701048583984375, 0.5700587768554688, 0.5703884887695313, 0.570144775390625, 0.569975830078125, 0.5700567016601562, 0.5700003662109375, 0.5700485229492187, 0.5704652709960938, 0.570113037109375, 0.570461181640625, 0.5703075561523437, 0.5700106201171875, 0.5700986938476562, 0.570166259765625, 0.5703301391601563, 0.5704437866210937, 0.5701642456054687, 0.5700781860351563, 0.5700567016601562, 0.5701621704101563, 0.570197998046875, 0.570197021484375, 0.5700966186523437, 0.5701263427734375, 0.570017822265625, 0.5705093383789063, 0.57034033203125, 0.5703987426757813, 0.5703218994140625, 0.5703485717773438, 0.5702573852539062, 0.57031884765625, 0.5704970092773437, 0.570545166015625, 0.5701683349609376, 0.5701437377929688, 0.57034033203125, 0.5705051879882812, 0.5702041625976563, 0.5704263916015625, 0.5704058837890625, 0.5704570922851563, 0.5706854248046875, 0.5703546752929688, 0.5704683227539062]",tokens/s,1.7269671312609398,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fa1-62746c825836de1161edb143;2d4885ee-5948-43c7-8ad8-d27f3d3ef705) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694924f-3b04e21119c6c72c3d6c0267;30325367-f9c9-4612-be1a-4512abe6b463) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1888.280576,15194.390528,0.0,14547.943424,13898.252288,s,10,16.972713500976564,1.6972713500976564,0.0011802396473078822,1.6969771728515626,1.6988580200195311,1.699245782470703,1.6995559924316406,"[1.6969825439453126, 1.6969718017578126, 1.6960919189453125, 1.6958106689453125, 1.6964619140625, 1.69635986328125, 1.6974122314453124, 1.6982171630859375, 1.699633544921875, 1.6987718505859375]",tokens/s,150.83033127570937,kWh,2.0026979611979592e-05,1.097496669508473e-05,9.603446571639917e-05,0.0001270364120234635,tokens/kWh,2015170.2643547354,MB,1888.280576,15194.390528,0.0,14547.943424,14315.97312,s,10,986.8612499999999,98.68612499999999,0.014850841233799409,98.68501953125,98.70599609374999,98.707103515625,98.707989453125,"[98.6797109375, 98.687671875, 98.6747890625, 98.6555546875, 98.6919140625, 98.6783203125, 98.6969609375, 98.7082109375, 98.70575, 98.6823671875]",tokens/s,0.6383876152802637,kWh,0.0011649224273860455,0.0006384806384544572,0.00558826860950019,0.007391671675340693,tokens/kWh,8523.105836826315,,s,629,1000.5712542724599,1.5907333136287136,0.2015788363996436,1.5663739013671876,1.5674868164062499,1.5676927978515625,3.262457841796875,"[1.5671644287109374, 1.5661424560546875, 1.56664013671875, 1.5672913818359375, 1.566525390625, 1.5666636962890625, 1.56687158203125, 1.566798828125, 1.565517822265625, 1.56556494140625, 1.56592333984375, 1.56533251953125, 1.5655465087890625, 1.5658004150390625, 1.565998046875, 1.5670538330078125, 1.5664508056640625, 1.56584033203125, 1.5667855224609375, 1.5674542236328124, 1.567055908203125, 1.5672022705078126, 1.5673907470703126, 1.56657666015625, 1.5663001708984374, 1.566665771484375, 1.5659376220703125, 1.5659520263671876, 1.5661055908203125, 1.566899169921875, 1.5660892333984375, 1.566604248046875, 1.5656611328125, 1.5667138671875, 1.565654052734375, 1.566857177734375, 1.5661802978515624, 1.5664854736328124, 1.5667210693359375, 1.5670538330078125, 1.56666162109375, 1.566857177734375, 1.5659581298828125, 1.565955078125, 1.5662294921875, 1.5661065673828125, 1.566191650390625, 1.5659642333984376, 1.5664527587890624, 1.5659898681640625, 1.5656510009765625, 1.5664803466796875, 1.566614501953125, 1.566317626953125, 1.5665059814453124, 1.565919189453125, 1.5657093505859374, 1.5657308349609376, 1.5665244140625, 1.565981689453125, 1.5660799560546874, 1.56632373046875, 3.263318115234375, 1.5650416259765625, 1.5667425537109374, 1.5671868896484376, 1.5674766845703125, 1.566899169921875, 1.5670477294921874, 1.567247314453125, 1.5669462890625, 1.566482421875, 1.56721044921875, 1.567352783203125, 1.56718603515625, 1.5668541259765625, 1.565632568359375, 1.5655035400390624, 1.565454345703125, 1.566171142578125, 1.5655423583984376, 1.5656632080078126, 1.565739013671875, 1.5655546875, 1.56594384765625, 1.565739990234375, 1.5656090087890624, 1.5654093017578126, 1.5656141357421876, 1.5657379150390625, 1.565550537109375, 1.5656468505859376, 1.566182373046875, 1.56573388671875, 1.5660994873046874, 1.566614501953125, 1.5671705322265626, 1.5668858642578125, 1.5673907470703126, 1.5655372314453124, 1.5656827392578125, 1.567224853515625, 1.5674173583984374, 1.567141845703125, 1.567562744140625, 1.56704150390625, 1.566593994140625, 1.5660902099609375, 1.56824267578125, 1.56689404296875, 1.5673763427734375, 1.5663564453125, 1.566992431640625, 1.567836181640625, 1.5679334716796876, 1.5667803955078126, 1.5659478759765626, 1.566908447265625, 1.566277587890625, 1.5658690185546875, 1.5664803466796875, 1.5664271240234375, 1.5662049560546876, 1.5662171630859374, 1.5660595703125, 3.26247021484375, 1.5656990966796875, 1.5657799072265626, 1.566017578125, 1.5667598876953126, 1.565744140625, 1.566393310546875, 1.56580859375, 1.56586083984375, 1.5654072265625, 1.56617529296875, 1.5659202880859375, 1.5659725341796875, 1.5665029296875, 1.5664486083984375, 1.5655260009765626, 1.5660780029296875, 1.5663062744140626, 1.5667117919921876, 1.566983154296875, 1.5657728271484375, 1.565895751953125, 1.566373779296875, 1.5677890625, 1.566457763671875, 1.5663277587890625, 1.566962646484375, 1.566697509765625, 1.5665489501953125, 1.566899169921875, 1.56623046875, 1.566688232421875, 1.5666124267578125, 1.5657093505859374, 1.566341064453125, 1.5660462646484374, 1.5663482666015625, 1.566123046875, 1.5662838134765624, 1.5664619140625, 1.56621826171875, 1.566017578125, 1.5659622802734374, 1.565760498046875, 1.5661669921875, 1.566396484375, 1.5663912353515625, 1.5682620849609374, 1.5664476318359375, 1.5661475830078124, 1.56608203125, 1.56626123046875, 1.5659765625, 1.5659632568359374, 1.565811767578125, 1.56657763671875, 1.5658956298828124, 1.5657799072265626, 1.5660042724609375, 1.566275634765625, 1.5663533935546874, 1.56682958984375, 1.566255126953125, 3.26236865234375, 1.5657420654296874, 1.5653990478515625, 1.5662427978515625, 1.565697021484375, 1.5651962890625, 1.56592333984375, 1.5652525634765626, 1.5654676513671875, 1.5651666259765624, 1.56598583984375, 1.5653150634765625, 1.5663011474609374, 1.5655894775390624, 1.5655516357421875, 1.565338623046875, 1.5655577392578126, 1.5656806640625, 1.56554345703125, 1.56598779296875, 1.5655843505859375, 1.5654718017578124, 1.5656263427734376, 1.5660400390625, 1.5658824462890626, 1.5657850341796875, 1.5657625732421876, 1.5657196044921875, 1.56554443359375, 1.566350341796875, 1.5656375732421874, 1.5655997314453125, 1.566192626953125, 1.565496337890625, 1.5660390625, 1.566271484375, 1.5658936767578124, 1.565685791015625, 1.5660216064453125, 1.5656141357421876, 1.565828125, 1.566434326171875, 1.5664906005859376, 1.5654307861328125, 1.56860107421875, 1.56617529296875, 1.566096435546875, 1.5658311767578126, 1.5665806884765625, 1.5662591552734375, 1.5666104736328126, 1.56617724609375, 1.566076904296875, 1.5665244140625, 1.566587890625, 1.56634521484375, 1.5658741455078125, 1.5661434326171875, 1.566371826171875, 1.5663533935546874, 1.566328857421875, 1.5666011962890625, 1.5661240234375, 3.263153076171875, 1.56556494140625, 1.565706298828125, 1.5666165771484375, 1.566034912109375, 1.566002197265625, 1.5660831298828124, 1.565319091796875, 1.5665797119140625, 1.5655751953125, 1.5664332275390624, 1.5658896484375, 1.5659744873046875, 1.5658916015625, 1.566130126953125, 1.5668695068359375, 1.5666226806640624, 1.5660933837890625, 1.566509033203125, 1.5661363525390626, 1.5657738037109374, 1.5658199462890625, 1.5663206787109376, 1.5657840576171875, 1.566213134765625, 1.5667742919921874, 1.5661240234375, 1.5659315185546876, 1.565498291015625, 1.5660472412109374, 1.5656407470703124, 1.56573486328125, 1.566254150390625, 1.56568359375, 1.5663575439453126, 1.566123046875, 1.5667579345703124, 1.5663963623046875, 1.5667947998046876, 1.5661865234375, 1.5662919921875, 1.5661158447265624, 1.5670897216796875, 1.56710302734375, 1.567573974609375, 1.5670128173828124, 1.5670006103515626, 1.5675535888671874, 1.568489501953125, 1.5677030029296875, 1.5673804931640625, 1.5675074462890626, 1.5675924072265626, 1.56819970703125, 1.56786181640625, 1.5679241943359374, 1.5680113525390624, 1.5678955078125, 1.56760888671875, 1.5664681396484375, 1.5662049560546876, 1.5662213134765626, 1.5661905517578125, 3.262426025390625, 1.5655628662109375, 1.56493212890625, 1.5661669921875, 1.5657431640625, 1.5655537109375, 1.5660482177734374, 1.5657359619140625, 1.5666063232421874, 1.5661004638671876, 1.5656785888671876, 1.5670230712890625, 1.5668284912109376, 1.5666944580078126, 1.566755859375, 1.566581787109375, 1.5667763671875, 1.5658946533203124, 1.5655372314453124, 1.5659530029296875, 1.565708251953125, 1.5669320068359376, 1.56744091796875, 1.566341064453125, 1.5660360107421876, 1.5659837646484376, 1.5658629150390626, 1.5660892333984375, 1.565887451171875, 1.56593359375, 1.5656898193359374, 1.5661455078125, 1.566086181640625, 1.5658076171875, 1.5659263916015624, 1.5658218994140625, 1.56609033203125, 1.56613525390625, 1.5673641357421875, 1.5665848388671875, 1.5666175537109375, 1.5664681396484375, 1.5664619140625, 1.566224365234375, 1.5664588623046876, 1.5661363525390626, 1.5667579345703124, 1.5667978515625, 1.5667178955078125, 1.5658916015625, 1.5658680419921875, 1.5657728271484375, 1.5670528564453126, 1.567161376953125, 1.5674490966796875, 1.567394775390625, 1.567477783203125, 1.56653369140625, 1.5658792724609376, 1.56665966796875, 1.5675699462890624, 1.56628076171875, 1.5665899658203124, 3.26359765625, 1.5656744384765624, 1.565328369140625, 1.566392333984375, 1.565917236328125, 1.566066650390625, 1.567247314453125, 1.56674560546875, 1.567057861328125, 1.566623779296875, 1.5667916259765624, 1.5669647216796876, 1.56680810546875, 1.5664117431640625, 1.566813232421875, 1.5673231201171876, 1.566376953125, 1.5661844482421876, 1.567056884765625, 1.567025146484375, 1.566773193359375, 1.5668214111328125, 1.5673548583984376, 1.568668701171875, 1.567552490234375, 1.566614501953125, 1.566899169921875, 1.567447021484375, 1.5675135498046875, 1.5669381103515625, 1.5667794189453126, 1.567139892578125, 1.5667916259765624, 1.566656494140625, 1.566287841796875, 1.5665806884765625, 1.5664046630859374, 1.5664854736328124, 1.56649169921875, 1.56577587890625, 1.5662652587890624, 1.5666688232421875, 1.5665521240234375, 1.56607080078125, 1.5668214111328125, 1.5663809814453125, 1.5665684814453125, 1.5666226806640624, 1.566630859375, 1.5663360595703124, 1.566734375, 1.5659100341796874, 1.566339111328125, 1.5664271240234375, 1.5667056884765624, 1.5661854248046876, 1.5660308837890624, 1.56668310546875, 1.5665264892578126, 1.5664742431640626, 1.5667752685546874, 1.5658568115234375, 1.566477294921875, 3.26474853515625, 1.5660062255859375, 1.5659560546875, 1.5659263916015624, 1.5657471923828126, 1.5661905517578125, 1.5662623291015625, 1.5657779541015624, 1.5659345703125, 1.5662991943359375, 1.5659427490234374, 1.565487060546875, 1.566613525390625, 1.56559765625, 1.56596533203125, 1.566318603515625, 1.566983154296875, 1.5657728271484375, 1.5661322021484374, 1.566572509765625, 1.5666114501953126, 1.5659100341796874, 1.56704150390625, 1.56611376953125, 1.5677716064453124, 1.5673876953125, 1.5676068115234374, 1.5671654052734374, 1.5670262451171875, 1.5657738037109374, 1.5667547607421874, 1.5669217529296875, 1.5676558837890624, 1.56761083984375, 1.567972412109375, 1.567614990234375, 1.5675115966796875, 1.5676702880859374, 1.56752587890625, 1.567363037109375, 1.567826904296875, 1.567635498046875, 1.5676488037109375, 1.5662724609375, 1.5680296630859376, 1.567309814453125, 1.567677490234375, 1.567458251953125, 1.5668489990234375, 1.565865966796875, 1.566993408203125, 1.5673487548828124, 1.567581298828125, 1.566005126953125, 1.5679825439453126, 1.567009765625, 1.566161865234375, 1.5676138916015625, 1.56758935546875, 1.5667568359375, 1.5663258056640625, 1.566224365234375, 1.5670302734375, 3.26736279296875, 1.5667579345703124, 1.5664261474609376, 1.5665531005859374, 1.5668193359375, 1.566329833984375, 1.5663739013671876, 1.566214111328125, 1.5671173095703126, 1.565612060546875, 1.5653099365234375, 1.565854736328125, 1.5658486328125, 1.5660902099609375, 1.566256103515625, 1.5667547607421874, 1.5674736328125, 1.56900244140625, 1.5678065185546874, 1.567677490234375, 1.5673057861328126, 1.5676702880859374, 1.5678648681640626, 1.566625732421875, 1.566234619140625, 1.5665244140625, 1.565811767578125, 1.566467041015625, 1.5663380126953126, 1.5663995361328125, 1.56670458984375, 1.566562255859375, 1.56580859375, 1.5658076171875, 1.567499267578125, 1.566665771484375, 1.5675023193359374, 1.5676190185546874, 1.5666585693359374, 1.5654676513671875, 1.5674849853515624, 1.5671900634765625, 1.567220703125, 1.5676558837890624, 1.5666514892578125, 1.5663369140625, 1.5666441650390626, 1.566841796875, 1.566866455078125, 1.56685107421875, 1.567130615234375, 1.5664896240234376, 1.5665152587890625, 1.5667691650390625, 1.56735693359375, 1.5668797607421876, 1.5669544677734375, 1.565843505859375, 1.5657564697265625, 1.5662509765625, 1.5679661865234376, 1.567494140625, 1.5670753173828125, 3.26488671875, 1.56588232421875, 1.5666165771484375, 1.56659912109375, 1.5660400390625, 1.565919189453125, 1.5653775634765625, 1.5651685791015626, 1.5665255126953126, 1.5655526123046875, 1.56554443359375, 1.565875244140625, 1.565812744140625, 1.56569091796875, 1.56594384765625, 1.566340087890625, 1.565750244140625, 1.5661793212890625, 1.5670374755859375, 1.5664691162109374, 1.5666708984375, 1.5669124755859376, 1.5670743408203125, 1.565885498046875, 1.567392822265625, 1.56645068359375, 1.567561767578125, 1.5667598876953126, 1.566843994140625, 1.566246826171875, 1.56635546875, 1.5664219970703126, 1.565961181640625, 1.5663524169921874, 1.5675914306640626, 1.56609326171875, 1.5661405029296875, 1.5667711181640624, 1.566587890625, 1.5662694091796876, 1.566982177734375, 1.566482421875, 1.566496826171875, 1.5665531005859374, 1.5663370361328126, 1.5661158447265624, 1.566561279296875, 1.566202880859375, 1.566286865234375, 1.5659674072265626, 1.5661629638671875, 1.5660155029296876, 1.566634033203125, 1.5668284912109376, 1.5664476318359375, 1.5665438232421875, 1.5667864990234375, 1.5668736572265625, 1.56661865234375, 1.5671204833984376, 1.566720947265625, 1.5667332763671875, 1.566159912109375]",tokens/s,0.6286408862079101,,,,,,main,False,False -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948ccc-4aa524e44773ac6e2fbb1a0d;7fc4bb38-2856-4d9d-a5cd-537123b54f21) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1585.569792,9676.783616,0.0,9030.336512,8583.573504,s,10,9.573078491210937,0.9573078491210938,0.0009060771854647526,0.9570158996582031,0.9584730651855469,0.958728140258789,0.9589322003173828,"[0.9589832153320312, 0.956114501953125, 0.95637109375, 0.9565009765625, 0.9567921142578125, 0.9570035400390625, 0.9570282592773437, 0.95793017578125, 0.957938232421875, 0.9584163818359375]",tokens/s,267.41658938139295,kWh,1.1295311258296775e-05,6.189367993647465e-06,5.412797259527177e-05,7.161265184721602e-05,tokens/kWh,3574787.323141869,MB,1585.569792,9676.783616,0.0,9030.336512,8872.967168,s,10,567.96033984375,56.796033984375,0.005905783414625482,56.796150390625,56.802221875,56.803097265625,56.803797578125,"[56.78812109375, 56.796453125, 56.795734375, 56.80109765625, 56.800734375, 56.79584765625, 56.80202734375, 56.80397265625, 56.7909765625, 56.785375]",tokens/s,1.1092323808618707,kWh,0.0006704276505549146,0.00036745356669733154,0.0031837375343617275,0.004221618751613973,tokens/kWh,14923.185561442368,,s,629,575.6730700073241,0.9152195071658572,0.113749717069217,0.901496826171875,0.9019905639648437,0.9021755371093749,1.8586116552734375,"[0.9012899780273438, 0.9015009155273438, 0.9013851928710938, 0.9014036254882812, 0.9017272338867187, 0.9014722290039062, 0.9017364501953125, 0.9016832275390625, 0.901676025390625, 0.9014937744140625, 0.9013677978515625, 0.90115380859375, 0.9009315795898437, 0.9009326171875, 0.9006499633789062, 0.9008291625976562, 0.9008558349609375, 0.9008486328125, 0.9011026000976563, 0.9014824829101562, 0.901602294921875, 0.901296142578125, 0.9016278686523438, 0.9010196533203125, 0.901043212890625, 0.9009264526367188, 0.9010216674804687, 0.9008496704101563, 0.901128173828125, 0.9008875732421875, 0.901443603515625, 0.9013667602539063, 0.9012459716796875, 0.90106982421875, 0.9016514282226562, 0.9009028930664063, 0.9010933837890625, 0.9009028930664063, 0.9010616455078125, 0.9009520874023438, 0.901349365234375, 0.9014415283203125, 0.901707763671875, 0.9015367431640625, 0.9014681396484375, 0.9018613891601562, 0.901622802734375, 0.9018931274414063, 0.901707763671875, 0.9014057006835937, 0.901602294921875, 0.90210302734375, 0.9018941650390625, 0.9015654296875, 0.9017302856445313, 0.9016248168945312, 0.9020457153320313, 0.9013043212890625, 0.9014906616210937, 0.9013822021484375, 0.9013165283203125, 0.901349365234375, 1.86075439453125, 0.901201904296875, 0.9012777099609375, 0.9009407958984375, 0.9011107788085938, 0.9012059936523438, 0.901411865234375, 0.9011742553710937, 0.9014906616210937, 0.9008762817382813, 0.9011650390625, 0.9011548461914063, 0.9017159423828125, 0.9009899291992187, 0.9010339965820312, 0.9014087524414063, 0.901095458984375, 0.9012418212890625, 0.9011476440429688, 0.90099609375, 0.901496826171875, 0.9016145629882812, 0.9013197021484375, 0.9020405883789062, 0.9015439453125, 0.9012941284179687, 0.9013165893554688, 0.9014087524414063, 0.9009602661132813, 0.9014149169921875, 0.901086181640625, 0.901359619140625, 0.9010995483398437, 0.9015562133789062, 0.9015613403320313, 0.9021737060546875, 0.9016637573242188, 0.9020088500976563, 0.9014948120117188, 0.90166064453125, 0.901233642578125, 0.901607421875, 0.90140673828125, 0.9016708984375, 0.9019443359375, 0.9020333862304688, 0.90393701171875, 0.9017435913085937, 0.90208154296875, 0.9017467041015625, 0.901749755859375, 0.9017620239257812, 0.9014108276367188, 0.901612548828125, 0.9014630126953125, 0.901897216796875, 0.9016043701171875, 0.9021522216796874, 0.9015429077148438, 0.9017200927734375, 0.9018798217773437, 0.901327880859375, 0.9020098266601563, 1.8579189453125, 0.9009674072265625, 0.9019514770507813, 0.9013217163085937, 0.9013043212890625, 0.9010616455078125, 0.901369873046875, 0.9010882568359375, 0.9010647583007813, 0.9008700561523437, 0.901254150390625, 0.9011005249023437, 0.9012029418945312, 0.901781494140625, 0.9017036743164063, 0.9013206787109375, 0.901180419921875, 0.9015664672851562, 0.9012275390625, 0.901060546875, 0.9013350219726562, 0.9012828369140625, 0.9011179809570312, 0.902530029296875, 0.901454833984375, 0.9017108764648437, 0.9014998779296876, 0.9013156127929688, 0.9013903198242188, 0.901664794921875, 0.9012408447265625, 0.9015879516601563, 0.9012008666992187, 0.9010565185546875, 0.9010083618164062, 0.9016893310546875, 0.9017681884765625, 0.9018941650390625, 0.9012459716796875, 0.9014087524414063, 0.9018050537109376, 0.9015992431640625, 0.9015572509765625, 0.9015726318359375, 0.9014558715820312, 0.9014138793945312, 0.901760009765625, 0.9032969970703125, 0.9014159545898438, 0.9015982055664062, 0.90126953125, 0.9023529052734375, 0.901634033203125, 0.9018787841796875, 0.9014353637695313, 0.9016453247070313, 0.9016279296875, 0.901738525390625, 0.9017559204101563, 0.902043701171875, 0.9015582275390625, 0.9019248657226563, 0.9020723266601562, 1.8581309814453124, 0.9010073852539062, 0.9013156127929688, 0.9008977661132812, 0.9008128051757812, 0.9016401977539062, 0.9013319702148438, 0.9011097412109375, 0.9010739135742187, 0.9013370971679687, 0.9015664672851562, 0.9017160034179688, 0.9023712768554687, 0.9015736083984375, 0.9013422241210938, 0.901080078125, 0.9018327026367188, 0.90148046875, 0.9011190185546875, 0.901212158203125, 0.9018624267578125, 0.9014783935546875, 0.9012479858398438, 0.9013688354492188, 0.90138623046875, 0.9015767211914063, 0.90112109375, 0.901693359375, 0.901707763671875, 0.9020232543945312, 0.901571533203125, 0.9019771118164063, 0.901349365234375, 0.9013718872070312, 0.9014671630859376, 0.9014220581054687, 0.9012838134765625, 0.9017538452148437, 0.9016350708007812, 0.9019105224609375, 0.9021767578125, 0.901813232421875, 0.9018296508789062, 0.9015695190429688, 0.903024658203125, 0.902118408203125, 0.9014558715820312, 0.9017098388671875, 0.901375, 0.901796875, 0.90182861328125, 0.9018388671875, 0.901644287109375, 0.9020845947265625, 0.9015941162109375, 0.9019043579101562, 0.9019207763671875, 0.901602294921875, 0.9014589233398438, 0.9020272827148438, 0.9018408813476563, 0.9020088500976563, 0.9018572998046875, 1.8591446533203124, 0.9011109008789062, 0.9014906005859376, 0.9012500610351563, 0.9013309326171876, 0.9015132446289063, 0.9010267944335938, 0.90169140625, 0.9015234375, 0.9012705078125, 0.90149169921875, 0.90165869140625, 0.90100830078125, 0.9009920043945312, 0.9015848999023437, 0.9015347290039063, 0.9014989013671875, 0.9024532470703125, 0.9021880493164063, 0.9016944580078124, 0.9015951538085938, 0.9013145751953126, 0.90147021484375, 0.9014896850585937, 0.9010872192382813, 0.9012111206054687, 0.9012377319335938, 0.9011046142578125, 0.90134423828125, 0.9021696166992188, 0.9015510864257813, 0.9017293090820313, 0.9014609985351563, 0.9018982543945312, 0.9016985473632813, 0.9015664672851562, 0.9012459716796875, 0.9016145629882812, 0.9012357177734375, 0.9017845458984375, 0.9017640991210938, 0.9023355102539062, 0.9016002807617187, 0.9013688354492188, 0.9013800659179687, 0.9019535522460937, 0.9013340454101563, 0.9017763671875, 0.9015776977539063, 0.90165966796875, 0.9015429077148438, 0.9014620361328125, 0.9018777465820312, 0.9020886840820312, 0.9014589233398438, 0.9016187133789062, 0.901876708984375, 0.903267333984375, 0.9016299438476563, 0.9016832275390625, 0.901592041015625, 0.9018306274414063, 0.9016535034179688, 1.8594969482421875, 0.90172314453125, 0.901623779296875, 0.9014261474609375, 0.9010974731445313, 0.9016514282226562, 0.9012612915039062, 0.9012162475585938, 0.9010811157226563, 0.900874267578125, 0.9016196899414063, 0.9018163452148438, 0.9015050048828125, 0.9010811157226563, 0.9016350708007812, 0.9010995483398437, 0.9011435546875, 0.9012008666992187, 0.9010196533203125, 0.90096337890625, 0.9009110717773438, 0.9015501098632812, 0.9013790893554687, 0.901444580078125, 0.9009868774414063, 0.9019638061523437, 0.9014251708984375, 0.9013463134765625, 0.9010083618164062, 0.9014886474609375, 0.9014528198242188, 0.9012612915039062, 0.902129638671875, 0.9023948974609375, 0.9013688354492188, 0.9017241821289063, 0.90169140625, 0.9015562133789062, 0.9032264404296875, 0.90172509765625, 0.9013514404296875, 0.9015767211914063, 0.9013986206054687, 0.9023374633789063, 0.9012766723632812, 0.90123876953125, 0.9009448852539063, 0.9015643920898437, 0.9012295532226563, 0.9011148681640625, 0.9011896362304688, 0.9011199951171875, 0.9018316650390625, 0.9017835693359375, 0.9014640502929687, 0.9016084594726562, 0.90189208984375, 0.901802978515625, 0.9020620727539063, 0.9019801635742187, 0.9017937622070312, 0.9018091430664062, 0.90191357421875, 1.858798583984375, 0.9011435546875, 0.9014241333007813, 0.90132275390625, 0.9010053100585937, 0.9013289184570312, 0.9013585815429688, 0.9011660766601562, 0.9014292602539062, 0.901317626953125, 0.901381103515625, 0.901228515625, 0.9015480346679687, 0.9016514282226562, 0.9015521240234375, 0.90144873046875, 0.9019647827148437, 0.9018716430664062, 0.9012531127929687, 0.9012572021484375, 0.901212158203125, 0.9014169311523438, 0.901138427734375, 0.9026416625976562, 0.9016320190429687, 0.9017907104492188, 0.9014876098632812, 0.9016504516601562, 0.901855224609375, 0.9019913940429688, 0.9016053466796875, 0.9015654296875, 0.9016565551757812, 0.9016135864257813, 0.901591064453125, 0.9022617797851562, 0.901928955078125, 0.9018091430664062, 0.901897216796875, 0.9014537963867187, 0.9010780029296875, 0.9012725830078125, 0.9011988525390625, 0.9014630126953125, 0.90176416015625, 0.9015735473632812, 0.9017302856445313, 0.9021542358398438, 0.9014528198242188, 0.9019176635742188, 0.9015859375, 0.9015582275390625, 0.901423095703125, 0.9016873168945313, 0.9015879516601563, 0.9017313232421875, 0.9018091430664062, 0.902240234375, 0.9021122436523438, 0.9021419677734375, 0.9018674926757813, 0.9018511352539063, 0.9022258911132812, 1.8597550048828124, 0.9009326171875, 0.90142822265625, 0.901043212890625, 0.9013135375976562, 0.9019094848632813, 0.9014773559570313, 0.9016790771484375, 0.9016719360351563, 0.9012367553710937, 0.9012531127929687, 0.9010974731445313, 0.901064697265625, 0.90119677734375, 0.9015040283203125, 0.901970947265625, 0.90260888671875, 0.9018121948242187, 0.9016063842773437, 0.9018624267578125, 0.9015449829101563, 0.901992431640625, 0.9017988891601563, 0.901696533203125, 0.9012930297851562, 0.9012428588867187, 0.90197607421875, 0.9014159545898438, 0.90151220703125, 0.9018674926757813, 0.9016678466796875, 0.9016135864257813, 0.9015623779296875, 0.9015643920898437, 0.9014937744140625, 0.902150146484375, 0.9015090942382813, 0.9018839111328125, 0.9017354125976562, 0.9015787353515625, 0.9013851928710938, 0.9017507934570312, 0.9015869140625, 0.9017579345703125, 0.901917724609375, 0.9019903564453124, 0.90163916015625, 0.9023303833007813, 0.901960693359375, 0.9023068237304688, 0.9017538452148437, 0.9017886962890626, 0.901560302734375, 0.9016637573242188, 0.9014384765625, 0.9020631103515625, 0.9018910522460938, 0.9014251708984375, 0.9013934326171875, 0.9016893310546875, 0.9019873657226563, 0.9017251586914062, 0.901432373046875, 1.8595020751953124, 0.9010237426757812, 0.9012930297851562, 0.9010022583007813, 0.9012469482421875, 0.9015675048828125, 0.9011814575195313, 0.9016350708007812, 0.9019985961914062, 0.9014097900390625, 0.9013739624023438, 0.9014691772460938, 0.901528564453125, 0.9014323120117187, 0.90157568359375, 0.9015634155273438, 0.9018613891601562, 0.9021675415039062, 0.9016709594726563, 0.9019002685546875, 0.9019852905273438, 0.9018562622070313, 0.900979736328125, 0.90132373046875, 0.901507080078125, 0.9013309936523437, 0.9011680908203125, 0.9011712036132813, 0.9011783447265626, 0.9020948486328125, 0.9016350708007812, 0.9018470458984374, 0.9016832275390625, 0.901180419921875, 0.9011251220703125, 0.901518310546875, 0.9013657836914063, 0.9012398071289063, 0.90106982421875, 0.9011405029296875, 0.901254150390625, 0.9014476928710937, 0.9010718994140625, 0.9012920532226563, 0.90113330078125, 0.9011609497070312, 0.901159912109375, 0.9013043212890625, 0.9009213256835937, 0.9009336547851563, 0.90114453125, 0.9015357666015625, 0.9015296020507813, 0.9014456176757812, 0.9014261474609375, 0.9017907104492188, 0.9016565551757812, 0.9016832275390625, 0.90138623046875, 0.9014343872070313, 0.9012725830078125, 0.9016371459960938, 0.902470703125, 1.8604583740234375, 0.9015695190429688, 0.9012644653320312, 0.9009939575195313, 0.9012254638671875, 0.9013217163085937, 0.9012008666992187, 0.9012828369140625, 0.9010513916015624, 0.9015951538085938, 0.9012674560546875, 0.9009285278320313, 0.9009203491210938, 0.9009633178710937, 0.9009141845703125, 0.9008394165039062, 0.9009039306640625, 0.901094482421875, 0.9009048461914062, 0.9008230590820312, 0.9013002319335938, 0.9019085083007813, 0.9010206909179688, 0.9013463134765625, 0.9014251708984375, 0.9011159057617187, 0.9022166748046875, 0.9010892944335938, 0.900806640625, 0.9012715454101563, 0.9008599243164063, 0.9013524780273438, 0.9015029907226563, 0.9012828369140625, 0.9010565185546875, 0.9016555786132813, 0.9012265014648437, 0.90136474609375, 0.9013237915039063, 0.9012940673828125, 0.90121728515625, 0.9013165893554688, 0.901396484375, 0.9020753784179687, 0.9011875610351563, 0.9012091064453125, 0.9015101318359375, 0.9015930786132812, 0.90141796875, 0.9013585815429688, 0.9015480346679687, 0.9018736572265625, 0.9019412231445313, 0.9015439453125, 0.9019586791992188, 0.9018480834960938, 0.9013289184570312, 0.9018142700195313, 0.9015265502929688, 0.901791748046875, 0.9013944091796875, 0.9017569580078125, 0.9013258056640625]",tokens/s,1.0926340535470893,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694934c-552c89d22edaa57c5d86bf4f;e5416f28-8c5b-4dea-ab57-2ffa4c6dbce2) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 900, in forward - transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 797, in forward - outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 477, in forward - mlp_output = self.mlp(mlp_layernorm_out) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 409, in forward - x = self.act(self.dense_h_to_4h(x)) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 242, in forward - out = WQLinearMMFunction.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply - return super().apply(*args, **kwargs) # type: ignore[misc] - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 50, in forward - out = dequantize_gemm(qweight, qzeros, scales, w_bit, group_size) - File ""/usr/local/lib/python3.10/dist-packages/awq/utils/packing_utils.py"", line 85, in dequantize_gemm - iweight, izeros = unpack_awq(qweight, qzeros, bits) - File ""/usr/local/lib/python3.10/dist-packages/awq/utils/packing_utils.py"", line 12, in unpack_awq - iweights = torch.bitwise_right_shift(qweight[:, :, None], shifts[None, None, :]).to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 GiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1303.977984,1030.22592,0.0,383.778816,312.280064,s,10,0.30782016181945804,0.030782016181945804,0.001617235684294077,0.030563695907592774,0.031164259910583494,0.03323989057540893,0.03490039510726929,"[0.03531552124023438, 0.03054502487182617, 0.030670495986938478, 0.029116479873657227, 0.030544319152832032, 0.02917731285095215, 0.030620607376098632, 0.030559423446655274, 0.030567968368530273, 0.030703008651733397]",tokens/s,8316.544260351227,kWh,3.557098667864181e-07,1.9491237936146868e-07,8.339715507108577e-07,1.3845937968587447e-06,tokens/kWh,184891771.5656334,MB,1303.977984,1030.22592,0.0,383.778816,347.090432,s,10,18.765849975585937,1.8765849975585938,0.01629298714952931,1.8788928833007814,1.8852921142578125,1.8978253173828123,1.9078518798828124,"[1.9103585205078124, 1.8825069580078124, 1.879499267578125, 1.8580986328125, 1.8786011962890625, 1.8434554443359374, 1.8807445068359374, 1.8754886474609376, 1.8791845703125, 1.8779122314453125]",tokens/s,33.5716208335684,kWh,2.201072483317281e-05,1.2059537590589184e-05,4.825437327548859e-05,8.232463569925058e-05,tokens/kWh,765263.0280703872,,s,629,19.00581683158875,0.030215925010474952,0.003670631507539437,0.02977996826171875,0.03002470397949219,0.03029995498657227,0.05998026794433601,"[0.03094118309020996, 0.03138150405883789, 0.03138355255126953, 0.031524864196777344, 0.030980096817016602, 0.03207884979248047, 0.032363521575927735, 0.03149004745483398, 0.03120844841003418, 0.031268863677978515, 0.03084492874145508, 0.032194561004638675, 0.031160320281982422, 0.03057868766784668, 0.030236671447753907, 0.03517747116088867, 0.030717952728271485, 0.030108671188354492, 0.03002470397949219, 0.029833215713500977, 0.029899776458740233, 0.02977177619934082, 0.02977791976928711, 0.02977382469177246, 0.030176256179809572, 0.0299683837890625, 0.029740032196044923, 0.02977894401550293, 0.029844480514526366, 0.029732864379882814, 0.02974412727355957, 0.029797376632690428, 0.029791231155395507, 0.029726720809936522, 0.029648895263671874, 0.029749248504638674, 0.02977791976928711, 0.029949951171875, 0.02977996826171875, 0.02976255989074707, 0.02981990432739258, 0.029797376632690428, 0.0297728328704834, 0.02997039985656738, 0.029839359283447265, 0.029833215713500977, 0.029870080947875976, 0.02978508758544922, 0.02968780708312988, 0.029784063339233398, 0.02979327964782715, 0.029755392074584962, 0.02972774314880371, 0.029976640701293945, 0.02982700729370117, 0.029852672576904295, 0.029896703720092774, 0.029731840133666993, 0.029747200012207032, 0.02983628845214844, 0.029772800445556642, 0.029739007949829102, 0.06072115325927734, 0.029829120635986327, 0.02977689552307129, 0.029834239959716798, 0.030591999053955078, 0.03021414375305176, 0.029755392074584962, 0.02983628845214844, 0.02980352020263672, 0.029842432022094727, 0.029707263946533204, 0.029665279388427734, 0.029698047637939453, 0.02979430389404297, 0.029593599319458007, 0.02993152046203613, 0.0301527042388916, 0.029792255401611328, 0.029764608383178712, 0.029861888885498046, 0.03102822494506836, 0.030027776718139648, 0.02980147171020508, 0.030087167739868165, 0.02975129508972168, 0.030086143493652344, 0.029826047897338868, 0.02974617576599121, 0.030042112350463866, 0.029730815887451172, 0.029899776458740233, 0.029838336944580077, 0.02994790458679199, 0.02980659294128418, 0.02976972770690918, 0.029682687759399414, 0.02978816032409668, 0.029817855834960938, 0.029734912872314452, 0.029831167221069335, 0.02974208068847656, 0.02978508758544922, 0.029848575592041016, 0.029800447463989257, 0.029785120010375976, 0.029689823150634766, 0.03014553642272949, 0.029886463165283202, 0.03018035125732422, 0.029938688278198244, 0.02973695945739746, 0.030068735122680663, 0.02973695945739746, 0.029897727966308595, 0.0297891845703125, 0.0298024959564209, 0.02995814323425293, 0.02976051139831543, 0.02976870346069336, 0.02997555160522461, 0.029975584030151367, 0.029692895889282228, 0.029708288192749024, 0.061895679473876954, 0.030073856353759764, 0.02974412727355957, 0.029680639266967773, 0.029848575592041016, 0.029882368087768556, 0.029543424606323244, 0.02978713607788086, 0.029755392074584962, 0.029875200271606447, 0.029856767654418945, 0.029831167221069335, 0.029770751953125, 0.02977791976928711, 0.02973695945739746, 0.029897727966308595, 0.030079999923706056, 0.029894655227661132, 0.029703168869018554, 0.0297574405670166, 0.029837312698364257, 0.029813760757446288, 0.029814783096313476, 0.029743167877197267, 0.029764543533325194, 0.029707263946533204, 0.02977791976928711, 0.029730815887451172, 0.030044160842895507, 0.029833215713500977, 0.02974617576599121, 0.029815807342529296, 0.029740032196044923, 0.029815807342529296, 0.029816831588745117, 0.029799423217773437, 0.029713407516479492, 0.0297891845703125, 0.02983628845214844, 0.029896703720092774, 0.029834239959716798, 0.0297574405670166, 0.029868032455444334, 0.029748224258422853, 0.029849599838256836, 0.02977382469177246, 0.02986604881286621, 0.029807552337646485, 0.030026752471923827, 0.02976255989074707, 0.02976972770690918, 0.029892608642578124, 0.029701120376586915, 0.02976870346069336, 0.02976255989074707, 0.029797376632690428, 0.02977177619934082, 0.029867008209228517, 0.029808639526367187, 0.02974515151977539, 0.029740032196044923, 0.029821952819824218, 0.030018560409545897, 0.05799935913085937, 0.028490751266479493, 0.028519424438476562, 0.028368896484375, 0.028404735565185548, 0.02834022331237793, 0.028285951614379884, 0.02834739112854004, 0.028308479309082032, 0.028387327194213868, 0.028321792602539062, 0.028298240661621094, 0.028923904418945313, 0.031113216400146484, 0.030136320114135744, 0.02981990432739258, 0.029868032455444334, 0.029728832244873046, 0.02970515251159668, 0.029386751174926756, 0.0297523193359375, 0.029822975158691405, 0.02971238327026367, 0.02978508758544922, 0.02980352020263672, 0.02977484893798828, 0.029677600860595704, 0.029713375091552734, 0.029651968002319336, 0.02973798370361328, 0.029881343841552735, 0.029772800445556642, 0.02979532814025879, 0.029672447204589843, 0.029861888885498046, 0.029646848678588866, 0.029551616668701174, 0.029764608383178712, 0.02973695945739746, 0.029662208557128908, 0.02970419120788574, 0.029772800445556642, 0.02975436782836914, 0.02972876739501953, 0.02979430389404297, 0.029699071884155274, 0.02981888008117676, 0.029753376007080078, 0.02983011245727539, 0.029718528747558592, 0.029489152908325194, 0.029640703201293944, 0.029736991882324218, 0.02938057518005371, 0.029748224258422853, 0.029775871276855468, 0.02980659294128418, 0.029814783096313476, 0.029716512680053712, 0.029680608749389648, 0.02979635238647461, 0.029783039093017577, 0.02978816032409668, 0.06087680053710937, 0.029916160583496092, 0.030342144012451173, 0.029867008209228517, 0.029744159698486327, 0.029726688385009765, 0.02975846481323242, 0.029857791900634766, 0.0297891845703125, 0.029784063339233398, 0.02979020881652832, 0.029849599838256836, 0.029449216842651366, 0.02939084815979004, 0.029541376113891602, 0.029874176025390626, 0.029816831588745117, 0.029724672317504884, 0.029915136337280275, 0.029921279907226563, 0.029829120635986327, 0.029734912872314452, 0.02981888008117676, 0.02980147171020508, 0.029860864639282225, 0.029874176025390626, 0.029817855834960938, 0.029855743408203125, 0.029718528747558592, 0.029854719161987304, 0.029833215713500977, 0.02995199966430664, 0.02972979164123535, 0.02980147171020508, 0.029864959716796875, 0.030071807861328126, 0.029874208450317383, 0.02984239959716797, 0.02975027275085449, 0.02978508758544922, 0.029832191467285156, 0.029860864639282225, 0.02976255989074707, 0.029702144622802733, 0.029920255661010742, 0.02975948715209961, 0.02994790458679199, 0.02977382469177246, 0.02973388862609863, 0.029643775939941407, 0.029703168869018554, 0.029852672576904295, 0.029839359283447265, 0.02983628845214844, 0.029784063339233398, 0.029769792556762695, 0.029781951904296874, 0.02998784065246582, 0.02981068801879883, 0.02998784065246582, 0.0297574405670166, 0.029809696197509766, 0.029711328506469726, 0.05807513427734375, 0.02834329605102539, 0.028271615982055662, 0.02834329605102539, 0.028402687072753906, 0.028421119689941408, 0.028279808044433592, 0.028318719863891603, 0.028318719863891603, 0.02834022331237793, 0.02835148811340332, 0.02838118362426758, 0.02836172866821289, 0.028310527801513673, 0.02832793617248535, 0.02815795135498047, 0.028197887420654297, 0.02831667137145996, 0.028222463607788087, 0.028233728408813476, 0.028296192169189452, 0.028251136779785156, 0.028318719863891603, 0.02973798370361328, 0.030136320114135744, 0.029817855834960938, 0.029731840133666993, 0.02978099250793457, 0.02972979164123535, 0.029930496215820314, 0.029886463165283202, 0.029709312438964845, 0.029739007949829102, 0.029848575592041016, 0.02972774314880371, 0.02976870346069336, 0.029648895263671874, 0.029732864379882814, 0.029813760757446288, 0.029820928573608397, 0.029755392074584962, 0.029713407516479492, 0.029643775939941407, 0.02977996826171875, 0.029856767654418945, 0.029802528381347657, 0.029723615646362306, 0.029940736770629882, 0.029823007583618163, 0.029761503219604492, 0.02976972770690918, 0.029691904067993165, 0.02968783950805664, 0.02980246353149414, 0.029711360931396483, 0.029895679473876953, 0.029684736251831056, 0.029894655227661132, 0.029791231155395507, 0.029692928314208986, 0.029740032196044923, 0.029820928573608397, 0.030217216491699218, 0.06100377655029297, 0.029852672576904295, 0.02978508758544922, 0.029661184310913087, 0.029660160064697266, 0.02972368049621582, 0.029649887084960937, 0.02981171226501465, 0.02997555160522461, 0.02975846481323242, 0.02975846481323242, 0.029682687759399414, 0.029800447463989257, 0.02975129508972168, 0.030038015365600586, 0.02983526420593262, 0.02981888008117676, 0.029861888885498046, 0.029864959716796875, 0.029840383529663086, 0.030003200531005858, 0.0307906551361084, 0.03020697593688965, 0.029899776458740233, 0.02975846481323242, 0.02978508758544922, 0.029917184829711913, 0.02975129508972168, 0.029702144622802733, 0.02978713607788086, 0.02976972770690918, 0.02976665687561035, 0.029708288192749024, 0.029823999404907226, 0.029826047897338868, 0.02983628845214844, 0.03013222312927246, 0.03099545669555664, 0.030038015365600586, 0.029878271102905272, 0.029697023391723632, 0.02977791976928711, 0.029929471969604493, 0.029862911224365234, 0.029932544708251952, 0.029714431762695313, 0.02996940803527832, 0.02980659294128418, 0.02976051139831543, 0.02951372718811035, 0.029684736251831056, 0.029820928573608397, 0.02977996826171875, 0.029778976440429688, 0.029754335403442383, 0.029816831588745117, 0.02976563262939453, 0.029755456924438477, 0.02987615966796875, 0.029709312438964845, 0.029723743438720703, 0.029749151229858398, 0.02977382469177246, 0.06091059112548828, 0.03012403106689453, 0.0297574405670166, 0.029799423217773437, 0.02974515151977539, 0.02982809638977051, 0.029861888885498046, 0.029743104934692382, 0.02978201675415039, 0.029713407516479492, 0.029759519577026366, 0.029883359909057616, 0.02977484893798828, 0.02979532814025879, 0.029874176025390626, 0.029875200271606447, 0.029850624084472657, 0.02984351921081543, 0.02972358322143555, 0.029660160064697266, 0.02992742347717285, 0.02974515151977539, 0.02968780708312988, 0.029831167221069335, 0.029741056442260744, 0.029732864379882814, 0.029642751693725586, 0.029642751693725586, 0.029895679473876953, 0.029628416061401368, 0.029723648071289063, 0.029618175506591796, 0.029715456008911133, 0.029864959716796875, 0.029732864379882814, 0.02978201675415039, 0.029748224258422853, 0.029813760757446288, 0.02982707214355469, 0.0297574405670166, 0.029854719161987304, 0.02972876739501953, 0.029657087326049804, 0.029668352127075196, 0.029611007690429687, 0.029412351608276367, 0.02972876739501953, 0.029885440826416015, 0.029694976806640624, 0.02976870346069336, 0.029775871276855468, 0.02993152046203613, 0.02977894401550293, 0.029700096130371095, 0.02977382469177246, 0.029710336685180663, 0.029688831329345702, 0.029684736251831056, 0.029700096130371095, 0.02970419120788574, 0.029667327880859375, 0.029730815887451172, 0.029863935470581054, 0.060827648162841794, 0.02976563262939453, 0.029716480255126954, 0.029770751953125, 0.02973388862609863, 0.02977689552307129, 0.02975846481323242, 0.029849599838256836, 0.0298024959564209, 0.029850624084472657, 0.029688831329345702, 0.029731840133666993, 0.029740032196044923, 0.029731840133666993, 0.029859840393066408, 0.02975027275085449, 0.029707263946533204, 0.029718528747558592, 0.02973388862609863, 0.029772800445556642, 0.029853696823120116, 0.02976563262939453, 0.029611007690429687, 0.02977996826171875, 0.02979430389404297, 0.029807615280151366, 0.029343807220458984, 0.029495231628417967, 0.029894655227661132, 0.03171123123168945, 0.030046207427978516, 0.03002470397949219, 0.029706239700317383, 0.02974412727355957, 0.029679616928100585, 0.029911039352416992, 0.02976051139831543, 0.029823999404907226, 0.029944831848144532, 0.02991926383972168, 0.029790176391601562, 0.02977382469177246, 0.029692928314208986, 0.029726720809936522, 0.029773855209350587, 0.029737951278686524, 0.02981990432739258, 0.029906944274902345, 0.029869056701660155, 0.029907968521118163, 0.029767679214477538, 0.02978201675415039, 0.02968780708312988, 0.02976563262939453, 0.029872127532958984, 0.02996428871154785, 0.02977689552307129, 0.02996326446533203, 0.029944831848144532, 0.029889568328857422, 0.029706207275390625, 0.029854719161987304, 0.029902847290039062, 0.06100787353515625, 0.029770816802978516, 0.029812671661376952, 0.02975027275085449, 0.029656063079833983, 0.029666303634643554, 0.02972979164123535, 0.0297256965637207, 0.029929471969604493, 0.029775871276855468, 0.029690879821777344, 0.029693952560424806, 0.02958028793334961, 0.029640703201293944, 0.02977996826171875, 0.029739007949829102, 0.03000115203857422, 0.029897727966308595, 0.029840383529663086, 0.029814783096313476, 0.0297523193359375, 0.029708288192749024, 0.02975027275085449, 0.02981068801879883, 0.029732864379882814, 0.029838336944580077, 0.029718528747558592, 0.02970419120788574, 0.0297205753326416, 0.029740032196044923, 0.02974617576599121, 0.02969599914550781, 0.029757503509521485, 0.0298853759765625, 0.02979840087890625, 0.029700096130371095, 0.029752351760864257, 0.02980656051635742, 0.02973695945739746, 0.029663232803344725, 0.02972159957885742, 0.030027776718139648, 0.029971519470214845, 0.029904832839965822, 0.029829120635986327, 0.02974412727355957, 0.030027776718139648, 0.029853696823120116, 0.029837312698364257, 0.029840383529663086, 0.029850624084472657, 0.02986911964416504, 0.030047168731689455, 0.030050304412841795, 0.029815807342529296, 0.029775871276855468, 0.02998784065246582, 0.029890560150146486, 0.02975334358215332, 0.029883392333984377, 0.02983526420593262, 0.029847551345825195, 0.02976972770690918]",tokens/s,33.09513111557333,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3951.190016,12732.33408,0.0,12085.886976,11337.496064,s,10,10.983638916015623,1.0983638916015628,0.002145812483257245,1.0982380981445312,1.1009110717773436,1.1018160705566407,1.1025400695800782,"[1.1027210693359375, 1.1007099609375, 1.0960345458984375, 1.0957255859375, 1.0965640869140625, 1.096655029296875, 1.0976143798828124, 1.09886181640625, 1.0996507568359375, 1.0991016845703125]",tokens/s,233.07394021003134,kWh,1.2942184060811996e-05,7.0910633564744784e-06,6.339727294000231e-05,8.343052035728877e-05,tokens/kWh,3068421.4709879244,MB,3951.190016,12732.33408,0.0,12085.886976,11686.800384,s,10,637.80165234375,63.780165234375,0.009100497531913991,63.777451171875,63.784855859375,63.7953341796875,63.8037168359375,"[63.775953125, 63.7747265625, 63.78252734375, 63.77445703125, 63.77937109375, 63.7818203125, 63.8058125, 63.7783125, 63.77208203125, 63.77658984375]",tokens/s,0.9877679019565393,kWh,0.0007529117455250688,0.00041266306092793453,0.003710265523765599,0.004875840330218602,tokens/kWh,12920.849686063342,,s,629,646.6700090332033,1.028092224218129,0.13044395391192767,1.0122936401367189,1.0129029296874998,1.0132424438476564,2.1090928125,"[1.0118082275390625, 1.0120591430664063, 1.0121963500976563, 1.0122721557617187, 1.0119987182617187, 1.0119086303710938, 1.0127564697265625, 1.0119772338867188, 1.013127197265625, 1.0117877807617188, 1.0121246948242189, 1.0118850708007812, 1.0127083740234375, 1.0122250366210936, 1.0122311401367188, 1.01222705078125, 1.0121298217773438, 1.01207958984375, 1.0122537231445312, 1.0121769409179688, 1.0120580444335938, 1.0120304565429687, 1.0126674194335938, 1.0122168579101563, 1.0119823608398437, 1.0118645629882812, 1.0125404052734375, 1.0122362670898437, 1.01228955078125, 1.012083740234375, 1.0118092651367188, 1.0122362670898437, 1.0126837768554688, 1.0127236938476563, 1.0127257690429687, 1.0131476440429688, 1.0123878784179687, 1.0122587280273438, 1.0124882202148437, 1.0123243408203124, 1.0121123657226563, 1.0124574584960937, 1.012696044921875, 1.0123038940429687, 1.012063232421875, 1.01218408203125, 1.0126653442382811, 1.012890625, 1.0124677124023438, 1.0129090576171875, 1.0120939331054688, 1.01304931640625, 1.0123591918945312, 1.0120345458984374, 1.0119608154296875, 1.0121820068359375, 1.0124882202148437, 1.0120724487304686, 1.01193115234375, 1.0123868408203125, 1.0123858642578125, 1.0128014526367188, 2.113512451171875, 1.0119444580078125, 1.0120970458984375, 1.0116249389648437, 1.0117980346679687, 1.0118410034179688, 1.0118922119140625, 1.0117406616210938, 1.012063232421875, 1.0121236572265624, 1.0118225708007813, 1.0117457885742187, 1.012200439453125, 1.0118594360351563, 1.0123018188476562, 1.0119075927734376, 1.0119393310546876, 1.0118450927734375, 1.0121697387695312, 1.0119772338867188, 1.0117940063476563, 1.0117733764648438, 1.0121502685546875, 1.0117877807617188, 1.0121226196289062, 1.0117345581054686, 1.011778564453125, 1.0116874389648438, 1.0122762451171874, 1.0118328247070312, 1.0120519409179687, 1.0118645629882812, 1.0123099975585939, 1.0122506103515625, 1.012801513671875, 1.0120089721679688, 1.0126571655273438, 1.0122014770507812, 1.0129817504882812, 1.0126458740234374, 1.012490234375, 1.0121615600585938, 1.0127646484375, 1.0119393310546876, 1.0128711547851563, 1.011999755859375, 1.0165934448242187, 1.0120037841796874, 1.0129735717773438, 1.0124287719726563, 1.0122291259765626, 1.0124595336914062, 1.01273291015625, 1.0122619018554688, 1.0127390747070313, 1.0128394165039063, 1.0125383911132813, 1.01269091796875, 1.013369873046875, 1.0124830932617188, 1.0126510009765626, 1.0123509521484375, 1.0127933349609375, 2.1089033203125, 1.0117857055664063, 1.0120253295898438, 1.012158447265625, 1.0124718017578125, 1.0120816650390625, 1.0120447998046875, 1.0125680541992188, 1.0123571166992187, 1.0120724487304686, 1.0127575073242188, 1.01252001953125, 1.0122987060546875, 1.0124451904296874, 1.0127728881835938, 1.011794921875, 1.011989501953125, 1.0122485961914063, 1.0122066040039062, 1.0119823608398437, 1.012210693359375, 1.0119772338867188, 1.01199462890625, 1.0123724975585937, 1.0123673706054688, 1.0120867919921874, 1.0119280395507813, 1.0121994018554688, 1.0121431274414063, 1.0121226196289062, 1.0124666748046875, 1.0119382934570313, 1.012031494140625, 1.0129346313476562, 1.012621337890625, 1.0127984619140624, 1.0124257202148437, 1.012552734375, 1.0124932861328124, 1.0126151733398439, 1.0124769287109374, 1.0124185791015625, 1.0123202514648437, 1.0127390747070313, 1.0145853271484375, 1.0121594848632813, 1.0121994018554688, 1.016111083984375, 1.01210009765625, 1.012337646484375, 1.0126919555664062, 1.0125444946289062, 1.0131015625, 1.0132469482421875, 1.0124186401367188, 1.0119146728515624, 1.0125578002929687, 1.0123294677734376, 1.0121595458984376, 1.0121768798828126, 1.0121963500976563, 1.0120345458984374, 1.0120714111328124, 2.108083251953125, 1.0117611694335937, 1.0117723999023438, 1.0123253784179687, 1.0121656494140625, 1.011751953125, 1.011820556640625, 1.0120601806640626, 1.0119454956054688, 1.01188916015625, 1.0119761962890625, 1.0117877807617188, 1.0130258178710938, 1.0123018188476562, 1.0121328735351562, 1.01246875, 1.0121953125, 1.0118369140625, 1.0122199096679687, 1.0124267578125, 1.0122546997070312, 1.0124503173828125, 1.0120703735351562, 1.0122537231445312, 1.0119536743164061, 1.0123397216796874, 1.0123960571289063, 1.0120325317382812, 1.0122772216796876, 1.012115478515625, 1.012005859375, 1.011937255859375, 1.0124103393554686, 1.0128527221679688, 1.012220947265625, 1.0126182250976563, 1.0130596313476563, 1.012464599609375, 1.0132828369140625, 1.0123468627929688, 1.0119547119140626, 1.0120048828125, 1.0124175415039063, 1.012010986328125, 1.0151219482421876, 1.0123069458007812, 1.0124124145507813, 1.0122076416015624, 1.0128138427734374, 1.0126878662109375, 1.0123939819335936, 1.0121318359375, 1.01235302734375, 1.011895263671875, 1.0121062622070311, 1.0122885131835937, 1.0123724975585937, 1.012242431640625, 1.012421630859375, 1.0123786010742188, 1.0121113891601563, 1.0120017700195312, 1.0122393798828124, 2.10916650390625, 1.0120038452148437, 1.0127247314453125, 1.0125916137695312, 1.0125660400390626, 1.0119761962890625, 1.0121748657226564, 1.0119721069335939, 1.0121370239257812, 1.0121942138671876, 1.0123048706054687, 1.0119403686523438, 1.0117099609375, 1.01187890625, 1.0121195678710937, 1.0117509155273436, 1.0118615112304687, 1.013433349609375, 1.012052978515625, 1.0117959594726562, 1.0120274047851563, 1.0121123657226563, 1.012421630859375, 1.012052978515625, 1.0121298217773438, 1.0119987182617187, 1.0118973388671875, 1.0125066528320312, 1.0120836791992187, 1.0121307983398438, 1.01264697265625, 1.0120714111328124, 1.0120857543945312, 1.0123540649414062, 1.0124544067382812, 1.0121564331054687, 1.0122383422851562, 1.0122454833984376, 1.0119935913085938, 1.0119239501953126, 1.0125363159179688, 1.0117755126953125, 1.011968017578125, 1.0119495239257812, 1.0124840698242188, 1.0122025146484375, 1.0122854614257812, 1.01258447265625, 1.0125885620117188, 1.0125301513671876, 1.012516845703125, 1.012400146484375, 1.0123970336914063, 1.0127892456054688, 1.01247998046875, 1.01256396484375, 1.0124237060546875, 1.0165504150390625, 1.0126458740234374, 1.0126848754882813, 1.0138418579101562, 1.0126428833007812, 1.0132418212890626, 2.10956298828125, 1.0119423828125, 1.0119608154296875, 1.0120847778320312, 1.011984375, 1.0120007934570312, 1.0124031982421875, 1.01182568359375, 1.0117908325195313, 1.0122680053710937, 1.012263916015625, 1.0119035034179686, 1.0124677734375, 1.0120057983398438, 1.0118276977539062, 1.011904541015625, 1.0125209350585938, 1.012252685546875, 1.0120653076171875, 1.0124205932617187, 1.0122495727539063, 1.0123724975585937, 1.0126407470703125, 1.012590576171875, 1.0119659423828125, 1.0125209350585938, 1.0127493286132812, 1.0124779663085937, 1.0127708129882813, 1.0121123657226563, 1.0121646118164063, 1.0116229248046875, 1.0122229614257812, 1.0120335083007812, 1.0120325317382812, 1.0120078735351563, 1.0126233520507812, 1.0124185791015625, 1.01650634765625, 1.0135787353515624, 1.012727783203125, 1.0122936401367189, 1.0128947143554687, 1.0121277465820313, 1.012600830078125, 1.0126612548828124, 1.0123960571289063, 1.012105224609375, 1.0127769775390625, 1.01275341796875, 1.012274169921875, 1.012279296875, 1.0123386840820312, 1.0121942749023438, 1.0125885620117188, 1.012947998046875, 1.0124779663085937, 1.0123930053710937, 1.0128373413085938, 1.0122721557617187, 1.01233251953125, 1.0123386840820312, 1.0127564697265625, 2.110841796875, 1.0128209838867188, 1.0126233520507812, 1.012595703125, 1.0131865844726562, 1.01278515625, 1.0128342895507811, 1.0131220703125, 1.0128527221679688, 1.012337646484375, 1.0124810180664063, 1.0129080200195313, 1.012747314453125, 1.0124318237304688, 1.0134036254882812, 1.0129408569335938, 1.0123612060546876, 1.0120929565429688, 1.0134886474609375, 1.0132265014648438, 1.0129080200195313, 1.0132152099609375, 1.0127984619140624, 1.0145208129882812, 1.013728271484375, 1.0132162475585937, 1.0127083740234375, 1.0127789916992187, 1.012833251953125, 1.0120120239257813, 1.0122465209960938, 1.0125762329101562, 1.0127605590820312, 1.0129448852539062, 1.01275341796875, 1.0117826538085937, 1.0119331665039062, 1.0120591430664063, 1.0131445922851563, 1.0131220703125, 1.0128814086914062, 1.0128875732421876, 1.0127513427734376, 1.013012451171875, 1.0131896362304686, 1.0130308837890625, 1.0130473022460937, 1.0133401489257812, 1.0132930297851563, 1.0126981201171874, 1.0128568115234375, 1.012906982421875, 1.0124503173828125, 1.0124984130859376, 1.0132520751953125, 1.0128076782226563, 1.0122936401367189, 1.0126827392578126, 1.0122035522460937, 1.0120509033203124, 1.0123540649414062, 1.0125946655273437, 1.0123171997070313, 2.112021484375, 1.0123406982421874, 1.0120253295898438, 1.0121513061523437, 1.0124144897460938, 1.012274169921875, 1.012401123046875, 1.0119710693359374, 1.012263916015625, 1.0128281860351562, 1.0122537231445312, 1.0122373046875, 1.0121615600585938, 1.0122373046875, 1.0120325317382812, 1.0120929565429688, 1.01382861328125, 1.0119669799804687, 1.0119239501953126, 1.0123489379882813, 1.0119403686523438, 1.0117539672851563, 1.0119976806640625, 1.0121533203125, 1.0122137451171875, 1.0119065551757813, 1.0129019165039062, 1.0124779052734374, 1.0123939819335936, 1.0126571655273438, 1.0125834350585938, 1.0119721069335939, 1.0120038452148437, 1.012474853515625, 1.0120499267578125, 1.0122383422851562, 1.0124830932617188, 1.0124758911132812, 1.0127083740234375, 1.012632568359375, 1.0122034912109374, 1.0120222778320314, 1.012178955078125, 1.0122977294921875, 1.0122034912109374, 1.01212158203125, 1.0132428588867188, 1.0123274536132814, 1.0125435180664062, 1.0123755493164062, 1.0122383422851562, 1.01226904296875, 1.0125250854492187, 1.0119833374023437, 1.0120601806640626, 1.0121134033203125, 1.012974609375, 1.0124656372070313, 1.01243798828125, 1.012580322265625, 1.0127124633789062, 1.0127247314453125, 1.0126704711914063, 2.11262353515625, 1.0118778686523437, 1.0124564208984375, 1.0118143920898437, 1.0125772705078124, 1.0121441040039063, 1.0118225708007813, 1.0119639892578125, 1.0122833251953125, 1.011894287109375, 1.0119280395507813, 1.0119198608398436, 1.0127401123046875, 1.01235205078125, 1.012570068359375, 1.011726318359375, 1.0117744750976563, 1.013375, 1.0122465209960938, 1.01218505859375, 1.0120325317382812, 1.011989501953125, 1.0127339477539063, 1.0121031494140624, 1.0123878173828125, 1.0119874267578124, 1.011962890625, 1.0119680786132812, 1.0123140258789063, 1.0120407104492188, 1.012220947265625, 1.0121410522460939, 1.0122772216796876, 1.011979248046875, 1.0125343017578126, 1.01197412109375, 1.01228955078125, 1.0118973388671875, 1.0122465209960938, 1.0119423828125, 1.012041748046875, 1.0123131103515626, 1.0124677124023438, 1.0119342041015624, 1.012621337890625, 1.012105224609375, 1.0122291259765626, 1.0119669799804687, 1.0125332641601563, 1.0122034912109374, 1.0122045288085937, 1.0123519897460938, 1.012875244140625, 1.0126397705078125, 1.0126233520507812, 1.0123182373046875, 1.0127708129882813, 1.0124646606445313, 1.0125987548828126, 1.0120775756835938, 1.012442138671875, 1.012552734375, 1.012252685546875, 2.11210546875, 1.011620849609375, 1.0120519409179687, 1.0119721069335939, 1.0122485961914063, 1.0120714111328124, 1.0120621948242188, 1.0127001342773438, 1.0125178833007813, 1.012548583984375, 1.01243701171875, 1.0118604736328125, 1.0117130126953124, 1.0120304565429687, 1.0122587890625, 1.01167822265625, 1.0118010864257811, 1.0119813232421875, 1.011894287109375, 1.0122905883789062, 1.01184716796875, 1.0117959594726562, 1.0116761474609375, 1.0118410034179688, 1.0117744750976563, 1.0119515991210937, 1.0144215087890625, 1.0122987670898438, 1.012021240234375, 1.0126315307617189, 1.0124862060546875, 1.0122158203125, 1.0121727905273437, 1.0125875244140625, 1.0124472045898438, 1.0122403564453124, 1.0122117309570313, 1.0127349853515626, 1.0121431274414063, 1.012495361328125, 1.012738037109375, 1.0124758911132812, 1.0128773193359375, 1.0123079833984374, 1.0123099975585939, 1.0121298217773438, 1.0122557373046874, 1.0126878662109375, 1.0124810180664063, 1.013000244140625, 1.012516845703125, 1.0124298095703126, 1.012358154296875, 1.0124656372070313, 1.0123171997070313, 1.01239501953125, 1.01268994140625, 1.0127656860351562, 1.01236328125, 1.0124308471679688, 1.012527099609375, 1.0123038940429687, 1.0129132080078125]",tokens/s,0.972675384993313,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2173.579264,6201.802752,0.0,5555.355648,5274.11712,s,10,6.188704040527344,0.6188704040527343,0.001630006512575581,0.6182249450683593,0.6205656127929687,0.6214197998046874,0.6221031494140624,"[0.6199492797851562, 0.6180035400390625, 0.6172139282226563, 0.6179977416992187, 0.618110595703125, 0.6165477905273438, 0.6183392944335937, 0.6222739868164062, 0.61989208984375, 0.6203757934570312]",tokens/s,413.65687924896486,kWh,7.2883394029405375e-06,3.9920504876135965e-06,3.5213917060001095e-05,4.649430695055522e-05,tokens/kWh,5506050.456290174,MB,2173.579264,6201.802752,0.0,5555.355648,5324.909056,s,10,366.72262109375004,36.672262109375005,0.03669260702690663,36.653501953125,36.74228671875,36.744877734375,36.746950546875006,"[36.7417109375, 36.74746875, 36.65041015625, 36.64756640625, 36.6498203125, 36.6483046875, 36.65363671875, 36.6533671875, 36.662453125, 36.6678828125]",tokens/s,1.7179196585174519,kWh,0.0004329811916251978,0.000237312961880707,0.002063029955978196,0.0027333241094841007,tokens/kWh,23048.858267997675,,s,629,371.7092334594727,0.5909526764061569,0.0735102746996128,0.581843994140625,0.5835925537109375,0.584360986328125,1.1999008935546875,"[0.5815029907226562, 0.58220849609375, 0.58231396484375, 0.5818121948242188, 0.581771240234375, 0.581728271484375, 0.581939208984375, 0.5820538940429687, 0.5821102294921875, 0.581634033203125, 0.5816616821289062, 0.5819156494140625, 0.581291015625, 0.5822781372070313, 0.5820303344726563, 0.5819361572265624, 0.581734375, 0.5818613891601563, 0.5819801635742188, 0.582466552734375, 0.5818736572265625, 0.581818359375, 0.5841039428710938, 0.58378955078125, 0.584427490234375, 0.58435791015625, 0.5840650024414062, 0.5840220336914063, 0.584052734375, 0.584026123046875, 0.5835888671875, 0.5834075927734375, 0.583383056640625, 0.5840967407226563, 0.5844940795898438, 0.5843302612304687, 0.5861007080078126, 0.5830584106445312, 0.5844940795898438, 0.5844203491210938, 0.5834240112304687, 0.58357861328125, 0.5838602294921875, 0.5845339965820312, 0.5830901489257813, 0.5837598876953125, 0.5830502319335937, 0.58431591796875, 0.5842994995117188, 0.5837005004882813, 0.5831137084960938, 0.5829529418945313, 0.5845821533203125, 0.5848340454101563, 0.5842728881835938, 0.5823283081054688, 0.5856133422851563, 0.5826621704101562, 0.5840762939453125, 0.5840148315429687, 0.5838244018554688, 0.583173095703125, 1.2010567626953126, 0.5827809448242187, 0.58383154296875, 0.58437939453125, 0.5838694458007813, 0.58402099609375, 0.5823805541992187, 0.5836072998046875, 0.5824061279296875, 0.5832857666015625, 0.5830154418945312, 0.5828423461914063, 0.5844193115234375, 0.583710693359375, 0.5833215942382812, 0.5818582763671875, 0.5821634521484375, 0.5816187133789062, 0.5839032592773438, 0.5848248291015625, 0.5831044921875, 0.5835745239257812, 0.5840752563476562, 0.58448486328125, 0.5853388671875, 0.5839749145507812, 0.5822197875976562, 0.5817006225585938, 0.581855224609375, 0.5835755615234375, 0.5830338745117187, 0.5852354736328125, 0.584363037109375, 0.5837445068359375, 0.58349365234375, 0.58421142578125, 0.585006103515625, 0.5831065673828125, 0.5816985473632813, 0.5817743530273437, 0.5817098388671875, 0.5845473022460937, 0.5823252563476562, 0.5827501831054688, 0.5815951538085937, 0.5826806030273437, 0.5863434448242187, 0.5844859008789063, 0.5844592895507813, 0.584052734375, 0.5841151733398438, 0.5830891723632813, 0.5820047607421875, 0.5847992553710938, 0.5855047607421875, 0.582703125, 0.5821572875976563, 0.5824225463867188, 0.5825310668945313, 0.58260888671875, 0.5822177124023438, 0.5819852905273437, 0.5815715942382812, 1.1993958740234374, 0.5825433349609375, 0.5819463500976563, 0.5820119018554688, 0.581875732421875, 0.5813340454101562, 0.5827901611328125, 0.5819699096679688, 0.581412841796875, 0.5816166381835938, 0.5814906616210938, 0.5816627197265625, 0.5826836547851563, 0.5817589721679688, 0.5819658203125, 0.5817825317382812, 0.5814906616210938, 0.5817098388671875, 0.582255615234375, 0.5815142211914063, 0.5814528198242187, 0.5813032836914063, 0.5811845092773438, 0.5815469970703125, 0.5824461059570313, 0.5814261474609375, 0.58144970703125, 0.5815429077148437, 0.5813892822265625, 0.5822146606445312, 0.5814312744140625, 0.581739501953125, 0.5815429077148437, 0.5813053588867187, 0.5817047119140625, 0.58222900390625, 0.5819566040039063, 0.5814466552734375, 0.5814537963867188, 0.5814927368164062, 0.581306396484375, 0.5819422607421875, 0.581738525390625, 0.5812838134765625, 0.5814528198242187, 0.5814886474609375, 0.5819361572265624, 0.5819166870117187, 0.5818040161132813, 0.5816985473632813, 0.5815736083984375, 0.5821388549804688, 0.5815367431640625, 0.582192138671875, 0.5816207275390625, 0.5815726318359375, 0.5814241333007812, 0.5814957885742188, 0.582708251953125, 0.581718017578125, 0.58149169921875, 0.5818613891601563, 0.5822637939453125, 1.2004178466796875, 0.5815613403320312, 0.5825177612304687, 0.5818429565429688, 0.5822146606445312, 0.5813770141601563, 0.5820886840820313, 0.5818121948242188, 0.5816586303710938, 0.5821511840820313, 0.58155517578125, 0.582277099609375, 0.5820498046875, 0.5820149536132813, 0.5815562133789063, 0.5816012573242187, 0.58168115234375, 0.5823037719726563, 0.5814323120117187, 0.5812920532226562, 0.5816371459960937, 0.5818091430664063, 0.5820221557617188, 0.5824839477539062, 0.5814835205078125, 0.581549072265625, 0.5811885986328125, 0.5815357666015625, 0.5823446655273438, 0.5814722290039063, 0.5815715942382812, 0.5821900634765625, 0.5817610473632813, 0.5821563110351563, 0.5818327026367187, 0.5814814453125, 0.5814220581054688, 0.5811742553710938, 0.5813903198242187, 0.5816350708007813, 0.581591064453125, 0.5814251708984375, 0.5813258056640624, 0.5812254638671875, 0.5822904052734375, 0.5816094970703125, 0.58172314453125, 0.58190234375, 0.5816903686523438, 0.581370849609375, 0.5813626708984375, 0.5817210693359375, 0.581518310546875, 0.5814149169921875, 0.5811558227539062, 0.581285888671875, 0.5823262939453125, 0.5823775024414063, 0.5814466552734375, 0.5813217163085938, 0.5813084106445312, 0.581475341796875, 0.5812244262695313, 1.2000972900390625, 0.5818818359375, 0.5816494140625, 0.5814087524414062, 0.5819525146484374, 0.5813268432617188, 0.5813104858398438, 0.5814395141601563, 0.5813831787109375, 0.5815377807617188, 0.5815398559570313, 0.5813851928710938, 0.5812265014648438, 0.5813248291015625, 0.581285888671875, 0.5814814453125, 0.58180712890625, 0.58216650390625, 0.5817006225585938, 0.581665771484375, 0.581285888671875, 0.5814927368164062, 0.5819913940429687, 0.5817118530273437, 0.58148046875, 0.5813248291015625, 0.5815654296875, 0.5814271850585937, 0.5818184204101563, 0.581286865234375, 0.5811497192382813, 0.58169140625, 0.5815244750976563, 0.5822689208984375, 0.5820487670898438, 0.5819586791992187, 0.5819259033203125, 0.5818101806640625, 0.582055908203125, 0.5828423461914063, 0.5817907104492187, 0.5819053955078125, 0.5818429565429688, 0.5818910522460937, 0.5818316650390625, 0.5823355102539063, 0.5815357666015625, 0.5818480834960937, 0.5816524658203125, 0.5814456176757813, 0.5823866577148438, 0.5815685424804687, 0.5816985473632813, 0.5815982055664063, 0.5815838623046875, 0.5819381713867188, 0.5823416137695312, 0.5835530395507813, 0.5818255615234375, 0.581444580078125, 0.5817507934570313, 0.58191259765625, 0.5820006103515625, 1.1981956787109376, 0.5812254638671875, 0.58144873046875, 0.5819668579101562, 0.5815818481445313, 0.5812940673828125, 0.5811456298828125, 0.5813626708984375, 0.5811497192382813, 0.58141796875, 0.5813995361328125, 0.5813135375976562, 0.58134423828125, 0.58155517578125, 0.5815787353515625, 0.5828638916015625, 0.5816575927734375, 0.5815111694335937, 0.5813616943359375, 0.5820538940429687, 0.5820927734375, 0.5819678955078125, 0.5817692260742188, 0.5816145629882813, 0.5816954956054687, 0.5814374389648438, 0.5818951416015625, 0.5821614379882812, 0.5817313232421875, 0.5813452758789063, 0.581381103515625, 0.5819310302734375, 0.5822279663085937, 0.5815029907226562, 0.5817210693359375, 0.5818112182617188, 0.5817722778320312, 0.5819627685546875, 0.583836669921875, 0.5821726684570312, 0.5814364013671875, 0.5819780883789063, 0.5817313232421875, 0.5815214233398438, 0.5822955322265625, 0.5814681396484375, 0.5818040161132813, 0.58149169921875, 0.5819668579101562, 0.5824163818359375, 0.5818613891601563, 0.5814384765625, 0.581412841796875, 0.5816555786132812, 0.58149169921875, 0.5826007080078125, 0.5815326538085938, 0.5814886474609375, 0.5814763793945312, 0.5815326538085938, 0.5814620361328126, 0.5819299926757813, 0.5814906616210938, 1.2004239501953125, 0.5820057373046875, 0.5815797729492187, 0.5823160400390625, 0.5817426147460938, 0.5813114624023438, 0.5819115600585938, 0.5817979125976562, 0.5821522216796875, 0.5816882934570312, 0.5816770629882813, 0.581875732421875, 0.5819259033203125, 0.5819658203125, 0.582761474609375, 0.582223876953125, 0.5820333862304687, 0.5817845458984375, 0.5826467895507812, 0.5818388061523437, 0.5823938598632813, 0.5820436401367187, 0.581734375, 0.5818245239257812, 0.5811896362304687, 0.5817927856445313, 0.5819688720703124, 0.5817190551757813, 0.5813688354492188, 0.5810667724609375, 0.5812234497070312, 0.581843994140625, 0.5815643920898438, 0.58111181640625, 0.5813903198242187, 0.5819085083007812, 0.58183984375, 0.582118408203125, 0.5814773559570312, 0.5814302978515625, 0.5815347290039062, 0.581191650390625, 0.5816688842773438, 0.5815234375, 0.5817190551757813, 0.5818490600585937, 0.5814508056640625, 0.5813779907226563, 0.5813463134765625, 0.5819893798828125, 0.581802978515625, 0.5817333984375, 0.5816944580078125, 0.58290380859375, 0.582845458984375, 0.5820845947265625, 0.5818582763671875, 0.5815060424804688, 0.5813718872070313, 0.5813084106445312, 0.58233447265625, 0.58221875, 0.582023193359375, 1.2042802734375, 0.5814743041992188, 0.5829151000976562, 0.581739501953125, 0.582096923828125, 0.5818674926757812, 0.5823180541992188, 0.5825535888671876, 0.581813232421875, 0.5818327026367187, 0.5814989013671875, 0.5814200439453125, 0.58134326171875, 0.5823580322265625, 0.5821480712890625, 0.5814957885742188, 0.581401611328125, 0.58191357421875, 0.5817169799804688, 0.5821388549804688, 0.5816801147460937, 0.581734375, 0.58136474609375, 0.5820723266601563, 0.5818541870117188, 0.5822105712890625, 0.5819094848632812, 0.5814200439453125, 0.5814589233398437, 0.5813892822265625, 0.5821788330078125, 0.5815685424804687, 0.5816401977539063, 0.5814948120117187, 0.5815224609375, 0.58134326171875, 0.5816104736328125, 0.5815562133789063, 0.5814251708984375, 0.5814835205078125, 0.5819678955078125, 0.5815869140625, 0.5825003662109375, 0.5818951416015625, 0.5816432495117188, 0.5817241821289062, 0.5819412231445312, 0.5827870483398437, 0.5818347778320313, 0.5818951416015625, 0.5817241821289062, 0.5815101318359375, 0.5815992431640625, 0.5828341674804688, 0.58174462890625, 0.5817559204101562, 0.5814732666015625, 0.5818153076171875, 0.5818726196289062, 0.5815941162109375, 0.5818357543945313, 0.5814814453125, 0.581423095703125, 1.20260302734375, 0.5818193969726563, 0.581865478515625, 0.581950439453125, 0.5817681884765625, 0.5818214111328125, 0.5814395141601563, 0.5819514770507812, 0.5817221069335937, 0.5816934204101563, 0.5817293090820312, 0.581970947265625, 0.5828628540039062, 0.5817088012695313, 0.5820282592773437, 0.5821061401367188, 0.5815398559570313, 0.5825218505859375, 0.5821337890625, 0.5817528076171875, 0.5816156005859375, 0.5817528076171875, 0.5815244750976563, 0.5816012573242187, 0.5817876586914063, 0.5815234375, 0.5816299438476562, 0.5816361083984375, 0.58243994140625, 0.5818275756835938, 0.581982177734375, 0.5820805053710938, 0.5815951538085937, 0.5817456665039062, 0.5822423095703125, 0.5822945556640625, 0.5821808471679687, 0.5818050537109375, 0.58201806640625, 0.5817753295898438, 0.5820897216796875, 0.5823733520507812, 0.5820651245117188, 0.5818265380859375, 0.581950439453125, 0.5819739990234375, 0.5821327514648438, 0.5821265869140625, 0.5822382202148437, 0.5820354614257812, 0.5818521728515625, 0.582096923828125, 0.5824255981445312, 0.5819852905273437, 0.58187060546875, 0.581559326171875, 0.5813483276367187, 0.5814466552734375, 0.5824675903320312, 0.5822116088867187, 0.582044677734375, 0.5817651977539062, 0.5824183959960938, 1.2028917236328125, 0.5816196899414062, 0.5820651245117188, 0.58216650390625, 0.5816453247070312, 0.5824542846679688, 0.58161767578125, 0.5813831787109375, 0.5814927368164062, 0.5813555297851563, 0.5818265380859375, 0.5822269287109375, 0.5816893310546875, 0.5816524658203125, 0.5816483764648438, 0.582150146484375, 0.5817559204101562, 0.5817753295898438, 0.5819832153320312, 0.58170263671875, 0.5822689208984375, 0.58190234375, 0.5830420532226562, 0.5826693115234375, 0.5818050537109375, 0.5818674926757812, 0.5821439819335937, 0.5822811889648437, 0.582197265625, 0.5828679809570313, 0.5820733642578125, 0.5823170776367188, 0.582361083984375, 0.5818163452148437, 0.5824215087890625, 0.5816258544921875, 0.5819381713867188, 0.5818787841796875, 0.5817630615234375, 0.5824501953125, 0.5823272705078125, 0.5820313720703125, 0.5818060913085937, 0.5822310180664062, 0.58191259765625, 0.5820538940429687, 0.5824081420898437, 0.5817354125976563, 0.5822463989257812, 0.5826129760742188, 0.5819381713867188, 0.5827451171875, 0.58174462890625, 0.5820057373046875, 0.5819617309570313, 0.581875732421875, 0.5822545776367187, 0.5817784423828125, 0.5824000244140625, 0.5813032836914063, 0.5822054443359375, 0.5817518310546875, 0.5821992797851563]",tokens/s,1.6921828767769354,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948147-454b740b381979b0360e98be;996817f2-d6f1-477f-813a-0f54ae537c23) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1868.201984,3349.676032,0.0,2703.228928,2578.238464,s,10,1.410495346069336,0.1410495346069336,0.0017689689177857657,0.14056432342529296,0.14309543151855467,0.14402269134521484,0.14476449920654297,"[0.144949951171875, 0.1412518768310547, 0.1397279357910156, 0.13956182861328126, 0.13982972717285155, 0.13892410278320313, 0.13987677001953125, 0.14288937377929686, 0.1413212127685547, 0.14216256713867187]",tokens/s,1814.9652227737006,kWh,1.6439394082552122e-06,9.00797525271931e-07,6.874183740083411e-06,9.418920673610555e-06,tokens/kWh,27179334.96533712,MB,1868.201984,3349.676032,0.0,2703.228928,2667.098624,s,10,83.44500195312501,8.3445001953125,0.03302115511867202,8.3336259765625,8.37593193359375,8.403280419921876,8.425159208984375,"[8.33203515625, 8.335076171875, 8.333888671875, 8.3144501953125, 8.32368359375, 8.3150146484375, 8.3698544921875, 8.43062890625, 8.3570068359375, 8.33336328125]",tokens/s,7.549882979856609,kWh,9.832901556366757e-05,5.38916025818177e-05,0.0003962753054459165,0.0005484959235914018,tokens/kWh,114859.55918777513,,s,629,84.57831109619147,0.13446472352335676,0.016756456539684012,0.13199974060058595,0.1339588592529297,0.13428590698242188,0.27203026611328124,"[0.131915771484375, 0.13208883666992188, 0.13340570068359375, 0.13261517333984374, 0.1320437774658203, 0.13189222717285157, 0.13191372680664062, 0.13180621337890625, 0.13191270446777345, 0.1318174743652344, 0.13188607788085938, 0.13165977478027344, 0.13220761108398438, 0.13279539489746095, 0.132642822265625, 0.1319086151123047, 0.13183590698242187, 0.13183692932128907, 0.13191372680664062, 0.1319710693359375, 0.13185536193847655, 0.13166387939453125, 0.13191888427734375, 0.13172015380859375, 0.1317058563232422, 0.13175704956054687, 0.1318830108642578, 0.13170687866210937, 0.1325506591796875, 0.13195468139648436, 0.13218412780761718, 0.13190444946289062, 0.1321625671386719, 0.13238067626953126, 0.13378150939941405, 0.13233255004882813, 0.13199667358398437, 0.13365965270996094, 0.13226495361328125, 0.13205401611328124, 0.13234483337402345, 0.13211033630371094, 0.13187481689453126, 0.13180210876464843, 0.13335859680175782, 0.13234176635742187, 0.13278311157226563, 0.13272679138183593, 0.1324042205810547, 0.13250457763671875, 0.13250662231445312, 0.13336883544921874, 0.13242880249023437, 0.13245030212402345, 0.13245234680175783, 0.13265408325195313, 0.1332162628173828, 0.13244825744628907, 0.1322782745361328, 0.13222093200683593, 0.13194239807128907, 0.1318461456298828, 0.2743060607910156, 0.13452493286132813, 0.13292338562011718, 0.13269094848632812, 0.13221580505371094, 0.13191372680664062, 0.13204888916015625, 0.1319393310546875, 0.13207244873046875, 0.13193624877929688, 0.1332346954345703, 0.13184307861328126, 0.13165977478027344, 0.13174169921875, 0.1319331817626953, 0.13257011413574218, 0.13245234680175783, 0.13182669067382813, 0.13220249938964843, 0.1319772186279297, 0.13190451049804688, 0.13176934814453126, 0.13176832580566405, 0.13202943420410157, 0.131842041015625, 0.13187992858886718, 0.13245132446289062, 0.1319014434814453, 0.13174374389648438, 0.13188096618652342, 0.1329971160888672, 0.1325875244140625, 0.1321922607421875, 0.13208473205566407, 0.1319751739501953, 0.13210829162597656, 0.132173828125, 0.1320202178955078, 0.13244108581542968, 0.1323520050048828, 0.13193624877929688, 0.1318707275390625, 0.13215335083007812, 0.1319833526611328, 0.13199871826171874, 0.13258546447753905, 0.1363056640625, 0.13410917663574218, 0.132347900390625, 0.13224557495117187, 0.13346809387207031, 0.1320499267578125, 0.1319823303222656, 0.13196595764160157, 0.13193113708496093, 0.13199974060058595, 0.13258444213867188, 0.13302787780761718, 0.13199562072753906, 0.13191885375976561, 0.13196493530273437, 0.1318461456298828, 0.13196800231933595, 0.27239935302734375, 0.1318348846435547, 0.13182975769042968, 0.13207449340820313, 0.13185638427734375, 0.1317724151611328, 0.13244415283203126, 0.13195263671875, 0.1320099792480469, 0.13171302795410156, 0.13188505554199217, 0.13182054138183594, 0.13208575439453124, 0.1338101806640625, 0.13198439025878905, 0.13201919555664063, 0.13190348815917968, 0.13199871826171874, 0.13165977478027344, 0.13169357299804688, 0.13205708312988282, 0.13223014831542967, 0.13255679321289063, 0.13336166381835937, 0.1323274230957031, 0.1321175079345703, 0.13262745666503906, 0.13243597412109376, 0.13281074523925782, 0.13219737243652344, 0.13270425415039064, 0.1319403839111328, 0.13225782775878905, 0.13328172302246094, 0.13259266662597657, 0.13259056091308594, 0.13216152954101562, 0.1320622100830078, 0.13242469787597655, 0.1341696014404297, 0.13195263671875, 0.13167718505859374, 0.13170278930664062, 0.13178675842285156, 0.13180108642578126, 0.13196902465820312, 0.13176319885253907, 0.1331517791748047, 0.1319188232421875, 0.13173248291015624, 0.13183795166015624, 0.13228440856933593, 0.1320273895263672, 0.13176934814453126, 0.13183897399902345, 0.13179600524902343, 0.13171708679199218, 0.13180108642578126, 0.13352243041992187, 0.1350635528564453, 0.13252915954589845, 0.13388493347167968, 0.1324451904296875, 0.2718658447265625, 0.1323663330078125, 0.1318041534423828, 0.1317375946044922, 0.13224755859375, 0.13172837829589842, 0.13183692932128907, 0.13180825805664062, 0.13196493530273437, 0.13201612854003905, 0.13178060913085937, 0.13186866760253907, 0.13176422119140624, 0.13189222717285157, 0.13185331726074218, 0.13174476623535156, 0.13189631652832032, 0.1319833526611328, 0.13183180236816405, 0.13173965454101563, 0.13178880310058594, 0.13191474914550783, 0.13178163146972657, 0.13190553283691406, 0.132674560546875, 0.13195161437988281, 0.13180313110351563, 0.13180108642578126, 0.13176524353027344, 0.13174578857421876, 0.1317232666015625, 0.13185740661621093, 0.1315359344482422, 0.13187271118164062, 0.13189427185058594, 0.13176217651367186, 0.13183999633789062, 0.131842041015625, 0.13198028564453124, 0.1318461456298828, 0.13185740661621093, 0.13189222717285157, 0.1318656005859375, 0.13177548217773438, 0.13433549499511718, 0.1318778839111328, 0.13178163146972657, 0.1320099792480469, 0.13189529418945312, 0.13173043823242186, 0.13199974060058595, 0.1317908477783203, 0.13167718505859374, 0.1318707580566406, 0.1319085693359375, 0.1320079345703125, 0.1317375946044922, 0.1329776611328125, 0.13234278869628907, 0.13224960327148438, 0.13266841125488282, 0.13238578796386719, 0.13234994506835937, 0.2727505798339844, 0.13219532775878906, 0.1322977294921875, 0.1321318359375, 0.13251072692871094, 0.13448908996582032, 0.13275852966308593, 0.1344153594970703, 0.13294796752929688, 0.1335828552246094, 0.13194137573242187, 0.1318604736328125, 0.13178469848632812, 0.13185023498535156, 0.13183282470703125, 0.13243084716796874, 0.13204582214355468, 0.13287628173828125, 0.13193522644042968, 0.13247999572753907, 0.13198028564453124, 0.13178880310058594, 0.1318225860595703, 0.13189529418945312, 0.1317969970703125, 0.1317898254394531, 0.13174783325195313, 0.13180825805664062, 0.1317611541748047, 0.13181951904296876, 0.13173043823242186, 0.1318901824951172, 0.1316822967529297, 0.1317908477783203, 0.13169664001464843, 0.1318656005859375, 0.1333289031982422, 0.1319536590576172, 0.13187890625, 0.13186457824707032, 0.13181951904296876, 0.1317969970703125, 0.1317580871582031, 0.13171200561523438, 0.13176934814453126, 0.13164851379394532, 0.13176524353027344, 0.13194239807128907, 0.13188812255859375, 0.13191270446777345, 0.1322117156982422, 0.13192909240722656, 0.13183999633789062, 0.1317693786621094, 0.13185020446777343, 0.13187583923339843, 0.1317611541748047, 0.13403237915039062, 0.13205503845214844, 0.13186151123046874, 0.13183795166015624, 0.13185536193847655, 0.13172531127929688, 0.2711296081542969, 0.13194137573242187, 0.131852294921875, 0.13176217651367186, 0.13171609497070313, 0.13177548217773438, 0.13163827514648438, 0.1318164520263672, 0.1318707275390625, 0.13187277221679689, 0.13203762817382814, 0.13187174987792968, 0.13186968994140624, 0.13186866760253907, 0.13186972045898437, 0.13196592712402344, 0.1320099792480469, 0.1333217315673828, 0.13214207458496094, 0.13328793334960937, 0.13255679321289063, 0.13287628173828125, 0.1319505920410156, 0.13182566833496093, 0.13165158081054687, 0.13177650451660156, 0.13287423706054688, 0.13261311340332033, 0.13195878601074218, 0.1316505584716797, 0.1316495361328125, 0.13174989318847657, 0.13171302795410156, 0.13187686157226564, 0.13190553283691406, 0.13175296020507812, 0.13174989318847657, 0.13191474914550783, 0.13393820190429687, 0.13187989807128905, 0.13174681091308593, 0.1316864013671875, 0.13167205810546875, 0.13172019958496095, 0.13174783325195313, 0.1315952606201172, 0.13168435668945314, 0.13165362548828125, 0.13327769470214842, 0.13195161437988281, 0.1319024963378906, 0.1318594207763672, 0.13180313110351563, 0.13170381164550782, 0.13171507263183593, 0.13190451049804688, 0.13232640075683594, 0.13192294311523436, 0.13172940063476563, 0.1317406768798828, 0.13174578857421876, 0.13170381164550782, 0.13169561767578125, 0.2720942077636719, 0.1323653106689453, 0.1319772186279297, 0.13174887084960937, 0.1318461456298828, 0.13169049072265626, 0.13180825805664062, 0.13170994567871094, 0.13199974060058595, 0.13190348815917968, 0.13172940063476563, 0.13172735595703125, 0.13171612548828124, 0.13159523010253907, 0.13188914489746092, 0.13179904174804688, 0.13198130798339844, 0.13195578002929687, 0.13164845275878906, 0.13183590698242187, 0.13194752502441406, 0.13176524353027344, 0.13180621337890625, 0.13325619506835937, 0.1319086456298828, 0.13182666015625, 0.13163929748535155, 0.13185125732421876, 0.1320396728515625, 0.13438668823242186, 0.13385317993164061, 0.13466111755371094, 0.13423411560058593, 0.133897216796875, 0.1338357696533203, 0.1337620849609375, 0.13375177001953126, 0.1338419189453125, 0.1338470458984375, 0.13377127075195314, 0.13373440551757812, 0.1337507781982422, 0.13362892150878905, 0.13362687683105468, 0.133718017578125, 0.1337139129638672, 0.13433139038085937, 0.1335930938720703, 0.13229158020019532, 0.13321932983398438, 0.1331998748779297, 0.1333289031982422, 0.13277183532714842, 0.13196185302734376, 0.13300531005859376, 0.13356031799316406, 0.13304013061523437, 0.13298892211914062, 0.1336432647705078, 0.13445120239257813, 0.13455258178710938, 0.1344040985107422, 0.13423922729492188, 0.27732583618164064, 0.1339084777832031, 0.13382144165039062, 0.1346867218017578, 0.13416653442382812, 0.13392076110839843, 0.13295001220703126, 0.13354495239257813, 0.13410508728027343, 0.1338173370361328, 0.13408869934082032, 0.1340200958251953, 0.1339043884277344, 0.13345074462890624, 0.1340712890625, 0.133653564453125, 0.1348218231201172, 0.13366886901855468, 0.13357466125488282, 0.1335029754638672, 0.1333729248046875, 0.13342617797851564, 0.1340518341064453, 0.1341563262939453, 0.13418496704101562, 0.13429244995117187, 0.13391769409179688, 0.13419314575195312, 0.13331149291992186, 0.13398121643066407, 0.1340333709716797, 0.13403135681152345, 0.13413682556152343, 0.13416447448730467, 0.1341071319580078, 0.13356748962402343, 0.13255885314941407, 0.1329449005126953, 0.13408767700195312, 0.13382655334472657, 0.13446556091308592, 0.134451171875, 0.1342740478515625, 0.13432524108886718, 0.13397196960449217, 0.133928955078125, 0.13388800048828126, 0.13408154296875, 0.1339463653564453, 0.13414707946777343, 0.1341204528808594, 0.1346938934326172, 0.13456895446777345, 0.13381427001953125, 0.13382553100585937, 0.13402316284179688, 0.13395558166503907, 0.13448602294921874, 0.13234994506835937, 0.13215437316894532, 0.13213081359863282, 0.13221376037597657, 0.13244825744628907, 0.27383807373046876, 0.13208677673339844, 0.1318748779296875, 0.13186143493652344, 0.1319342041015625, 0.1316986846923828, 0.13210009765625, 0.13194956970214844, 0.1318338623046875, 0.13179904174804688, 0.13189427185058594, 0.1324390411376953, 0.1331261444091797, 0.13414399719238282, 0.13440205383300782, 0.13397196960449217, 0.13388394165039064, 0.13371900939941406, 0.13379379272460937, 0.1336944580078125, 0.13435395812988282, 0.133012451171875, 0.1330391082763672, 0.13337496948242186, 0.13211033630371094, 0.1332316131591797, 0.1331374053955078, 0.1329827880859375, 0.13283839416503906, 0.13333401489257812, 0.1330401611328125, 0.1329510040283203, 0.13266636657714845, 0.132642822265625, 0.1325813751220703, 0.1328547821044922, 0.13313536071777343, 0.13244313049316406, 0.13259365844726562, 0.13253018188476562, 0.1329459228515625, 0.13234585571289062, 0.13299200439453124, 0.13250764465332032, 0.13244210815429688, 0.13175296020507812, 0.1317580871582031, 0.1318338623046875, 0.1318666229248047, 0.1317580871582031, 0.13186358642578125, 0.13192803955078125, 0.13181849670410156, 0.13414501953125, 0.13427609252929687, 0.13285580444335937, 0.13174476623535156, 0.1318656005859375, 0.13187277221679689, 0.13214002990722656, 0.13324185180664064, 0.13186151123046874, 0.13166592407226563, 0.27445761108398437, 0.13168025207519532, 0.1318492126464844, 0.13196595764160157, 0.13203353881835939, 0.13289573669433594, 0.13374771118164064, 0.13365350341796875, 0.13377433776855469, 0.13357772827148437, 0.13372621154785155, 0.1335900115966797, 0.1336494140625, 0.13360946655273437, 0.1339095001220703, 0.13365863037109374, 0.13385317993164061, 0.1333053436279297, 0.13203660583496094, 0.13213388061523437, 0.13183897399902345, 0.132063232421875, 0.13191372680664062, 0.13187992858886718, 0.13165875244140626, 0.1317560272216797, 0.13234994506835937, 0.13201510620117188, 0.13194342041015625, 0.13241856384277345, 0.13182566833496093, 0.1321871337890625, 0.13201715087890625, 0.13212979125976562, 0.13185331726074218, 0.13186764526367187, 0.1317969970703125, 0.13172940063476563, 0.13185433959960938, 0.1317959747314453, 0.13192909240722656, 0.1318144073486328, 0.1319772186279297, 0.13174783325195313, 0.13176422119140624, 0.13171916198730468, 0.13182464599609375, 0.1319086151123047, 0.13198745727539063, 0.13206431579589845, 0.13207244873046875, 0.1318757781982422, 0.13179391479492186, 0.13187379455566406, 0.13178060913085937, 0.1318113250732422, 0.13175196838378905, 0.13170889282226564, 0.13177754211425782, 0.13175091552734375, 0.1317611541748047, 0.13186968994140624, 0.1329213409423828]",tokens/s,7.43689477654188,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1612.562432,7598.505984,0.0,6952.05888,6314.304512,s,10,6.256621520996093,0.6256621520996093,0.00177274849590911,0.6261759033203125,0.6270674194335938,0.6281453430175781,0.6290076818847656,"[0.6292232666015625, 0.6262135009765625, 0.6265690307617188, 0.6241492919921875, 0.6230667114257813, 0.6232349853515625, 0.624854736328125, 0.6261383056640625, 0.6263438110351562, 0.626827880859375]",tokens/s,409.16651125677043,kWh,7.363675038019816e-06,4.0350039173812114e-06,3.4213050246233903e-05,4.561172920163493e-05,tokens/kWh,5612591.420691497,MB,1612.562432,7598.505984,0.0,6952.05888,6464.047616,s,10,369.81373437499997,36.981373437500004,0.03993526863753842,36.962308593749995,37.0309203125,37.048473828125,37.062516640625,"[37.02584375, 37.06602734375, 37.02701953125, 36.95202734375, 36.96666796875, 36.9451328125, 36.9648984375, 36.9521875, 36.95971875, 36.9542109375]",tokens/s,1.7035603100699468,kWh,0.0004362355767852731,0.00023909502275260607,0.0019567083202673705,0.0026320389198052495,tokens/kWh,23935.816270019863,,s,629,374.855053039551,0.5959539793951523,0.07432782764438475,0.5866659545898437,0.589058251953125,0.5894471923828125,1.2107634912109375,"[0.586392578125, 0.5868226318359375, 0.5859829711914063, 0.5863291015625, 0.5865410766601562, 0.5872220458984375, 0.586271728515625, 0.5857536010742187, 0.5860116577148438, 0.585923583984375, 0.5870950317382813, 0.5866148071289062, 0.5868349609375, 0.5866342163085938, 0.5861621704101563, 0.586134521484375, 0.5858928833007813, 0.58690869140625, 0.586186767578125, 0.5873684692382812, 0.5866659545898437, 0.5863311157226563, 0.588506103515625, 0.5890938720703125, 0.5862461547851563, 0.5873582153320313, 0.5887150268554687, 0.5900718383789062, 0.5890580444335938, 0.589464599609375, 0.5892474975585937, 0.5888132934570313, 0.5892208862304688, 0.5892362060546875, 0.58881640625, 0.587177978515625, 0.5861478271484375, 0.5872230224609375, 0.5890713500976562, 0.5897615356445313, 0.5891819458007812, 0.5891215209960937, 0.588368896484375, 0.5868963623046874, 0.5877462768554688, 0.587673583984375, 0.5883515014648437, 0.5891358642578125, 0.5892372436523438, 0.5901947021484375, 0.5882511596679687, 0.5863966674804687, 0.5867079467773437, 0.5862789306640624, 0.5875824584960937, 0.589075439453125, 0.5904906005859375, 0.5885787963867187, 0.5882501220703125, 0.5885009765625, 0.5887518920898438, 0.5892843627929687, 1.21577880859375, 0.5866813354492187, 0.58638232421875, 0.5860689697265625, 0.586250244140625, 0.5862062377929688, 0.5863833618164063, 0.5862236328125, 0.5872701416015625, 0.5891921997070313, 0.5893621826171875, 0.5892301025390625, 0.5891163940429688, 0.589286376953125, 0.58905908203125, 0.5892198486328125, 0.5870059814453125, 0.58844775390625, 0.5890242309570313, 0.5887774658203125, 0.5890150146484375, 0.5887242431640625, 0.5887866821289063, 0.5895270385742187, 0.5885030517578125, 0.5896734619140624, 0.5887754516601562, 0.5896263427734375, 0.5871104125976563, 0.5881671752929688, 0.5891235961914062, 0.5891962890625, 0.5888031005859375, 0.5896325073242188, 0.5887764282226563, 0.5869895629882812, 0.5890662231445313, 0.5892987060546875, 0.5882552490234375, 0.5869424438476563, 0.588790771484375, 0.5888245849609375, 0.5896591186523438, 0.5890518798828125, 0.589486083984375, 0.5889976196289063, 0.5883146362304688, 0.58652978515625, 0.5864765625, 0.5863751831054688, 0.586809326171875, 0.5878477172851563, 0.5889166870117187, 0.589169677734375, 0.5894891357421875, 0.5896304931640625, 0.5890713500976562, 0.5871114501953125, 0.5880750122070313, 0.5888890991210938, 0.588980224609375, 0.588822509765625, 0.588010498046875, 1.2160072021484376, 0.5894573974609375, 0.5897778930664063, 0.5893355712890626, 0.589739013671875, 0.5899202270507813, 0.5896151123046875, 0.58777294921875, 0.5865891723632812, 0.5862645874023438, 0.5884047241210938, 0.5887549438476563, 0.5893632202148438, 0.587030517578125, 0.589475830078125, 0.5881303100585937, 0.588000244140625, 0.5885706176757812, 0.5890109252929687, 0.5895772094726562, 0.5876244506835937, 0.5894738159179688, 0.58935400390625, 0.5891471557617187, 0.589496337890625, 0.5882972412109375, 0.5862020874023437, 0.5883658447265625, 0.58893310546875, 0.589431884765625, 0.5892074584960938, 0.5897041625976562, 0.5888942260742187, 0.5887662353515625, 0.589075439453125, 0.5871063232421875, 0.5874237670898438, 0.586166259765625, 0.5869127807617187, 0.5871524047851563, 0.5860792236328125, 0.5860894775390625, 0.5864212646484375, 0.5861089477539062, 0.5861775512695313, 0.586134521484375, 0.586650634765625, 0.586608642578125, 0.5868236694335938, 0.5863117065429687, 0.5860730590820312, 0.5861652221679687, 0.5863720703125, 0.587325439453125, 0.5869486083984375, 0.5867796630859375, 0.586829833984375, 0.5859512329101563, 0.586598388671875, 0.5858897705078125, 0.5857484741210938, 0.5861038208007813, 0.5859328002929688, 1.21084521484375, 0.5868482666015625, 0.58640283203125, 0.5865072631835937, 0.58753125, 0.5866895141601562, 0.5865697021484375, 0.5866485595703125, 0.5865379638671875, 0.5872005004882812, 0.5859409790039063, 0.5867479248046875, 0.5860208740234375, 0.5864622192382812, 0.5861642456054688, 0.5862113037109375, 0.5866629028320313, 0.5866127319335938, 0.5868892211914063, 0.5860433959960938, 0.5860228881835937, 0.585712646484375, 0.5860034790039063, 0.5866076049804687, 0.5860700073242188, 0.5865840454101563, 0.5865113525390625, 0.5863505859375, 0.5864140625, 0.5859491577148438, 0.586281982421875, 0.5866659545898437, 0.5862092895507812, 0.5860966186523437, 0.586608642578125, 0.5861068725585937, 0.5864765625, 0.585987060546875, 0.5858989868164063, 0.5861160888671875, 0.587757568359375, 0.586156005859375, 0.5871318969726562, 0.5865809936523437, 0.5864468383789062, 0.5861632080078125, 0.5858989868164063, 0.586124267578125, 0.5864806518554687, 0.5860403442382812, 0.5868748779296875, 0.586625, 0.5871697998046875, 0.586534912109375, 0.5867571411132813, 0.5872445678710938, 0.5864386596679687, 0.5876326293945312, 0.588031982421875, 0.5878978271484375, 0.5873828735351563, 0.5863617553710937, 0.5862686767578125, 1.20973828125, 0.5866116943359375, 0.5870663452148438, 0.5871646728515625, 0.586650634765625, 0.5869865112304687, 0.586355712890625, 0.5865973510742187, 0.5867642822265625, 0.5863895263671876, 0.586640380859375, 0.5864683227539063, 0.5873387451171875, 0.587441162109375, 0.5863936157226562, 0.5867008056640625, 0.5865328369140625, 0.5866751708984375, 0.5866547241210938, 0.5867018432617187, 0.5871431884765625, 0.5867120361328125, 0.5885173950195313, 0.58661376953125, 0.5860853881835938, 0.5864765625, 0.5864909057617187, 0.587452392578125, 0.587109375, 0.5869700927734375, 0.5880678100585938, 0.5873899536132813, 0.5867468872070313, 0.5864356079101563, 0.5865267333984375, 0.586555419921875, 0.5870673828125, 0.5869865112304687, 0.5871329345703125, 0.5876347045898438, 0.5866997680664062, 0.586439697265625, 0.5864110107421875, 0.5860280151367188, 0.586935302734375, 0.5874688110351562, 0.5872630004882813, 0.5882040405273438, 0.5861519165039063, 0.5861754760742187, 0.5864959716796875, 0.5864253540039063, 0.5865482177734375, 0.586439697265625, 0.58705615234375, 0.586955810546875, 0.5866649169921875, 0.586872802734375, 0.5867396850585938, 0.5856685791015624, 0.585691162109375, 0.5862932739257812, 0.5861509399414062, 1.2105533447265624, 0.5868963623046874, 0.5862307739257813, 0.5858088989257813, 0.5863147583007813, 0.5863117065429687, 0.5865164794921875, 0.5869424438476563, 0.5864171752929688, 0.5864099731445312, 0.5860372314453125, 0.5859174194335938, 0.5860126953125, 0.5862420654296875, 0.5864284057617187, 0.5860905151367187, 0.5861795654296875, 0.5876797485351563, 0.5864632568359375, 0.5860034790039063, 0.5856358642578126, 0.5858897705078125, 0.5856849975585937, 0.5860556640625, 0.5864069213867188, 0.5858795776367187, 0.5864683227539063, 0.5857034301757813, 0.586197998046875, 0.5860546264648437, 0.5868114013671875, 0.5869946899414062, 0.586625, 0.5865584716796876, 0.5874636840820312, 0.5859491577148438, 0.586603515625, 0.5859368896484375, 0.5866004638671874, 0.585970703125, 0.5859840087890625, 0.5862932739257812, 0.5863259887695312, 0.58724658203125, 0.586335205078125, 0.5863598022460937, 0.5860515747070313, 0.5862676391601562, 0.586872802734375, 0.5865533447265625, 0.5867427978515625, 0.5866065673828125, 0.5869660034179688, 0.5869803466796875, 0.5866997680664062, 0.5868963623046874, 0.5864785766601562, 0.58616015625, 0.5860546264648437, 0.5872752685546875, 0.5869076538085938, 0.5865799560546875, 0.586840087890625, 1.211283447265625, 0.5869813842773437, 0.5866997680664062, 0.5866414184570312, 0.587345947265625, 0.5873807373046875, 0.5871063232421875, 0.5870315551757812, 0.5875353393554688, 0.58697216796875, 0.5863075561523438, 0.586588134765625, 0.5869383544921875, 0.587821044921875, 0.5868165283203125, 0.5868810424804688, 0.5863833618164063, 0.5862512817382812, 0.5872967529296875, 0.5871339721679687, 0.5874821166992188, 0.5877125244140625, 0.5862809448242188, 0.5868349609375, 0.58678271484375, 0.5863117065429687, 0.5862789306640624, 0.586708984375, 0.5875824584960937, 0.5870919799804688, 0.5866690673828125, 0.586181640625, 0.5861775512695313, 0.5859921875, 0.5867694091796875, 0.58703564453125, 0.5866270751953125, 0.586419189453125, 0.5869496459960938, 0.5864642333984375, 0.5868062744140625, 0.5862686767578125, 0.5867919311523437, 0.5862195434570312, 0.58638232421875, 0.5867694091796875, 0.5868318481445313, 0.586829833984375, 0.5861826782226562, 0.586302490234375, 0.5865001220703125, 0.5866393432617187, 0.5861959838867188, 0.5873704833984374, 0.5870489501953124, 0.5865164794921875, 0.5868236694335938, 0.5866116943359375, 0.58705712890625, 0.5863731079101563, 0.58650830078125, 0.5867161865234375, 0.5862952880859374, 1.2128675537109375, 0.586335205078125, 0.5866659545898437, 0.5865216064453125, 0.586155029296875, 0.5867110595703126, 0.587025390625, 0.5866659545898437, 0.5868226318359375, 0.5863649291992188, 0.5859788818359375, 0.5858211669921874, 0.5860321044921875, 0.5860249633789063, 0.5860086059570313, 0.5864939575195313, 0.586608642578125, 0.586387451171875, 0.586119140625, 0.5861437377929688, 0.5861488647460937, 0.586313720703125, 0.5860751342773437, 0.5867427978515625, 0.5866076049804687, 0.5862164306640625, 0.5868421020507812, 0.5864365844726562, 0.5864765625, 0.5864837036132813, 0.5860372314453125, 0.586925048828125, 0.5860485229492187, 0.58665673828125, 0.5867642822265625, 0.5860065307617187, 0.5863987426757813, 0.586060791015625, 0.586392578125, 0.5863106689453125, 0.5877749633789062, 0.58640283203125, 0.5868861694335937, 0.5868216552734375, 0.5870684204101563, 0.586323974609375, 0.5864427490234375, 0.5864703979492187, 0.5862952880859374, 0.586693603515625, 0.5876336669921876, 0.5873316040039063, 0.5868585205078125, 0.58693017578125, 0.5867694091796875, 0.5865830688476562, 0.586429443359375, 0.5870018310546875, 0.5871503295898437, 0.5864939575195313, 0.5877985229492187, 0.586144775390625, 0.5863424072265625, 1.2129935302734376, 0.5862727661132813, 0.5863915405273438, 0.5866475219726562, 0.5876971435546875, 0.5864058837890626, 0.5863065795898438, 0.5865758666992188, 0.5865635986328125, 0.5862164306640625, 0.5869281005859375, 0.586840087890625, 0.5862543334960938, 0.5875742797851562, 0.5865205688476562, 0.5862706909179688, 0.5863649291992188, 0.5861734619140625, 0.5863147583007813, 0.5860198364257813, 0.58655126953125, 0.5865543823242187, 0.58614990234375, 0.5865379638671875, 0.5857679443359375, 0.5859266357421875, 0.58583447265625, 0.5874698486328125, 0.5878701782226563, 0.5867161865234375, 0.5876551513671875, 0.5866875, 0.587071533203125, 0.5868062133789063, 0.586630126953125, 0.5871063232421875, 0.5867059326171875, 0.5871452026367188, 0.5867632446289063, 0.587125732421875, 0.5870151977539062, 0.5868308715820313, 0.5868052368164063, 0.5859778442382813, 0.5877913818359375, 0.5867008056640625, 0.5866905517578125, 0.58863818359375, 0.5863485717773438, 0.5870551147460937, 0.5870684204101563, 0.586407958984375, 0.58616015625, 0.5859154052734376, 0.5867008056640625, 0.5863854370117187, 0.5862307739257813, 0.5860966186523437, 0.5860198364257813, 0.5865277709960938, 0.5865287475585937, 0.5874104614257812, 0.586376220703125, 1.215204345703125, 0.586377197265625, 0.5866434326171875, 0.586534912109375, 0.5863895263671876, 0.5870858154296875, 0.5862635498046875, 0.5872025756835938, 0.586982421875, 0.5865861206054688, 0.5865164794921875, 0.586608642578125, 0.5866710815429688, 0.5860003662109375, 0.5871441650390625, 0.5867550659179688, 0.586144775390625, 0.5870264282226563, 0.5859225463867187, 0.5859358520507812, 0.5861325073242187, 0.5858846435546875, 0.58669873046875, 0.5869383544921875, 0.5876326293945312, 0.587109375, 0.5869526977539062, 0.5863679809570312, 0.5862573852539062, 0.5862011108398437, 0.5859225463867187, 0.586871826171875, 0.5864949951171875, 0.5862635498046875, 0.5871820678710937, 0.5862850341796875, 0.5865062255859375, 0.5861990356445312, 0.5862440795898437, 0.5863117065429687, 0.58606591796875, 0.5864724731445312, 0.5863618774414062, 0.5869076538085938, 0.5863720703125, 0.5866680297851562, 0.5867079467773437, 0.5861099243164063, 0.5866577758789062, 0.5864990844726562, 0.5863792724609375, 0.5876592407226563, 0.586555419921875, 0.5861253051757812, 0.5861632080078125, 0.586450927734375, 0.5867694091796875, 0.5861539916992188, 0.5869639892578125, 0.58669775390625, 0.5874268188476562, 0.586555419921875, 0.5859317626953126]",tokens/s,1.6779819156756426,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1358.589952,2103.967744,0.0,1457.52064,1272.881664,s,10,1.3631298065185549,0.1363129806518555,0.0008682167068706195,0.13635552215576174,0.1370564453125,0.13756546936035158,0.13797268859863282,"[0.13807449340820313, 0.13481919860839844, 0.13663923645019532, 0.13648768615722656, 0.1363610534667969, 0.13634999084472657, 0.13634962463378905, 0.1369433288574219, 0.13605657958984374, 0.13504861450195313]",tokens/s,1878.030975302537,kWh,1.5893511683852586e-06,8.706513991564859e-07,6.58916082688051e-06,9.049163394422252e-06,tokens/kWh,28289908.010479063,MB,1358.589952,2103.967744,0.0,1457.52064,1369.424896,s,10,80.1509423828125,8.01509423828125,0.04758558328935424,8.03790185546875,8.060850390625001,8.06196572265625,8.06285798828125,"[7.943890625, 7.97273681640625, 8.0463369140625, 8.0606025390625, 8.06037841796875, 8.0630810546875, 8.0572548828125, 8.029466796875, 7.960978515625, 7.9562158203125]",tokens/s,7.860169590907975,kWh,9.384463308144499e-05,5.143388635707014e-05,0.00037368961561811727,0.0005189681350566323,tokens/kWh,121394.73648632616,,s,629,81.24969168090827,0.12917280076455995,0.016234788877661135,0.127678466796875,0.12822896728515623,0.12854743347167968,0.26253706176757813,"[0.12623462677001954, 0.12626534271240233, 0.12749107360839843, 0.12655411529541016, 0.12602880096435548, 0.12595507049560548, 0.12608409881591798, 0.125949951171875, 0.12577792358398437, 0.1258967056274414, 0.12588851165771484, 0.12596736145019533, 0.12578099060058595, 0.1260912628173828, 0.12604723358154296, 0.12598169708251952, 0.12593971252441405, 0.12601139068603515, 0.12579634857177735, 0.12603392028808594, 0.12579840087890626, 0.12579532623291015, 0.1258967056274414, 0.1258086395263672, 0.1257297897338867, 0.12582093048095702, 0.1259315185546875, 0.12595609283447265, 0.12572783660888673, 0.12588227081298828, 0.12573081970214844, 0.12737945556640626, 0.1259468765258789, 0.12588646697998046, 0.12581478118896483, 0.12592435455322265, 0.1258792953491211, 0.1258588180541992, 0.12746444702148438, 0.12731187438964844, 0.12609945678710938, 0.12609843444824217, 0.12581478118896483, 0.12597452545166016, 0.12607078552246093, 0.12630118560791015, 0.12647833251953125, 0.12594483184814453, 0.12569087982177735, 0.12633702087402343, 0.12602880096435548, 0.1259898910522461, 0.126023681640625, 0.12634214019775392, 0.12730879974365233, 0.12593357086181642, 0.1258250274658203, 0.12601036834716797, 0.1260175323486328, 0.12602060699462891, 0.1258598403930664, 0.1257185287475586, 0.2625024108886719, 0.12606771087646484, 0.1269903335571289, 0.12595507049560548, 0.12662681579589843, 0.1257164764404297, 0.12573081970214844, 0.12580966186523437, 0.1257676773071289, 0.12584345245361328, 0.12604723358154296, 0.12586700439453125, 0.125876220703125, 0.12575027465820313, 0.12595507049560548, 0.12601856231689454, 0.12643840026855468, 0.12581068420410157, 0.12602265930175782, 0.12638412475585936, 0.12576153564453124, 0.12624486541748048, 0.12597964477539061, 0.12647833251953125, 0.12601856231689454, 0.12584652709960936, 0.12577792358398437, 0.12563763427734376, 0.12597145843505858, 0.1260359649658203, 0.12596121978759767, 0.1258055648803711, 0.12600422668457031, 0.1259345932006836, 0.12581273651123046, 0.12602674865722657, 0.12610355377197266, 0.12577279663085938, 0.12581375885009766, 0.12614963531494142, 0.1260062713623047, 0.12613426971435546, 0.1257359390258789, 0.1258567657470703, 0.12633190155029297, 0.1259694061279297, 0.12657254028320314, 0.12820480346679688, 0.12799180603027344, 0.12775526428222655, 0.127710205078125, 0.127857666015625, 0.1278545913696289, 0.12770918273925783, 0.12889190673828124, 0.1281597442626953, 0.12789145660400392, 0.12811878967285156, 0.12782284545898437, 0.128, 0.12832972717285157, 0.12828466796875, 0.12807577514648438, 0.26479617309570314, 0.12780953979492188, 0.1277501449584961, 0.12807475280761718, 0.12764979553222655, 0.1277470703125, 0.12904141235351563, 0.12810957336425782, 0.12779315185546875, 0.12776959991455078, 0.12783103942871094, 0.128247802734375, 0.1280174102783203, 0.12772045135498047, 0.12871270751953126, 0.12824371337890625, 0.12798976135253906, 0.12790579223632811, 0.12783103942871094, 0.12777574157714844, 0.1258086395263672, 0.12573798370361328, 0.12613222503662108, 0.12614860534667968, 0.12590592193603517, 0.12583219146728516, 0.12566630554199218, 0.12582093048095702, 0.12807679748535156, 0.1276159973144531, 0.12811672973632812, 0.1279078369140625, 0.1277327346801758, 0.12819967651367187, 0.12767539215087892, 0.1281658935546875, 0.12766515350341798, 0.12815565490722655, 0.12822528076171874, 0.12830618286132814, 0.12780748748779297, 0.1277286376953125, 0.12833279418945312, 0.1282723846435547, 0.12803890991210937, 0.12805836486816408, 0.12844236755371094, 0.12783411407470704, 0.12770611572265625, 0.12779724884033203, 0.1281546173095703, 0.1276159973144531, 0.12784230041503905, 0.12783206176757814, 0.12786688232421875, 0.128142333984375, 0.12768051147460938, 0.12766822052001953, 0.12811264038085937, 0.1277491226196289, 0.12756582641601563, 0.1288970184326172, 0.1281495056152344, 0.26441317749023435, 0.12781977844238282, 0.12796927642822264, 0.12775628662109376, 0.12817613220214844, 0.12858061218261718, 0.12771942138671874, 0.12783513641357422, 0.1279508514404297, 0.12757606506347657, 0.12777369689941406, 0.12777164459228516, 0.12801126098632812, 0.12763545227050782, 0.12789043426513672, 0.1276436462402344, 0.1276559371948242, 0.12776959991455078, 0.1282856903076172, 0.12782080078125, 0.12882330322265625, 0.1280614471435547, 0.12808090209960937, 0.12776959991455078, 0.1277675552368164, 0.12821708679199217, 0.12763033294677734, 0.12757708740234375, 0.1277347869873047, 0.12844032287597656, 0.12804812622070313, 0.12764979553222655, 0.12763545227050782, 0.12835430908203124, 0.12770918273925783, 0.12769075012207032, 0.12814131164550782, 0.1277491226196289, 0.12796934509277344, 0.1278842239379883, 0.12797337341308593, 0.1286594543457031, 0.12793036651611328, 0.12776448059082032, 0.12818226623535156, 0.12790886688232422, 0.12799488067626952, 0.12788121795654298, 0.1278054428100586, 0.12845158386230468, 0.12768870544433594, 0.12787404632568358, 0.12859596252441408, 0.1280563201904297, 0.1278013458251953, 0.12784435272216796, 0.12770508575439454, 0.12821197509765625, 0.12766719818115235, 0.12801228332519532, 0.12829901123046875, 0.1277675552368164, 0.12806965637207032, 0.2643558044433594, 0.12766515350341798, 0.12768051147460938, 0.12765286254882813, 0.12751462554931642, 0.12763545227050782, 0.12779519653320312, 0.12767129516601564, 0.12799078369140626, 0.1276590042114258, 0.1277655029296875, 0.12870144653320312, 0.12818226623535156, 0.12776242828369141, 0.1278699493408203, 0.1276272659301758, 0.128184326171875, 0.12763545227050782, 0.12879769897460938, 0.12881100463867187, 0.12820684814453126, 0.12788735961914063, 0.1276211166381836, 0.12761087799072265, 0.12819456481933594, 0.12811878967285156, 0.12794572448730468, 0.12788735961914063, 0.12781465911865234, 0.12768563079833983, 0.1276211166381836, 0.12834815979003905, 0.12801945495605468, 0.1278720016479492, 0.12835430908203124, 0.1289707489013672, 0.12773990631103516, 0.12778495788574218, 0.12773990631103516, 0.1278167037963867, 0.12815155029296876, 0.12765081787109375, 0.1277675552368164, 0.12948786926269532, 0.12794265747070313, 0.1282826232910156, 0.1276600341796875, 0.12820378112792968, 0.12769792175292968, 0.1278238754272461, 0.12797235107421875, 0.12779212951660157, 0.12791193389892577, 0.12792217254638671, 0.12759859466552734, 0.12793548583984374, 0.12773580932617187, 0.12768563079833983, 0.12795597076416015, 0.12788633728027343, 0.12800306701660155, 0.1278238754272461, 0.127710205078125, 0.264237060546875, 0.12841676330566407, 0.12879667663574218, 0.12799590301513672, 0.12780032348632814, 0.12786688232421875, 0.1277480926513672, 0.12770201873779297, 0.1276231689453125, 0.12800204467773438, 0.12798976135253906, 0.12791295623779297, 0.12839730834960938, 0.1277163543701172, 0.12764569854736327, 0.12788428497314452, 0.12782284545898437, 0.12786585235595704, 0.12791705322265626, 0.12781362915039063, 0.12769792175292968, 0.12772147369384765, 0.12825599670410157, 0.1278382110595703, 0.12759859466552734, 0.12937522888183595, 0.1279283218383789, 0.12773990631103516, 0.127857666015625, 0.12759859466552734, 0.12825804138183594, 0.1278167037963867, 0.12803482055664062, 0.12878028869628907, 0.128005126953125, 0.1279477767944336, 0.12773785400390625, 0.12787814331054687, 0.12846284484863282, 0.12783001708984376, 0.12776345825195312, 0.12814437866210937, 0.12812594604492186, 0.1279324188232422, 0.12761395263671876, 0.12771737670898436, 0.128427001953125, 0.12770508575439454, 0.1278883819580078, 0.12929638671875, 0.1278699493408203, 0.12793856048583985, 0.12772147369384765, 0.12795597076416015, 0.12789965057373046, 0.1279477767944336, 0.127678466796875, 0.1288642578125, 0.12818534851074218, 0.1278371810913086, 0.12756070709228515, 0.1280880584716797, 0.1277829132080078, 0.26441522216796876, 0.12775321960449218, 0.1284147186279297, 0.12843110656738282, 0.12777062225341798, 0.12808601379394532, 0.12776242828369141, 0.12883660888671875, 0.12807986450195313, 0.1278924789428711, 0.12769280242919923, 0.1283819580078125, 0.12761497497558594, 0.12829286193847655, 0.1278545913696289, 0.12783411407470704, 0.12800921630859374, 0.12785151672363282, 0.12760985565185548, 0.1278935012817383, 0.127604736328125, 0.12804197692871094, 0.12766207885742187, 0.1277276153564453, 0.12755661010742186, 0.12782284545898437, 0.12766310119628907, 0.12764876556396484, 0.1277317123413086, 0.1276006393432617, 0.12769280242919923, 0.12792729949951173, 0.12772249603271485, 0.1276211166381836, 0.1279477767944336, 0.12771942138671874, 0.12809625244140624, 0.12782080078125, 0.12788121795654298, 0.1286420440673828, 0.12810751342773438, 0.12785151672363282, 0.12791603088378906, 0.12781362915039063, 0.1283072052001953, 0.12790271759033203, 0.1278935012817383, 0.12861235046386718, 0.1277870101928711, 0.12774092864990233, 0.12795699310302736, 0.1275709457397461, 0.12798976135253906, 0.12764262390136719, 0.128, 0.12771942138671874, 0.12753817749023438, 0.12765081787109375, 0.12802047729492189, 0.12789657592773437, 0.1276610565185547, 0.12777369689941406, 0.12768972778320312, 0.2650091552734375, 0.12773990631103516, 0.1276764144897461, 0.12778598022460938, 0.1281781768798828, 0.12770611572265625, 0.12761190032958986, 0.12885093688964844, 0.12839730834960938, 0.12804608154296876, 0.12754431915283204, 0.12804301452636718, 0.128110595703125, 0.12830003356933595, 0.1278771209716797, 0.1279293441772461, 0.12791705322265626, 0.1275125732421875, 0.12755865478515624, 0.12813107299804688, 0.12789657592773437, 0.12766413116455078, 0.12822528076171874, 0.12799488067626952, 0.1279774703979492, 0.12805223083496095, 0.1276610565185547, 0.12801023864746094, 0.12790271759033203, 0.12759347534179688, 0.12863999938964843, 0.1278760986328125, 0.12768768310546874, 0.127783935546875, 0.12769075012207032, 0.12832972717285157, 0.127678466796875, 0.12767948913574217, 0.12722994995117187, 0.1263267822265625, 0.12628173065185547, 0.1263114242553711, 0.12657766723632813, 0.12635852813720702, 0.12587315368652344, 0.12590592193603517, 0.12597657775878907, 0.12586598205566407, 0.1257543716430664, 0.1258301467895508, 0.12576563262939452, 0.12673535919189452, 0.12682342529296875, 0.12621414184570312, 0.12802662658691405, 0.12777574157714844, 0.12764569854736327, 0.12707532501220703, 0.12799078369140626, 0.12770816040039062, 0.12756787109375, 0.12807475280761718, 0.12647731018066405, 0.262550537109375, 0.1261670379638672, 0.12625305938720705, 0.12649779510498046, 0.12646707153320313, 0.12611174774169923, 0.12602777862548828, 0.12611993408203126, 0.12626739501953124, 0.12624588775634765, 0.12608512115478515, 0.12602572631835937, 0.12627763366699218, 0.12701081848144533, 0.12849766540527344, 0.12709273529052734, 0.12640460968017578, 0.1259356155395508, 0.12617932891845704, 0.12601856231689454, 0.12573184204101562, 0.1274439697265625, 0.1281474609375, 0.12596428680419922, 0.12658790588378907, 0.12630016326904298, 0.12737535858154297, 0.1271398391723633, 0.12651622772216797, 0.12802867126464842, 0.127494140625, 0.12671385955810546, 0.12621517181396485, 0.12586495971679687, 0.125949951171875, 0.12607692718505858, 0.12581990051269532, 0.1269585952758789, 0.12647628784179688, 0.1260400619506836, 0.1257011184692383, 0.1262888946533203, 0.12585062408447265, 0.1261506576538086, 0.1261619186401367, 0.12615885162353516, 0.12634521484375, 0.1262909469604492, 0.1259857940673828, 0.12619468688964844, 0.1256785888671875, 0.1258751983642578, 0.12573184204101562, 0.12563251495361327, 0.1259898910522461, 0.12623462677001954, 0.12567244720458984, 0.12609228515625, 0.12659916687011719, 0.12599807739257812, 0.12612505340576172, 0.1267558364868164, 0.12647731018066405, 0.2608926696777344, 0.12610150146484375, 0.1264158706665039, 0.12645785522460937, 0.126740478515625, 0.12682240295410158, 0.1262356491088867, 0.1261957092285156, 0.1260359649658203, 0.12633805084228517, 0.12569292449951172, 0.1258086395263672, 0.12725145721435546, 0.12759859466552734, 0.12629708862304687, 0.1263790054321289, 0.12595916748046876, 0.12631346893310547, 0.12617113494873047, 0.12567961883544923, 0.12705587005615235, 0.12577689361572267, 0.1257011184692383, 0.12581581115722656, 0.12577792358398437, 0.1261629409790039, 0.1258905563354492, 0.12567244720458984, 0.1256079330444336, 0.1280921630859375, 0.1263073272705078, 0.12712857818603515, 0.12635340881347656, 0.1265233917236328, 0.12599603271484375, 0.1261506576538086, 0.1261844482421875, 0.1280061492919922, 0.12627763366699218, 0.12625305938720705, 0.12565094757080078, 0.12626022338867188, 0.12611686706542968, 0.12614860534667968, 0.12619161224365236, 0.12624486541748048, 0.12653772735595703, 0.12628070068359376, 0.1262581787109375, 0.12623872375488282, 0.12604415893554688, 0.12649472045898438, 0.12680703735351562, 0.12633395385742188, 0.1260031967163086, 0.12610867309570312, 0.1264230422973633, 0.12621107482910157, 0.12615679931640625, 0.12624486541748048, 0.1259898910522461, 0.12594892883300782, 0.1265080337524414]",tokens/s,7.741567838438954,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2276.958208,3364.356096,0.0,2717.908992,2483.645952,s,10,2.418925704956055,0.24189257049560547,0.0014063564969548042,0.2416701126098633,0.24295570678710937,0.24411388549804686,0.24504042846679686,"[0.24527206420898437, 0.24195974731445313, 0.2402183074951172, 0.2404288330078125, 0.24130831909179687, 0.2407073974609375, 0.24138047790527345, 0.24269833374023436, 0.24266143798828124, 0.24229078674316407]",tokens/s,1058.3210533316103,kWh,2.845328939812524e-06,1.5591126709426003e-06,1.357947514928401e-05,1.7983916760039136e-05,tokens/kWh,14234941.332070695,MB,2281.037824,3364.356096,0.0,2717.908992,2632.492032,s,10,138.94498046874997,13.894498046874997,0.004182595458186898,13.892857421875,13.8990578125,13.90156796875,13.903576093749999,"[13.894330078125, 13.904078125, 13.891740234375, 13.8913271484375, 13.8928662109375, 13.8928486328125, 13.890017578125, 13.8911748046875, 13.8985, 13.89809765625]",tokens/s,4.534168833408796,kWh,0.00016402044498967748,8.989529760271189e-05,0.0007398838220653231,0.0009937995646577126,tokens/kWh,63393.06459819053,,s,629,140.89449887084956,0.22399761346716945,0.028708018380316665,0.22048768615722655,0.22092697143554688,0.2211510284423828,0.4614811267089844,"[0.22103858947753907, 0.22030642700195313, 0.2203074493408203, 0.2203627471923828, 0.22044467163085937, 0.2201466827392578, 0.22047232055664062, 0.2204610595703125, 0.22034637451171876, 0.22052249145507813, 0.22052249145507813, 0.2210744323730469, 0.22037094116210937, 0.22095564270019533, 0.2205102081298828, 0.220400634765625, 0.2204180450439453, 0.22016717529296875, 0.22055328369140625, 0.22030943298339845, 0.22044979858398436, 0.2203740234375, 0.22033509826660155, 0.22029107666015624, 0.22042008972167967, 0.220221435546875, 0.22049996948242187, 0.22039552307128907, 0.2202757110595703, 0.2207139892578125, 0.22043545532226563, 0.2203248596191406, 0.22038528442382813, 0.22035661315917968, 0.22044058227539062, 0.2203135986328125, 0.2203863067626953, 0.22037504577636718, 0.22030848693847657, 0.22094744873046876, 0.22068121337890625, 0.22063002014160157, 0.2204917755126953, 0.22049693298339842, 0.22050607299804686, 0.22025112915039063, 0.2205347900390625, 0.22040882873535156, 0.2204559326171875, 0.22036376953125, 0.22044160461425782, 0.22035763549804688, 0.22027468872070313, 0.2205716552734375, 0.22072114562988282, 0.22048255920410156, 0.22129356384277343, 0.22104063415527345, 0.22131712341308593, 0.22103141784667968, 0.22057676696777342, 0.22042828369140624, 0.4625459289550781, 0.22034739685058594, 0.22049996948242187, 0.22023680114746094, 0.22024295043945313, 0.2206351318359375, 0.22068838500976562, 0.220474365234375, 0.22047129821777345, 0.22064947509765626, 0.22041087341308593, 0.22091673278808593, 0.22036787414550782, 0.22061465454101561, 0.22062591552734376, 0.22061363220214844, 0.2205153350830078, 0.2204753875732422, 0.22044979858398436, 0.22052864074707032, 0.22069862365722656, 0.22137957763671876, 0.22033920288085937, 0.22062387084960938, 0.220695556640625, 0.22112973022460938, 0.22057472229003905, 0.2208757781982422, 0.22068634033203124, 0.22061567687988282, 0.220548095703125, 0.22069247436523437, 0.2205368347167969, 0.22067916870117188, 0.22079180908203125, 0.22078463745117188, 0.22062591552734376, 0.2209423370361328, 0.2213263397216797, 0.22097305297851563, 0.22086349487304688, 0.22097613525390625, 0.22042726135253907, 0.2206965789794922, 0.22049996948242187, 0.22054502868652343, 0.22163456726074218, 0.22095359802246095, 0.22068019104003905, 0.2211655731201172, 0.2206842803955078, 0.22073651123046875, 0.22048973083496093, 0.22058706665039063, 0.22057977294921874, 0.22068844604492188, 0.22081939697265626, 0.2213744659423828, 0.2205665283203125, 0.22082252502441407, 0.2207262725830078, 0.22075187683105468, 0.22038937377929688, 0.4611839904785156, 0.22023577880859374, 0.22089727783203125, 0.22050405883789062, 0.22021734619140626, 0.22054400634765625, 0.22038528442382813, 0.2205655059814453, 0.2203248596191406, 0.22023884582519532, 0.22023583984375, 0.22047123718261719, 0.22023577880859374, 0.22024191284179687, 0.2201917419433594, 0.22043443298339843, 0.22027162170410156, 0.2202449951171875, 0.22028492736816407, 0.22072422790527343, 0.220980224609375, 0.2206771240234375, 0.22129458618164063, 0.22060134887695312, 0.2206730194091797, 0.2208123779296875, 0.22065858459472656, 0.22089830017089843, 0.22067507934570313, 0.2204436492919922, 0.22042930603027344, 0.22058087158203124, 0.22041497802734375, 0.22024908447265626, 0.22027162170410156, 0.22030540466308593, 0.22031564331054687, 0.220368896484375, 0.22108876037597655, 0.22054092407226564, 0.22035353088378906, 0.22040576171875, 0.22026138305664061, 0.22044467163085937, 0.2204559326171875, 0.22032179260253906, 0.22044773864746095, 0.22073036193847656, 0.22113075256347656, 0.22058393859863282, 0.22032691955566405, 0.22068531799316407, 0.22055833435058594, 0.22046412658691406, 0.22032383728027344, 0.22034637451171876, 0.22055322265625, 0.22052455139160157, 0.22036480712890624, 0.2204047393798828, 0.22033612060546875, 0.2206771240234375, 0.2206044158935547, 0.4615966796875, 0.22076824951171875, 0.220295166015625, 0.22087271118164062, 0.2203504638671875, 0.22047232055664062, 0.220189697265625, 0.22043238830566406, 0.22019378662109376, 0.22050816345214844, 0.220579833984375, 0.22056346130371093, 0.22019276428222656, 0.2204375, 0.22046617126464843, 0.2205716552734375, 0.22037196350097657, 0.22026649475097657, 0.22035661315917968, 0.22112562561035157, 0.22030950927734375, 0.22041293334960937, 0.2203873291015625, 0.22026853942871094, 0.22054502868652343, 0.22050201416015625, 0.22024806213378906, 0.2203146209716797, 0.22042930603027344, 0.22102117919921874, 0.22032896423339843, 0.22048768615722655, 0.22014976501464845, 0.22047232055664062, 0.22042112731933594, 0.2204569549560547, 0.2205122528076172, 0.22034226989746095, 0.22057778930664063, 0.22053375244140624, 0.220442626953125, 0.22042214965820311, 0.22033203125, 0.22048664855957031, 0.22074981689453124, 0.2204917755126953, 0.22120652770996094, 0.2206904296875, 0.22041497802734375, 0.22121676635742188, 0.2204784698486328, 0.2206033935546875, 0.22039450073242187, 0.22054400634765625, 0.22060850524902345, 0.22071705627441407, 0.22043443298339843, 0.22050816345214844, 0.2202931213378906, 0.2204794921875, 0.22053887939453126, 0.2205982666015625, 0.22036787414550782, 0.46206668090820313, 0.22080613708496094, 0.2205716552734375, 0.22023782348632812, 0.22025421142578125, 0.22019590759277344, 0.22057875061035156, 0.22045901489257813, 0.22017330932617188, 0.22030950927734375, 0.2208921661376953, 0.22050201416015625, 0.22088294982910156, 0.2204538879394531, 0.2203197479248047, 0.22046310424804688, 0.22013133239746094, 0.22019071960449219, 0.2202828826904297, 0.22072320556640626, 0.22030438232421876, 0.2206730194091797, 0.2204559326171875, 0.22029209899902344, 0.22052659606933595, 0.22039657592773437, 0.22047030639648438, 0.22110202026367187, 0.22078463745117188, 0.22039654541015624, 0.22048768615722655, 0.22048153686523436, 0.22034637451171876, 0.22068325805664063, 0.22042623901367187, 0.22046208190917968, 0.2204989471435547, 0.22047129821777345, 0.22049690246582032, 0.22055526733398437, 0.22030848693847657, 0.22025625610351562, 0.2204047393798828, 0.2205102081298828, 0.22039756774902344, 0.22056448364257814, 0.22109797668457032, 0.2207406005859375, 0.22061260986328124, 0.220548095703125, 0.2202931213378906, 0.22047334289550782, 0.22042828369140624, 0.22047334289550782, 0.22030032348632814, 0.22054191589355468, 0.22119935607910157, 0.22169293212890626, 0.2205419464111328, 0.220557373046875, 0.2205152587890625, 0.2205655059814453, 0.22039756774902344, 0.46115838623046873, 0.22026853942871094, 0.22028492736816407, 0.22010981750488282, 0.22020608520507812, 0.22032077026367186, 0.22018765258789064, 0.2204375, 0.2203811798095703, 0.22077133178710937, 0.22063206481933595, 0.22066073608398437, 0.22049075317382813, 0.22043034362792968, 0.22024191284179687, 0.22040882873535156, 0.22035865783691405, 0.2205982666015625, 0.22046514892578126, 0.220400634765625, 0.22039039611816405, 0.2204047393798828, 0.22018048095703124, 0.22042726135253907, 0.22025625610351562, 0.2204016571044922, 0.2204917755126953, 0.22091775512695314, 0.22068736267089845, 0.2210478057861328, 0.22050714111328126, 0.22068022155761718, 0.220601318359375, 0.2203811798095703, 0.22034022521972657, 0.22041497802734375, 0.22021632385253906, 0.22114303588867187, 0.22130073547363283, 0.22058905029296874, 0.22039961242675782, 0.22047027587890625, 0.22025830078125, 0.22062284851074218, 0.2205102081298828, 0.22040780639648438, 0.22047232055664062, 0.22069349670410157, 0.22081024169921876, 0.22071807861328124, 0.22037811279296876, 0.22054502868652343, 0.2205306854248047, 0.22061567687988282, 0.22122189331054687, 0.22058087158203124, 0.22052557373046874, 0.2210048065185547, 0.22053785705566406, 0.22060646057128908, 0.22035865783691405, 0.22059622192382813, 0.220516357421875, 0.46187826538085935, 0.22027877807617188, 0.2201661376953125, 0.22008934020996093, 0.22014874267578124, 0.22040780639648438, 0.220221435546875, 0.22034739685058594, 0.22072422790527343, 0.22045184326171874, 0.2204436798095703, 0.22067196655273438, 0.2206525421142578, 0.22057676696777342, 0.22060032653808595, 0.22078976440429687, 0.22035661315917968, 0.2203740234375, 0.22085427856445314, 0.2204600372314453, 0.220337158203125, 0.22026240539550782, 0.22036480712890624, 0.22081741333007812, 0.22036172485351563, 0.2203177032470703, 0.22025421142578125, 0.22035763549804688, 0.22030335998535155, 0.2207139892578125, 0.22059213256835938, 0.22072525024414064, 0.2207406005859375, 0.22064537048339844, 0.22042008972167967, 0.22125669860839844, 0.22061567687988282, 0.220474365234375, 0.22071807861328124, 0.2205347900390625, 0.2203627471923828, 0.2203146209716797, 0.2203248596191406, 0.22029823303222656, 0.22036376953125, 0.22047232055664062, 0.2202255401611328, 0.22048664855957031, 0.22040576171875, 0.22032179260253906, 0.22034025573730467, 0.2205460205078125, 0.22030950927734375, 0.22042317199707032, 0.22106214904785157, 0.22057267761230467, 0.220516357421875, 0.22052761840820312, 0.2204600372314453, 0.22071705627441407, 0.22034124755859374, 0.2203822021484375, 0.22037196350097657, 0.4634490966796875, 0.22028901672363282, 0.22034124755859374, 0.22013848876953124, 0.22024806213378906, 0.22021836853027343, 0.22028901672363282, 0.22026860046386718, 0.22030227661132812, 0.22021530151367188, 0.22014463806152343, 0.22054911804199218, 0.22034431457519532, 0.2202439727783203, 0.22034226989746095, 0.22027775573730468, 0.22059519958496093, 0.22043852233886718, 0.22060032653808595, 0.2206351318359375, 0.2210744323730469, 0.22051123046875, 0.22054092407226564, 0.2203863067626953, 0.22021427917480468, 0.22092697143554688, 0.22021324157714844, 0.22043136596679688, 0.2202408905029297, 0.22046208190917968, 0.2203074493408203, 0.2203504638671875, 0.2204190673828125, 0.2203514862060547, 0.2202593231201172, 0.2208204803466797, 0.2203146209716797, 0.22043034362792968, 0.22032896423339843, 0.22037196350097657, 0.22022860717773438, 0.22044058227539062, 0.22041293334960937, 0.2202449951171875, 0.22045082092285156, 0.22114303588867187, 0.22066893005371094, 0.2209300537109375, 0.22127410888671875, 0.22070477294921875, 0.2205368347167969, 0.22061158752441407, 0.2204927978515625, 0.22057676696777342, 0.2206525421142578, 0.22057066345214843, 0.22053884887695313, 0.22044467163085937, 0.22082867431640624, 0.22062899780273437, 0.22075289916992188, 0.2207139892578125, 0.2211584014892578, 0.4632709045410156, 0.22081024169921876, 0.22034022521972657, 0.22049996948242187, 0.2202265625, 0.22050099182128907, 0.2203822021484375, 0.2204600372314453, 0.22082150268554687, 0.2206904296875, 0.22041087341308593, 0.22061567687988282, 0.22044979858398436, 0.22075698852539063, 0.22043341064453126, 0.22061465454101561, 0.2210682830810547, 0.22072218322753906, 0.2204805145263672, 0.2206699523925781, 0.22046514892578126, 0.22043238830566406, 0.2206177215576172, 0.22051840209960938, 0.22042726135253907, 0.22032383728027344, 0.22085324096679687, 0.22037709045410156, 0.22035661315917968, 0.22064743041992188, 0.22058802795410157, 0.22054296875, 0.22046310424804688, 0.22043238830566406, 0.2203453369140625, 0.22068736267089845, 0.2204180450439453, 0.22066175842285157, 0.22066073608398437, 0.22049484252929688, 0.22063923645019531, 0.22058700561523437, 0.22040985107421876, 0.22107341003417968, 0.22086553955078125, 0.22104371643066406, 0.22082867431640624, 0.22134066772460936, 0.2208573455810547, 0.22092697143554688, 0.22046310424804688, 0.2206208038330078, 0.22063002014160157, 0.22059111022949218, 0.22057061767578126, 0.22050918579101564, 0.22044671630859375, 0.220727294921875, 0.22051840209960938, 0.22058802795410157, 0.22056243896484376, 0.22115635681152343, 0.22068940734863282, 0.46320025634765627, 0.22056346130371093, 0.22062899780273437, 0.22051840209960938, 0.22040882873535156, 0.22072012329101562, 0.22032691955566405, 0.2210713653564453, 0.22036070251464843, 0.220400634765625, 0.22021324157714844, 0.22049075317382813, 0.22033509826660155, 0.22090547180175782, 0.22041293334960937, 0.22086656188964843, 0.22052761840820312, 0.2203197479248047, 0.22071296691894532, 0.22047232055664062, 0.22075698852539063, 0.22052659606933595, 0.22043238830566406, 0.22035968017578125, 0.22096896362304688, 0.22059928894042968, 0.2214615020751953, 0.2207467498779297, 0.22179942321777343, 0.22081228637695313, 0.22052146911621093, 0.22039961242675782, 0.22029209899902344, 0.2204805145263672, 0.22017433166503905, 0.2204436492919922, 0.2202204132080078, 0.22042930603027344, 0.2203811798095703, 0.22039654541015624, 0.2202408905029297, 0.22037196350097657, 0.22073855590820313, 0.22145228576660156, 0.22041395568847658, 0.22063308715820312, 0.220368896484375, 0.22088499450683594, 0.22040882873535156, 0.22045901489257813, 0.22026649475097657, 0.22042726135253907, 0.22040985107421876, 0.22130482482910158, 0.22058700561523437, 0.22063002014160157, 0.22062693786621093, 0.22053887939453126, 0.22047640991210937, 0.22061158752441407, 0.2204375, 0.22050611877441406, 0.22054911804199218]",tokens/s,4.464333278026493,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 122534 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2001.69472,5480.382464,0.0,4833.93536,4503.41376,s,10,5.738609375,0.5738609375000001,0.0014211968686511217,0.5739980163574219,0.575327734375,0.5758431579589843,0.5762554968261718,"[0.5748433227539063, 0.5763585815429687, 0.5723970336914063, 0.5719782104492187, 0.5729453125, 0.57201806640625, 0.5736748046875, 0.5743212280273438, 0.5752131958007812, 0.574859619140625]",tokens/s,446.1011079012499,kWh,6.756110654936897e-06,3.7020347229372414e-06,3.163201604633459e-05,4.209016142420872e-05,tokens/kWh,6082181.472764753,MB,2003.341312,5480.382464,0.0,4833.93536,4688.700416,s,10,334.8389140625,33.48389140625,0.006002777931550277,33.4837109375,33.491801171875004,33.4921896484375,33.4925004296875,"[33.49171484375, 33.492578125, 33.48669921875, 33.4832421875, 33.47827734375, 33.4841796875, 33.4782109375, 33.47846875, 33.4750390625, 33.49050390625]",tokens/s,1.8815017417073754,kWh,0.000395271455562777,0.00021664300892132815,0.0018250813026564506,0.0024369957671405555,tokens/kWh,25851.501610903877,,s,629,339.4681906738281,0.539695056715148,0.0682067154879699,0.5314457397460938,0.5320167358398438,0.5323411499023437,1.1045171826171876,"[0.5315440673828125, 0.5327882080078125, 0.5320939331054687, 0.531873779296875, 0.5308590087890624, 0.5313822631835937, 0.530882568359375, 0.5313515625, 0.530988037109375, 0.531061767578125, 0.530872314453125, 0.5314385986328125, 0.5312184448242188, 0.531462158203125, 0.531262451171875, 0.5317355346679687, 0.5311897583007813, 0.5321195678710937, 0.5312982788085937, 0.5320591430664062, 0.5310802001953125, 0.531641357421875, 0.5311631469726562, 0.5312388916015625, 0.5309296875, 0.5314119873046875, 0.5308661499023437, 0.53139453125, 0.5312368774414062, 0.5312471313476562, 0.5308395385742187, 0.5312962646484375, 0.5309542236328125, 0.531441650390625, 0.531462158203125, 0.532063232421875, 0.5323673706054688, 0.5327810668945312, 0.5322188720703125, 0.5317089233398438, 0.5315245971679687, 0.531757080078125, 0.5315543212890625, 0.5318953247070313, 0.5315266723632812, 0.5318656005859375, 0.5314826049804687, 0.53178369140625, 0.5316423950195313, 0.5318696899414063, 0.5316649169921875, 0.53180517578125, 0.531388427734375, 0.5319229736328125, 0.5317713623046875, 0.5329234008789062, 0.5328568115234374, 0.5318778686523438, 0.531589111328125, 0.5319567260742187, 0.5317099609375, 0.5322915649414063, 1.1084400634765625, 0.53127783203125, 0.5318246459960938, 0.53123583984375, 0.5316290283203124, 0.531357666015625, 0.5316055297851563, 0.5315020751953125, 0.531830810546875, 0.5315850219726562, 0.532305908203125, 0.5320980224609375, 0.5330299072265625, 0.5324052734375, 0.5326130981445313, 0.53216357421875, 0.5326397705078125, 0.5320242919921875, 0.5325066528320312, 0.532401123046875, 0.5316433715820312, 0.5314949340820313, 0.5316167602539063, 0.5315225830078125, 0.5317222290039062, 0.5312593994140625, 0.5317662963867188, 0.5315317993164063, 0.5316321411132813, 0.5320345458984375, 0.5328107299804687, 0.5314774780273438, 0.531272705078125, 0.530861083984375, 0.53119384765625, 0.5307944946289063, 0.5313526000976563, 0.530988037109375, 0.5313013916015625, 0.530966552734375, 0.5314918212890625, 0.5308528442382813, 0.5315717163085938, 0.53110888671875, 0.5315563354492188, 0.530924560546875, 0.533138427734375, 0.531162109375, 0.5319669799804687, 0.5311057739257813, 0.5323171997070313, 0.53113037109375, 0.5316034545898437, 0.5314006958007812, 0.5314744262695312, 0.5310115966796874, 0.5314641723632813, 0.53139453125, 0.5315348510742187, 0.5311682739257813, 0.53148876953125, 0.5309757690429687, 0.5314242553710937, 1.1040880126953125, 0.5311743774414063, 0.5314990234375, 0.531357666015625, 0.53203662109375, 0.5313760986328125, 0.531758056640625, 0.5313689575195313, 0.53195263671875, 0.5313187866210938, 0.5315809326171875, 0.5311826171875, 0.5317130126953125, 0.5317140502929687, 0.5317130126953125, 0.5313382568359375, 0.5315819702148438, 0.53187890625, 0.5318358764648438, 0.531926025390625, 0.5318963012695312, 0.5314027709960938, 0.53145703125, 0.531620849609375, 0.5316331787109375, 0.5310167236328125, 0.5312348022460938, 0.5310382080078125, 0.5314088745117187, 0.5308682250976563, 0.5315399780273438, 0.5312174072265625, 0.5312706298828125, 0.5310443725585937, 0.5318563842773437, 0.531294189453125, 0.5317816162109374, 0.5314345092773437, 0.5318461303710937, 0.5316024169921875, 0.5316915283203125, 0.5309102172851563, 0.531857421875, 0.5316290283203124, 0.5315747680664062, 0.5310914306640625, 0.5317744750976563, 0.5314283447265625, 0.53144677734375, 0.5316792602539062, 0.5314396362304687, 0.531156982421875, 0.5315000610351562, 0.5313597412109375, 0.531751953125, 0.5312440185546875, 0.5317304077148437, 0.5317755126953125, 0.5315819702148438, 0.531620849609375, 0.5316690063476562, 0.5314795532226563, 0.531746826171875, 1.10517041015625, 0.5310320434570313, 0.5314959106445313, 0.531240966796875, 0.5316321411132813, 0.5310750732421875, 0.5315758056640625, 0.5309490966796875, 0.5315245971679687, 0.531431396484375, 0.5313065185546875, 0.5310504760742187, 0.53180517578125, 0.5314457397460938, 0.5317089233398438, 0.53155224609375, 0.5318389892578125, 0.53121435546875, 0.5315266723632812, 0.5316557006835938, 0.5317037963867187, 0.5315112915039063, 0.5319659423828125, 0.5315870971679687, 0.5313024291992188, 0.5310637817382813, 0.53197412109375, 0.5312553100585937, 0.5316925659179688, 0.5314713745117188, 0.53150927734375, 0.5313689575195313, 0.531694580078125, 0.5312471313476562, 0.5318174438476563, 0.5313341674804688, 0.5315020751953125, 0.5311109008789062, 0.5313853149414063, 0.5311262817382812, 0.53142529296875, 0.531198974609375, 0.53186767578125, 0.5312348022460938, 0.5325199584960938, 0.5314805908203125, 0.5316055297851563, 0.531357666015625, 0.5316454467773437, 0.5315297241210938, 0.5319075927734375, 0.5314221801757812, 0.5315809326171875, 0.531282958984375, 0.531325927734375, 0.5312184448242188, 0.5315061645507813, 0.5310894165039063, 0.5316116333007812, 0.5311078491210938, 0.5315430297851562, 0.5313955688476563, 0.5316587524414063, 1.10468408203125, 0.5308170166015626, 0.5314140014648437, 0.5310218505859375, 0.53187890625, 0.5309481201171875, 0.5317294311523437, 0.5310084838867187, 0.5319915771484375, 0.530977783203125, 0.5312849731445313, 0.5309696044921876, 0.531294189453125, 0.5310863647460937, 0.5315205078125, 0.5311488037109375, 0.531873779296875, 0.5310392456054688, 0.5315072021484375, 0.531240966796875, 0.53151025390625, 0.531162109375, 0.531541015625, 0.5314877319335938, 0.5313935546875, 0.5315809326171875, 0.5316167602539063, 0.531219482421875, 0.53136279296875, 0.5309470825195313, 0.5313136596679687, 0.5309798583984375, 0.5316341552734375, 0.5309798583984375, 0.5316966552734375, 0.5313157348632812, 0.531535888671875, 0.5310628051757813, 0.5316270141601562, 0.5313607788085938, 0.5316013793945312, 0.5313177490234375, 0.5321758422851562, 0.53148876953125, 0.5317089233398438, 0.53140478515625, 0.5316423950195313, 0.5311181030273437, 0.5316239624023438, 0.5310228271484375, 0.5313597412109375, 0.5311314086914063, 0.5313966064453125, 0.5312317504882812, 0.531378173828125, 0.5308600463867188, 0.5313966064453125, 0.5326162109375, 0.5314959106445313, 0.5313351440429688, 0.5318082275390625, 0.5313423461914063, 0.5320233154296875, 1.1039805908203124, 0.530788330078125, 0.5315082397460937, 0.5307863159179688, 0.53119384765625, 0.5309398803710937, 0.5312583618164063, 0.5307330322265625, 0.5312890625, 0.5308733520507812, 0.5311170654296875, 0.5309942016601562, 0.5312973022460937, 0.5311661987304688, 0.5314498291015625, 0.5309091796875, 0.532263916015625, 0.531462158203125, 0.5319485473632812, 0.531884033203125, 0.531937255859375, 0.5315133666992188, 0.532031494140625, 0.5308630981445313, 0.5312614135742187, 0.5308876953125, 0.5313873901367188, 0.5314078979492187, 0.5316280517578125, 0.531314697265625, 0.53157373046875, 0.5310853271484375, 0.5314744262695312, 0.5311897583007813, 0.5319854125976563, 0.53146728515625, 0.5319290771484375, 0.5310658569335938, 0.533580810546875, 0.5315614624023437, 0.5317345581054688, 0.5312808837890625, 0.531472412109375, 0.531282958984375, 0.5319321899414062, 0.5314600830078124, 0.53201611328125, 0.5309706420898438, 0.5315635375976563, 0.53127783203125, 0.5316904907226563, 0.5316259765625, 0.5321615600585937, 0.5318072509765625, 0.5314979858398438, 0.5314017333984375, 0.5320017700195312, 0.5319782104492188, 0.5316690063476562, 0.5314088745117187, 0.5318072509765625, 0.5312388916015625, 0.531979248046875, 1.1056363525390625, 0.5310975952148438, 0.5316484985351563, 0.5309685668945312, 0.5316218872070313, 0.5310679321289062, 0.53184716796875, 0.5311968994140625, 0.5316198120117187, 0.5313331298828124, 0.53146728515625, 0.5315911865234375, 0.5316013793945312, 0.5314119873046875, 0.5317488403320313, 0.5311682739257813, 0.5313904418945312, 0.5310167236328125, 0.5313966064453125, 0.5309951782226563, 0.5314457397460938, 0.5311436767578125, 0.5312788696289062, 0.5310361328125, 0.5313966064453125, 0.5309634399414063, 0.531441650390625, 0.5314631958007813, 0.5321011352539062, 0.5310320434570313, 0.53186865234375, 0.5309337768554687, 0.5312890625, 0.5309788208007813, 0.53139453125, 0.53129931640625, 0.5314539794921875, 0.5309747314453125, 0.5315932006835937, 0.5313382568359375, 0.5317478637695312, 0.5309747314453125, 0.53136279296875, 0.5310310668945313, 0.5313843383789062, 0.5313546142578125, 0.531884033203125, 0.5320816650390625, 0.5317007446289063, 0.5317181396484375, 0.5314928588867187, 0.531162109375, 0.53182568359375, 0.5310238647460938, 0.5315983276367188, 0.5310853271484375, 0.5314877319335938, 0.5318953247070313, 0.5316690063476562, 0.5312655639648437, 0.531473388671875, 0.5310628051757813, 0.5314242553710937, 1.1063428955078125, 0.5311477661132813, 0.5320335083007812, 0.5312921752929688, 0.5317335205078125, 0.530977783203125, 0.5313136596679687, 0.5312286987304687, 0.5314478149414062, 0.5310218505859375, 0.5314529418945313, 0.5309644775390625, 0.5314129638671875, 0.5308477172851562, 0.5313074951171874, 0.5311713256835937, 0.5313065185546875, 0.5310802001953125, 0.531578857421875, 0.5314457397460938, 0.5317069091796875, 0.53136279296875, 0.531314697265625, 0.531267578125, 0.5314457397460938, 0.5310955810546875, 0.5312737426757812, 0.53108837890625, 0.5317027587890625, 0.5308856201171875, 0.5313976440429687, 0.5313341674804688, 0.5313167114257813, 0.5309122314453125, 0.5314457397460938, 0.5309849853515625, 0.5314765014648437, 0.5310156860351563, 0.5319966430664063, 0.5313443603515625, 0.5320386352539063, 0.531367919921875, 0.5315552978515625, 0.530850830078125, 0.5315706787109375, 0.5309685668945312, 0.5314590454101562, 0.5309583129882812, 0.5313720092773437, 0.5309685668945312, 0.5319751586914062, 0.531578857421875, 0.5315164184570312, 0.5312655639648437, 0.5314447631835938, 0.5310504760742187, 0.5314877319335938, 0.5322485961914063, 0.5322516479492188, 0.531857421875, 0.5321328735351563, 0.5316331787109375, 0.531778564453125, 1.1072911376953125, 0.5309224853515625, 0.5317345581054688, 0.5315655517578125, 0.531462158203125, 0.53136279296875, 0.5313065185546875, 0.530951171875, 0.5313351440429688, 0.5309603881835937, 0.53127783203125, 0.5310310668945313, 0.5318154296875, 0.531683349609375, 0.5316904907226563, 0.5310494995117188, 0.5312767944335938, 0.5310556030273438, 0.5316259765625, 0.53110986328125, 0.5314088745117187, 0.5309767456054687, 0.5314846801757812, 0.5312081909179688, 0.5313607788085938, 0.5309849853515625, 0.53131982421875, 0.5310699462890625, 0.5314191284179688, 0.5309747314453125, 0.5317734375, 0.531103759765625, 0.5316280517578125, 0.5309747314453125, 0.531689453125, 0.531009521484375, 0.5314703369140625, 0.53123583984375, 0.53159423828125, 0.5309818725585937, 0.5314191284179688, 0.5310842895507812, 0.5318082275390625, 0.531125244140625, 0.5315932006835937, 0.531051513671875, 0.5314324340820312, 0.5311846313476563, 0.5314447631835938, 0.5312880859375, 0.5315921630859375, 0.5316597900390625, 0.5315440673828125, 0.53146826171875, 0.53159423828125, 0.5313331298828124, 0.5315133666992188, 0.5311201171875, 0.5314662475585937, 0.5311539306640625, 0.5316423950195313, 0.531114990234375, 0.5314866943359375, 1.1065169677734374, 0.5308661499023437, 0.5313402709960937, 0.5308170166015626, 0.5315850219726562, 0.5312532348632812, 0.5315512084960937, 0.53100341796875, 0.5315338134765625, 0.531156982421875, 0.5314345092773437, 0.5309490966796875, 0.5311846313476563, 0.5308651733398437, 0.5314539794921875, 0.5309276123046875, 0.531431396484375, 0.5310126342773438, 0.5312388916015625, 0.5309061279296875, 0.531294189453125, 0.5308026733398438, 0.531325927734375, 0.5314857177734374, 0.5316792602539062, 0.5311539306640625, 0.5321492309570313, 0.5309573364257812, 0.5314765014648437, 0.5313320922851562, 0.5315184936523437, 0.5314457397460938, 0.5313597412109375, 0.530850830078125, 0.5313320922851562, 0.531251220703125, 0.5314334716796875, 0.5309030151367188, 0.531431396484375, 0.531162109375, 0.5314017333984375, 0.53098291015625, 0.5322833862304688, 0.532005859375, 0.5324338989257813, 0.53241650390625, 0.5326049194335938, 0.5322711181640625, 0.5329356689453125, 0.5318450927734375, 0.5316024169921875, 0.5314273071289063, 0.5318809814453125, 0.53153076171875, 0.5320017700195312, 0.5319137573242188, 0.5322250366210938, 0.5320192260742187, 0.5323571166992187, 0.5319556884765625, 0.53264794921875, 0.5321123657226563, 0.5325609130859374]",tokens/s,1.8528982016001712,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490f6-54ff6a371973c24632aecd5c;051c96c8-4369-4d30-b801-0f3cd954464d) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948216-2de7a4964a6a23231ae7f83f;92c8aeb9-9cc7-41d1-b668-691f8b6b77cc) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3015.59808,9299.296256,0.0,8652.849152,8210.185216,s,10,11.00354541015625,1.100354541015625,0.0021995629105345756,1.10037060546875,1.1026337646484374,1.103404345703125,1.104020810546875,"[1.1041749267578125, 1.1024625244140625, 1.0971700439453125, 1.0986126708984374, 1.0993076171875, 1.097432861328125, 1.099380126953125, 1.101361083984375, 1.1021866455078124, 1.1014569091796875]",tokens/s,232.65228656548507,kWh,1.2965490321318309e-05,7.10465502763327e-06,5.986921456200278e-05,7.993935991095436e-05,tokens/kWh,3202427.443566751,MB,3016.822784,9299.296256,0.0,8652.849152,8503.104,s,10,641.11232421875,64.11123242187502,0.005190298699693196,64.11128124999999,64.116784765625,64.1185056640625,64.11988238281249,"[64.1162265625, 64.11091796875, 64.10715234375, 64.11164453125, 64.1140703125, 64.10447265625, 64.1202265625, 64.106796875, 64.1044140625, 64.11640234375]",tokens/s,0.9826671180712498,kWh,0.0007569437074826825,0.00041487128047605436,0.0034860815944185997,0.004657896582377336,tokens/kWh,13525.418369818235,,s,629,649.9943001708987,1.0333772657724936,0.13070199113792613,1.0176091918945311,1.0182221801757814,1.0184349731445312,2.1165633984375,"[1.0176849975585938, 1.0179225463867188, 1.0177464599609376, 1.0181263427734375, 1.01766552734375, 1.0177433471679687, 1.0174985961914063, 1.0180587768554688, 1.0172999877929687, 1.0173552856445311, 1.017459716796875, 1.0184939575195313, 1.017143310546875, 1.0182799072265625, 1.0176931762695312, 1.0179122924804687, 1.0178191528320313, 1.0179092407226562, 1.0176112670898438, 1.0177853393554688, 1.0181171264648436, 1.0180086059570312, 1.0171443481445313, 1.0179840087890626, 1.0173839111328125, 1.0177720336914062, 1.0175538940429687, 1.0174218139648437, 1.0173317260742187, 1.0178887939453125, 1.0179092407226562, 1.0181570434570313, 1.0174976196289063, 1.0177576904296874, 1.017449462890625, 1.017692138671875, 1.0180843505859376, 1.0179747924804687, 1.0176112670898438, 1.0182471923828125, 1.0175887451171874, 1.017427978515625, 1.0171924438476563, 1.0172886962890626, 1.0171688842773436, 1.0173245239257813, 1.0170091552734375, 1.0176368408203125, 1.0171146240234374, 1.0184765625, 1.0177085571289062, 1.017554931640625, 1.0172211303710939, 1.0182522583007811, 1.017290771484375, 1.0179625244140624, 1.0176573486328124, 1.0178734130859375, 1.0176276245117188, 1.0185001220703125, 1.0183004150390624, 1.0180914916992188, 2.120390625, 1.0169886474609375, 1.0174678955078125, 1.0169763793945312, 1.017365478515625, 1.0172262573242188, 1.0176307373046876, 1.017291748046875, 1.0174207763671874, 1.017438232421875, 1.0176419677734374, 1.01682275390625, 1.0177566528320312, 1.0169026489257813, 1.0171187133789064, 1.0169517822265626, 1.0179747924804687, 1.0175006713867187, 1.0174310302734375, 1.0172119140625, 1.018013671875, 1.0172262573242188, 1.0173501586914062, 1.0170921020507813, 1.0172692260742187, 1.0172661743164062, 1.01758154296875, 1.0180413208007812, 1.017764892578125, 1.0175068359375, 1.0178314208984376, 1.0169129028320312, 1.0174238891601564, 1.01768701171875, 1.0177402954101562, 1.0183987426757812, 1.01777099609375, 1.0177843017578125, 1.0177607421875, 1.0173163452148437, 1.0173081665039063, 1.017702392578125, 1.0177484741210938, 1.0175764770507814, 1.01732763671875, 1.018228759765625, 1.018945556640625, 1.0175508422851562, 1.0177587280273437, 1.0177536010742188, 1.0183505859375, 1.0178427124023437, 1.01798095703125, 1.0178457641601562, 1.0182246704101563, 1.0184437866210938, 1.0184017944335937, 1.0174044189453124, 1.01778125, 1.0175907592773437, 1.0179645385742186, 1.0172713012695314, 1.0177116088867189, 2.116588623046875, 1.0172682495117187, 1.017439208984375, 1.0171781005859375, 1.0176378784179687, 1.0172579956054688, 1.0172876586914064, 1.0171351318359374, 1.017439208984375, 1.0174095458984376, 1.0178488159179688, 1.017796630859375, 1.0178846435546876, 1.0169200439453125, 1.0174443359375, 1.0173378295898436, 1.0173532104492187, 1.0172938232421875, 1.0175784912109376, 1.0172467041015625, 1.0177372436523437, 1.01789697265625, 1.0177136840820313, 1.0170101928710937, 1.01732763671875, 1.0172815551757812, 1.0176215209960937, 1.0174166870117187, 1.0176153564453125, 1.017275390625, 1.0179368896484375, 1.0172160034179687, 1.0177505493164063, 1.0171678466796874, 1.01737060546875, 1.01718017578125, 1.0172098388671875, 1.0171494140625, 1.0173306884765625, 1.0171054077148438, 1.0177413330078124, 1.017786376953125, 1.0182041625976563, 1.0170777587890625, 1.0175405883789062, 1.0175836181640625, 1.0175068359375, 1.017169921875, 1.0173778076171875, 1.0175529174804687, 1.0178037719726563, 1.0177454223632814, 1.0187315063476563, 1.01823486328125, 1.0180054931640625, 1.0178140258789063, 1.0181683349609374, 1.017670654296875, 1.0181611328125, 1.0182564086914063, 1.0183587646484376, 1.0175396118164062, 1.0181222534179688, 2.11649853515625, 1.0170214233398438, 1.0172620849609375, 1.0174719848632812, 1.0172548828125, 1.0174484252929688, 1.017628662109375, 1.0173849487304687, 1.0173992919921875, 1.0177689819335938, 1.0176215209960937, 1.01728564453125, 1.0174402465820314, 1.0171627807617187, 1.0172057495117188, 1.0171904296875, 1.0177679443359375, 1.0173214721679686, 1.0170664672851562, 1.0171207885742188, 1.0171893920898438, 1.0169364624023438, 1.0172098388671875, 1.0170009765625, 1.0171842651367187, 1.01760205078125, 1.0179368896484375, 1.0174985961914063, 1.0172743530273438, 1.0170787963867187, 1.0172272338867188, 1.0170316772460937, 1.0173040771484374, 1.0175529174804687, 1.0179287109375, 1.0178242797851562, 1.0183321533203125, 1.01816015625, 1.0178682861328125, 1.0174054565429687, 1.0174771118164063, 1.0178682861328125, 1.0177188110351563, 1.017955322265625, 1.0186710815429687, 1.0180361938476563, 1.018166259765625, 1.017849853515625, 1.0181478271484374, 1.0177095947265624, 1.017891845703125, 1.0177259521484374, 1.0180966186523437, 1.0177996826171876, 1.0185390014648437, 1.0180700073242188, 1.0183905029296876, 1.0182389526367188, 1.0185400390625, 1.0179686279296876, 1.0179194946289063, 1.0175518798828125, 1.0181058349609375, 2.11787255859375, 1.017354248046875, 1.0174566650390624, 1.017512939453125, 1.0176153564453125, 1.0173112182617188, 1.0177638549804688, 1.0170715942382813, 1.017206787109375, 1.0171371459960938, 1.0176399536132812, 1.0181683349609374, 1.0179246215820312, 1.017238525390625, 1.0177177734375, 1.01699072265625, 1.01747509765625, 1.0175672607421875, 1.0181283569335937, 1.01778125, 1.01810791015625, 1.018156005859375, 1.018429443359375, 1.0175344848632812, 1.0176460571289063, 1.0172333984375, 1.0177321166992188, 1.01758154296875, 1.0172559204101563, 1.0175887451171874, 1.0181652221679687, 1.0181017456054688, 1.0174576416015626, 1.0177464599609376, 1.017849853515625, 1.0172507934570312, 1.0176378784179687, 1.0174627685546875, 1.0175211791992187, 1.017702392578125, 1.0177321166992188, 1.01766552734375, 1.0176245727539062, 1.017238525390625, 1.0175139770507813, 1.0173101806640625, 1.0175518798828125, 1.01707568359375, 1.0175723266601562, 1.0176614379882813, 1.018265625, 1.0177587280273437, 1.0177955932617186, 1.017280517578125, 1.0177669067382813, 1.01758056640625, 1.0177003784179688, 1.0202439575195312, 1.0180403442382813, 1.0180946044921875, 1.0176676025390625, 1.0172211303710939, 1.0179778442382812, 2.115203125, 1.0167890014648437, 1.0172354736328124, 1.0170439453125, 1.0171873168945313, 1.0175703125, 1.0176470947265626, 1.0175744018554687, 1.0177515258789063, 1.0173716430664062, 1.0174197998046874, 1.0172938232421875, 1.0174003295898437, 1.0167725830078125, 1.0175344848632812, 1.0169354248046876, 1.017470947265625, 1.017322509765625, 1.017417724609375, 1.0173368530273437, 1.0175313720703125, 1.0170009765625, 1.0170460205078125, 1.0174044189453124, 1.017523193359375, 1.0177791748046876, 1.0174197998046874, 1.0170818481445312, 1.0176419677734374, 1.0173101806640625, 1.0176409301757812, 1.0174095458984376, 1.0175293579101563, 1.0173173828125, 1.0182144165039062, 1.0182215576171876, 1.0176747436523437, 1.01743310546875, 1.0186895141601562, 1.0174781494140626, 1.0176266479492186, 1.0174013671875, 1.0175191040039062, 1.0173880615234374, 1.017828369140625, 1.0177105712890624, 1.0176378784179687, 1.0176849975585938, 1.017650146484375, 1.0173193969726562, 1.0175958862304688, 1.017544677734375, 1.01745458984375, 1.0172119140625, 1.017970703125, 1.0179348754882813, 1.017543701171875, 1.0179287109375, 1.0183065795898438, 1.017802734375, 1.0182492065429687, 1.0176123046875, 1.0174044189453124, 2.1172890625, 1.0174003295898437, 1.0177330932617188, 1.0177269897460937, 1.0172897338867188, 1.0172108764648438, 1.0175191040039062, 1.0172661743164062, 1.0171361083984376, 1.0175949096679688, 1.0182318115234374, 1.0174924926757813, 1.0179317626953126, 1.0172047119140626, 1.017650146484375, 1.017218017578125, 1.0178754272460937, 1.017364501953125, 1.017511962890625, 1.0177156982421875, 1.0180003662109376, 1.0175949096679688, 1.01768603515625, 1.0176327514648438, 1.0180106201171875, 1.0180044555664063, 1.0175570068359374, 1.0175354614257812, 1.0178191528320313, 1.0182748413085938, 1.0180044555664063, 1.0177034301757812, 1.0174884033203124, 1.0172713012695314, 1.0173092041015626, 1.01715966796875, 1.0175641479492188, 1.0176747436523437, 1.0178734130859375, 1.0186076049804687, 1.0178191528320313, 1.017248779296875, 1.0178734130859375, 1.0178088989257812, 1.0177566528320312, 1.0173737182617189, 1.0184867553710937, 1.0182072143554688, 1.018197998046875, 1.0183782348632813, 1.0181672973632812, 1.0175877075195312, 1.0179911499023437, 1.0176522216796875, 1.0181212158203126, 1.017575439453125, 1.0183485717773437, 1.01770751953125, 1.0183301391601562, 1.0184693603515624, 1.0186875, 1.0184386596679686, 1.0182625122070312, 2.120072265625, 1.017439208984375, 1.0174781494140626, 1.0172333984375, 1.0177269897460937, 1.0175928344726564, 1.0170654907226562, 1.0174505004882812, 1.0172559204101563, 1.0168995971679688, 1.0167019653320313, 1.0168186645507813, 1.0172682495117187, 1.0168565673828125, 1.0178058471679687, 1.0173900756835939, 1.0174361572265624, 1.0177945556640624, 1.0173880615234374, 1.0177044677734375, 1.0172129516601562, 1.0170511474609376, 1.0175641479492188, 1.0171637573242187, 1.0182041625976563, 1.0174515380859375, 1.0173992919921875, 1.017302001953125, 1.01718017578125, 1.0170521850585938, 1.017697265625, 1.0175518798828125, 1.0177699584960938, 1.0171688842773436, 1.0180577392578125, 1.0177750854492187, 1.0180413208007812, 1.0171555786132813, 1.0174617309570313, 1.0173552856445311, 1.0172088623046875, 1.0174453735351563, 1.0177587280273437, 1.0179358520507813, 1.017807861328125, 1.0180464477539062, 1.01760205078125, 1.01793994140625, 1.01802392578125, 1.0177402954101562, 1.01783349609375, 1.01783447265625, 1.0175006713867187, 1.017565185546875, 1.0181806030273437, 1.01722314453125, 1.0179891357421875, 1.0179143676757811, 1.0177699584960938, 1.018265625, 1.018239990234375, 1.0177269897460937, 1.01764404296875, 2.12052685546875, 1.0178191528320313, 1.0170664672851562, 1.0168914184570312, 1.016933349609375, 1.0168248291015625, 1.0170194091796876, 1.0169968872070312, 1.0170706176757813, 1.01739111328125, 1.0174085083007813, 1.0172446899414063, 1.01743408203125, 1.0172498168945312, 1.0173173828125, 1.0169630737304687, 1.0172764282226563, 1.017670654296875, 1.0172395629882813, 1.0175570068359374, 1.01774951171875, 1.0171514892578124, 1.0176829223632813, 1.0178232421875, 1.0177474365234376, 1.0177638549804688, 1.0182195434570311, 1.0177802124023438, 1.01798193359375, 1.0181058349609375, 1.0178447265625, 1.0170623779296875, 1.017565185546875, 1.01713818359375, 1.017654296875, 1.0177515258789063, 1.0174115600585938, 1.0176266479492186, 1.01768603515625, 1.0180618286132812, 1.0176409301757812, 1.01743408203125, 1.0178948974609374, 1.0173562622070313, 1.0175979614257813, 1.0175078125, 1.0172713012695314, 1.0172344360351562, 1.0179164428710938, 1.0178744506835937, 1.0179573974609375, 1.01726513671875, 1.017871337890625, 1.0172272338867188, 1.017660400390625, 1.01722314453125, 1.0177474365234376, 1.0172640991210937, 1.0182471923828125, 1.0183259887695313, 1.0176481323242188, 1.0172948608398438, 1.0174535522460937, 2.119248779296875, 1.0171259155273438, 1.0173378295898436, 1.0171361083984376, 1.0176768188476562, 1.0173480834960937, 1.0173060913085938, 1.0175313720703125, 1.0172876586914064, 1.0169313354492187, 1.0169517822265626, 1.0170767211914062, 1.016911865234375, 1.0168883056640625, 1.0176091918945311, 1.0169405517578125, 1.0172006225585937, 1.0173839111328125, 1.0174832763671875, 1.0169149169921874, 1.0174115600585938, 1.017101318359375, 1.0171627807617187, 1.0175191040039062, 1.0177177734375, 1.0175150146484375, 1.0177362060546875, 1.0174299926757813, 1.0178037719726563, 1.0174238891601564, 1.0178887939453125, 1.0173532104492187, 1.0172507934570312, 1.01740234375, 1.0190120849609374, 1.0181201782226563, 1.018503173828125, 1.0183167724609374, 1.0181693725585939, 1.0181580810546875, 1.0182164306640624, 1.0177146606445313, 1.0178099365234374, 1.0184202270507812, 1.0194851684570312, 1.019615234375, 1.0187745361328124, 1.018102783203125, 1.0178682861328125, 1.01852978515625, 1.018186767578125, 1.017744384765625, 1.0179573974609375, 1.0179799194335937, 1.0177484741210938, 1.01732861328125, 1.0178109741210937, 1.0174668579101562, 1.017786376953125, 1.017491455078125, 1.0179041137695313, 1.0180321044921874, 1.0186588134765624]",tokens/s,0.9677007934294523,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481a9-5155662c5726c493241271e2;0705f56b-e52c-41fd-8d8e-723918dbbe14) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949463-026faf42475103197641a8df;55e13455-78b3-4a93-b731-e264c355deb4) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2265.919488,3330.801664,0.0,2684.35456,2447.59552,s,10,2.4030111236572265,0.24030111236572266,0.0012440053970201484,0.24010930633544922,0.24119976043701172,0.24230707931518555,0.24319293441772463,"[0.2434143981933594, 0.2409536895751953, 0.23907139587402343, 0.23946762084960938, 0.2396122589111328, 0.23890797424316407, 0.23970700073242188, 0.2405301055908203, 0.24051161193847656, 0.24083506774902344]",tokens/s,1065.330066430923,kWh,2.8237744456245787e-06,1.5472998550649208e-06,1.2583984934904673e-05,1.6955059235594172e-05,tokens/kWh,15098738.166751605,MB,2265.919488,3330.801664,0.0,2684.35456,2597.681664,s,10,139.56764257812497,13.9567642578125,0.0037662168729373563,13.95505322265625,13.962588671875,13.96323818359375,13.963757792968751,"[13.95483984375, 13.95481640625, 13.9549306640625, 13.95517578125, 13.9599765625, 13.9638876953125, 13.9624443359375, 13.953060546875, 13.952154296875, 13.9563564453125]",tokens/s,4.513940254076789,kWh,0.00016469605360239274,9.026671937368518e-05,0.0007221266028330957,0.0009770893758091737,tokens/kWh,64477.21320050864,,s,629,141.50236668396,0.22496401698562793,0.028578568615475336,0.22146662902832032,0.22197882690429688,0.222200830078125,0.46145660278320316,"[0.222501953125, 0.22133958435058593, 0.22126080322265626, 0.22115533447265626, 0.22122291564941407, 0.22115737915039063, 0.22111334228515625, 0.2211881561279297, 0.22123001098632813, 0.22108671569824218, 0.22202674865722657, 0.2213621826171875, 0.22130892944335936, 0.221412353515625, 0.22178816223144532, 0.22125669860839844, 0.22154550170898438, 0.22125155639648436, 0.22184141540527344, 0.22194586181640624, 0.22131817626953126, 0.22113481140136718, 0.22139187622070314, 0.22203596496582031, 0.22120550537109376, 0.22182707214355468, 0.22148197937011718, 0.22121574401855468, 0.22116249084472656, 0.22113999938964843, 0.22153315734863283, 0.22137344360351563, 0.22128536987304687, 0.22147686767578126, 0.22139497375488282, 0.2215167694091797, 0.22115020751953124, 0.22119526672363282, 0.2216222686767578, 0.22131507873535156, 0.2215731201171875, 0.22197042846679688, 0.2214871063232422, 0.22137344360351563, 0.22149530029296874, 0.2211829833984375, 0.22195916748046876, 0.22144102478027344, 0.22115122985839844, 0.22134988403320313, 0.2216417236328125, 0.22153318786621093, 0.2213396453857422, 0.22140824890136718, 0.2213949432373047, 0.22160794067382814, 0.22140109252929688, 0.22149017333984375, 0.22158233642578126, 0.2214686737060547, 0.2215290832519531, 0.2215116729736328, 0.4632596435546875, 0.22171238708496094, 0.221233154296875, 0.22143283081054688, 0.2215679931640625, 0.22207183837890626, 0.22199087524414063, 0.2218137664794922, 0.22159461975097655, 0.2211604766845703, 0.22112969970703125, 0.22134375, 0.22146156311035156, 0.2211706237792969, 0.2218250274658203, 0.2215004119873047, 0.2212454376220703, 0.22122291564941407, 0.22121881103515625, 0.22129049682617188, 0.22144000244140624, 0.22122496032714845, 0.22121574401855468, 0.22197247314453125, 0.2214799346923828, 0.22136012268066407, 0.22134066772460936, 0.22136422729492186, 0.22131097412109374, 0.22127923583984374, 0.22128128051757812, 0.22142156982421876, 0.22127308654785155, 0.22122598266601562, 0.22121676635742188, 0.22201344299316406, 0.2224384002685547, 0.22226841735839845, 0.22168576049804686, 0.22131507873535156, 0.2211666259765625, 0.22145021057128905, 0.2213509063720703, 0.2210908203125, 0.2211420135498047, 0.2215116729736328, 0.22274458312988282, 0.22148300170898438, 0.2212454376220703, 0.22115327453613282, 0.22183013916015626, 0.22141746520996095, 0.22183628845214845, 0.2215557098388672, 0.22127104187011717, 0.2220482635498047, 0.2213693389892578, 0.22153421020507813, 0.22129356384277343, 0.22213938903808594, 0.22136114501953125, 0.22106112670898437, 0.22137957763671876, 0.4608409729003906, 0.22114405822753908, 0.2210918426513672, 0.22119833374023437, 0.22196018981933593, 0.2215413818359375, 0.22137139892578125, 0.2214246368408203, 0.22122802734375, 0.22138983154296876, 0.2213478698730469, 0.2211327667236328, 0.22113591003417968, 0.2211645050048828, 0.22161613464355467, 0.22142771911621092, 0.22118502807617188, 0.22131715393066406, 0.22171746826171876, 0.22127410888671875, 0.22118911743164063, 0.22119740295410156, 0.22118185424804687, 0.22219775390625, 0.22114816284179686, 0.22126797485351563, 0.22138265991210937, 0.22202163696289062, 0.221623291015625, 0.22125567626953124, 0.2214297637939453, 0.22170930480957032, 0.22169293212890626, 0.22206874084472655, 0.22159257507324218, 0.2214799346923828, 0.22199090576171876, 0.22157720947265624, 0.22174208068847656, 0.22161613464355467, 0.2212843475341797, 0.22151271057128907, 0.22158233642578126, 0.22143283081054688, 0.22131404113769532, 0.22166323852539063, 0.22163046264648437, 0.22240870666503906, 0.22161407470703126, 0.22181785583496094, 0.22200831604003907, 0.22142874145507813, 0.22144102478027344, 0.22163661193847656, 0.22149530029296874, 0.22142668151855469, 0.22134169006347656, 0.2213683166503906, 0.2215076141357422, 0.2218208923339844, 0.221412353515625, 0.2214072265625, 0.22158233642578126, 0.46169601440429686, 0.2212136993408203, 0.22105702209472655, 0.2211266632080078, 0.2211031036376953, 0.22123622131347656, 0.22111846923828124, 0.22110105895996093, 0.22219468688964844, 0.22128536987304687, 0.2216058807373047, 0.22124237060546875, 0.22226022338867188, 0.22136323547363282, 0.2218741455078125, 0.22145741271972658, 0.22111538696289063, 0.22111231994628905, 0.22129766845703125, 0.22105702209472655, 0.22113792419433595, 0.22142874145507813, 0.2212351989746094, 0.22122496032714845, 0.22117068481445312, 0.22113279724121093, 0.22115122985839844, 0.22112973022460938, 0.2212024383544922, 0.2211102752685547, 0.2212956085205078, 0.22164582824707033, 0.2216058807373047, 0.22137548828125, 0.22140524291992186, 0.22150752258300782, 0.22146560668945312, 0.22119526672363282, 0.22161715698242188, 0.22124032592773438, 0.22137651062011718, 0.221770751953125, 0.22136524963378906, 0.22113690185546875, 0.22289407348632811, 0.22195814514160156, 0.22175027465820313, 0.22173286437988282, 0.2216048583984375, 0.221559814453125, 0.22163661193847656, 0.22160383605957032, 0.22145741271972658, 0.22292991638183593, 0.22169804382324218, 0.22141644287109374, 0.22166732788085938, 0.22217625427246093, 0.2222335968017578, 0.22209843444824218, 0.22188134765625, 0.2213939208984375, 0.22153727722167968, 0.46185061645507813, 0.2211788787841797, 0.22111436462402342, 0.22129049682617188, 0.22150553894042968, 0.22128128051757812, 0.2212024383544922, 0.22144000244140624, 0.22146969604492187, 0.22136114501953125, 0.22112562561035157, 0.2214481964111328, 0.2222335968017578, 0.22209536743164063, 0.22137548828125, 0.2215034942626953, 0.22125567626953124, 0.22210150146484375, 0.22135398864746095, 0.22115122985839844, 0.22146560668945312, 0.22135501098632812, 0.2211973114013672, 0.2217144317626953, 0.22152809143066407, 0.22151266479492188, 0.2218260498046875, 0.22138777160644532, 0.22115327453613282, 0.22133042907714845, 0.2219438018798828, 0.22181068420410155, 0.22151679992675782, 0.22163967895507813, 0.22198374938964843, 0.222202880859375, 0.2216089630126953, 0.22158848571777343, 0.22197964477539062, 0.2219304962158203, 0.2216058807373047, 0.22188236999511718, 0.2217144317626953, 0.2216990966796875, 0.22185162353515625, 0.22161509704589843, 0.22144825744628907, 0.2215761260986328, 0.22152294921875, 0.22147789001464843, 0.22122189331054687, 0.22149017333984375, 0.22142054748535156, 0.2215905303955078, 0.2212833251953125, 0.22145535278320314, 0.2214256591796875, 0.22370611572265625, 0.22159666442871093, 0.22157209777832032, 0.22164889526367187, 0.22127622985839843, 0.2215331268310547, 0.4607232055664062, 0.2214246368408203, 0.22150758361816406, 0.22157212829589842, 0.22139695739746093, 0.22143283081054688, 0.22193766784667968, 0.22149017333984375, 0.22146456909179688, 0.22140824890136718, 0.22149533081054687, 0.22149014282226562, 0.22156288146972655, 0.22154342651367187, 0.22170623779296875, 0.2220062713623047, 0.22157107543945312, 0.22163865661621093, 0.22165196228027345, 0.22187519836425781, 0.22198477172851563, 0.22197862243652344, 0.22181272888183592, 0.22163456726074218, 0.22144921875, 0.2217902069091797, 0.22170623779296875, 0.22131404113769532, 0.22122291564941407, 0.22189158630371095, 0.22152499389648436, 0.2215362548828125, 0.22123417663574219, 0.2213365783691406, 0.2216407012939453, 0.22166015625, 0.22149221801757812, 0.22126182556152343, 0.22392626953125, 0.2219438018798828, 0.22167347717285157, 0.22165606689453124, 0.22170930480957032, 0.22157626342773437, 0.2213846435546875, 0.22152499389648436, 0.2214307861328125, 0.2218014678955078, 0.22188134765625, 0.22133042907714845, 0.22165298461914062, 0.22161407470703126, 0.22176870727539064, 0.22166835021972656, 0.22149017333984375, 0.22150656127929688, 0.22176768493652343, 0.22180863952636717, 0.22148101806640624, 0.221297607421875, 0.22151373291015625, 0.222308349609375, 0.22171034240722656, 0.46220184326171876, 0.22253363037109375, 0.22171034240722656, 0.22214041137695312, 0.22114816284179686, 0.2212351989746094, 0.22153932189941405, 0.22129664611816408, 0.2211778564453125, 0.22154853820800782, 0.22134176635742187, 0.22164781188964844, 0.22147378540039062, 0.22127615356445313, 0.2212843475341797, 0.22165913391113282, 0.2215885467529297, 0.22137234497070313, 0.22157516479492187, 0.22207180786132813, 0.22170623779296875, 0.22145126342773438, 0.22118502807617188, 0.22266777038574218, 0.22152601623535156, 0.2213744659423828, 0.22128947448730468, 0.22164991760253908, 0.2218076171875, 0.22161920166015625, 0.22146969604492187, 0.22127513122558592, 0.22119424438476562, 0.22168269348144531, 0.22124441528320313, 0.22097100830078126, 0.221085693359375, 0.22113381958007813, 0.22170008850097656, 0.22122700500488282, 0.22127923583984374, 0.22145330810546876, 0.22255923461914062, 0.22140007019042968, 0.22144613647460937, 0.22161509704589843, 0.222023681640625, 0.22214041137695312, 0.22231552124023438, 0.2217574462890625, 0.22175949096679687, 0.22195610046386718, 0.22176666259765626, 0.221739013671875, 0.22195001220703126, 0.22145529174804687, 0.2223953857421875, 0.22163661193847656, 0.22160281372070312, 0.22152294921875, 0.22183322143554687, 0.2217840576171875, 0.2217410583496094, 0.46331597900390625, 0.22162431335449218, 0.22123930358886718, 0.2212843475341797, 0.2211409912109375, 0.22146662902832032, 0.22136729431152344, 0.22115020751953124, 0.22153216552734376, 0.22155059814453126, 0.22213427734375, 0.2212034606933594, 0.221412353515625, 0.22128640747070313, 0.22130995178222657, 0.22144102478027344, 0.22194688415527344, 0.22158131408691406, 0.22149632263183594, 0.22149427795410156, 0.2216222686767578, 0.22129664611816408, 0.22110617065429689, 0.22211993408203126, 0.2216058807373047, 0.22146456909179688, 0.22136012268066407, 0.22197760009765624, 0.2215905303955078, 0.22179327392578124, 0.2214993896484375, 0.22128230285644532, 0.2213017578125, 0.22127410888671875, 0.22146456909179688, 0.22121778869628905, 0.22168576049804686, 0.22120037841796875, 0.22118406677246094, 0.22135084533691407, 0.22131301879882812, 0.22129869079589845, 0.22116659545898437, 0.2210508728027344, 0.2215045166015625, 0.22192332458496095, 0.2215854034423828, 0.22149325561523436, 0.22160076904296874, 0.2215669708251953, 0.22166323852539063, 0.22128536987304687, 0.22132333374023438, 0.22139077758789064, 0.22124549865722656, 0.22128941345214845, 0.221665283203125, 0.22161613464355467, 0.22151577758789062, 0.22136524963378906, 0.2216407012939453, 0.22126693725585939, 0.22150860595703126, 0.4624425048828125, 0.22111744689941407, 0.22104473876953126, 0.2211829833984375, 0.22187826538085936, 0.22165196228027345, 0.2212290496826172, 0.22156083679199218, 0.22200729370117187, 0.22158950805664063, 0.22172671508789063, 0.221380615234375, 0.221306884765625, 0.22107449340820312, 0.22128428649902343, 0.22132121276855468, 0.221306884765625, 0.22197042846679688, 0.22143589782714843, 0.2213027801513672, 0.22142771911621092, 0.22136524963378906, 0.22115225219726561, 0.22165811157226561, 0.22134988403320313, 0.22114303588867187, 0.22162124633789063, 0.22177484130859376, 0.22148812866210937, 0.2213570556640625, 0.22117170715332032, 0.22172569274902343, 0.221444091796875, 0.22128640747070313, 0.22114816284179686, 0.22123423767089845, 0.22163040161132813, 0.22124134826660155, 0.22113900756835939, 0.22141433715820313, 0.22133351135253906, 0.22119833374023437, 0.22116659545898437, 0.2211727294921875, 0.2214256591796875, 0.22142771911621092, 0.22107034301757814, 0.2211973114013672, 0.22134579467773438, 0.22134988403320313, 0.2222950439453125, 0.22165298461914062, 0.22142361450195314, 0.22213938903808594, 0.22171749877929686, 0.2223206329345703, 0.2216816711425781, 0.22124032592773438, 0.22122700500488282, 0.22170623779296875, 0.22179840087890626, 0.22177587890625, 0.2214297637939453, 0.4629862365722656, 0.22168269348144531, 0.22135910034179687, 0.22179840087890626, 0.22143487548828125, 0.2214686737060547, 0.22133042907714845, 0.22189260864257812, 0.22169088745117188, 0.22153421020507813, 0.22115327453613282, 0.22121165466308593, 0.22176153564453124, 0.221380615234375, 0.22127308654785155, 0.22127206420898438, 0.2213959655761719, 0.22139085388183594, 0.22160592651367186, 0.2212945556640625, 0.22128128051757812, 0.2216611785888672, 0.22154751586914062, 0.22151373291015625, 0.2215004119873047, 0.22142361450195314, 0.22282957458496094, 0.22140313720703125, 0.22110617065429689, 0.22118502807617188, 0.22146456909179688, 0.2219325408935547, 0.22170828247070312, 0.22170008850097656, 0.22203187561035156, 0.22157005310058595, 0.22241897583007814, 0.22136521911621093, 0.22132940673828125, 0.22154444885253907, 0.22126284790039064, 0.22154444885253907, 0.22126080322265626, 0.22116966247558595, 0.22158335876464844, 0.22131199645996094, 0.2215188446044922, 0.22126797485351563, 0.22128128051757812, 0.22140518188476563, 0.22145126342773438, 0.221559814453125, 0.22204620361328126, 0.22181068420410155, 0.22146456909179688, 0.22160794067382814, 0.22161715698242188, 0.22150553894042968, 0.22128640747070313, 0.22156185913085938, 0.22152806091308594, 0.22130995178222657, 0.2214297637939453]",tokens/s,4.445155333725598,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1441.001472,1646.788608,0.0,1000.341504,901.382144,s,10,1.2824376068115233,0.12824376068115234,0.0013381948349874154,0.12786808013916018,0.12959590759277345,0.1305943862915039,0.13139316925048827,"[0.13159286499023437, 0.12686815643310548, 0.1274053421020508, 0.1281833953857422, 0.12710332489013673, 0.12755276489257814, 0.12878483581542968, 0.12818998718261718, 0.1273829116821289, 0.1293740234375]",tokens/s,1996.1984788989712,kWh,1.5084334378001057e-06,8.263029694510034e-07,5.418130213543536e-06,7.752866620794645e-06,tokens/kWh,33020044.394077398,MB,1441.001472,1646.788608,0.0,1000.341504,932.76416,s,10,76.77896875,7.677896875,0.023249628538630225,7.67915625,7.703863232421875,7.706249243164062,7.708158051757812,"[7.64836474609375, 7.62882421875, 7.67977490234375, 7.69081591796875, 7.67853759765625, 7.7033330078125, 7.70863525390625, 7.67114794921875, 7.67421923828125, 7.69531591796875]",tokens/s,8.205371995179345,kWh,9.095814451596237e-05,4.985198124346679e-05,0.0003132890969742542,0.00045409922273368346,tokens/kWh,138736.19871168057,,s,629,77.8087404022218,0.12370228998763382,0.015267564948821125,0.12151500701904297,0.12351795043945313,0.12392367401123047,0.24871362792968751,"[0.12091289520263672, 0.12134502410888671, 0.1254635543823242, 0.12273868560791015, 0.12172492980957031, 0.12116480255126953, 0.12129894256591797, 0.12113715362548828, 0.12124364471435548, 0.12089241790771485, 0.12106547546386719, 0.12196761322021485, 0.12150886535644531, 0.12106854248046875, 0.12139520263671875, 0.12140544128417968, 0.12128768157958984, 0.12113919830322266, 0.12091187286376953, 0.12113407897949219, 0.1221427230834961, 0.12104601287841797, 0.12148633575439453, 0.12152934265136718, 0.1209886703491211, 0.12108493041992187, 0.12132966613769532, 0.1210931167602539, 0.12157440185546875, 0.12151500701904297, 0.12138393402099609, 0.12142899322509766, 0.12107263946533203, 0.12096717071533203, 0.12105113220214844, 0.1210091552734375, 0.12110643005371094, 0.12125798034667969, 0.12106854248046875, 0.12104908752441407, 0.12095283508300782, 0.12088114929199219, 0.12084735870361328, 0.12105830383300781, 0.12114125061035157, 0.12377804565429687, 0.12163993835449219, 0.12108595275878906, 0.12111154937744141, 0.12169010925292968, 0.12106240081787109, 0.12132556915283203, 0.12277247619628906, 0.12324864196777344, 0.12130406188964844, 0.12083404541015624, 0.12118937683105468, 0.12106240081787109, 0.1208463363647461, 0.1210245132446289, 0.12098560333251954, 0.12108799743652343, 0.2477936706542969, 0.12073062133789063, 0.12093030548095703, 0.12108595275878906, 0.12114227294921875, 0.12072959899902344, 0.12111666870117188, 0.12076544189453126, 0.12109209442138671, 0.12101427459716797, 0.12082688140869141, 0.12099072265625, 0.12134092712402343, 0.12110028839111328, 0.12076236724853516, 0.12246937561035157, 0.12178125, 0.12127129364013672, 0.12102041625976563, 0.12081561279296875, 0.12112179565429687, 0.12079513549804688, 0.12220722961425781, 0.12136243438720704, 0.12094873809814453, 0.12121804809570312, 0.12110131072998047, 0.12102553558349609, 0.12125389099121094, 0.12094258880615234, 0.12103270721435547, 0.12157030487060547, 0.12122316741943359, 0.1208832015991211, 0.12120269012451172, 0.12115455627441406, 0.12125081634521484, 0.12102861022949218, 0.1209169921875, 0.12092518615722657, 0.12124262237548829, 0.1210808334350586, 0.12104806518554688, 0.12156313323974609, 0.12109209442138671, 0.12071321868896484, 0.12101837158203126, 0.12109721374511719, 0.12089241790771485, 0.1209139175415039, 0.1210613784790039, 0.12095283508300782, 0.12115455627441406, 0.12093030548095703, 0.12107366180419922, 0.1208770523071289, 0.12091187286376953, 0.12103679656982422, 0.12085862731933594, 0.12101427459716797, 0.12090982055664062, 0.12072550201416016, 0.12163686370849609, 0.25078988647460937, 0.12208128356933594, 0.12245606231689453, 0.12228813171386718, 0.12169420623779297, 0.12159283447265624, 0.12156620788574218, 0.12150067138671874, 0.12323635101318359, 0.12156723022460937, 0.12253695678710938, 0.12134194946289062, 0.12189183807373047, 0.12104499053955078, 0.12240998077392579, 0.12145868682861329, 0.12170240020751953, 0.12159999847412109, 0.1212907485961914, 0.12119961547851563, 0.12125183868408203, 0.12093644714355468, 0.12158975982666016, 0.1212252197265625, 0.12197376251220703, 0.12266598510742187, 0.12199116516113281, 0.12252272033691407, 0.12169821166992187, 0.12251750183105468, 0.12150272369384765, 0.12135321807861328, 0.12139315032958985, 0.12348518371582032, 0.12360601806640625, 0.12340531158447265, 0.12198707580566406, 0.12240486145019532, 0.12276121520996094, 0.12286975860595703, 0.1258751983642578, 0.12238240051269532, 0.12162143707275391, 0.1215283203125, 0.12173107147216797, 0.12092723083496094, 0.12131942749023437, 0.12141260528564453, 0.12081356811523437, 0.12177101135253907, 0.12151705932617188, 0.12153241729736328, 0.12177305603027344, 0.12143001556396485, 0.1215836181640625, 0.1209692153930664, 0.12140850830078125, 0.12112588500976562, 0.1214576644897461, 0.12091085052490234, 0.12239871978759766, 0.1221212158203125, 0.12117196655273438, 0.25012428283691407, 0.12173414611816406, 0.12145049285888672, 0.12212838745117187, 0.12329779052734376, 0.12234649658203126, 0.12200857543945312, 0.12356403350830078, 0.12401152038574219, 0.12281446075439453, 0.12228300476074219, 0.12225945281982421, 0.12321791839599609, 0.1226219482421875, 0.1216880645751953, 0.12208537292480469, 0.12178636932373046, 0.122355712890625, 0.12368793487548828, 0.12342374420166016, 0.1231800308227539, 0.1235077133178711, 0.1234524154663086, 0.12143923187255859, 0.1212968978881836, 0.12111052703857422, 0.12108185577392579, 0.12117913818359374, 0.12229325103759765, 0.12225638580322265, 0.12117196655273438, 0.12148838043212891, 0.12117196655273438, 0.1209886703491211, 0.12110131072998047, 0.121565185546875, 0.12196659088134766, 0.12180480194091797, 0.1214382095336914, 0.12103884887695313, 0.1212221450805664, 0.12177510070800782, 0.1223034896850586, 0.12251136016845703, 0.12103372955322265, 0.12178125, 0.12100198364257812, 0.12146482849121094, 0.12118016052246093, 0.12082892608642579, 0.121312255859375, 0.1234698257446289, 0.12242534637451172, 0.12148735809326172, 0.12146790313720703, 0.12168396759033204, 0.12169216156005859, 0.12237516784667969, 0.12246527862548828, 0.12407603454589844, 0.1232701416015625, 0.1230223388671875, 0.1217976303100586, 0.24860671997070313, 0.12128256225585937, 0.121275390625, 0.12294041442871094, 0.12173926544189453, 0.12173004913330078, 0.12162873840332031, 0.12132447814941406, 0.12121600341796875, 0.12143103790283204, 0.12095078277587891, 0.12180377960205079, 0.12157952117919922, 0.12169216156005859, 0.12112179565429687, 0.12152320098876954, 0.12086579132080078, 0.12098252868652344, 0.12098047637939453, 0.12137574768066406, 0.12174646759033203, 0.12155900573730469, 0.12151193237304687, 0.12159999847412109, 0.12110540771484375, 0.121670654296875, 0.12152217864990235, 0.12107469177246094, 0.12170137786865234, 0.12137165069580078, 0.12135935974121094, 0.12147097778320312, 0.12331520080566406, 0.1227540512084961, 0.1215498275756836, 0.12133171081542969, 0.12187238311767579, 0.12289638519287109, 0.12396749114990234, 0.12359884643554687, 0.12377497863769531, 0.1236858901977539, 0.12344831848144532, 0.12326502227783204, 0.12415385437011718, 0.12243865966796875, 0.12206182098388672, 0.12256665802001954, 0.12256153869628907, 0.12152934265136718, 0.12154163360595703, 0.12200857543945312, 0.12149555206298829, 0.12161023712158203, 0.12224614715576172, 0.12173312377929688, 0.1210224609375, 0.12114022064208985, 0.12110438537597656, 0.1216542739868164, 0.12151398468017578, 0.1216204833984375, 0.12237004852294922, 0.24945152282714844, 0.12142694091796875, 0.12127232360839844, 0.12121907043457031, 0.12172697448730468, 0.12396646118164062, 0.1225902099609375, 0.12338483428955078, 0.12398796844482422, 0.12242227172851562, 0.12293631744384766, 0.12390502166748046, 0.12467814636230469, 0.12454605102539062, 0.12382822418212891, 0.12355379486083984, 0.12169728088378906, 0.12186624145507813, 0.12168396759033204, 0.12192768096923828, 0.12244377899169921, 0.1223526382446289, 0.12162764739990234, 0.1217228775024414, 0.12147097778320312, 0.12159078216552735, 0.1216215057373047, 0.12143309020996093, 0.12141465759277344, 0.12173312377929688, 0.12229631805419922, 0.12157337951660156, 0.12169830322265625, 0.12177817535400391, 0.12134502410888671, 0.12178329467773437, 0.1214607391357422, 0.12167884826660157, 0.12179558563232422, 0.12148326110839844, 0.12133785247802735, 0.12141158294677734, 0.12147814178466797, 0.12156723022460937, 0.121670654296875, 0.12136140441894532, 0.12141875457763672, 0.12145868682861329, 0.12202496337890625, 0.12198502349853516, 0.12148429107666016, 0.1211678695678711, 0.12209049224853516, 0.12175667572021484, 0.12315750122070312, 0.1227315216064453, 0.1232210235595703, 0.12416713714599609, 0.12378214263916015, 0.12348108673095703, 0.12337356567382812, 0.12387942504882812, 0.12446208190917969, 0.2528962554931641, 0.12143718719482421, 0.12210176086425781, 0.12319334411621094, 0.12263116455078125, 0.12150784301757812, 0.12382720184326172, 0.12240589141845704, 0.12118323516845703, 0.12321485137939453, 0.1212907485961914, 0.12293119812011719, 0.12185600280761719, 0.12230451202392578, 0.12115968322753906, 0.12163276672363281, 0.12134092712402343, 0.12267520141601562, 0.1209169921875, 0.12166963195800781, 0.1208616943359375, 0.12096511840820312, 0.12128562927246093, 0.12440268707275391, 0.12370124816894532, 0.12368694305419922, 0.12331721496582031, 0.12332236480712891, 0.12322611236572266, 0.12365824127197265, 0.12440985870361328, 0.1228779525756836, 0.121491455078125, 0.12277964782714844, 0.12328857421875, 0.12425523376464843, 0.12316365051269532, 0.12288307189941407, 0.12309913635253907, 0.1242460174560547, 0.1239900131225586, 0.12403404998779297, 0.12373197174072266, 0.12393062591552734, 0.1235763168334961, 0.12192972564697266, 0.12147814178466797, 0.12157234954833984, 0.12149964904785156, 0.12162457275390624, 0.12195123291015625, 0.12152934265136718, 0.12136447906494141, 0.12115865325927734, 0.1212200927734375, 0.1212938232421875, 0.12141465759277344, 0.12101734161376954, 0.12137983703613281, 0.12099174499511718, 0.12123955535888672, 0.12125389099121094, 0.12133273315429688, 0.24995942687988282, 0.12144742584228516, 0.12333875274658203, 0.12174336242675782, 0.12292915344238281, 0.12289024353027343, 0.12305101013183593, 0.12317286682128906, 0.12351487731933594, 0.12141567993164062, 0.12154163360595703, 0.12127436828613282, 0.12144435119628906, 0.12115968322753906, 0.12119142150878906, 0.121849853515625, 0.12192972564697266, 0.12124877166748046, 0.12119551849365234, 0.12158566284179688, 0.12136243438720704, 0.1219583969116211, 0.12119245147705078, 0.12171263885498047, 0.12150169372558593, 0.12132761383056641, 0.12123442840576172, 0.12193587493896485, 0.12132659149169922, 0.12116377258300781, 0.12158566284179688, 0.12126924896240235, 0.12131635284423828, 0.12112076568603515, 0.12137574768066406, 0.12258201599121094, 0.12156825256347656, 0.12157337951660156, 0.12132966613769532, 0.12138905334472656, 0.12101427459716797, 0.12355891418457031, 0.12150784301757812, 0.12143103790283204, 0.12133683013916016, 0.12091596984863281, 0.12126207733154297, 0.12289024353027343, 0.12268339538574219, 0.12567449951171875, 0.1217976303100586, 0.12148326110839844, 0.12115455627441406, 0.12108697509765624, 0.12136038208007813, 0.12146892547607421, 0.1212200927734375, 0.12286054229736328, 0.12166451263427734, 0.12133785247802735, 0.1213829116821289, 0.12138700866699219, 0.12124569702148437, 0.24875520324707032, 0.12118118286132812, 0.12122726440429688, 0.1210931167602539, 0.12280934143066406, 0.1244927978515625, 0.12353024291992187, 0.12354764556884766, 0.12378112030029297, 0.12381798553466797, 0.12268851470947266, 0.12156416320800781, 0.12156313323974609, 0.12137062072753907, 0.12162252807617188, 0.12152524566650391, 0.12139520263671875, 0.12147917175292969, 0.12140338897705077, 0.12144435119628906, 0.12111154937744141, 0.12134502410888671, 0.12145254516601563, 0.12136140441894532, 0.12134809875488281, 0.12144332885742187, 0.12182937622070313, 0.12134092712402343, 0.12142694091796875, 0.12099378967285156, 0.12107981109619141, 0.12137677001953125, 0.12088524627685547, 0.12261273956298828, 0.12443750762939453, 0.12207820892333984, 0.12129280090332031, 0.12143718719482421, 0.12140646362304687, 0.12147711944580078, 0.12159385681152343, 0.12174336242675782, 0.1218243179321289, 0.12172179412841796, 0.12106547546386719, 0.1214750747680664, 0.12177203369140625, 0.12269875335693359, 0.12165017700195313, 0.12121907043457031, 0.12128665924072266, 0.12140748596191406, 0.12142899322509766, 0.1210439682006836, 0.12137471771240234, 0.12203622436523437, 0.12110848236083985, 0.12105216217041016, 0.12070809936523437, 0.12304691314697265, 0.12252262115478516, 0.12272537231445313, 0.12301721954345703, 0.25320652770996094, 0.12138803100585938, 0.12191948699951172, 0.12489830780029297, 0.12382514953613281, 0.12366745758056641, 0.12364390563964844, 0.12391324615478516, 0.1236274871826172, 0.12293836975097656, 0.12138086700439453, 0.12145868682861329, 0.12145561981201172, 0.12148531341552735, 0.1216358413696289, 0.12158668518066407, 0.12107981109619141, 0.12146482849121094, 0.12212019348144532, 0.12342272186279298, 0.12273356628417968, 0.12340838623046875, 0.1230417938232422, 0.12280012512207031, 0.121997314453125, 0.1230387191772461, 0.1221560287475586, 0.12209356689453126, 0.12225433349609376, 0.12141567993164062, 0.12371968078613281, 0.12368895721435547, 0.12325888061523438, 0.12346470642089843, 0.1220495376586914, 0.12130815887451171, 0.12168089294433594, 0.12150169372558593, 0.12148121643066406, 0.12139315032958985, 0.12125389099121094, 0.12131123352050781, 0.1211156463623047, 0.12303667449951172, 0.12199116516113281, 0.12166553497314453, 0.1215989761352539, 0.12130406188964844, 0.12145254516601563, 0.12136243438720704, 0.1214331512451172, 0.12362643432617187, 0.12173107147216797, 0.12140748596191406, 0.12129177856445313, 0.12127334594726563, 0.12132249450683594, 0.12149964904785156, 0.12110438537597656, 0.12128460693359375, 0.12160307312011719, 0.12139417266845703, 0.121133056640625]",tokens/s,8.083924720390927,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1575.784448,5448.925184,0.0,4802.47808,4489.252352,s,10,5.114569244384766,0.5114569244384766,0.0014045752365005437,0.51150537109375,0.5130841125488281,0.5135362274169921,0.5138979193115234,"[0.512983642578125, 0.5139883422851562, 0.5091934204101562, 0.5101927795410156, 0.5104193725585937, 0.5101881103515625, 0.5112746887207031, 0.5121220092773437, 0.5117360534667968, 0.5124708251953125]",tokens/s,500.5309103617276,kWh,6.020330099595917e-06,3.2988774182740595e-06,2.7782869448500503e-05,3.710207696637048e-05,tokens/kWh,6899883.266158921,MB,1575.784448,5448.925184,0.0,4802.47808,4557.794816,s,10,301.333625,30.133362499999997,0.011965382718741807,30.1356015625,30.146014453125,30.1513353515625,30.1555920703125,"[30.12017578125, 30.120552734375, 30.1369375, 30.140724609375, 30.15665625, 30.137947265625, 30.123359375, 30.14483203125, 30.118173828125, 30.134265625]",tokens/s,2.0907059409649356,kWh,0.00035569889715148347,0.00019495335105611957,0.001580753417379499,0.002131405665587102,tokens/kWh,29557.958401431977,,s,629,305.4536612854004,0.48561790347440437,0.060780789003776696,0.4781598815917969,0.47936102294921873,0.4797579284667969,0.9888164648437501,"[0.47731610107421873, 0.4777103271484375, 0.4780533752441406, 0.4779223022460938, 0.47746868896484373, 0.47739285278320315, 0.4777676696777344, 0.47874050903320314, 0.47792022705078124, 0.4776212463378906, 0.47766937255859376, 0.4774625244140625, 0.4779018249511719, 0.47790591430664064, 0.47894528198242187, 0.47799398803710935, 0.47878759765625, 0.47878964233398436, 0.47852645874023436, 0.47817422485351563, 0.4778905334472656, 0.47780557250976563, 0.47809637451171877, 0.47767245483398435, 0.477765625, 0.4778649597167969, 0.4779622497558594, 0.47779531860351565, 0.47874456787109376, 0.47817214965820315, 0.4782950439453125, 0.47745944213867186, 0.47759051513671874, 0.4785008544921875, 0.47783013916015626, 0.4780267639160156, 0.4782438354492187, 0.4785858459472656, 0.4793190307617187, 0.47905487060546875, 0.47873635864257813, 0.47829400634765623, 0.4777697143554688, 0.4778240661621094, 0.47742047119140624, 0.4780851135253906, 0.4782008361816406, 0.47801651000976564, 0.47831243896484377, 0.47876199340820313, 0.4778547058105469, 0.47859405517578124, 0.4782591857910156, 0.47776461791992186, 0.47824075317382814, 0.4776304626464844, 0.4778383483886719, 0.4776642456054688, 0.47760385131835936, 0.478160888671875, 0.47876300048828124, 0.4783943786621094, 0.9927915649414063, 0.47756494140625, 0.4774000549316406, 0.47765298461914063, 0.47747378540039065, 0.4782704772949219, 0.477907958984375, 0.4783944091796875, 0.477813720703125, 0.47758950805664063, 0.47759051513671874, 0.4776069030761719, 0.47729867553710936, 0.4773724060058594, 0.47751473999023436, 0.47745742797851565, 0.4780113525390625, 0.4786903076171875, 0.47860427856445314, 0.4783482971191406, 0.47794790649414065, 0.47776461791992186, 0.4778291320800781, 0.4779427795410156, 0.4779376525878906, 0.47769189453125, 0.47908352661132814, 0.47773797607421875, 0.47901287841796875, 0.4777134094238281, 0.4779151306152344, 0.47777587890625, 0.47748709106445314, 0.47771136474609377, 0.4781527099609375, 0.47798886108398436, 0.47750042724609376, 0.47809127807617186, 0.47905892944335937, 0.4782591857910156, 0.4781598815917969, 0.4782981262207031, 0.4776468505859375, 0.47786392211914064, 0.4776560668945313, 0.478023681640625, 0.48116427612304685, 0.4793456726074219, 0.4777492370605469, 0.47873126220703127, 0.47907122802734375, 0.4783626098632813, 0.4779949951171875, 0.47790286254882813, 0.4776908874511719, 0.478065673828125, 0.4777738342285156, 0.477770751953125, 0.47832369995117185, 0.47939175415039065, 0.47875686645507814, 0.47773284912109376, 0.478023681640625, 0.988906494140625, 0.47783013916015626, 0.4776437683105469, 0.4778649597167969, 0.4783902587890625, 0.47753521728515624, 0.4795494384765625, 0.47817214965820315, 0.4789770202636719, 0.4776386413574219, 0.47787213134765627, 0.47758950805664063, 0.47774514770507814, 0.47762841796875, 0.47790386962890624, 0.479541259765625, 0.4791797790527344, 0.47914700317382813, 0.4779294738769531, 0.4777625732421875, 0.47816497802734376, 0.4782356567382812, 0.47807180786132814, 0.4797726745605469, 0.4777687072753906, 0.4803246154785156, 0.4800747375488281, 0.4793231506347656, 0.4796539001464844, 0.48039935302734377, 0.4796252136230469, 0.4790927429199219, 0.477655029296875, 0.4791510925292969, 0.4787394409179688, 0.47877017211914064, 0.47938558959960936, 0.47959552001953126, 0.47838516235351564, 0.47773593139648435, 0.478129150390625, 0.4781404113769531, 0.4776857604980469, 0.4779018249511719, 0.47747378540039065, 0.4776847229003906, 0.4781803588867187, 0.4799620971679687, 0.4786053161621094, 0.47794790649414065, 0.4777123718261719, 0.4776683654785156, 0.4774696960449219, 0.47712460327148437, 0.4774481811523438, 0.47752908325195315, 0.47732632446289064, 0.4782438354492187, 0.47871487426757814, 0.47837286376953125, 0.4777123718261719, 0.47754238891601564, 0.4779929504394531, 0.9885849609375, 0.47834625244140627, 0.47808615112304687, 0.47836468505859375, 0.47836468505859375, 0.4776908874511719, 0.47762738037109376, 0.4779254455566406, 0.477751220703125, 0.4784117736816406, 0.47790286254882813, 0.4781741943359375, 0.4787138671875, 0.47869439697265626, 0.47841998291015625, 0.4790876159667969, 0.47936920166015623, 0.4789770202636719, 0.4778157958984375, 0.47849676513671874, 0.4780349426269531, 0.4785776672363281, 0.4781475830078125, 0.4785581970214844, 0.4792135620117188, 0.47874969482421875, 0.4787026062011719, 0.47837899780273435, 0.47872308349609377, 0.47839334106445314, 0.4786411437988281, 0.4790169677734375, 0.47768063354492185, 0.4778465270996094, 0.4781793212890625, 0.47906610107421876, 0.47883367919921876, 0.4782213134765625, 0.4785172424316406, 0.47826739501953125, 0.478087158203125, 0.4777277526855469, 0.4778680419921875, 0.47746456909179685, 0.4802908020019531, 0.47867086791992186, 0.47896060180664063, 0.4792279052734375, 0.4781783142089844, 0.47810867309570315, 0.4786063232421875, 0.4785162353515625, 0.4785848388671875, 0.4778659973144531, 0.4776212463378906, 0.4777840576171875, 0.4788787231445312, 0.47973171997070313, 0.47971328735351565, 0.4792842102050781, 0.477655029296875, 0.47770315551757814, 0.4779049377441406, 0.9883463134765625, 0.4791357421875, 0.4782458801269531, 0.4794173583984375, 0.478166015625, 0.47800833129882814, 0.4775157775878906, 0.4780421142578125, 0.47852032470703126, 0.4791224365234375, 0.47801651000976564, 0.47799398803710935, 0.4782530517578125, 0.47967129516601564, 0.47834417724609374, 0.4779847717285156, 0.4790302734375, 0.47956378173828124, 0.4773304443359375, 0.4774420471191406, 0.4774143981933594, 0.4773447570800781, 0.47814349365234377, 0.47828582763671873, 0.479710205078125, 0.4788910217285156, 0.479056884765625, 0.47832369995117185, 0.4782438354492187, 0.4783964233398437, 0.479025146484375, 0.4786606140136719, 0.4782909545898438, 0.47821823120117185, 0.4802129821777344, 0.47790286254882813, 0.47835751342773436, 0.4782561340332031, 0.478635009765625, 0.47870156860351565, 0.4777001037597656, 0.4775802917480469, 0.47765914916992186, 0.479578125, 0.4784527282714844, 0.47922894287109374, 0.47923199462890625, 0.4788213806152344, 0.47869439697265626, 0.4787804260253906, 0.47939993286132815, 0.47935186767578125, 0.4787639770507813, 0.4786268310546875, 0.4793589782714844, 0.48041677856445314, 0.48034817504882815, 0.482777099609375, 0.4785745849609375, 0.478571533203125, 0.47958123779296874, 0.47865029907226564, 0.4787384338378906, 0.990581787109375, 0.4781793212890625, 0.4788521423339844, 0.47797039794921875, 0.47851007080078123, 0.47790286254882813, 0.4778321838378906, 0.47813427734375, 0.4789043273925781, 0.47835134887695313, 0.47826226806640626, 0.47895858764648436, 0.4790947875976562, 0.4783759460449219, 0.47830938720703126, 0.4781803588867187, 0.477991943359375, 0.47756903076171875, 0.47764480590820313, 0.47726080322265624, 0.47773797607421875, 0.47950848388671874, 0.47766015625, 0.47899853515625, 0.47830322265625, 0.4781537170410156, 0.47825204467773436, 0.4785592346191406, 0.479025146484375, 0.47944705200195314, 0.47835134887695313, 0.478445556640625, 0.47876199340820313, 0.47891659545898435, 0.4791101379394531, 0.47820391845703125, 0.4782581787109375, 0.47862374877929686, 0.4814622802734375, 0.47808819580078127, 0.47835751342773436, 0.47878964233398436, 0.4785551452636719, 0.4777082824707031, 0.4787271728515625, 0.4784322509765625, 0.47835546875, 0.4775679931640625, 0.4779346008300781, 0.47834521484375, 0.478160888671875, 0.47767962646484374, 0.47781170654296873, 0.4783585205078125, 0.4791285705566406, 0.4783697814941406, 0.47800216674804685, 0.47763250732421875, 0.4782294921875, 0.47773696899414064, 0.47801651000976564, 0.4774718017578125, 0.47802670288085936, 0.9896888427734375, 0.47829608154296877, 0.47802975463867187, 0.47756497192382813, 0.47750039672851563, 0.47811380004882814, 0.4781025390625, 0.4778936767578125, 0.47798370361328124, 0.47788851928710935, 0.4786268310546875, 0.4782899169921875, 0.4779632568359375, 0.47777484130859377, 0.47753521728515624, 0.4779007873535156, 0.47758544921875, 0.47753726196289065, 0.47787213134765627, 0.47847628784179685, 0.4782909545898438, 0.4778014831542969, 0.47860223388671874, 0.47986483764648435, 0.47783935546875, 0.47765298461914063, 0.4779980773925781, 0.47794073486328126, 0.4787517395019531, 0.4781506652832031, 0.4780155029296875, 0.47900875854492186, 0.47818548583984377, 0.47805029296875, 0.4782233581542969, 0.47802471923828127, 0.4779346008300781, 0.47827557373046875, 0.478497802734375, 0.47849063110351564, 0.47899237060546873, 0.4786196594238281, 0.47815167236328127, 0.4799846801757813, 0.47795709228515626, 0.47788851928710935, 0.4778270568847656, 0.4778547058105469, 0.478060546875, 0.47782608032226564, 0.4778434143066406, 0.477601806640625, 0.4788572082519531, 0.47828274536132814, 0.47832986450195314, 0.4779417724609375, 0.477633544921875, 0.477907958984375, 0.478065673828125, 0.4780482482910156, 0.47808819580078127, 0.47858380126953126, 0.47811892700195313, 0.9911572265625, 0.4780707702636719, 0.47788134765625, 0.4775536499023437, 0.4774696960449219, 0.47756185913085936, 0.47758642578125, 0.4777615356445313, 0.47794790649414065, 0.4787118225097656, 0.47779736328125, 0.47749325561523437, 0.4775475158691406, 0.4777697143554688, 0.4781363220214844, 0.4779141235351563, 0.4792012939453125, 0.4778874816894531, 0.47856845092773437, 0.47768267822265625, 0.4781311950683594, 0.4785070190429688, 0.47809127807617186, 0.4776365966796875, 0.4780707702636719, 0.47816497802734376, 0.47828582763671873, 0.47777279663085936, 0.4780728454589844, 0.47835751342773436, 0.47851211547851563, 0.47822235107421873, 0.4775055236816406, 0.4781465454101563, 0.47790386962890624, 0.4779018249511719, 0.478359619140625, 0.4794295654296875, 0.4784394226074219, 0.4795248718261719, 0.4780544128417969, 0.477949951171875, 0.48094821166992185, 0.4794306640625, 0.4784814147949219, 0.4792197265625, 0.4782981262207031, 0.47831655883789065, 0.47831243896484377, 0.4784998474121094, 0.47935488891601563, 0.47914599609375, 0.48005630493164064, 0.48013311767578126, 0.47997030639648436, 0.48005426025390624, 0.4799140625, 0.4794142150878906, 0.4800153503417969, 0.4793067626953125, 0.47961505126953125, 0.47790789794921873, 0.4778670043945312, 0.9900185546875, 0.4775372924804687, 0.47779840087890624, 0.477633544921875, 0.4782950439453125, 0.47761920166015626, 0.47800421142578126, 0.478803955078125, 0.4791029663085937, 0.4782643127441406, 0.47842098999023436, 0.4776908874511719, 0.4780185546875, 0.47770932006835937, 0.4777851257324219, 0.477681640625, 0.477812744140625, 0.47979006958007814, 0.47791717529296873, 0.4780451965332031, 0.4788193359375, 0.477955078125, 0.47790591430664064, 0.47803903198242187, 0.47824697875976563, 0.4786861572265625, 0.47816192626953125, 0.477655029296875, 0.47835134887695313, 0.478919677734375, 0.47831039428710936, 0.4777205810546875, 0.4773253173828125, 0.47773284912109376, 0.4778260498046875, 0.4776069030761719, 0.47773492431640624, 0.4777461853027344, 0.478587890625, 0.47767041015625, 0.4789801330566406, 0.47915618896484374, 0.4780257263183594, 0.47762841796875, 0.4778168334960938, 0.4776714172363281, 0.47783526611328125, 0.47757107543945315, 0.4779704284667969, 0.4786677856445313, 0.47908148193359373, 0.4783206481933594, 0.4781240234375, 0.47756494140625, 0.4773294067382812, 0.47757208251953126, 0.477348876953125, 0.47757720947265625, 0.47773492431640624, 0.47816192626953125, 0.47770932006835937, 0.4789381103515625, 0.47819366455078127, 0.9904609375, 0.47848550415039065, 0.478013427734375, 0.4775751647949219, 0.4774912109375, 0.4782847900390625, 0.4794931335449219, 0.4796549072265625, 0.47851318359375, 0.4785254211425781, 0.47859506225585935, 0.4788695068359375, 0.47816293334960935, 0.47827969360351563, 0.4785244140625, 0.4797358093261719, 0.4782438354492187, 0.4776570739746094, 0.4789125061035156, 0.47882650756835937, 0.4777082824707031, 0.47753216552734373, 0.47809637451171877, 0.47831655883789065, 0.4776488952636719, 0.4774471740722656, 0.4796170349121094, 0.47986483764648435, 0.4793456726074219, 0.479599609375, 0.4791654357910156, 0.4776488952636719, 0.4775475158691406, 0.47792538452148436, 0.4778240051269531, 0.4774819946289062, 0.4782612915039062, 0.4777738037109375, 0.4777543640136719, 0.47963134765625, 0.47828070068359374, 0.4778486022949219, 0.4777625427246094, 0.477601806640625, 0.4777185363769531, 0.47762432861328125, 0.47770932006835937, 0.4777062377929687, 0.47948800659179686, 0.4784066467285156, 0.47847015380859376, 0.4775106506347656, 0.4777420654296875, 0.47842098999023436, 0.47805233764648436, 0.47766015625, 0.47758746337890623, 0.47821926879882815, 0.4781087036132812, 0.4776806640625, 0.47913876342773437, 0.47851828002929686, 0.4783575744628906]",tokens/s,2.059232151132392,,,,,,main,False,False -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2236.862464,2932.342784,0.0,2285.89568,2082.706944,s,10,2.5091812438964842,0.25091812438964844,0.0016359490817563227,0.25024176025390626,0.25317278594970705,0.2535381980895996,0.25383052780151366,"[0.25309158325195313, 0.2539036102294922, 0.24987110900878906, 0.24888946533203124, 0.2497696075439453, 0.24914796447753906, 0.2504080047607422, 0.25179104614257813, 0.25223333740234377, 0.2500755157470703]",tokens/s,1020.253122897013,kWh,2.948670335578402e-06,1.6157431971317612e-06,1.3066561943753649e-05,1.763097547646381e-05,tokens/kWh,14519899.953450851,MB,2238.38208,2959.60576,0.0,2313.158656,2180.685312,s,10,143.8805810546875,14.38805810546875,0.011353783216855234,14.382828125,14.40145888671875,14.403668896484376,14.405436904296876,"[14.4009677734375, 14.40587890625, 14.39833203125, 14.3805361328125, 14.374423828125, 14.3830166015625, 14.400140625, 14.373748046875, 14.3808974609375, 14.3826396484375]",tokens/s,4.378631191102458,kWh,0.000169606630340582,9.295822156634463e-05,0.0007504743544440634,0.0010130392063509902,tokens/kWh,62189.10344736671,,s,629,145.90565167236326,0.2319644700673502,0.029875164751919897,0.22824755859375,0.22927770080566406,0.22946876831054688,0.47820606079101563,"[0.22992076110839843, 0.22815129089355468, 0.22794752502441407, 0.22817485046386718, 0.2277969970703125, 0.22800997924804686, 0.22908108520507814, 0.22838169860839844, 0.2285506591796875, 0.2286049346923828, 0.22830694580078126, 0.22801516723632811, 0.22815020751953125, 0.22767308044433593, 0.22776422119140624, 0.2276741180419922, 0.22829362487792967, 0.22767205810546876, 0.22779493713378907, 0.2277058563232422, 0.22765670776367186, 0.22813594055175782, 0.22820147705078125, 0.2283335723876953, 0.2275809326171875, 0.22812364196777343, 0.2283008575439453, 0.22861715698242188, 0.2281564178466797, 0.22812979125976562, 0.2289090576171875, 0.2288476104736328, 0.22834585571289062, 0.22857522583007814, 0.2290155487060547, 0.22790553283691406, 0.22831922912597657, 0.22821784973144532, 0.22858444213867188, 0.2286612548828125, 0.22839808654785157, 0.22902169799804686, 0.22916812133789063, 0.22946917724609375, 0.22901350402832032, 0.22913536071777343, 0.22909747314453124, 0.229138427734375, 0.22936679077148436, 0.22892851257324218, 0.22936268615722658, 0.22932582092285156, 0.22920909118652344, 0.22904730224609374, 0.2291425323486328, 0.22898892211914063, 0.22877183532714843, 0.2286202850341797, 0.229064697265625, 0.22876876831054688, 0.2292131805419922, 0.22902476501464844, 0.48329931640625, 0.2282854461669922, 0.228822021484375, 0.2279741516113281, 0.22834687805175782, 0.22893157958984375, 0.22888038635253907, 0.22889677429199218, 0.22806629943847656, 0.22842678833007812, 0.22791778564453125, 0.22817791748046876, 0.22819737243652344, 0.228274169921875, 0.22817791748046876, 0.2278656005859375, 0.22779391479492186, 0.22778982543945311, 0.22769561767578125, 0.22855783081054687, 0.2280273895263672, 0.22766490173339843, 0.2277734375, 0.2279331817626953, 0.22785125732421874, 0.2278594512939453, 0.227852294921875, 0.2281492462158203, 0.2276505584716797, 0.22739762878417968, 0.22808883666992188, 0.22794650268554686, 0.22817485046386718, 0.228094970703125, 0.22861415100097657, 0.22928793334960937, 0.22927769470214843, 0.22896333312988282, 0.22915072631835937, 0.22908006286621094, 0.22935040283203126, 0.22938829040527345, 0.22915583801269532, 0.22902578735351561, 0.22928282165527344, 0.22933914184570312, 0.23262924194335938, 0.22968319702148438, 0.22875648498535156, 0.22789529418945312, 0.2278666229248047, 0.2295767059326172, 0.22941900634765625, 0.22929306030273439, 0.22928282165527344, 0.22947532653808594, 0.2292490234375, 0.22923365783691407, 0.22946815490722655, 0.22925106811523438, 0.22942207336425782, 0.22925315856933592, 0.2293155517578125, 0.4786954345703125, 0.22789324951171874, 0.22778778076171874, 0.22853427124023437, 0.2276505584716797, 0.22818611145019532, 0.22808677673339844, 0.22816160583496095, 0.22942201232910156, 0.22774887084960938, 0.22915277099609374, 0.2292162628173828, 0.22927772521972656, 0.22797821044921876, 0.2286940155029297, 0.22868377685546876, 0.22756658935546875, 0.22776832580566406, 0.22760652160644532, 0.22787379455566406, 0.2284400634765625, 0.22826905822753907, 0.22876876831054688, 0.22959616088867188, 0.22788198852539063, 0.22754815673828124, 0.2290493469238281, 0.22905958557128905, 0.22827314758300782, 0.22785842895507813, 0.22869094848632812, 0.2282854461669922, 0.22885580444335937, 0.22893157958984375, 0.2289971160888672, 0.22875340270996095, 0.22852301025390626, 0.22836224365234375, 0.22938829040527345, 0.2279147491455078, 0.22865306091308593, 0.22844825744628905, 0.22864588928222657, 0.22837759399414062, 0.2290636749267578, 0.2289213409423828, 0.22904115295410157, 0.23109939575195312, 0.22880665588378907, 0.22881996154785156, 0.2284707794189453, 0.229232666015625, 0.22840829467773438, 0.2283520050048828, 0.23005081176757813, 0.22963821411132812, 0.22803654479980467, 0.22828851318359375, 0.22792909240722656, 0.22795878601074218, 0.22777548217773438, 0.22785331726074218, 0.22797004699707032, 0.47824075317382814, 0.22801408386230468, 0.22845132446289063, 0.22803660583496094, 0.22865408325195313, 0.22791679382324218, 0.22993101501464844, 0.22821376037597657, 0.22777548217773438, 0.2276874237060547, 0.22872781372070314, 0.22810418701171875, 0.22863462829589845, 0.2276290588378906, 0.22807244873046875, 0.2276546630859375, 0.22767514038085937, 0.22787481689453126, 0.22811648559570313, 0.22791270446777342, 0.22781336975097657, 0.22764236450195313, 0.2276822967529297, 0.22771916198730469, 0.22807347106933593, 0.22767718505859375, 0.22773248291015624, 0.2274396209716797, 0.2276177978515625, 0.2276126708984375, 0.22771405029296876, 0.2276884460449219, 0.22757273864746094, 0.22881689453125, 0.22877593994140624, 0.22816461181640624, 0.22797311401367187, 0.22859878540039064, 0.22825677490234375, 0.22843699645996093, 0.22875750732421876, 0.22935040283203126, 0.22820352172851563, 0.22832333374023436, 0.23120999145507812, 0.22912101745605468, 0.22907904052734376, 0.22970060729980468, 0.22873907470703125, 0.22823014831542968, 0.22959414672851564, 0.22853219604492186, 0.22784101867675782, 0.22805708312988282, 0.22899507141113282, 0.2288885803222656, 0.227852294921875, 0.2276669464111328, 0.2279219207763672, 0.22771916198730469, 0.22784921264648436, 0.2281492462158203, 0.22833561706542968, 0.47811685180664065, 0.22757478332519532, 0.2276433868408203, 0.227662841796875, 0.22771510314941407, 0.22770684814453124, 0.22767001342773438, 0.2279884796142578, 0.22770176696777344, 0.2278707275390625, 0.22784512329101564, 0.22757171630859374, 0.22767922973632812, 0.22853836059570312, 0.22817181396484376, 0.22842568969726562, 0.22801408386230468, 0.2303057861328125, 0.22843084716796874, 0.22846669006347656, 0.22829670715332032, 0.227631103515625, 0.22766796875, 0.22799462890625, 0.22806732177734376, 0.22791270446777342, 0.22781747436523436, 0.22777548217773438, 0.22781234741210937, 0.22849331665039063, 0.22844009399414061, 0.22815536499023437, 0.22805509948730468, 0.22854751586914063, 0.22936679077148436, 0.2276741180419922, 0.22772940063476563, 0.22759014892578125, 0.22804173278808593, 0.22774169921875, 0.22859373474121095, 0.22844819641113281, 0.22795266723632812, 0.22765052795410157, 0.22817893981933593, 0.22788096618652343, 0.2275768280029297, 0.227915771484375, 0.22789631652832032, 0.22774783325195314, 0.22783692932128907, 0.22787890625, 0.22841958618164063, 0.22780006408691406, 0.22799154663085938, 0.2284862060546875, 0.22839187622070312, 0.23194111633300782, 0.22911180114746094, 0.22951731872558595, 0.22850355529785157, 0.22849740600585938, 0.22812570190429687, 0.47752294921875, 0.2276556854248047, 0.22764544677734375, 0.22774169921875, 0.22828953552246095, 0.22794444274902342, 0.22778880310058594, 0.2285332489013672, 0.22908927917480468, 0.2287073211669922, 0.2291087341308594, 0.22870118713378906, 0.22815948486328125, 0.22893466186523437, 0.2281328582763672, 0.22798233032226561, 0.22857215881347656, 0.22788812255859375, 0.2276177978515625, 0.22820658874511718, 0.22790963745117188, 0.2278912353515625, 0.22779286193847656, 0.22789631652832032, 0.2286622772216797, 0.2280079345703125, 0.22793728637695312, 0.22807142639160155, 0.22804888916015625, 0.2280335388183594, 0.22812159729003906, 0.2278830108642578, 0.2276884460449219, 0.22792807006835938, 0.22778675842285157, 0.22810009765625, 0.22793215942382813, 0.22936473083496095, 0.23167283630371094, 0.22877081298828125, 0.22863258361816408, 0.22877593994140624, 0.22783282470703126, 0.2277181396484375, 0.22791270446777342, 0.22783183288574219, 0.22780720520019532, 0.22802841186523437, 0.2279536590576172, 0.2279619140625, 0.22795872497558595, 0.22792909240722656, 0.22810009765625, 0.22938214111328126, 0.22828031921386718, 0.228242431640625, 0.2287636413574219, 0.22942311096191406, 0.22908114624023437, 0.22857618713378905, 0.22855372619628905, 0.22922035217285155, 0.2285117492675781, 0.4789893188476563, 0.22813900756835936, 0.22828134155273438, 0.22873292541503906, 0.2284707794189453, 0.22896640014648437, 0.22858956909179687, 0.2285096893310547, 0.22846669006347656, 0.22824858093261718, 0.2286878662109375, 0.2290083770751953, 0.22912716674804687, 0.22828851318359375, 0.22853836059570312, 0.228384765625, 0.22901145935058595, 0.2284523468017578, 0.22929306030273439, 0.22884352111816406, 0.2288916473388672, 0.22799874877929688, 0.22763005065917968, 0.2294476776123047, 0.22789324951171874, 0.22849740600585938, 0.2280437774658203, 0.22870835876464843, 0.228890625, 0.228600830078125, 0.22893466186523437, 0.22901248168945312, 0.22887936401367187, 0.22905445861816406, 0.2291025848388672, 0.22861824035644532, 0.2289040069580078, 0.22949267578125, 0.22833255004882813, 0.22794650268554686, 0.2277928924560547, 0.2285117492675781, 0.2278656005859375, 0.22826905822753907, 0.22805914306640626, 0.22932992553710937, 0.2291025848388672, 0.2288046112060547, 0.22843597412109376, 0.22823526000976563, 0.2282977294921875, 0.22911488342285155, 0.22893772888183594, 0.2283100128173828, 0.22832127380371095, 0.22796287536621093, 0.22796389770507813, 0.22786151123046874, 0.22802024841308594, 0.22792703247070312, 0.2290247344970703, 0.2290872344970703, 0.2293729248046875, 0.48112127685546874, 0.22774476623535156, 0.22767205810546876, 0.22760140991210936, 0.22770790100097657, 0.228706298828125, 0.22839602661132813, 0.22828953552246095, 0.22751437377929687, 0.22827008056640624, 0.22855577087402343, 0.22760858154296876, 0.22853733825683595, 0.2290882568359375, 0.22853427124023437, 0.22787174987792969, 0.22959922790527343, 0.22843597412109376, 0.22793830871582033, 0.22853427124023437, 0.22780621337890625, 0.22782566833496093, 0.2280755157470703, 0.22808781433105468, 0.22815948486328125, 0.22787686157226564, 0.22864691162109374, 0.2278481903076172, 0.22766387939453125, 0.22787174987792969, 0.22767514038085937, 0.22795161437988282, 0.22794650268554686, 0.22880563354492187, 0.22775808715820312, 0.22767514038085937, 0.22767718505859375, 0.2277734375, 0.22772122192382813, 0.22761984252929687, 0.2289653778076172, 0.22932582092285156, 0.2285506591796875, 0.22796083068847656, 0.2276986846923828, 0.22787583923339844, 0.2282608642578125, 0.2280263671875, 0.22794137573242187, 0.2278461456298828, 0.22779391479492186, 0.22773248291015624, 0.2276259765625, 0.22793215942382813, 0.22887014770507813, 0.2283756103515625, 0.22759724426269531, 0.22863462829589845, 0.22802841186523437, 0.228279296875, 0.22952755737304686, 0.22824755859375, 0.2283223114013672, 0.4810905456542969, 0.22825372314453124, 0.22767202758789062, 0.22804582214355468, 0.22862745666503906, 0.227810302734375, 0.22774989318847655, 0.22795980834960938, 0.22772940063476563, 0.22866943359375, 0.22906982421875, 0.2284390411376953, 0.22901657104492187, 0.22875852966308594, 0.22855270385742188, 0.22873805236816405, 0.22831513977050782, 0.22964530944824219, 0.2282782745361328, 0.228890625, 0.22893772888183594, 0.22941389465332032, 0.22863871765136717, 0.22856192016601562, 0.2286868438720703, 0.22785536193847655, 0.22773554992675782, 0.22780723571777345, 0.22774887084960938, 0.22783795166015625, 0.2286028747558594, 0.2292316131591797, 0.22855474853515625, 0.2284277801513672, 0.22815129089355468, 0.22762086486816407, 0.22760345458984374, 0.2294599609375, 0.228251708984375, 0.22796998596191406, 0.22916300964355468, 0.22799667358398437, 0.22786355590820312, 0.2276986846923828, 0.22783282470703126, 0.22780621337890625, 0.22774681091308593, 0.22811955261230468, 0.22787890625, 0.22774374389648439, 0.2275000305175781, 0.22767001342773438, 0.2276884460449219, 0.22773554992675782, 0.2278778839111328, 0.22794650268554686, 0.22774476623535156, 0.22773452758789062, 0.22781234741210937, 0.22782975769042968, 0.22782054138183594, 0.22786151123046874, 0.22886604309082031, 0.4816783447265625, 0.22901145935058595, 0.22866943359375, 0.2290145263671875, 0.22938316345214843, 0.22859571838378906, 0.22785023498535156, 0.22841139221191406, 0.22864895629882812, 0.2287615966796875, 0.22899302673339844, 0.2279720916748047, 0.2287073211669922, 0.22850355529785157, 0.22853631591796875, 0.22866021728515626, 0.22911077880859376, 0.22943026733398436, 0.22803558349609376, 0.22808781433105468, 0.2292725830078125, 0.22914457702636717, 0.22839295959472655, 0.22797314453125, 0.22791267395019532, 0.22795161437988282, 0.22988902282714843, 0.2280447998046875, 0.22778163146972658, 0.2276444091796875, 0.2277928924560547, 0.2285015106201172, 0.22870835876464843, 0.22830181884765624, 0.22769766235351563, 0.22775193786621092, 0.22775091552734375, 0.22768333435058594, 0.22769664001464843, 0.22772633361816405, 0.22788607788085938, 0.22782054138183594, 0.22759837341308595, 0.22872572326660157, 0.2291568603515625, 0.22867762756347657, 0.228094970703125, 0.2280990753173828, 0.22783795166015625, 0.2277724151611328, 0.22763827514648438, 0.2278164520263672, 0.22774681091308593, 0.2276433868408203, 0.2277181396484375, 0.22805708312988282, 0.2277232666015625, 0.228068359375, 0.2278154296875, 0.2276864013671875, 0.22811546325683593, 0.22927462768554688, 0.2289459228515625]",tokens/s,4.311005041891343,,,,,,main,False,False -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494c4-56dde3854d36ea97282aa5c4;24779085-20a3-4e9d-9a98-3929db5b97e3) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3047.755776,4521.984,0.0,3875.536896,3575.121408,s,10,3.925874938964844,0.3925874938964844,0.0032003814064900295,0.39131011962890627,0.3974839904785156,0.39779729309082035,0.39804793518066406,"[0.398110595703125, 0.3898521728515625, 0.39046603393554685, 0.39330859375, 0.38967181396484374, 0.38878271484375, 0.39181430053710936, 0.3956484069824219, 0.39741436767578125, 0.3908059387207031]",tokens/s,652.0839404718809,kWh,4.606836531152074e-06,2.5243247530642504e-06,1.9845015875998338e-05,2.697617716021466e-05,tokens/kWh,9489854.640247436,MB,3047.755776,4521.984,0.0,3875.536896,3800.975872,s,10,231.19119335937498,23.119119335937498,0.024652283207228468,23.1104677734375,23.1560455078125,23.16491630859375,23.17201294921875,"[23.104484375, 23.106837890625, 23.11409765625, 23.1257734375, 23.09865234375, 23.091490234375, 23.10330859375, 23.173787109375, 23.15407421875, 23.1186875]",tokens/s,2.7250172934602097,kWh,0.0002730506428477602,0.00014965489141425045,0.0011581440098478151,0.0015808495441098258,tokens/kWh,39851.989858702975,,s,629,234.351453125,0.3725778269077901,0.04661763996043978,0.3668070373535156,0.3683815368652344,0.36919970703125,0.7562686254882812,"[0.3660308532714844, 0.3664025573730469, 0.36736306762695314, 0.3682928771972656, 0.3671849060058594, 0.366424072265625, 0.36691455078125, 0.36642098999023437, 0.3666820983886719, 0.36670053100585936, 0.3671910400390625, 0.3671593017578125, 0.3675473937988281, 0.367383544921875, 0.36641281127929687, 0.3659335632324219, 0.366244873046875, 0.3652311096191406, 0.3665745849609375, 0.3662633056640625, 0.3673968505859375, 0.36597760009765623, 0.3667660827636719, 0.3654379577636719, 0.3664527282714844, 0.3673671569824219, 0.36636468505859376, 0.3656898498535156, 0.3663196105957031, 0.3655997314453125, 0.36726272583007813, 0.36568780517578126, 0.3668695068359375, 0.36599508666992187, 0.3665059204101562, 0.3655762023925781, 0.36666366577148435, 0.36643328857421875, 0.3669176330566406, 0.3657113647460937, 0.36776141357421877, 0.3660533752441406, 0.36670053100585936, 0.36581787109375, 0.36618853759765624, 0.3657318420410156, 0.36655206298828125, 0.3656663818359375, 0.36818527221679687, 0.36655206298828125, 0.3670487060546875, 0.366587890625, 0.3682109375, 0.3672012939453125, 0.3683788757324219, 0.3662612609863281, 0.36835430908203126, 0.3673917541503906, 0.3714969482421875, 0.36665652465820314, 0.3664425048828125, 0.36596124267578123, 0.7560806274414062, 0.36733746337890627, 0.36763238525390624, 0.3667384338378906, 0.3690987548828125, 0.3665008544921875, 0.3659049072265625, 0.3675392150878906, 0.36642611694335936, 0.36708966064453125, 0.36646810913085937, 0.3664025573730469, 0.3674142761230469, 0.36601959228515624, 0.36743270874023437, 0.36704461669921873, 0.36616806030273436, 0.36657357788085937, 0.3658526611328125, 0.3659263916015625, 0.3657093200683594, 0.3664773254394531, 0.3665899658203125, 0.3660308532714844, 0.36575845336914065, 0.3663247375488281, 0.3659059143066406, 0.36675994873046874, 0.3660185546875, 0.3673917541503906, 0.36654180908203127, 0.36684698486328127, 0.367072265625, 0.3671255187988281, 0.36628070068359375, 0.3660789794921875, 0.36613528442382814, 0.3671644287109375, 0.3661455383300781, 0.36799591064453124, 0.36629299926757813, 0.36816485595703125, 0.36593869018554687, 0.3678883972167969, 0.3660892028808594, 0.3684013977050781, 0.36708865356445314, 0.36752896118164063, 0.36656741333007814, 0.3682099304199219, 0.36648345947265626, 0.36751565551757814, 0.3659735107421875, 0.3680624694824219, 0.3659202575683594, 0.3667906494140625, 0.36607489013671873, 0.3667957763671875, 0.36641485595703127, 0.36750439453125, 0.36619058227539064, 0.36707431030273435, 0.36645068359375, 0.7576248168945312, 0.3653406677246094, 0.3671552124023437, 0.3671224365234375, 0.3667906494140625, 0.3663052673339844, 0.36621209716796876, 0.36776141357421877, 0.3668439025878906, 0.36665139770507815, 0.3658373107910156, 0.36659506225585936, 0.36775115966796873, 0.36711117553710937, 0.36632986450195315, 0.36725247192382815, 0.36715213012695314, 0.36897998046875, 0.3676968994140625, 0.3678535766601562, 0.366065673828125, 0.367072265625, 0.36664523315429687, 0.3668899841308594, 0.366271484375, 0.3664445495605469, 0.3664271240234375, 0.366392333984375, 0.36568576049804685, 0.36705279541015623, 0.36768154907226563, 0.36760678100585936, 0.36624383544921874, 0.3671849060058594, 0.36741018676757814, 0.36703640747070315, 0.36581375122070314, 0.36656536865234374, 0.3663052673339844, 0.36876492309570313, 0.366455810546875, 0.3671715698242187, 0.36698419189453124, 0.3677542419433594, 0.36644046020507814, 0.36671795654296874, 0.36616293334960937, 0.36698828125, 0.3662899169921875, 0.36686746215820315, 0.3674449768066406, 0.36675994873046874, 0.366519287109375, 0.3665111083984375, 0.36600628662109375, 0.36778289794921876, 0.3663800354003906, 0.36820684814453125, 0.3660175476074219, 0.36745419311523436, 0.3680143432617187, 0.3678023681640625, 0.3672965087890625, 0.76101123046875, 0.36763442993164064, 0.3688806457519531, 0.3682867126464844, 0.3672862854003906, 0.36792831420898436, 0.36681729125976564, 0.36853555297851565, 0.36648446655273437, 0.36719000244140626, 0.3671961669921875, 0.3679938659667969, 0.3675965576171875, 0.3683921813964844, 0.3668418579101563, 0.3667855224609375, 0.36585470581054685, 0.36641998291015626, 0.3668428649902344, 0.3661527099609375, 0.3681187744140625, 0.3663144836425781, 0.367393798828125, 0.36705484008789063, 0.36674969482421876, 0.36767642211914064, 0.3666595764160156, 0.36638516235351565, 0.3659837341308594, 0.3676375122070312, 0.36701901245117186, 0.36602264404296875, 0.36724429321289065, 0.36727194213867187, 0.3677163391113281, 0.36646194458007814, 0.3668899841308594, 0.36752896118164063, 0.3668070373535156, 0.36688385009765623, 0.3675699157714844, 0.36781158447265627, 0.3655679931640625, 0.3665203247070313, 0.3669176330566406, 0.3664250793457031, 0.36553421020507815, 0.3678494567871094, 0.367056884765625, 0.3667271728515625, 0.3659151306152344, 0.36611276245117186, 0.3676180419921875, 0.3684751281738281, 0.3661475830078125, 0.3675002746582031, 0.3667466125488281, 0.36979815673828126, 0.3675893859863281, 0.3668500366210937, 0.36594073486328127, 0.3664721984863281, 0.3660421142578125, 0.7563417358398438, 0.36537957763671874, 0.3666237487792969, 0.3666851806640625, 0.36812799072265623, 0.3660308532714844, 0.3663882141113281, 0.3670702209472656, 0.3671562194824219, 0.3671490478515625, 0.3683932189941406, 0.36664523315429687, 0.3658895263671875, 0.36612710571289064, 0.36652340698242186, 0.3672749938964844, 0.3655301208496094, 0.366635009765625, 0.3657994384765625, 0.36625100708007813, 0.36786483764648437, 0.3671715698242187, 0.36642816162109376, 0.3672166442871094, 0.3667108459472656, 0.36772549438476565, 0.36702822875976565, 0.367720458984375, 0.3670384521484375, 0.3671142272949219, 0.3670169677734375, 0.369112060546875, 0.36612506103515624, 0.3668643798828125, 0.3657052307128906, 0.3660328979492187, 0.3655086059570313, 0.3667589111328125, 0.3669944458007813, 0.36692889404296875, 0.3655577697753906, 0.3665080261230469, 0.36726373291015624, 0.36662066650390623, 0.3658455505371094, 0.36661654663085935, 0.365918212890625, 0.368047119140625, 0.3663114318847656, 0.36607794189453124, 0.36666778564453123, 0.36623870849609375, 0.36541131591796877, 0.36611993408203125, 0.36540826416015626, 0.36666470336914064, 0.3660861511230469, 0.3677306823730469, 0.3666360168457031, 0.36710296630859374, 0.366376953125, 0.3661721496582031, 0.36556698608398436, 0.75503515625, 0.36811160278320315, 0.3666186218261719, 0.3668746337890625, 0.36598681640625, 0.36689202880859373, 0.3665131530761719, 0.36666983032226563, 0.3660205993652344, 0.3664025573730469, 0.3666462707519531, 0.3671009216308594, 0.3655587768554687, 0.3668203430175781, 0.36630117797851564, 0.36666366577148435, 0.36585574340820315, 0.36627865600585935, 0.3661414489746094, 0.36637799072265625, 0.36607998657226565, 0.3658045349121094, 0.3679395751953125, 0.36621517944335935, 0.36576153564453123, 0.3663124389648437, 0.3655833740234375, 0.3661219787597656, 0.3678760986328125, 0.36634521484375, 0.36614862060546877, 0.3665377197265625, 0.3658229675292969, 0.36720025634765624, 0.3659571228027344, 0.36605746459960936, 0.36572467041015627, 0.36643429565429686, 0.3677470703125, 0.3671142272949219, 0.36663092041015627, 0.36711935424804687, 0.3663636474609375, 0.36681625366210935, 0.3660943298339844, 0.36676812744140624, 0.36628582763671874, 0.3674306640625, 0.3657963562011719, 0.36687359619140625, 0.36616293334960937, 0.3663595581054688, 0.3658577880859375, 0.367494140625, 0.3659980773925781, 0.3674224548339844, 0.3666606140136719, 0.3666483154296875, 0.36678964233398437, 0.367278076171875, 0.3672596435546875, 0.36691558837890625, 0.3659898986816406, 0.7581112060546875, 0.36696063232421877, 0.36686541748046875, 0.3666217041015625, 0.3667189636230469, 0.36658688354492186, 0.36612606811523435, 0.3663329162597656, 0.36610763549804687, 0.3663811340332031, 0.3662243347167969, 0.3669329833984375, 0.3673456726074219, 0.3678023681640625, 0.36764877319335937, 0.3677501525878906, 0.3669678039550781, 0.3668746337890625, 0.3683153991699219, 0.36921240234375, 0.368362548828125, 0.36775827026367186, 0.3663052673339844, 0.3672483825683594, 0.36686746215820315, 0.36620184326171873, 0.36589157104492187, 0.36683877563476563, 0.36737841796875, 0.36710400390625, 0.3661424560546875, 0.36658074951171876, 0.365517822265625, 0.36632781982421875, 0.36605645751953125, 0.366856201171875, 0.36679168701171877, 0.3667793884277344, 0.3662469177246094, 0.36760064697265626, 0.36884786987304685, 0.3667568664550781, 0.3658803100585937, 0.36786996459960936, 0.3664742431640625, 0.3666083984375, 0.36686541748046875, 0.367140869140625, 0.3666790466308594, 0.36664422607421876, 0.365802490234375, 0.36675277709960935, 0.365348876953125, 0.36615576171875, 0.3656755065917969, 0.36651724243164063, 0.3666483154296875, 0.3665623168945312, 0.3659253845214844, 0.36593869018554687, 0.3652372436523437, 0.36640359497070313, 0.3656540222167969, 0.7624765625, 0.36612710571289064, 0.3665633239746094, 0.36761907958984374, 0.36593048095703123, 0.36634112548828124, 0.3662571411132812, 0.3672842102050781, 0.3657646179199219, 0.368606201171875, 0.36768359375, 0.3676794738769531, 0.36858981323242185, 0.3683133544921875, 0.369396728515625, 0.36714599609375, 0.36760577392578125, 0.3668357238769531, 0.3677470703125, 0.3677235107421875, 0.36642098999023437, 0.36621209716796876, 0.3658362731933594, 0.36801739501953123, 0.3667783813476562, 0.3668715515136719, 0.3696394348144531, 0.369375244140625, 0.3691806640625, 0.3698810729980469, 0.37041253662109375, 0.36909466552734377, 0.3689021301269531, 0.3667169189453125, 0.36602264404296875, 0.3657953186035156, 0.3676252136230469, 0.36923904418945314, 0.3685191650390625, 0.3708651428222656, 0.3690188903808594, 0.370050048828125, 0.36955239868164064, 0.366551025390625, 0.3658076171875, 0.3684198303222656, 0.3685857238769531, 0.3687065734863281, 0.36557516479492186, 0.3663943786621094, 0.3689195556640625, 0.3698083801269531, 0.3692380065917969, 0.3681546325683594, 0.3669350280761719, 0.36759039306640623, 0.3672842102050781, 0.3702108154296875, 0.3688243103027344, 0.36716030883789064, 0.36670465087890625, 0.36789248657226564, 0.3689768981933594, 0.7654307861328125, 0.367162353515625, 0.369691650390625, 0.3691734924316406, 0.370017333984375, 0.368702392578125, 0.36607794189453124, 0.36709375, 0.3667589111328125, 0.36747161865234373, 0.3679764404296875, 0.3669626770019531, 0.3658486022949219, 0.36790267944335936, 0.3670026245117187, 0.36758526611328124, 0.3668479919433594, 0.36762625122070314, 0.36738970947265626, 0.36922470092773435, 0.36768768310546873, 0.36928103637695314, 0.3660902404785156, 0.36697601318359374, 0.36616705322265625, 0.3675197448730469, 0.36707635498046876, 0.3673385009765625, 0.3668746337890625, 0.36747161865234373, 0.36656536865234374, 0.36796722412109373, 0.36610662841796876, 0.3665213317871094, 0.365781005859375, 0.36651007080078124, 0.36767025756835936, 0.3668213806152344, 0.36538470458984373, 0.3673118591308594, 0.365907958984375, 0.3671910400390625, 0.3668971557617188, 0.3673313293457031, 0.3669053344726563, 0.3664783935546875, 0.36691961669921874, 0.36783718872070315, 0.36801126098632814, 0.36738970947265626, 0.36602264404296875, 0.36634521484375, 0.3659909057617188, 0.36675787353515626, 0.3704688720703125, 0.37160037231445314, 0.370060302734375, 0.3701022644042969, 0.36780850219726563, 0.36857342529296877, 0.3668899841308594, 0.36739788818359376, 0.3669186706542969, 0.7624959716796875, 0.366529541015625, 0.367541259765625, 0.3666298828125, 0.367783935546875, 0.36849972534179687, 0.36857550048828125, 0.36766720581054685, 0.3665428466796875, 0.367025146484375, 0.3665684509277344, 0.3679231872558594, 0.3658014831542969, 0.3674449768066406, 0.36611276245117186, 0.367678466796875, 0.3667189636230469, 0.3687383117675781, 0.36729037475585935, 0.3665848388671875, 0.36641177368164063, 0.36767333984375, 0.3674972229003906, 0.36638516235351565, 0.3664025573730469, 0.36736306762695314, 0.3676334228515625, 0.3689072570800781, 0.3667712097167969, 0.3672862854003906, 0.3658874816894531, 0.36724429321289065, 0.3662274475097656, 0.3670732727050781, 0.3663523864746094, 0.3670425720214844, 0.3662489624023437, 0.36671282958984375, 0.3670978698730469, 0.3670241394042969, 0.3659970703125, 0.3673231506347656, 0.3663739013671875, 0.3662264404296875, 0.36703436279296875, 0.36681729125976564, 0.3661414489746094, 0.3668623352050781, 0.366129150390625, 0.367130615234375, 0.36584756469726565, 0.3665489807128906, 0.3668070373535156, 0.3673395080566406, 0.3674347534179688, 0.3669698486328125, 0.36617010498046876, 0.3672535095214844, 0.36757708740234374, 0.3671152648925781, 0.3667189636230469, 0.3670374450683594, 0.3659479064941406]",tokens/s,2.684002986166677,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2593.296384,7298.613248,0.0,6652.166144,6323.352576,s,10,7.731728576660157,0.7731728576660156,0.0036666872570226623,0.7712686767578125,0.7772910278320313,0.7798124450683593,0.7818295788574219,"[0.7823338623046875, 0.776730712890625, 0.7708938598632813, 0.7714732666015625, 0.7710640869140625, 0.770478515625, 0.770045166015625, 0.773399658203125, 0.7746173706054688, 0.7706920776367188]",tokens/s,331.10319052428935,kWh,9.090474362556752e-06,4.981150633102515e-06,4.3684586229685554e-05,5.775621122534482e-05,tokens/kWh,4432423.709394238,MB,2593.296384,7298.613248,0.0,6652.166144,6382.565888,s,10,458.08653125,45.808653125,0.015714942284677954,45.80371484375,45.83385546875,45.83744140625,45.84031015625,"[45.8103125, 45.8045234375, 45.84102734375, 45.79598828125, 45.78718359375, 45.80095703125, 45.79937890625, 45.80290625, 45.8111953125, 45.83305859375]",tokens/s,1.3752860148079282,kWh,0.0005408116745630392,0.00029641227384910964,0.0025237221621385317,0.0033609461105506802,tokens/kWh,18744.721851454397,,s,629,464.3046874389652,0.7381632550698964,0.09178720852369766,0.7269785766601562,0.7282177978515625,0.72855,1.49778814453125,"[0.728890380859375, 0.7286569213867188, 0.728468505859375, 0.727841796875, 0.7261204223632812, 0.7262678833007813, 0.7260436401367187, 0.7263180541992188, 0.7261552734375, 0.7261777954101563, 0.727593994140625, 0.7266488037109375, 0.726371337890625, 0.7267962646484375, 0.7268167724609375, 0.72713623046875, 0.7261091918945313, 0.7263775024414062, 0.726086669921875, 0.7265126953125, 0.7268792114257813, 0.7265433349609375, 0.7264358520507812, 0.7269488525390625, 0.7277793579101562, 0.7264532470703124, 0.7258746948242187, 0.7262843017578124, 0.726619140625, 0.726530029296875, 0.726930419921875, 0.7266365356445312, 0.7263467407226563, 0.7267451171875, 0.7262003173828125, 0.7265435180664063, 0.7259226684570312, 0.7268444213867188, 0.727319580078125, 0.72618798828125, 0.725907470703125, 0.7261306762695312, 0.7267153930664062, 0.7260835571289063, 0.7304304809570312, 0.7278233642578125, 0.7278356323242188, 0.7286978759765625, 0.7283251342773438, 0.7282206420898437, 0.7269744873046875, 0.7285032958984375, 0.728458251953125, 0.7281141967773438, 0.7277547607421875, 0.727568359375, 0.7287777099609375, 0.727846923828125, 0.7280547485351563, 0.72765234375, 0.7284254760742187, 0.7282565307617187, 1.504901123046875, 0.7284664306640625, 0.7290664672851562, 0.7276646118164063, 0.7266221923828124, 0.7263283081054688, 0.7270154418945313, 0.7276861572265625, 0.7269110717773437, 0.7259544677734375, 0.7266682739257813, 0.7265515747070312, 0.7271710815429687, 0.7266058349609374, 0.7279667358398437, 0.7284111328125, 0.7286005859375, 0.726703125, 0.7264102172851562, 0.7261973876953125, 0.7271340942382812, 0.72724072265625, 0.7264429931640625, 0.726245361328125, 0.7261767578125, 0.7261071166992188, 0.72650244140625, 0.726097900390625, 0.7263776245117187, 0.726574951171875, 0.728237060546875, 0.7274116821289063, 0.7262125854492187, 0.7267153930664062, 0.7282913208007813, 0.726614013671875, 0.7263006591796874, 0.7260282592773437, 0.726560791015625, 0.7266262817382813, 0.7263662109375, 0.7263231811523437, 0.7260938110351562, 0.727815185546875, 0.7281008911132812, 0.727647216796875, 0.7297269897460937, 0.726920166015625, 0.72753564453125, 0.7265023803710937, 0.726255615234375, 0.7262545776367187, 0.7261071166992188, 0.72612353515625, 0.726687744140625, 0.726403076171875, 0.7265781860351562, 0.7276553955078126, 0.728158203125, 0.7277864990234375, 0.7276748657226563, 0.7281663818359375, 0.7285872802734376, 1.497443359375, 0.726253662109375, 0.7272754516601563, 0.7280455932617188, 0.7274752197265625, 0.7277711181640625, 0.728121337890625, 0.7287439575195312, 0.7278479614257812, 0.7272069091796876, 0.728195068359375, 0.7279093627929687, 0.7278653564453125, 0.7270317993164063, 0.7276226806640625, 0.7280025634765624, 0.7273707275390625, 0.7268731079101562, 0.7277240600585938, 0.7278991088867187, 0.728922119140625, 0.7292661743164063, 0.7271915283203125, 0.72674609375, 0.72665087890625, 0.7293204345703125, 0.7278919677734375, 0.7269775390625, 0.7271495971679688, 0.7265064697265625, 0.7276011352539062, 0.7278345947265625, 0.7281715087890624, 0.7285504150390625, 0.72888525390625, 0.7278458862304688, 0.7282175903320313, 0.7279083251953125, 0.7274700927734375, 0.72768408203125, 0.7280230102539063, 0.727125, 0.7273963623046875, 0.7267492065429687, 0.7279011840820313, 0.7279144897460937, 0.7274905395507812, 0.7290408935546875, 0.7272591552734375, 0.7271188354492187, 0.7262003173828125, 0.7260712890625, 0.7269284057617188, 0.7274291381835938, 0.7278981323242187, 0.7281622924804687, 0.727773193359375, 0.72789501953125, 0.7277240600585938, 0.727568359375, 0.727462890625, 0.72740966796875, 0.7279739379882812, 1.49783544921875, 0.7277803344726562, 0.7284193115234375, 0.7274598388671875, 0.727204833984375, 0.7272642822265625, 0.7271331787109375, 0.7269990234375, 0.72734619140625, 0.726697998046875, 0.7275581665039063, 0.7281787109375, 0.7283179321289063, 0.7270953369140625, 0.7263682250976562, 0.7266856689453125, 0.7272109985351562, 0.7262371826171875, 0.7263467407226563, 0.726561767578125, 0.7266099243164063, 0.7263775024414062, 0.7269427490234375, 0.7270051879882813, 0.7267901611328125, 0.7268003540039063, 0.7271588134765625, 0.7267839965820313, 0.7270287475585937, 0.7264204711914063, 0.7269273681640624, 0.7266437377929688, 0.7264727172851563, 0.7263098754882813, 0.7269898071289063, 0.72724072265625, 0.7265023803710937, 0.726582275390625, 0.727125, 0.7272601318359375, 0.7271116943359375, 0.7265310668945313, 0.7264296875, 0.7270942993164062, 0.72810498046875, 0.7268444213867188, 0.7269007568359375, 0.72627197265625, 0.7266754760742188, 0.7263273315429688, 0.726993896484375, 0.72631298828125, 0.727208984375, 0.7275560913085938, 0.7275110473632812, 0.7266375732421875, 0.7264818725585938, 0.7270154418945313, 0.726582275390625, 0.7271209106445312, 0.7262269287109375, 0.7262637939453125, 0.7265955810546875, 1.49766650390625, 0.7265485229492188, 0.7276533203125, 0.7271311645507812, 0.726666259765625, 0.7262494506835937, 0.7270901489257813, 0.7279677734375, 0.7269109497070313, 0.7260007934570313, 0.726369140625, 0.7265126342773438, 0.7262740478515625, 0.7264921875, 0.7263375244140625, 0.7265730590820313, 0.727419921875, 0.7267584228515624, 0.726582275390625, 0.7271137084960938, 0.7265505981445313, 0.726699951171875, 0.72631298828125, 0.7264921875, 0.7262166748046875, 0.7264942016601562, 0.727056396484375, 0.7262740478515625, 0.726329345703125, 0.72700927734375, 0.7266652221679688, 0.7271157836914063, 0.7266324462890625, 0.7262699584960938, 0.7262648315429687, 0.7263733520507812, 0.7264257202148438, 0.7263466796875, 0.7261910400390625, 0.726476806640625, 0.7270922241210938, 0.7264163818359375, 0.726287353515625, 0.726835205078125, 0.7277772827148438, 0.7272254638671874, 0.7271760864257812, 0.7273492431640625, 0.7281674194335938, 0.7272499389648438, 0.7273533325195313, 0.7268812866210937, 0.7270850830078125, 0.7263784790039063, 0.7263406372070312, 0.7266806030273437, 0.7265914916992188, 0.7289251708984374, 0.7269837036132812, 0.7268054809570312, 0.7262802124023438, 0.7263119506835938, 0.727593994140625, 1.498050537109375, 0.7272069091796876, 0.7269785766601562, 0.7263672485351562, 0.7267686157226563, 0.7265392456054688, 0.7266918334960938, 0.7279226684570312, 0.7270952758789062, 0.7273717651367188, 0.7269324951171875, 0.7273584594726562, 0.7267799072265625, 0.7266355590820313, 0.7265770874023437, 0.7268864135742188, 0.7267072143554687, 0.7268515625, 0.72646875, 0.7266506958007812, 0.7269765014648437, 0.7271884765625, 0.7277824096679687, 0.7268259887695312, 0.7264942016601562, 0.7269846801757812, 0.7266631469726562, 0.7266221923828124, 0.7265433959960937, 0.7265709228515626, 0.7269048461914063, 0.726640625, 0.7268331298828125, 0.7268157348632812, 0.7275376586914063, 0.7284869384765625, 0.72732568359375, 0.72707275390625, 0.7283681030273438, 0.7273052368164062, 0.727035888671875, 0.726771728515625, 0.7267860717773438, 0.7267205200195312, 0.7269447631835938, 0.7271106567382812, 0.7269212036132813, 0.7263958740234375, 0.7277291259765625, 0.7267225341796875, 0.7280814208984375, 0.726513671875, 0.72703076171875, 0.7273072509765625, 0.7264839477539062, 0.72660888671875, 0.7266047973632812, 0.7268126831054688, 0.7263416137695312, 0.7265904541015625, 0.727103515625, 0.72654541015625, 0.7273564453125, 1.4995804443359375, 0.7264389038085938, 0.72692529296875, 0.726814697265625, 0.7266785278320312, 0.7263928833007812, 0.7267000122070313, 0.726929443359375, 0.7269508666992187, 0.7267072143554687, 0.7272652587890625, 0.727456787109375, 0.7268905029296875, 0.7272182006835938, 0.7271659545898438, 0.7272396850585937, 0.727488525390625, 0.727024658203125, 0.7267123413085937, 0.7267573852539062, 0.72673486328125, 0.7267174682617188, 0.7264542846679688, 0.7265056762695312, 0.7268145141601563, 0.7267010498046875, 0.7269017333984376, 0.7276697387695312, 0.7267901611328125, 0.7262535400390625, 0.7270850830078125, 0.7274118041992188, 0.7274280395507813, 0.7267235717773437, 0.727041015625, 0.7266365356445312, 0.7266611328125, 0.7265853271484375, 0.7269519653320312, 0.7284623413085938, 0.7270697021484375, 0.7270390014648438, 0.7264204711914063, 0.7266826171875, 0.7274915771484375, 0.726845458984375, 0.72681982421875, 0.72749365234375, 0.72808447265625, 0.727667724609375, 0.726656005859375, 0.7265833129882813, 0.726640625, 0.7277496337890625, 0.7269703979492188, 0.7271680297851563, 0.7267593994140625, 0.7272683715820313, 0.727320556640625, 0.7269324951171875, 0.7269560546875, 0.7263651733398437, 0.7267870483398438, 1.500390380859375, 0.7265679321289062, 0.7275233154296875, 0.7270062255859375, 0.7268106079101563, 0.7261430053710938, 0.7260712890625, 0.7270625, 0.7281285400390625, 0.728479736328125, 0.7286661376953125, 0.7285464477539062, 0.7271463623046875, 0.7275120849609376, 0.7265228881835938, 0.7265628051757812, 0.7272130737304687, 0.7273912353515625, 0.7264358520507812, 0.7261306762695312, 0.7267123413085937, 0.7264491577148438, 0.72669287109375, 0.726719482421875, 0.7263631591796875, 0.7265198364257812, 0.7262894287109375, 0.7263252563476562, 0.7271505737304688, 0.7277117309570312, 0.7283435668945313, 0.7273421020507812, 0.7282186279296875, 0.7270390014648438, 0.7294136352539062, 0.7282667236328125, 0.7285493774414062, 0.7274598388671875, 0.7267921752929688, 0.7266416625976563, 0.72656591796875, 0.72635595703125, 0.7266129760742187, 0.7267891235351562, 0.7269498901367187, 0.7263416137695312, 0.72686181640625, 0.7261245727539063, 0.7262761840820312, 0.7264869995117188, 0.7264901123046875, 0.7260047607421874, 0.726487060546875, 0.7265208129882812, 0.7264603881835937, 0.7263908081054687, 0.7275448608398437, 0.727146484375, 0.72680859375, 0.7264603881835937, 0.7277178955078125, 0.72709326171875, 0.728501220703125, 1.5012095947265625, 0.72654541015625, 0.7268945922851563, 0.7272919311523437, 0.7272028198242187, 0.7270809326171875, 0.7276093139648437, 0.7282923583984375, 0.7271884765625, 0.7268485717773437, 0.726763427734375, 0.7262822265625, 0.7274660034179687, 0.7266007080078125, 0.72632421875, 0.7261010131835938, 0.726930419921875, 0.7262833251953125, 0.727236572265625, 0.7263109130859375, 0.727277587890625, 0.729017333984375, 0.7280557861328125, 0.7287122192382812, 0.726213623046875, 0.72635595703125, 0.7278028564453125, 0.7271577758789063, 0.7265413208007813, 0.7267686157226563, 0.7265740966796875, 0.7276277465820312, 0.727436279296875, 0.7270174560546875, 0.7273318481445312, 0.72755712890625, 0.72652392578125, 0.7278878784179688, 0.7272489013671875, 0.7272007446289063, 0.727320556640625, 0.7268229370117187, 0.7260794677734375, 0.7274660034179687, 0.726408203125, 0.726381591796875, 0.7260671997070313, 0.7263037719726563, 0.7268433837890625, 0.726957275390625, 0.7271473388671875, 0.7270225830078125, 0.7275509643554687, 0.7284777221679688, 0.7270584106445312, 0.7272191772460938, 0.727357421875, 0.7268218994140625, 0.7266734008789062, 0.726677490234375, 0.7279493408203125, 0.7279093627929687, 0.7280087280273437, 1.50259716796875, 0.7271526489257812, 0.727667724609375, 0.7279851684570312, 0.7274424438476562, 0.7262618408203125, 0.7266088256835938, 0.7269324951171875, 0.7273543701171875, 0.7273164672851562, 0.7271823120117188, 0.7268035278320313, 0.7278939208984375, 0.7264112548828126, 0.7268864135742188, 0.726513671875, 0.7265802001953126, 0.72677685546875, 0.7268116455078125, 0.7273564453125, 0.72827392578125, 0.72766259765625, 0.7272315063476562, 0.7262218017578125, 0.726414306640625, 0.7278561401367187, 0.728342529296875, 0.7276656494140625, 0.7273707275390625, 0.7264389038085938, 0.7276830444335938, 0.7273564453125, 0.7274208984375, 0.7277598876953125, 0.72755712890625, 0.7271044921875, 0.7269417114257812, 0.7284859008789063, 0.728322021484375, 0.727647216796875, 0.7293214721679687, 0.7274178466796875, 0.7273226318359375, 0.7271823120117188, 0.72783154296875, 0.7280087280273437, 0.7273554077148437, 0.7271423950195313, 0.7276287841796875, 0.7275499267578125, 0.7273082885742187, 0.7275765991210937, 0.7275704345703125, 0.7288955078125, 0.7285176391601562, 0.7280199584960938, 0.727841796875, 0.72796875, 0.7280548095703125, 0.7271168212890625, 0.7272642822265625, 0.7274669799804687, 0.7284869384765625]",tokens/s,1.3547138700439787,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1400.639488,6094.848,0.0,5448.400896,5215.942144,s,10,5.915999877929687,0.5915999877929687,0.0028828720813889874,0.5905971069335938,0.5921576904296875,0.5961826965332031,0.5994027014160156,"[0.6002077026367187, 0.5907606201171876, 0.5909608764648437, 0.5905933227539063, 0.5902119140625, 0.5906008911132813, 0.5904852294921875, 0.5912632446289062, 0.5904329223632813, 0.590483154296875]",tokens/s,432.7248229923689,kWh,6.983380516370138e-06,3.826209227811299e-06,3.1648620090131334e-05,4.2458209834312766e-05,tokens/kWh,6029458.166017933,MB,1400.967168,6094.848,0.0,5448.400896,5405.644288,s,10,341.33091015625,34.133091015625,0.0089451948085196,34.130179687500004,34.14430546875,34.149687890625,34.153993828125,"[34.13034765625, 34.1351484375, 34.13001171875, 34.128140625, 34.12757421875, 34.12398828125, 34.13193359375, 34.1255859375, 34.143109375, 34.1550703125]",tokens/s,1.8457162280193342,kWh,0.00040300285582741096,0.00022088092240917615,0.0018324814267682385,0.002456365205004826,tokens/kWh,25647.65201511484,,s,629,346.1052627563478,0.5502468406301234,0.07016038395822202,0.5417257080078125,0.5423982788085937,0.54271630859375,1.1323166894531251,"[0.5417594604492187, 0.5414471435546875, 0.5411215209960938, 0.5414297485351562, 0.5414563598632812, 0.541576171875, 0.5412239379882813, 0.5414502563476562, 0.5416427612304687, 0.54096484375, 0.54148095703125, 0.5411840209960938, 0.5414339599609375, 0.5409760131835938, 0.5409024047851563, 0.5412208862304687, 0.5413017578125, 0.5412515869140625, 0.5410549926757813, 0.5412177734375, 0.5411133422851563, 0.54131201171875, 0.5422049560546875, 0.5416530151367187, 0.54127001953125, 0.5410795288085938, 0.5415239868164062, 0.5422069702148438, 0.5421731567382813, 0.5426155395507812, 0.542755859375, 0.541576171875, 0.5415362548828125, 0.5411993408203125, 0.5416642456054688, 0.541591552734375, 0.541427734375, 0.5414830322265625, 0.5413529663085938, 0.5414666137695312, 0.5418700561523437, 0.5421414184570312, 0.5417953491210937, 0.5417728271484376, 0.5417267456054687, 0.5425919799804687, 0.5420779418945313, 0.5423953857421875, 0.542466064453125, 0.5427732543945313, 0.5427783813476562, 0.5423319091796875, 0.5419212646484375, 0.5422335815429687, 0.5419622192382813, 0.542166015625, 0.542202880859375, 0.5419612426757813, 0.541928466796875, 0.5428971557617187, 0.5427886352539063, 0.542540771484375, 1.132921875, 0.5414379272460937, 0.541675537109375, 0.5412976684570312, 0.5413980102539062, 0.541432861328125, 0.5411819458007813, 0.5411235961914063, 0.5415782470703125, 0.5412136840820313, 0.5412894897460937, 0.5412925415039063, 0.5418301391601562, 0.541549560546875, 0.5421035766601563, 0.5417698974609375, 0.5415985717773437, 0.5415782470703125, 0.5415618286132813, 0.5418792724609375, 0.5416151123046875, 0.5421527099609375, 0.5418721313476562, 0.5414307861328125, 0.5414574584960937, 0.5414255981445313, 0.5418076171875, 0.5416806640625, 0.5416058959960938, 0.5413232421875, 0.5415167846679687, 0.5413140869140625, 0.5417481689453125, 0.5427220458984375, 0.5421270751953124, 0.541823974609375, 0.54205029296875, 0.5420144653320312, 0.5415424194335937, 0.541971435546875, 0.5416202392578126, 0.541264892578125, 0.5415219116210938, 0.541216796875, 0.5416058959960938, 0.541259765625, 0.5414993896484375, 0.5412925415039063, 0.5416744995117188, 0.5412996826171875, 0.5416222534179688, 0.5422120971679687, 0.5429841918945313, 0.542635009765625, 0.5428388061523437, 0.542835693359375, 0.5432268676757812, 0.5427363891601562, 0.5429586181640625, 0.5425602416992188, 0.5428838500976563, 0.5426534423828125, 0.5426380615234375, 1.1323843994140625, 0.5415637817382812, 0.5415362548828125, 0.541212646484375, 0.5413027954101562, 0.54141748046875, 0.54186083984375, 0.542003173828125, 0.5418147583007813, 0.5417257080078125, 0.5426544799804688, 0.5414871215820313, 0.5419520263671875, 0.5414297485351562, 0.5414932250976563, 0.5413765258789063, 0.5414635620117187, 0.5415690307617187, 0.5419008178710938, 0.5413099365234375, 0.5413130493164062, 0.5413457641601562, 0.5415126953125, 0.541191162109375, 0.541591552734375, 0.5423012084960938, 0.5414676513671876, 0.5414563598632812, 0.5417738037109375, 0.541896728515625, 0.5416048583984375, 0.5420687255859375, 0.5419673461914063, 0.5420953369140625, 0.541780029296875, 0.541761474609375, 0.5418792724609375, 0.5419478759765625, 0.54186083984375, 0.5423062744140625, 0.54225, 0.5419407348632812, 0.542171142578125, 0.5424015502929688, 0.5422223510742188, 0.5414348754882813, 0.5414635620117187, 0.54171337890625, 0.5415720825195313, 0.5418065795898438, 0.5415321655273437, 0.54148095703125, 0.5417645874023438, 0.54140625, 0.5415218505859375, 0.5416734619140625, 0.5417164916992188, 0.5417820434570313, 0.5417778930664062, 0.54166015625, 0.5416734619140625, 0.5428797607421875, 0.542571533203125, 1.1325224609375, 0.5419622192382813, 0.541770751953125, 0.5419089965820313, 0.5416908569335938, 0.5413734130859374, 0.5414747924804687, 0.5413980102539062, 0.5412987060546876, 0.5413621826171875, 0.5414256591796875, 0.5414717407226562, 0.5418414306640625, 0.5414390258789062, 0.5416283569335938, 0.5422162475585938, 0.5423297729492188, 0.5422459106445312, 0.5416038208007813, 0.5418384399414062, 0.5420675659179688, 0.5416325073242187, 0.5418936157226563, 0.54164892578125, 0.5416837158203125, 0.5414542846679687, 0.5416089477539062, 0.541623291015625, 0.541696044921875, 0.5414962768554688, 0.5420676879882812, 0.5414850463867188, 0.5416734619140625, 0.5414481811523437, 0.5424260864257813, 0.5415341796875, 0.5418352661132813, 0.5416427612304687, 0.5419017944335938, 0.5418506469726563, 0.5419898681640625, 0.541454345703125, 0.5417994384765625, 0.5414666137695312, 0.541697021484375, 0.5416581420898438, 0.5419612426757813, 0.5414061889648437, 0.5416857299804687, 0.5413396606445312, 0.5420892333984375, 0.5414430541992188, 0.5420545654296876, 0.5417859497070312, 0.5416704711914062, 0.5413395385742188, 0.5417564086914063, 0.5420462036132813, 0.5418311767578124, 0.5419110107421875, 0.5419161376953125, 0.5414850463867188, 0.541528076171875, 1.131968505859375, 0.5414400024414062, 0.5414932250976563, 0.5411287231445312, 0.541296630859375, 0.5412792358398437, 0.5417031860351562, 0.5415956420898438, 0.5414348754882813, 0.5413161010742188, 0.5413847045898438, 0.541760498046875, 0.5419417724609376, 0.5416417236328125, 0.5418035278320312, 0.54144921875, 0.54145947265625, 0.5413785400390625, 0.541380615234375, 0.5414000854492188, 0.5414727783203125, 0.5413887939453125, 0.5414727783203125, 0.5413324584960938, 0.5414451293945313, 0.54200830078125, 0.541663330078125, 0.5414224853515625, 0.5416151123046875, 0.5414993896484375, 0.54185986328125, 0.5417636108398437, 0.5419540405273438, 0.5416038208007813, 0.5418005981445313, 0.541447021484375, 0.541686767578125, 0.5417062377929688, 0.5416693725585937, 0.5417902221679688, 0.5416990966796875, 0.5416304931640625, 0.5417277221679687, 0.5417277221679687, 0.5418322143554688, 0.5419100341796875, 0.5417410888671875, 0.5418536987304687, 0.5424578247070313, 0.5422274780273437, 0.5421045532226563, 0.5419489135742187, 0.5420206298828125, 0.5418895263671875, 0.541897705078125, 0.5418690795898438, 0.5419857788085938, 0.5421117553710938, 0.54192333984375, 0.5418588256835938, 0.5421434936523437, 0.5418905639648437, 0.5424384155273437, 1.1324405517578124, 0.5413406982421874, 0.5414522705078125, 0.5422376708984376, 0.5428193359375, 0.541686767578125, 0.5414686889648438, 0.5414215698242187, 0.541285400390625, 0.5416571044921875, 0.5415997314453125, 0.5419386596679687, 0.5416673583984375, 0.541638671875, 0.5414850463867188, 0.5416683349609375, 0.5423175659179688, 0.5418322143554688, 0.5414451904296875, 0.5412515258789062, 0.5415239868164062, 0.542023681640625, 0.541918212890625, 0.5417359619140625, 0.541549560546875, 0.5413662719726563, 0.5415792846679688, 0.5414010620117188, 0.5414441528320313, 0.5412525634765625, 0.54166015625, 0.5412556762695313, 0.541591552734375, 0.5411932373046875, 0.541432861328125, 0.5416161499023437, 0.5414912109375, 0.5413294067382812, 0.541365234375, 0.5413304443359375, 0.5423472900390625, 0.541759521484375, 0.5416038208007813, 0.5411983642578125, 0.5415557250976563, 0.5414194946289063, 0.5415966796875, 0.54135498046875, 0.5417625732421875, 0.5416345825195312, 0.5418465576171875, 0.541486083984375, 0.5419673461914063, 0.541765625, 0.542076904296875, 0.5416171264648437, 0.5418035278320312, 0.5415731201171875, 0.541892578125, 0.54164892578125, 0.5422459106445312, 0.5417850952148437, 0.5420390625, 1.132142578125, 0.5416161499023437, 0.541454345703125, 0.5413867797851563, 0.5415997314453125, 0.541613037109375, 0.5415372924804688, 0.5415844116210937, 0.5418772583007813, 0.5416714477539063, 0.5416253662109375, 0.5415557250976563, 0.5419458618164062, 0.5417195434570312, 0.5419008178710938, 0.541591552734375, 0.5416642456054688, 0.5415946044921875, 0.5415925903320312, 0.5416468505859375, 0.541644775390625, 0.5415782470703125, 0.5415966796875, 0.541538330078125, 0.541675537109375, 0.5414788818359375, 0.5420431518554687, 0.5420267333984375, 0.5419776000976563, 0.5416171264648437, 0.541822998046875, 0.54211376953125, 0.5417615356445312, 0.5421096801757812, 0.5417891845703126, 0.5419161376953125, 0.541697021484375, 0.5416591186523437, 0.5419724731445312, 0.5416560668945313, 0.54164892578125, 0.5418291015625, 0.5418055419921874, 0.541601806640625, 0.5418721313476562, 0.54198681640625, 0.5417297973632812, 0.5416796264648438, 0.5417349243164062, 0.5415823364257812, 0.5425029296875, 0.542382080078125, 0.5421414184570312, 0.5416663208007813, 0.5417011108398437, 0.54160302734375, 0.5418053588867188, 0.5416949462890625, 0.5418045654296875, 0.5421281127929688, 0.5423175659179688, 0.5419632568359375, 0.542244873046875, 1.1331278076171876, 0.5414716186523437, 0.54160791015625, 0.54152294921875, 0.5418578491210938, 0.5414696655273438, 0.5420236206054687, 0.5413458862304688, 0.5417337646484375, 0.5414185180664063, 0.5415403442382812, 0.5418291015625, 0.5415823974609375, 0.5414583740234375, 0.541802490234375, 0.541475830078125, 0.5415629272460938, 0.5419693603515625, 0.5415751953125, 0.5413673095703125, 0.5415239868164062, 0.5413990478515625, 0.5414297485351562, 0.5419171752929688, 0.542044189453125, 0.5415536499023438, 0.5416273803710937, 0.5416406860351562, 0.5414400024414062, 0.541470703125, 0.5419960327148438, 0.5414901733398437, 0.5415792846679688, 0.5415465087890625, 0.5416714477539063, 0.5413161010742188, 0.5425889282226563, 0.5413949584960938, 0.5415833740234375, 0.541106201171875, 0.5417615356445312, 0.5417666625976563, 0.541760498046875, 0.5414307861328125, 0.5415894775390625, 0.5413898315429687, 0.5415833740234375, 0.5412874145507812, 0.5417349243164062, 0.5415741577148437, 0.5416611938476562, 0.5413416748046875, 0.5422161865234375, 0.5416673583984375, 0.5423308715820313, 0.5420431518554687, 0.5420534057617188, 0.5417984008789063, 0.5419888916015625, 0.5413898315429687, 0.5425121459960938, 0.541970458984375, 0.5419970092773437, 1.13246826171875, 0.5414359130859375, 0.5415659790039062, 0.54150244140625, 0.5414912719726562, 0.5414020385742188, 0.541581298828125, 0.5415567626953125, 0.5416243286132812, 0.5418076171875, 0.5416089477539062, 0.5414696655273438, 0.5415587768554687, 0.541931640625, 0.542152587890625, 0.541929443359375, 0.541939697265625, 0.54158642578125, 0.5415321655273437, 0.5415823364257812, 0.5417267456054687, 0.5416222534179688, 0.54167041015625, 0.54148095703125, 0.5416376342773438, 0.5415116577148438, 0.5416591186523437, 0.5424230346679687, 0.5425469360351562, 0.5417778930664062, 0.5417523193359375, 0.5418987426757812, 0.5419951171875, 0.5423696899414062, 0.5424568481445312, 0.5426206665039063, 0.542086181640625, 0.541823974609375, 0.54186083984375, 0.5420349731445312, 0.5424475708007812, 0.5419776000976563, 0.5422203369140625, 0.5420625, 0.542244873046875, 0.5426073608398437, 0.5429688110351563, 0.5421936645507812, 0.542075927734375, 0.5420676879882812, 0.5423175659179688, 0.5427466430664063, 0.5428531494140625, 0.542118896484375, 0.5421475830078125, 0.542002197265625, 0.5421066284179688, 0.5419857788085938, 0.5419765625, 0.5419990844726562, 0.5420123901367188, 0.5418936157226563, 0.5418700561523437, 1.1330999755859374, 0.5421752319335937, 0.5420472412109375, 0.5415352172851563, 0.541517822265625, 0.5416509399414062, 0.54187109375, 0.5422120971679687, 0.5417625732421875, 0.5419806518554687, 0.5415413818359375, 0.54137548828125, 0.5424609375, 0.5421240234375, 0.54167041015625, 0.5416581420898438, 0.5418803100585937, 0.5415997314453125, 0.5424701538085938, 0.5428551635742187, 0.5424916381835938, 0.542624755859375, 0.542044189453125, 0.54175537109375, 0.541865966796875, 0.54215576171875, 0.5419192504882813, 0.5422418212890625, 0.54240869140625, 0.5427630004882813, 0.543088623046875, 0.5421854858398437, 0.5420318603515625, 0.5419612426757813, 0.5423749389648438, 0.541897705078125, 0.5418147583007813, 0.5428131713867187, 0.542508056640625, 0.5420390625, 0.5422202758789062, 0.542045166015625, 0.542202880859375, 0.5423974609375, 0.5422069702148438, 0.541749267578125, 0.5420185546875, 0.5419468994140625, 0.5423267822265625, 0.5417062377929688, 0.5420277709960938, 0.5420349731445312, 0.5422673950195313, 0.5421270751953124, 0.5421915893554687, 0.5427077026367187, 0.5426472778320313, 0.542382080078125, 0.54236572265625, 0.542023681640625, 0.5423565063476562, 0.542160888671875, 0.5429483642578125]",tokens/s,1.8173661821571478,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1231.42144,879.230976,0.0,232.783872,169.719808,s,10,0.35891270065307623,0.035891270065307616,0.001166922312453486,0.03606051254272461,0.03654228057861328,0.037586180877685546,0.03842130111694336,"[0.03863008117675781, 0.03461849594116211, 0.03453500747680664, 0.034383518218994144, 0.03608425521850586, 0.03613267135620117, 0.036015392303466794, 0.036310302734375, 0.036166206359863284, 0.03603676986694336]",tokens/s,7132.653693619183,kWh,4.2057203039815654e-07,2.3039066429929688e-07,8.883686426833978e-07,1.5393313373808513e-06,tokens/kWh,166305975.7073354,MB,1231.42144,879.230976,0.0,232.783872,199.793152,s,10,21.796639404296876,2.179663940429687,0.03379298018028798,2.1997751464843747,2.2025270751953125,2.2029048950195316,2.2032071508789066,"[2.15886328125, 2.11872216796875, 2.1151240234375, 2.1993291015625, 2.20328271484375, 2.20231494140625, 2.20022119140625, 2.201608642578125, 2.194730224609375, 2.202443115234375]",tokens/s,28.903538215886858,kWh,2.6054930089725813e-05,1.4278879354016576e-05,5.1935423616310185e-05,9.22692330600526e-05,tokens/kWh,682784.4765870875,,s,629,22.081034248352047,0.035104982906760014,0.004313579122612795,0.03483135986328125,0.03513838195800781,0.035482418823242184,0.06834089904785158,"[0.035579902648925785, 0.0354150390625, 0.035542015075683595, 0.0358737907409668, 0.03517337417602539, 0.035922943115234376, 0.03607551956176758, 0.03511808013916016, 0.03547443389892578, 0.035765247344970705, 0.035937278747558594, 0.03575091171264649, 0.03546214294433594, 0.04169113540649414, 0.03495731353759766, 0.03391795349121094, 0.033565696716308595, 0.03336908721923828, 0.03351347351074219, 0.03344179153442383, 0.03361177444458008, 0.03342745590209961, 0.03343564987182617, 0.03342745590209961, 0.03340185546875, 0.03341926574707031, 0.03338649749755859, 0.03336294555664063, 0.033552383422851564, 0.03352883148193359, 0.03450368118286133, 0.03436032104492188, 0.034111488342285154, 0.03450265502929688, 0.03355136108398438, 0.034164737701416016, 0.03474431991577148, 0.034351104736328124, 0.03521331024169922, 0.035111934661865234, 0.03381760025024414, 0.034427902221679685, 0.033756160736083986, 0.0334510383605957, 0.0331734733581543, 0.03338854217529297, 0.03346944046020508, 0.03350425720214844, 0.03346636962890625, 0.03356671905517578, 0.03343769454956055, 0.03313971328735352, 0.03326259231567383, 0.03314790344238281, 0.03320729446411133, 0.03347967910766601, 0.03352678298950195, 0.033538047790527346, 0.03310182571411133, 0.03322163009643555, 0.033478656768798826, 0.03340800094604492, 0.06847795104980468, 0.03334348678588867, 0.03356576156616211, 0.03346118545532226, 0.033667072296142575, 0.03376025772094727, 0.033555454254150394, 0.03353497695922852, 0.03349913787841797, 0.03347455978393555, 0.03362713623046875, 0.033650688171386715, 0.03344998550415039, 0.03351244735717773, 0.033532928466796875, 0.033718273162841796, 0.033614849090576174, 0.03364659118652344, 0.033478656768798826, 0.03355136108398438, 0.03344998550415039, 0.034282497406005856, 0.03367833709716797, 0.033516544342041016, 0.03338854217529297, 0.033584129333496096, 0.033539070129394534, 0.033326080322265625, 0.03335987091064453, 0.03366604614257813, 0.03334143829345703, 0.033258495330810545, 0.03334041595458984, 0.03343155288696289, 0.03324620819091797, 0.03349401473999023, 0.03338547134399414, 0.03338137435913086, 0.0335175666809082, 0.03442892837524414, 0.03411558532714844, 0.034249729156494144, 0.03337011337280273, 0.03331174468994141, 0.033413120269775394, 0.033301502227783206, 0.03349606323242187, 0.03334143829345703, 0.03332403182983398, 0.033527809143066405, 0.0335206413269043, 0.03582156753540039, 0.035253246307373046, 0.03405209732055664, 0.03366604614257813, 0.033827838897705076, 0.03394559860229492, 0.03346636962890625, 0.03348582458496094, 0.03372544097900391, 0.03336703872680664, 0.03363942337036133, 0.033410049438476565, 0.06798847961425782, 0.033957889556884766, 0.03349401473999023, 0.03372236633300781, 0.033659934997558594, 0.033262561798095704, 0.033339393615722655, 0.03347148895263672, 0.03481190490722656, 0.03399270248413086, 0.033448993682861326, 0.033379295349121096, 0.03344179153442383, 0.03343155288696289, 0.0330618896484375, 0.03346124649047852, 0.033410049438476565, 0.033430526733398434, 0.033495040893554685, 0.03334656143188477, 0.03343564987182617, 0.033446910858154294, 0.033513534545898435, 0.03330553436279297, 0.03338547134399414, 0.033484798431396484, 0.03334147262573242, 0.03339465713500977, 0.03335782241821289, 0.03341516876220703, 0.033355777740478515, 0.03362713623046875, 0.03338854217529297, 0.033360897064208986, 0.03363020706176758, 0.03340800094604492, 0.03336505508422852, 0.03361990356445312, 0.03336191940307617, 0.035092479705810545, 0.034958335876464845, 0.03446783828735352, 0.033719329833984374, 0.0333260498046875, 0.03360768127441406, 0.03371417617797851, 0.03365785598754883, 0.033653759002685545, 0.03359027099609375, 0.03361177444458008, 0.033465343475341795, 0.033562625885009766, 0.03357183837890625, 0.03344076919555664, 0.0335093765258789, 0.03359231948852539, 0.033463294982910154, 0.03351551818847656, 0.03339059066772461, 0.0335022087097168, 0.03354009628295898, 0.033432575225830076, 0.033476608276367184, 0.06785228729248047, 0.038763519287109374, 0.03499212646484375, 0.03476172637939453, 0.03466035079956055, 0.03442892837524414, 0.03445862579345703, 0.0349194221496582, 0.03481087875366211, 0.03482009506225586, 0.03474227142333984, 0.03470336151123047, 0.03492768096923828, 0.0348221435546875, 0.034755519866943356, 0.03479654312133789, 0.035053569793701174, 0.03477503967285156, 0.03513651275634765, 0.03483647918701172, 0.03467161560058594, 0.03479244613647461, 0.034738174438476564, 0.03485184097290039, 0.03481292724609375, 0.03513651275634765, 0.03499728012084961, 0.034848735809326174, 0.03482009506225586, 0.03604684829711914, 0.03580108642578125, 0.034900993347167966, 0.03499622344970703, 0.03482009506225586, 0.03486617660522461, 0.034716670989990234, 0.034871295928955076, 0.03480883026123047, 0.03496857452392578, 0.034961406707763674, 0.03484262466430664, 0.03478732681274414, 0.0348671989440918, 0.03487539291381836, 0.03496038436889649, 0.03478121566772461, 0.03482006454467773, 0.03547750473022461, 0.03485184097290039, 0.034953216552734374, 0.034854911804199216, 0.03493478393554687, 0.03474943923950195, 0.03476889419555664, 0.03471974563598633, 0.034825214385986326, 0.035095550537109374, 0.0350300178527832, 0.034802688598632815, 0.034781185150146485, 0.034677761077880856, 0.034490367889404294, 0.034372608184814454, 0.07120281219482422, 0.03474227142333984, 0.03492659378051758, 0.03486515045166016, 0.035027999877929684, 0.035214336395263675, 0.035108863830566404, 0.03492655944824219, 0.03491635131835937, 0.03499212646484375, 0.0349409294128418, 0.03500236892700195, 0.03487641525268555, 0.03486105728149414, 0.03492659378051758, 0.03494911956787109, 0.03502489471435547, 0.034909183502197266, 0.03505152130126953, 0.03499724960327148, 0.03479142379760742, 0.03497983932495117, 0.03498291015625, 0.03495116806030273, 0.03492454528808594, 0.03508633422851563, 0.034934814453125, 0.03494499206542969, 0.034936832427978515, 0.034841598510742186, 0.03491839981079101, 0.035023872375488284, 0.03488665771484375, 0.03486617660522461, 0.03508428955078125, 0.03497881698608398, 0.03496448135375976, 0.03487334442138672, 0.034991104125976565, 0.035092479705810545, 0.03487641525268555, 0.0350013427734375, 0.035125247955322264, 0.03501567840576172, 0.035350528717041016, 0.03521843338012695, 0.03506687927246094, 0.03503104019165039, 0.034985984802246094, 0.03507308959960938, 0.034981822967529295, 0.03523174285888672, 0.0352174072265625, 0.034923519134521484, 0.034361343383789066, 0.03486822509765625, 0.03495731353759766, 0.03501875305175781, 0.034956287384033204, 0.03489484786987305, 0.034835456848144535, 0.035156993865966796, 0.03473408126831055, 0.07153561401367188, 0.034855934143066404, 0.03496345520019531, 0.034987071990966796, 0.035133377075195316, 0.03502592086791992, 0.035043327331542966, 0.035209217071533204, 0.03504844665527344, 0.03502592086791992, 0.034936832427978515, 0.03493478393554687, 0.0358205451965332, 0.035659774780273434, 0.03496038436889649, 0.03518668746948242, 0.03511296081542969, 0.03490304183959961, 0.03482624053955078, 0.03487539291381836, 0.03492761611938477, 0.03479142379760742, 0.035125247955322264, 0.03506585693359375, 0.03505254364013672, 0.034825214385986326, 0.03476995086669922, 0.03494089508056641, 0.03478732681274414, 0.03488256072998047, 0.035244033813476565, 0.035007488250732424, 0.03475046539306641, 0.03497062301635742, 0.0347770881652832, 0.03480166244506836, 0.03479964828491211, 0.03495734405517578, 0.03496441650390625, 0.03494911956787109, 0.034885631561279294, 0.034953216552734374, 0.034909183502197266, 0.03480575942993164, 0.03474943923950195, 0.034948097229003904, 0.03509862518310547, 0.03496857452392578, 0.03502284622192383, 0.0344719352722168, 0.03450271987915039, 0.03489888000488281, 0.034909183502197266, 0.03487846374511719, 0.03488460922241211, 0.03492147064208984, 0.03486515045166016, 0.03475251388549805, 0.03486310577392578, 0.03487027359008789, 0.03497369766235352, 0.03508230209350586, 0.034826175689697265, 0.07138098907470704, 0.035064830780029296, 0.035089408874511716, 0.03489894485473633, 0.03474537658691406, 0.034952159881591796, 0.034969600677490234, 0.03484985733032227, 0.0348732795715332, 0.03523481750488281, 0.034993152618408206, 0.03489593505859375, 0.03493983840942383, 0.03493580627441406, 0.034854911804199216, 0.03482316970825195, 0.03493273544311523, 0.034797569274902344, 0.03540787124633789, 0.035068992614746095, 0.03575494384765625, 0.03548672103881836, 0.03483955383300781, 0.03533107376098633, 0.03497062301635742, 0.034800640106201174, 0.034786304473876956, 0.03497574234008789, 0.0349306869506836, 0.03490508651733398, 0.034917377471923826, 0.03463065719604492, 0.034928638458251955, 0.03481292724609375, 0.03505254364013672, 0.03483955383300781, 0.03466854476928711, 0.03482112121582031, 0.03525734329223633, 0.03482726287841797, 0.034948097229003904, 0.034776065826416014, 0.03475558471679688, 0.034283519744873044, 0.03471974563598633, 0.034735103607177735, 0.03482931137084961, 0.03508224105834961, 0.034678783416748044, 0.03488870239257812, 0.034802688598632815, 0.03478015899658203, 0.03476070404052734, 0.03488051223754883, 0.034976768493652347, 0.0347586555480957, 0.03504435348510742, 0.034929695129394533, 0.03516617584228516, 0.034799617767333986, 0.034723838806152346, 0.03487334442138672, 0.03482931137084961, 0.07187558746337891, 0.03538739013671875, 0.03526348876953125, 0.03513241577148438, 0.034925567626953126, 0.03487744140625, 0.0354856948852539, 0.035422206878662106, 0.0351723518371582, 0.034976768493652347, 0.03488460922241211, 0.034890750885009765, 0.0349409294128418, 0.03503206253051758, 0.035076095581054685, 0.03489279937744141, 0.035119102478027346, 0.034729984283447264, 0.03479244613647461, 0.034864158630371095, 0.034802654266357425, 0.0348221435546875, 0.034678783416748044, 0.03501055908203125, 0.03484467315673828, 0.0348487663269043, 0.03479859161376953, 0.03482931137084961, 0.03487539291381836, 0.034783233642578126, 0.03495116806030273, 0.03474534225463867, 0.03493580627441406, 0.03485388946533203, 0.03487948989868164, 0.034926624298095704, 0.034881504058837894, 0.03505254364013672, 0.03485081481933594, 0.03487846374511719, 0.03444224166870117, 0.03474431991577148, 0.03492761611938477, 0.034948097229003904, 0.03510067367553711, 0.0347770881652832, 0.03488972854614258, 0.03482112121582031, 0.034977790832519534, 0.034405376434326174, 0.03482422256469726, 0.034968544006347656, 0.03485081481933594, 0.0349409294128418, 0.034840576171875, 0.034907135009765625, 0.035122177124023435, 0.03496755218505859, 0.03485388946533203, 0.035350528717041016, 0.03549593734741211, 0.03502182388305664, 0.03480473709106445, 0.07150796508789062, 0.0350013427734375, 0.03506995010375977, 0.034994174957275394, 0.03483647918701172, 0.03483135986328125, 0.034991104125976565, 0.03488153457641602, 0.03486310577392578, 0.034756607055664065, 0.034781185150146485, 0.03496038436889649, 0.034710529327392575, 0.03470848083496094, 0.034977790832519534, 0.03480780792236328, 0.03479040145874023, 0.034776065826416014, 0.03490304183959961, 0.03465219116210937, 0.0350074577331543, 0.03491123199462891, 0.03498905563354492, 0.03513139343261719, 0.03480883026123047, 0.03476582336425781, 0.03472281646728516, 0.034713600158691404, 0.034830337524414064, 0.03484364700317383, 0.03500646209716797, 0.034683902740478514, 0.03486310577392578, 0.03489894485473633, 0.034664447784423826, 0.03480575942993164, 0.034835456848144535, 0.03486822509765625, 0.0347770881652832, 0.03489382553100586, 0.03503615951538086, 0.03473715209960938, 0.03480473709106445, 0.034802688598632815, 0.034677761077880856, 0.03475251388549805, 0.03466652679443359, 0.03473097610473633, 0.0347883529663086, 0.035141632080078124, 0.03476070404052734, 0.03492966461181641, 0.03492147064208984, 0.034854911804199216, 0.034776065826416014, 0.03474431991577148, 0.0348037109375, 0.034800640106201174, 0.0348037109375, 0.03454054260253906, 0.03448934555053711, 0.03489791870117188, 0.03465830230712891, 0.07124582672119141, 0.03520102310180664, 0.03487744140625, 0.035156993865966796, 0.03470848083496094, 0.03489996719360351, 0.03482726287841797, 0.03494297790527344, 0.03487744140625, 0.034769920349121096, 0.034885631561279294, 0.03489996719360351, 0.03513756942749023, 0.03485385513305664, 0.03532185745239258, 0.03501875305175781, 0.0368721923828125, 0.03486310577392578, 0.03483443069458008, 0.03476582336425781, 0.03483647918701172, 0.0348221435546875, 0.03477196884155274, 0.034669567108154296, 0.03483647918701172, 0.034947071075439456, 0.0348671989440918, 0.03482316970825195, 0.03477814483642578, 0.03476988983154297, 0.03473612976074219, 0.03470745468139649, 0.03570380783081055, 0.03584921646118164, 0.03489484786987305, 0.03489689636230469, 0.03458969497680664, 0.034830337524414064, 0.03489689636230469, 0.03477811050415039, 0.034705406188964845, 0.03480985641479492, 0.03458662414550781, 0.03588614273071289, 0.03546003341674805, 0.035043327331542966, 0.034993152618408206, 0.03488051223754883, 0.03490611267089844, 0.035004417419433595, 0.03475763320922851, 0.034797569274902344, 0.03473715209960938, 0.03501772689819336, 0.034772991180419925, 0.034885631561279294, 0.03487539291381836, 0.03483443069458008, 0.03482316970825195, 0.0350013427734375, 0.03475763320922851, 0.03494604873657227, 0.0350300178527832]",tokens/s,28.485984529775525,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 84307 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493af-07a0696f7b37cf132d2642fe;02f94de0-9875-4055-96c5-25d6f155b65d) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490a4-289526393e0256c03402fba1;52d95121-eb62-4f8b-acd8-276858c6fc61) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1400.946688,4467.458048,0.0,3821.010944,3588.539904,s,10,2.8509368591308593,0.285093685913086,0.0018864014341163783,0.28445895385742187,0.2852971740722656,0.28801900939941405,0.29019647766113277,"[0.2907408447265625, 0.28459954833984374, 0.28446585083007814, 0.2842637329101563, 0.28442007446289064, 0.2843152465820312, 0.2845796203613281, 0.2844075622558594, 0.28469232177734377, 0.2844520568847656]",tokens/s,897.9504375205436,kWh,3.3620131512482964e-06,1.8420311789971794e-06,1.613754377422258e-05,2.134158810446806e-05,tokens/kWh,11995358.487234792,MB,1400.946688,4467.458048,0.0,3821.010944,3698.94656,s,10,165.60105078125,16.560105078124998,0.002304538991507171,16.5600693359375,16.5629611328125,16.56372666015625,16.56433908203125,"[16.5644921875, 16.559681640625, 16.560998046875, 16.557119140625, 16.55881640625, 16.558265625, 16.56045703125, 16.562791015625, 16.556978515625, 16.561451171875]",tokens/s,3.804323686521747,kWh,0.00019549797135922647,0.00010714875620388434,0.0009433736405007801,0.0012460203680638908,tokens/kWh,50560.97124470891,,s,629,167.89605535888654,0.2669253662303445,0.03378333259823164,0.26283621215820313,0.2630875061035156,0.263170263671875,0.5471718627929687,"[0.26277273559570313, 0.26256280517578123, 0.2627635192871094, 0.2625884094238281, 0.262614013671875, 0.26269287109375, 0.26282803344726563, 0.26283111572265627, 0.2626437072753906, 0.26271026611328124, 0.2626826171875, 0.2628269958496094, 0.2629119873046875, 0.2628935546875, 0.26298162841796874, 0.26274612426757815, 0.2627860412597656, 0.26264779663085935, 0.26285055541992186, 0.26276556396484374, 0.26288433837890623, 0.2632120361328125, 0.2632437744140625, 0.262935546875, 0.26322021484375, 0.2628751220703125, 0.26288433837890623, 0.2628751220703125, 0.2629795837402344, 0.26279833984375, 0.263130126953125, 0.2629437561035156, 0.263046142578125, 0.26281777954101565, 0.26273895263671876, 0.26270721435546873, 0.26306353759765627, 0.2627993469238281, 0.263014404296875, 0.26284442138671876, 0.26303692626953123, 0.26287103271484374, 0.26279220581054685, 0.26292633056640624, 0.2630000610351563, 0.2632509460449219, 0.2630911865234375, 0.26301644897460935, 0.2629447631835938, 0.2627860412597656, 0.26292837524414064, 0.26297344970703124, 0.2631321716308594, 0.26289767456054686, 0.26301132202148436, 0.2628464660644531, 0.26280755615234375, 0.2627205200195312, 0.2628055114746094, 0.26311065673828127, 0.26308914184570314, 0.26280755615234375, 0.5474641723632813, 0.2627502136230469, 0.262677490234375, 0.26272564697265627, 0.26257305908203127, 0.2627666015625, 0.26281777954101565, 0.26266726684570313, 0.2627686462402344, 0.26266009521484374, 0.2626662292480469, 0.26324069213867185, 0.2628741149902344, 0.26273382568359377, 0.26285772705078125, 0.2628853759765625, 0.26265701293945315, 0.2628894653320312, 0.2630328369140625, 0.26302566528320315, 0.26280856323242185, 0.2628055114746094, 0.2626734008789062, 0.2630440979003906, 0.2628382568359375, 0.26280038452148435, 0.2630901794433594, 0.26281573486328125, 0.2629386291503906, 0.2631526489257813, 0.2629437561035156, 0.2629140625, 0.26282803344726563, 0.2627686462402344, 0.26325503540039064, 0.26316903686523435, 0.26300314331054686, 0.26308096313476564, 0.263041015625, 0.26282290649414064, 0.26275430297851565, 0.26275942993164064, 0.2629099426269531, 0.26284542846679687, 0.26273995971679687, 0.2628136901855469, 0.2626826171875, 0.26290689086914065, 0.26275225830078125, 0.2628515930175781, 0.26283416748046873, 0.2628751220703125, 0.2627901306152344, 0.26285772705078125, 0.26277273559570313, 0.26280142211914065, 0.26279833984375, 0.2628833312988281, 0.26294168090820313, 0.2629375915527344, 0.26277273559570313, 0.26275839233398435, 0.2627430419921875, 0.547251220703125, 0.2627635192871094, 0.26263143920898435, 0.2626652221679687, 0.2625669250488281, 0.2627276916503906, 0.2626590576171875, 0.26271026611328124, 0.2630768737792969, 0.26291915893554685, 0.26271435546875, 0.2629437561035156, 0.2627635192871094, 0.2625976257324219, 0.262697998046875, 0.2629939270019531, 0.2626723937988281, 0.2627799072265625, 0.26286285400390624, 0.26268978881835936, 0.2628239440917969, 0.2629273681640625, 0.2628853759765625, 0.2630993957519531, 0.2629744567871094, 0.26282290649414064, 0.26280856323242185, 0.2628874206542969, 0.26281573486328125, 0.2628106384277344, 0.26278707885742186, 0.2631178283691406, 0.2629519348144531, 0.26300827026367185, 0.2628822937011719, 0.262793212890625, 0.2627358703613281, 0.262898681640625, 0.2627635192871094, 0.2630543212890625, 0.26283621215820313, 0.2628751220703125, 0.2629508972167969, 0.2629242858886719, 0.26298675537109373, 0.26301336669921876, 0.2632284240722656, 0.26328472900390626, 0.2629119873046875, 0.2631649169921875, 0.26297854614257815, 0.26286181640625, 0.26307379150390625, 0.26298880004882813, 0.26273995971679687, 0.26296728515625, 0.26279730224609377, 0.2629386291503906, 0.26305535888671877, 0.262835205078125, 0.2627409973144531, 0.26296728515625, 0.26286181640625, 0.5470679321289063, 0.26265496826171875, 0.2626447448730469, 0.26254437255859375, 0.2625976257324219, 0.26270513916015625, 0.2626662292480469, 0.2626078796386719, 0.2626385803222656, 0.2627911682128906, 0.26279525756835936, 0.26274713134765626, 0.26267544555664063, 0.2626795654296875, 0.26265396118164064, 0.2627348327636719, 0.2626273193359375, 0.26269287109375, 0.26267648315429687, 0.26258740234375, 0.26294168090820313, 0.2628188171386719, 0.2626805725097656, 0.26300927734375, 0.26291915893554685, 0.2627901306152344, 0.2627624816894531, 0.2626580505371094, 0.26270925903320314, 0.26276556396484374, 0.2627819519042969, 0.26282803344726563, 0.26264779663085935, 0.26278707885742186, 0.26265087890625, 0.26304510498046874, 0.26298162841796874, 0.26296728515625, 0.2629273681640625, 0.26303079223632814, 0.26278091430664063, 0.262898681640625, 0.2630758361816406, 0.2631127014160156, 0.2629908447265625, 0.2628485107421875, 0.2629222412109375, 0.2627747802734375, 0.26271026611328124, 0.2629294128417969, 0.26290277099609377, 0.2629468078613281, 0.2629150695800781, 0.2629375915527344, 0.262803466796875, 0.2627768249511719, 0.26270721435546873, 0.26287307739257815, 0.2632929382324219, 0.2630379638671875, 0.2627747802734375, 0.26309222412109373, 0.26279730224609377, 0.5472327880859374, 0.2629273681640625, 0.2627839965820313, 0.26275225830078125, 0.26269183349609376, 0.26259249877929686, 0.26274508666992186, 0.26279425048828126, 0.2629375915527344, 0.2630973510742188, 0.26277069091796873, 0.2626744384765625, 0.26267544555664063, 0.2627768249511719, 0.2626908264160156, 0.2628167724609375, 0.2626355285644531, 0.2627747802734375, 0.2627993469238281, 0.26275942993164064, 0.2628392944335938, 0.26277069091796873, 0.2627286987304687, 0.26279730224609377, 0.2626713562011719, 0.26264984130859376, 0.2627358703613281, 0.26286590576171875, 0.26278091430664063, 0.2628464660644531, 0.2626447448730469, 0.2627850341796875, 0.2631075744628906, 0.26286285400390624, 0.26297549438476564, 0.26297650146484375, 0.26297344970703124, 0.26278912353515627, 0.2626252746582031, 0.2630748291015625, 0.26306149291992187, 0.26298880004882813, 0.2631884765625, 0.26296218872070315, 0.2627358703613281, 0.26285055541992186, 0.26278912353515627, 0.26330316162109374, 0.26291302490234375, 0.26285055541992186, 0.2627799072265625, 0.2628055114746094, 0.2628536376953125, 0.2628884582519531, 0.26296319580078126, 0.26335232543945314, 0.262930419921875, 0.26275326538085936, 0.2627911682128906, 0.262835205078125, 0.26279833984375, 0.2627778625488281, 0.2626990051269531, 0.547040283203125, 0.2626211853027344, 0.2626027526855469, 0.26274713134765626, 0.26255462646484373, 0.26267034912109377, 0.2627778625488281, 0.2628802490234375, 0.26282907104492187, 0.2628853759765625, 0.26311578369140626, 0.2628106384277344, 0.262761474609375, 0.26274200439453127, 0.262645751953125, 0.2627491760253906, 0.2628413391113281, 0.2627215270996094, 0.262719482421875, 0.26273690795898436, 0.2629324951171875, 0.26294168090820313, 0.26289767456054686, 0.2629847106933594, 0.26286590576171875, 0.262866943359375, 0.2628167724609375, 0.2628802490234375, 0.262972412109375, 0.2628741149902344, 0.26290072631835937, 0.2630010986328125, 0.26286489868164065, 0.2628536376953125, 0.26311883544921877, 0.26306561279296875, 0.26278912353515627, 0.2628884582519531, 0.2627799072265625, 0.2629795837402344, 0.26278912353515627, 0.2628812866210937, 0.26278912353515627, 0.2628106384277344, 0.26293145751953123, 0.26276760864257814, 0.2627758178710938, 0.2628536376953125, 0.26280755615234375, 0.26282086181640624, 0.26271026611328124, 0.26280242919921876, 0.26279730224609377, 0.2627082214355469, 0.26268365478515626, 0.2629601135253906, 0.26269287109375, 0.2627307434082031, 0.26279525756835936, 0.26303079223632814, 0.26300314331054686, 0.262866943359375, 0.2628792419433594, 0.5475604248046875, 0.2626938781738281, 0.262719482421875, 0.26291915893554685, 0.26270309448242185, 0.262719482421875, 0.26266827392578124, 0.2629447631835938, 0.26271743774414064, 0.2627205200195312, 0.262645751953125, 0.26268161010742186, 0.26261196899414063, 0.2626805725097656, 0.26280960083007815, 0.26294784545898436, 0.2627061767578125, 0.26282803344726563, 0.26302462768554685, 0.2627491760253906, 0.2626959228515625, 0.2627440490722656, 0.26291915893554685, 0.26292120361328125, 0.26270721435546873, 0.26285260009765626, 0.26279525756835936, 0.26292327880859373, 0.2627440490722656, 0.26276043701171875, 0.26281268310546874, 0.2629427185058594, 0.26270925903320314, 0.2629847106933594, 0.26284442138671876, 0.2630963134765625, 0.26291815185546874, 0.2629662780761719, 0.26276556396484374, 0.2630348815917969, 0.2629161071777344, 0.26322738647460936, 0.26300827026367185, 0.26284033203125, 0.26286798095703123, 0.2630676574707031, 0.2629591064453125, 0.26298062133789063, 0.2627184753417969, 0.2629345397949219, 0.2627491760253906, 0.26317108154296875, 0.2627276916503906, 0.26300619506835937, 0.26283621215820313, 0.26308709716796874, 0.26335028076171874, 0.26290585327148436, 0.262793212890625, 0.2628751220703125, 0.2630625305175781, 0.262866943359375, 0.26292120361328125, 0.5473106079101563, 0.262751220703125, 0.26283111572265627, 0.2626652221679687, 0.2627184753417969, 0.26272357177734373, 0.26292327880859373, 0.26301031494140625, 0.26290585327148436, 0.26286181640625, 0.26279220581054685, 0.2627553405761719, 0.2628884582519531, 0.26308914184570314, 0.26276556396484374, 0.2630502319335937, 0.26282803344726563, 0.26288641357421877, 0.2627993469238281, 0.26274200439453127, 0.26290277099609377, 0.2628853759765625, 0.2627716979980469, 0.2627716979980469, 0.2627010498046875, 0.26282803344726563, 0.2629017639160156, 0.26265087890625, 0.262935546875, 0.2629508972167969, 0.2630492248535156, 0.26289971923828126, 0.2630379638671875, 0.2628894653320312, 0.2629601135253906, 0.2628935546875, 0.2628833312988281, 0.26278707885742186, 0.26283724975585937, 0.2631280517578125, 0.2629242858886719, 0.26284237670898436, 0.2629294128417969, 0.262930419921875, 0.2628167724609375, 0.2629847106933594, 0.26290585327148436, 0.26296832275390625, 0.2628269958496094, 0.2630911865234375, 0.2628587646484375, 0.26311578369140626, 0.2628044738769531, 0.2630000610351563, 0.26291915893554685, 0.26306866455078126, 0.26301644897460935, 0.2631628723144531, 0.2630225830078125, 0.26312396240234376, 0.26298367309570314, 0.26306561279296875, 0.2629017639160156, 0.547757080078125, 0.2626713562011719, 0.26260171508789065, 0.262729736328125, 0.26323355102539064, 0.2629171142578125, 0.2627225646972656, 0.2627696533203125, 0.262856689453125, 0.2626641845703125, 0.26276760864257814, 0.2626375732421875, 0.2626744384765625, 0.2626744384765625, 0.26258636474609376, 0.262724609375, 0.2627061767578125, 0.26262835693359377, 0.26265087890625, 0.26280856323242185, 0.2628044738769531, 0.26286181640625, 0.2627768249511719, 0.26277069091796873, 0.2627133483886719, 0.26289971923828126, 0.26298880004882813, 0.2631403503417969, 0.2628608093261719, 0.26283416748046873, 0.26289254760742187, 0.26295297241210935, 0.2626713562011719, 0.2626641845703125, 0.2626150512695313, 0.26276455688476563, 0.26270413208007815, 0.2628331604003906, 0.262793212890625, 0.26271539306640623, 0.2627286987304687, 0.2627010498046875, 0.26297854614257815, 0.26323251342773435, 0.26312295532226565, 0.2629427185058594, 0.2632806396484375, 0.2629427185058594, 0.26271435546875, 0.26275326538085936, 0.2626631774902344, 0.262866943359375, 0.2626744384765625, 0.26271539306640623, 0.26262631225585936, 0.26273895263671876, 0.26274713134765626, 0.2630215759277344, 0.26290380859375, 0.2628802490234375, 0.26276760864257814, 0.26292837524414064, 0.26273995971679687, 0.5472122802734375, 0.262940673828125, 0.2628536376953125, 0.2628188171386719, 0.26270513916015625, 0.26295501708984376, 0.2627082214355469, 0.26284954833984375, 0.26267852783203127, 0.2627666015625, 0.26269491577148435, 0.2628239440917969, 0.26267034912109377, 0.26264883422851565, 0.2627061767578125, 0.2629591064453125, 0.2631485290527344, 0.26324786376953124, 0.26280755615234375, 0.26280960083007815, 0.26319256591796875, 0.26267852783203127, 0.2628167724609375, 0.2628433837890625, 0.26269287109375, 0.26274713134765626, 0.2629744567871094, 0.26298367309570314, 0.26273178100585937, 0.26305227661132813, 0.26277886962890623, 0.26314138793945313, 0.26285260009765626, 0.26283621215820313, 0.2628055114746094, 0.26295297241210935, 0.2627491760253906, 0.2626693115234375, 0.26276043701171875, 0.26314752197265623, 0.26287820434570314, 0.26291302490234375, 0.26278912353515627, 0.2630154113769531, 0.2626723937988281, 0.2631383056640625, 0.2627901306152344, 0.26301644897460935, 0.2626805725097656, 0.2629847106933594, 0.262761474609375, 0.26287103271484374, 0.26281268310546874, 0.26283724975585937, 0.2633441162109375, 0.2631127014160156, 0.2628884582519531, 0.26285260009765626, 0.26326937866210937, 0.26319155883789064, 0.2628894653320312, 0.26291815185546874, 0.2629099426269531]",tokens/s,3.7463655632377972,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1736.183808,12352.749568,0.0,11706.302464,11035.465216,s,10,13.024343627929689,1.3024343627929689,0.002338564901261307,1.30185107421875,1.3038882202148436,1.3064520446777343,1.3085031042480468,"[1.309015869140625, 1.3033184814453125, 1.3003494873046875, 1.3011973876953125, 1.3011253662109374, 1.300955810546875, 1.301638427734375, 1.302063720703125, 1.3022261962890624, 1.302452880859375]",tokens/s,196.55501061184225,kWh,1.5360654360718197e-05,8.415124671664671e-06,7.215477994602094e-05,9.593055897840379e-05,tokens/kWh,2668596.9802139024,MB,1736.183808,12352.749568,0.0,11706.302464,11329.172992,s,10,763.3166562499999,76.331665625,0.01404827179512581,76.33404687500001,76.34705,76.34801328125,76.34878390625,"[76.3116171875, 76.3329609375, 76.3489765625, 76.3411171875, 76.3468359375, 76.331546875, 76.3441953125, 76.3351328125, 76.31521875, 76.3090546875]",tokens/s,0.8253455428249735,kWh,0.0009011750282347202,0.0004939259724664952,0.0042084087556131695,0.005603509756314386,tokens/kWh,11242.953566558468,,s,629,773.821041748047,1.2302401299650985,0.15462762199250651,1.21159375,1.2122087890625,1.2124819091796875,2.513270859375,"[1.2114217529296876, 1.2109813232421875, 1.2112762451171875, 1.2113695068359376, 1.2109967041015626, 1.21078369140625, 1.210820556640625, 1.2114083251953125, 1.211473876953125, 1.2109915771484374, 1.2110264892578124, 1.211177001953125, 1.2111329345703126, 1.2108482666015625, 1.210894287109375, 1.2108328857421875, 1.2114595947265625, 1.21109912109375, 1.2110377197265625, 1.2114605712890625, 1.21109814453125, 1.2109107666015626, 1.2113837890625, 1.211439208984375, 1.2113089599609375, 1.2114852294921874, 1.2108953857421876, 1.211968505859375, 1.2109854736328125, 1.2114114990234375, 1.211503662109375, 1.2115701904296876, 1.2111954345703124, 1.212129150390625, 1.211504638671875, 1.2116378173828124, 1.21143701171875, 1.211441162109375, 1.2110633544921876, 1.21100390625, 1.210900390625, 1.2119766845703126, 1.21135302734375, 1.211661376953125, 1.21140625, 1.211376708984375, 1.21117578125, 1.211125732421875, 1.2112117919921874, 1.2114166259765624, 1.2111329345703126, 1.2116644287109375, 1.2111021728515625, 1.211261962890625, 1.2113489990234374, 1.2112978515625, 1.211167724609375, 1.211284423828125, 1.2117083740234376, 1.2115958251953125, 1.2114329833984374, 1.2118804931640625, 2.514330810546875, 1.210652587890625, 1.2109178466796875, 1.21115234375, 1.210639404296875, 1.2111728515625, 1.2111964111328124, 1.2109495849609375, 1.210982421875, 1.210829833984375, 1.211577392578125, 1.2116376953125, 1.2123822021484374, 1.2119183349609375, 1.2111278076171874, 1.2120401611328124, 1.2116695556640624, 1.2114871826171876, 1.2117001953125, 1.2118865966796875, 1.211429931640625, 1.2113817138671874, 1.21173095703125, 1.2116612548828125, 1.211536376953125, 1.211513916015625, 1.2119930419921876, 1.2118671875, 1.2117279052734375, 1.211217041015625, 1.2116253662109375, 1.211358154296875, 1.211610107421875, 1.21132958984375, 1.2115968017578125, 1.2115753173828125, 1.212295166015625, 1.211431884765625, 1.211931640625, 1.21170947265625, 1.21185791015625, 1.2116807861328125, 1.211569091796875, 1.2115169677734374, 1.2117945556640626, 1.211475830078125, 1.2115762939453125, 1.2120863037109375, 1.211937744140625, 1.2119654541015625, 1.2116817626953125, 1.2118035888671874, 1.212337158203125, 1.2117073974609376, 1.2118385009765624, 1.2118773193359376, 1.21191015625, 1.2120863037109375, 1.21194091796875, 1.212105712890625, 1.2121497802734376, 1.2118917236328124, 1.211799560546875, 2.513431640625, 1.2114862060546876, 1.2116961669921875, 1.2118785400390626, 1.2121865234375, 1.2122286376953124, 1.2120013427734375, 1.2121025390625, 1.21226953125, 1.211442138671875, 1.2113643798828124, 1.2111483154296876, 1.2115557861328126, 1.2120247802734374, 1.2112076416015625, 1.2115435791015625, 1.2116663818359374, 1.2110797119140626, 1.2113746337890625, 1.2114166259765624, 1.2112281494140624, 1.2118333740234375, 1.211620361328125, 1.2116275634765625, 1.211684814453125, 1.2115281982421875, 1.212507080078125, 1.2123299560546874, 1.211989990234375, 1.2121068115234375, 1.212232666015625, 1.2115784912109375, 1.21185888671875, 1.211282470703125, 1.211768798828125, 1.2113284912109374, 1.2114544677734376, 1.2116142578125, 1.2119736328125, 1.21160400390625, 1.2118660888671875, 1.211783203125, 1.211763671875, 1.21185888671875, 1.211821044921875, 1.211894775390625, 1.212494873046875, 1.2119080810546874, 1.2119388427734374, 1.2123709716796875, 1.211740234375, 1.2130374755859374, 1.212662841796875, 1.212389404296875, 1.21267919921875, 1.2124569091796875, 1.212505126953125, 1.212078125, 1.2119132080078125, 1.2119111328125, 1.2119111328125, 1.2122039794921875, 1.212859375, 2.513799072265625, 1.2114248046875, 1.211298828125, 1.2112598876953125, 1.2111002197265626, 1.211157470703125, 1.2119388427734374, 1.2117012939453125, 1.211177001953125, 1.21225927734375, 1.2121456298828126, 1.212099609375, 1.2122501220703126, 1.212095458984375, 1.2122705078125, 1.212464111328125, 1.21236474609375, 1.21200439453125, 1.211461669921875, 1.21135107421875, 1.2123013916015626, 1.2120933837890624, 1.2120391845703125, 1.2125921630859375, 1.2116397705078126, 1.21157421875, 1.2111861572265625, 1.21097216796875, 1.2113848876953126, 1.2114390869140625, 1.21153125, 1.211658203125, 1.2115947265625, 1.2115711669921876, 1.2116275634765625, 1.211242431640625, 1.21172998046875, 1.2113121337890624, 1.2114923095703125, 1.211609130859375, 1.2118292236328125, 1.2115927734375, 1.21164794921875, 1.2114503173828124, 1.2116920166015626, 1.2116182861328124, 1.2114801025390625, 1.211564208984375, 1.21261865234375, 1.2118896484375, 1.211937744140625, 1.2124937744140625, 1.2117872314453124, 1.2115753173828125, 1.2119429931640624, 1.211613037109375, 1.212316650390625, 1.211663330078125, 1.2121661376953126, 1.2119859619140625, 1.2116695556640624, 1.2118455810546875, 1.2117862548828124, 2.5132001953125, 1.21116162109375, 1.2113070068359375, 1.2113653564453124, 1.211356201171875, 1.2111585693359375, 1.21143505859375, 1.211683837890625, 1.2116121826171875, 1.2118189697265624, 1.2116448974609375, 1.21178515625, 1.2120279541015626, 1.21154052734375, 1.211658203125, 1.211451416015625, 1.21213134765625, 1.2125296630859375, 1.2122440185546874, 1.212705810546875, 1.2123677978515626, 1.2122071533203125, 1.2123084716796875, 1.2123751220703125, 1.2121220703125, 1.2126556396484376, 1.2121026611328125, 1.21187841796875, 1.212506103515625, 1.21231982421875, 1.212422119140625, 1.2116009521484374, 1.2117115478515625, 1.2120238037109374, 1.2117667236328125, 1.2115660400390624, 1.2122071533203125, 1.211494384765625, 1.211684814453125, 1.21158251953125, 1.211788330078125, 1.2116644287109375, 1.211957275390625, 1.2114073486328125, 1.211895751953125, 1.2115875244140626, 1.2115179443359374, 1.211845703125, 1.2115126953125, 1.2115343017578124, 1.2121968994140624, 1.2115262451171875, 1.211950927734375, 1.2120555419921875, 1.211916259765625, 1.2119879150390624, 1.2116162109375, 1.2115435791015625, 1.212336181640625, 1.2118609619140626, 1.21176171875, 1.211826171875, 1.2119696044921875, 2.513016845703125, 1.2115302734375, 1.21221533203125, 1.211273193359375, 1.211552734375, 1.21154248046875, 1.211378662109375, 1.2118077392578126, 1.2109425048828124, 1.2108125, 1.211199462890625, 1.2112393798828125, 1.2109864501953125, 1.2110008544921875, 1.2113499755859376, 1.21132958984375, 1.211335693359375, 1.2113797607421875, 1.211218994140625, 1.21196240234375, 1.211410400390625, 1.2120770263671874, 1.2116243896484375, 1.2119141845703125, 1.2118671875, 1.211895751953125, 1.2114534912109376, 1.2118670654296875, 1.211398193359375, 1.2112353515625, 1.21159375, 1.2112230224609375, 1.211525146484375, 1.211252685546875, 1.2116796875, 1.21173095703125, 1.2115252685546876, 1.21129150390625, 1.211832275390625, 1.211515869140625, 1.21154052734375, 1.21152001953125, 1.2113541259765626, 1.211916259765625, 1.211999267578125, 1.211557861328125, 1.2116612548828125, 1.2116644287109375, 1.211683837890625, 1.2118814697265625, 1.2116859130859374, 1.21154248046875, 1.212590087890625, 1.211631591796875, 1.21187841796875, 1.2120863037109375, 1.211619384765625, 1.2120166015625, 1.2119869384765625, 1.2120177001953125, 1.2121640625, 1.211704345703125, 1.211842529296875, 2.51329833984375, 1.2113704833984376, 1.2111278076171874, 1.21133056640625, 1.2119141845703125, 1.2111728515625, 1.2112230224609375, 1.2113807373046874, 1.2113182373046876, 1.211325439453125, 1.2114288330078125, 1.211557861328125, 1.211810791015625, 1.2117626953125, 1.21128759765625, 1.2117484130859375, 1.21141650390625, 1.2113756103515625, 1.2116080322265625, 1.211427978515625, 1.2113477783203126, 1.2119552001953124, 1.211189208984375, 1.2114248046875, 1.211610107421875, 1.21135205078125, 1.2119481201171876, 1.2117523193359374, 1.211484130859375, 1.2121180419921875, 1.21152099609375, 1.2116326904296875, 1.211673583984375, 1.2113121337890624, 1.2119869384765625, 1.2122685546875, 1.2123853759765626, 1.2128603515625, 1.2124027099609376, 1.2125235595703125, 1.2127314453125, 1.2127191162109374, 1.212885986328125, 1.2127242431640626, 1.2118333740234375, 1.211953125, 1.2121129150390626, 1.2117801513671875, 1.212015625, 1.211763671875, 1.2117012939453125, 1.2120667724609375, 1.21187939453125, 1.2117197265625, 1.2119981689453125, 1.21179443359375, 1.2120340576171875, 1.212147705078125, 1.21187841796875, 1.2120791015625, 1.21209033203125, 1.211905029296875, 1.2124200439453126, 2.514125732421875, 1.2114227294921875, 1.211292724609375, 1.211658203125, 1.2115589599609375, 1.2113983154296875, 1.2119888916015624, 1.2113704833984376, 1.211610107421875, 1.211758544921875, 1.2114852294921874, 1.21145654296875, 1.211821044921875, 1.2112169189453126, 1.2116602783203125, 1.211989990234375, 1.2113223876953125, 1.2115517578125, 1.2113489990234374, 1.2112762451171875, 1.2118035888671874, 1.211737060546875, 1.2115977783203125, 1.2129249267578126, 1.21261767578125, 1.2122420654296875, 1.211623291015625, 1.2114923095703125, 1.212148681640625, 1.2119019775390625, 1.21175146484375, 1.2118385009765624, 1.2116920166015626, 1.2118294677734376, 1.21165185546875, 1.2111922607421874, 1.2113212890625, 1.2114442138671875, 1.211694091796875, 1.21236474609375, 1.212018798828125, 1.211768798828125, 1.211747314453125, 1.2114288330078125, 1.2113223876953125, 1.2116746826171876, 1.21150048828125, 1.21159375, 1.21177294921875, 1.21147802734375, 1.212080078125, 1.21192138671875, 1.2113746337890625, 1.2114554443359375, 1.2115548095703126, 1.21162646484375, 1.211575439453125, 1.2113038330078125, 1.211806640625, 1.2116695556640624, 1.211400146484375, 1.2115343017578124, 1.2114677734375, 2.5139150390625, 1.2104947509765625, 1.2104796142578125, 1.2110601806640624, 1.210818603515625, 1.21080419921875, 1.2113817138671874, 1.2107110595703126, 1.2115599365234375, 1.210962890625, 1.21103466796875, 1.2109844970703125, 1.2109957275390626, 1.210639404296875, 1.210967041015625, 1.210735595703125, 1.211298828125, 1.2114892578125, 1.210882080078125, 1.2117001953125, 1.211292724609375, 1.2112998046875, 1.211736083984375, 1.2114698486328126, 1.2116868896484374, 1.212020751953125, 1.2116572265625, 1.2118538818359375, 1.211252685546875, 1.2111072998046875, 1.211619384765625, 1.2111688232421876, 1.211368408203125, 1.2118763427734376, 1.2112978515625, 1.211556884765625, 1.211440185546875, 1.2111011962890625, 1.21163671875, 1.2113223876953125, 1.2111298828125, 1.2116162109375, 1.2110653076171876, 1.2111217041015625, 1.211494384765625, 1.211451416015625, 1.2114759521484375, 1.21181689453125, 1.2115538330078126, 1.2116080322265625, 1.211831298828125, 1.211515869140625, 1.21173193359375, 1.211282470703125, 1.2113018798828126, 1.21145654296875, 1.2112281494140624, 1.2112547607421875, 1.2116612548828125, 1.21142578125, 1.21210986328125, 1.2116920166015626, 1.2119910888671874, 2.51358935546875, 1.21063427734375, 1.211430908203125, 1.210544189453125, 1.2111871337890625, 1.21103564453125, 1.2112589111328125, 1.2109700927734375, 1.2114503173828124, 1.2111072998046875, 1.21147802734375, 1.210945556640625, 1.2111922607421874, 1.211684814453125, 1.2114554443359375, 1.2112486572265626, 1.2119346923828125, 1.2112281494140624, 1.2114483642578124, 1.2110079345703124, 1.2108524169921875, 1.2109935302734376, 1.2109833984375, 1.2113858642578126, 1.2111492919921876, 1.210892333984375, 1.2111124267578126, 1.2112025146484375, 1.2109700927734375, 1.211509765625, 1.21159375, 1.2114063720703125, 1.211040771484375, 1.21109814453125, 1.210799072265625, 1.210892333984375, 1.210966064453125, 1.210841064453125, 1.2113079833984375, 1.21124560546875, 1.2112608642578124, 1.2112496337890626, 1.2110306396484376, 1.2115035400390626, 1.211241455078125, 1.2117279052734375, 1.2114945068359375, 1.2111419677734374, 1.2113212890625, 1.211431884765625, 1.2111728515625, 1.2113961181640625, 1.211167724609375, 1.210883056640625, 1.21160498046875, 1.211494384765625, 1.2115814208984375, 1.2117698974609374, 1.2115025634765626, 1.2116275634765625, 1.2118814697265625, 1.211230224609375, 1.21170947265625]",tokens/s,0.8128494394247809,,,,,,,, -4bit-awq-gemm-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2022.883328,5539.10272,0.0,4892.655616,4542.741504,s,10,5.694402282714844,0.5694402282714843,0.0022278964552512335,0.5688670349121093,0.5707117797851562,0.5731342102050782,0.5750721545410157,"[0.575556640625, 0.5690338134765625, 0.5678692626953125, 0.5687002563476562, 0.5676268920898437, 0.5674249267578125, 0.5684844970703125, 0.5696422119140625, 0.5701734619140625, 0.5698903198242188]",tokens/s,449.56430418883286,kWh,6.703966662839608e-06,3.6734817678128214e-06,3.077389807590622e-05,4.115134650655865e-05,tokens/kWh,6220938.601831633,MB,2022.883328,5539.10272,0.0,4892.655616,4726.280192,s,10,334.554328125,33.4554328125,0.012952548332308819,33.45466796875,33.46486484375,33.476350390625,33.485538828125,"[33.4878359375, 33.44866796875, 33.443125, 33.44916015625, 33.43737890625, 33.4588671875, 33.4546875, 33.4623125, 33.4546484375, 33.45764453125]",tokens/s,1.8831022259697452,kWh,0.00039510093122168826,0.00021654957452975094,0.0018051211817500937,0.0024167716875015325,tokens/kWh,26067.832690116305,,s,629,339.1397302856444,0.5391728621393395,0.06755475317116652,0.53097265625,0.5316083862304688,0.5317822387695312,1.09872478515625,"[0.5310126342773438, 0.53113037109375, 0.5317058715820312, 0.530935791015625, 0.5307023315429688, 0.531651611328125, 0.5313556518554687, 0.53151025390625, 0.5312081909179688, 0.5308344116210938, 0.5306572875976563, 0.531704833984375, 0.5313720092773437, 0.5314744262695312, 0.5315502319335937, 0.5319423828125, 0.5314641723632813, 0.53194140625, 0.5313074951171874, 0.53119384765625, 0.5315758056640625, 0.5314529418945313, 0.5310607299804687, 0.5313535766601563, 0.5317161254882813, 0.531689453125, 0.53146826171875, 0.531725341796875, 0.5313976440429687, 0.53174169921875, 0.5316290283203124, 0.5319515991210938, 0.5317816162109374, 0.5313218383789062, 0.5316331787109375, 0.5317826538085938, 0.5314375610351563, 0.5311324462890625, 0.5311549682617187, 0.5317427368164063, 0.5311119384765625, 0.5312276611328125, 0.5314365234375, 0.5316792602539062, 0.5316904907226563, 0.5320693969726562, 0.531862548828125, 0.5321605224609375, 0.5318092651367188, 0.5320345458984375, 0.5320376586914063, 0.531989501953125, 0.5317529296875, 0.5317816162109374, 0.532279296875, 0.531968994140625, 0.531651611328125, 0.5318492431640625, 0.5322158203125, 0.5313269653320313, 0.5311262817382812, 0.5317109985351562, 1.100517333984375, 0.531336181640625, 0.5311201171875, 0.5306900634765624, 0.5306705932617187, 0.5307289428710937, 0.5306470336914062, 0.5307658081054687, 0.530572265625, 0.5306316528320313, 0.5310679321289062, 0.5309183959960937, 0.5307955322265625, 0.5309910888671875, 0.5315430297851562, 0.531473388671875, 0.5305477294921875, 0.5311221923828126, 0.5307709350585937, 0.530966552734375, 0.5305128784179688, 0.5308323974609375, 0.5309839477539062, 0.5310494995117188, 0.5307473754882812, 0.5309757690429687, 0.5309235229492187, 0.5309337768554687, 0.53066650390625, 0.5311743774414063, 0.53091943359375, 0.5312245483398438, 0.5310628051757813, 0.5319270629882813, 0.5310371704101563, 0.5307791137695312, 0.5309276123046875, 0.530777099609375, 0.5308047485351562, 0.5308313598632812, 0.530946044921875, 0.5307647705078125, 0.5308211059570312, 0.5306654663085938, 0.530524169921875, 0.5307432861328125, 0.5308251953125, 0.53072998046875, 0.530977783203125, 0.5311867065429687, 0.5309869995117188, 0.5312542724609375, 0.5314273071289063, 0.5308969116210938, 0.5310075073242188, 0.5313853149414063, 0.5306746826171875, 0.53089892578125, 0.5307893676757812, 0.5309020385742188, 0.5307484130859375, 0.5309389038085938, 0.5306982421875, 1.0987735595703125, 0.5308344116210938, 0.530502685546875, 0.5315389404296875, 0.5309573364257812, 0.5307207641601562, 0.530703369140625, 0.531072021484375, 0.5305855712890625, 0.5308221435546875, 0.530577392578125, 0.530745361328125, 0.5306163330078125, 0.5306101684570312, 0.5306142578125, 0.5307074584960938, 0.5306920776367188, 0.530682861328125, 0.5304330444335937, 0.5306326904296875, 0.5307760620117188, 0.5308211059570312, 0.5306358032226562, 0.53072998046875, 0.5306920776367188, 0.5310648193359375, 0.5310341186523437, 0.5307627563476562, 0.530850830078125, 0.5307586669921875, 0.5305743408203125, 0.530819091796875, 0.5308047485351562, 0.5305784301757812, 0.5307218017578125, 0.5306890258789062, 0.5307053833007812, 0.5306900634765624, 0.5304514770507812, 0.5309265747070312, 0.5308579711914062, 0.5307044067382812, 0.5306583251953125, 0.530681884765625, 0.5314078979492187, 0.5308272705078125, 0.53083544921875, 0.5314437255859376, 0.5309910888671875, 0.5313760986328125, 0.5309757690429687, 0.5308323974609375, 0.5307709350585937, 0.5311876831054687, 0.5309788208007813, 0.5310750732421875, 0.5309757690429687, 0.5311590576171875, 0.530819091796875, 0.5316198120117187, 0.5310689086914062, 0.5308856201171875, 0.530956298828125, 1.099652099609375, 0.5306685180664062, 0.5309573364257812, 0.5311047973632812, 0.5308251953125, 0.530680908203125, 0.5306971435546874, 0.5309389038085938, 0.5305805053710938, 0.5308150024414062, 0.530714599609375, 0.5305753784179688, 0.530609130859375, 0.5306388549804687, 0.5310023803710937, 0.5311734008789063, 0.5306429443359375, 0.5309942016601562, 0.5309910888671875, 0.5308006591796876, 0.5307330322265625, 0.5312122802734375, 0.5308692626953125, 0.53094091796875, 0.5309368286132813, 0.5310699462890625, 0.531135498046875, 0.5310894165039063, 0.5310156860351563, 0.5310453491210938, 0.5309798583984375, 0.5312214965820312, 0.5310709838867187, 0.5311631469726562, 0.5312348022460938, 0.5308098754882813, 0.5313177490234375, 0.5314129638671875, 0.5311539306640625, 0.5311221923828126, 0.5308692626953125, 0.5309265747070312, 0.5310023803710937, 0.5306695556640625, 0.5307924194335938, 0.5308477172851562, 0.5312583618164063, 0.5308375244140625, 0.5306859741210938, 0.5308375244140625, 0.5310238647460938, 0.5310392456054688, 0.5313167114257813, 0.5309163818359375, 0.530735107421875, 0.5309808349609375, 0.5313843383789062, 0.53096240234375, 0.530924560546875, 0.5308661499023437, 0.5307893676757812, 0.5308641357421875, 0.5307678833007813, 1.098599365234375, 0.5306071166992188, 0.5305692138671875, 0.5305211181640626, 0.53089794921875, 0.5305497436523438, 0.5308221435546875, 0.5307658081054687, 0.5307996215820312, 0.530661376953125, 0.5306900634765624, 0.5311692504882812, 0.530609130859375, 0.5308006591796876, 0.5306644287109376, 0.5308375244140625, 0.5307730102539062, 0.5307525024414063, 0.5305477294921875, 0.5310003051757812, 0.53079345703125, 0.5306757202148438, 0.5304637451171875, 0.5307637939453125, 0.5306132202148437, 0.530956298828125, 0.530988037109375, 0.5308764038085938, 0.5306808471679687, 0.5308743896484375, 0.5307279663085938, 0.5307576293945313, 0.5310965576171875, 0.5306583251953125, 0.53056103515625, 0.5305681762695312, 0.530555908203125, 0.5308845825195313, 0.5308344116210938, 0.5308221435546875, 0.530787353515625, 0.5306757202148438, 0.5306941528320313, 0.5308262329101563, 0.5310812377929688, 0.530934814453125, 0.5305538330078124, 0.5308098754882813, 0.5308231811523437, 0.5310986328125, 0.5306429443359375, 0.530845703125, 0.5307422485351563, 0.5307197265625, 0.530492431640625, 0.5308323974609375, 0.5305548706054688, 0.53087744140625, 0.5306767578125, 0.5308753662109374, 0.5305855712890625, 0.5308129272460937, 0.5303736572265625, 1.098567626953125, 0.5309030151367188, 0.5317017822265625, 0.5314866943359375, 0.531140625, 0.5313515625, 0.5313720092773437, 0.530951171875, 0.5308917846679687, 0.530724853515625, 0.5307863159179688, 0.5310587158203125, 0.530777099609375, 0.530819091796875, 0.53087744140625, 0.5309696044921876, 0.5308897094726562, 0.5310904541015625, 0.531515380859375, 0.5310259399414062, 0.5307739868164062, 0.5312061157226563, 0.5318901977539062, 0.5310761108398437, 0.5309962158203125, 0.5314529418945313, 0.5309951782226563, 0.5310013427734375, 0.5313760986328125, 0.5313331298828124, 0.5312849731445313, 0.5316055297851563, 0.5308313598632812, 0.5314816284179688, 0.531324951171875, 0.5315655517578125, 0.5319567260742187, 0.5317283935546875, 0.5308856201171875, 0.5309910888671875, 0.5312266235351563, 0.53136279296875, 0.5310289916992188, 0.5309757690429687, 0.530524169921875, 0.5308047485351562, 0.5308436279296875, 0.5307371215820312, 0.5306552124023437, 0.5307576293945313, 0.530914306640625, 0.530629638671875, 0.530746337890625, 0.5308231811523437, 0.5308712768554688, 0.5312450561523437, 0.5309327392578125, 0.53103515625, 0.5309214477539063, 0.5310279541015624, 0.5311580200195313, 0.5310894165039063, 0.5313126220703125, 1.0993970947265626, 0.5308150024414062, 0.53054052734375, 0.53062451171875, 0.5311447143554687, 0.5309869995117188, 0.5307893676757812, 0.5311334228515625, 0.5309542236328125, 0.5308313598632812, 0.5310842895507812, 0.5308108520507813, 0.5306603393554687, 0.531177490234375, 0.5307023315429688, 0.5314396362304687, 0.5306491088867188, 0.5312952270507812, 0.5308108520507813, 0.530840576171875, 0.5308078002929687, 0.5307863159179688, 0.5308047485351562, 0.5311262817382812, 0.53103515625, 0.5310740356445313, 0.5309634399414063, 0.531051513671875, 0.5307965698242187, 0.5314877319335938, 0.530746337890625, 0.5310259399414062, 0.5311201171875, 0.5307340698242188, 0.5309931640625, 0.5309593505859375, 0.5321942749023437, 0.5316433715820312, 0.5312388916015625, 0.530872314453125, 0.5307698974609375, 0.5307975463867187, 0.53082421875, 0.5306982421875, 0.5306019897460937, 0.5307197265625, 0.5305917358398438, 0.5307258911132813, 0.53085693359375, 0.531398681640625, 0.53166796875, 0.5317969970703125, 0.531862548828125, 0.5320745239257813, 0.5316608276367187, 0.5313966064453125, 0.5307791137695312, 0.53075048828125, 0.5309102172851563, 0.5311057739257813, 0.5310842895507812, 0.5312010498046875, 0.5308037109375, 1.1007191162109375, 0.53106689453125, 0.5309931640625, 0.5313853149414063, 0.5308897094726562, 0.5309972534179688, 0.5310842895507812, 0.531324951171875, 0.5306767578125, 0.5316229248046875, 0.5309573364257812, 0.530756591796875, 0.5307310180664062, 0.53082421875, 0.5305927734375, 0.5316853637695312, 0.5315819702148438, 0.5312542724609375, 0.5311344604492187, 0.531040283203125, 0.5308897094726562, 0.5313392944335937, 0.5316823120117188, 0.53125634765625, 0.5309573364257812, 0.5317652587890624, 0.531435546875, 0.531162109375, 0.5312890625, 0.5313095703125, 0.5313320922851562, 0.5313013916015625, 0.5311661987304688, 0.531324951171875, 0.5308743896484375, 0.53102490234375, 0.5308999633789062, 0.53098291015625, 0.5310023803710937, 0.5311641845703124, 0.5315082397460937, 0.5309317016601562, 0.5308917846679687, 0.5309010009765625, 0.5306695556640625, 0.531409912109375, 0.5308856201171875, 0.531504150390625, 0.5308948364257813, 0.5309337768554687, 0.5309696044921876, 0.5308682250976563, 0.5308712768554688, 0.5310637817382813, 0.5310596923828125, 0.530924560546875, 0.5309398803710937, 0.5311754150390625, 0.5311918334960938, 0.5315389404296875, 0.5313822631835937, 0.5316004028320312, 0.5323612060546875, 1.101580322265625, 0.5309798583984375, 0.5314283447265625, 0.53097265625, 0.5306849365234375, 0.53102490234375, 0.5308344116210938, 0.5312901000976562, 0.5309429931640625, 0.5312133178710937, 0.53045556640625, 0.5307914428710937, 0.53096142578125, 0.5311590576171875, 0.531051513671875, 0.5311539306640625, 0.5308795166015625, 0.5314437255859376, 0.5311702880859375, 0.5311856689453125, 0.5311795043945312, 0.5310105590820312, 0.5306644287109376, 0.530904052734375, 0.5307525024414063, 0.530788330078125, 0.5305599975585937, 0.5308651733398437, 0.5308682250976563, 0.5307955322265625, 0.5306275634765625, 0.5308323974609375, 0.53082421875, 0.5314150390625, 0.5311631469726562, 0.53131982421875, 0.5312214965820312, 0.5309327392578125, 0.5311282958984375, 0.5312542724609375, 0.530845703125, 0.5310156860351563, 0.53068798828125, 0.5308897094726562, 0.5308231811523437, 0.5310628051757813, 0.531072021484375, 0.5317027587890625, 0.5310494995117188, 0.5310187377929687, 0.5309296875, 0.531330078125, 0.5311447143554687, 0.53096044921875, 0.5310719604492188, 0.531030029296875, 0.5310156860351563, 0.53125634765625, 0.5312440185546875, 0.5312337646484375, 0.5311641845703124, 0.5312235717773437, 0.530788330078125, 1.10097412109375, 0.5311385498046876, 0.5308712768554688, 0.5309481201171875, 0.5307709350585937, 0.5306480712890626, 0.5311713256835937, 0.5311160278320313, 0.5307095336914063, 0.5308487548828125, 0.53075048828125, 0.5309020385742188, 0.5306849365234375, 0.5309808349609375, 0.53065625, 0.5309685668945312, 0.5306695556640625, 0.5307422485351563, 0.5307944946289063, 0.53087744140625, 0.53075048828125, 0.5307781372070313, 0.5308733520507812, 0.5309255981445312, 0.5307822265625, 0.531041259765625, 0.5312808837890625, 0.5311181030273437, 0.531198974609375, 0.5309603881835937, 0.5314375610351563, 0.5310955810546875, 0.5312604370117188, 0.5309583129882812, 0.5311651611328125, 0.530998291015625, 0.53083544921875, 0.5311876831054687, 0.531009521484375, 0.531167236328125, 0.5306757202148438, 0.531293212890625, 0.5311754150390625, 0.53102490234375, 0.5311047973632812, 0.5315072021484375, 0.531167236328125, 0.5316218872070313, 0.5310310668945313, 0.531662841796875, 0.5310525512695312, 0.5312481079101562, 0.5310341186523437, 0.5309962158203125, 0.5309798583984375, 0.5312184448242188, 0.5310965576171875, 0.5310013427734375, 0.5311498413085938, 0.53165771484375, 0.5311181030273437, 0.531272705078125, 0.5313116455078125]",tokens/s,1.8546927529553001,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491be-6be489d15777cf0d431fe6cc;22eeef10-eb0a-48d7-8393-55415d6f0ed8) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17837.924352,21357.920256,0.0,20772.29056,20328.900608,s,1,14.204771484375,14.204771484375,0.0,14.204771484375,14.204771484375,14.204771484375,14.204771484375,[14.204771484375],,kWh,8.725302875901536e-05,4.780623887562563e-05,0.00012586565624805868,0.00026092492388269967,,MB,4418.400256,21892.694016,0.0,21246.246912,20640.73728,s,10,4.921095489501953,0.4921095489501953,0.00013921068114227224,0.492099853515625,0.49230864868164065,0.4923201568603516,0.49232936340332034,"[0.49230609130859376, 0.49197897338867186, 0.4918567810058594, 0.4921710510253906, 0.4920533447265625, 0.4921463623046875, 0.492180908203125, 0.4923316650390625, 0.49204986572265624, 0.49202044677734375]",tokens/s,520.2093731896043,kWh,5.817891577710066e-06,3.187953166257042e-06,3.4684551557142985e-05,4.3690396301110096e-05,tokens/kWh,5859411.259071035,MB,4422.418432,21919.956992,0.0,21273.509888,20640.73984,s,10,36.81683374023437,3.681683374023437,0.00996697201341147,3.683453491210938,3.691398681640625,3.6931982421875,3.6946378906249997,"[3.694997802734375, 3.671735595703125, 3.68397802734375, 3.667395751953125, 3.682928955078125, 3.68205810546875, 3.66432421875, 3.6876728515625, 3.690998779296875, 3.69074365234375]",tokens/s,17.111737648192165,kWh,4.357231084020513e-05,2.3880529054676535e-05,0.0001908820455626543,0.000258334885457536,tokens/kWh,243869.50251964974,,s,630,36.8130302886963,0.058433381410629016,0.0010797931258523165,0.05806950378417969,0.0599544849395752,0.06043391876220703,0.06142006324768067,"[0.05873455810546875, 0.058243072509765625, 0.057491455078125, 0.058213375091552735, 0.058665985107421874, 0.05854412841796875, 0.05910015869140625, 0.058055679321289064, 0.05793484878540039, 0.05799116897583008, 0.06191923141479492, 0.05906022262573242, 0.05794713592529297, 0.05788876724243164, 0.05832089614868164, 0.0579788818359375, 0.057796607971191405, 0.05897727966308594, 0.05847859191894531, 0.05758771133422851, 0.05917900848388672, 0.05808947372436524, 0.057452545166015626, 0.058848255157470705, 0.05858611297607422, 0.05914112091064453, 0.058211326599121094, 0.057447425842285155, 0.05823078536987305, 0.057761791229248044, 0.059292671203613284, 0.059888641357421876, 0.060524543762207034, 0.057853950500488284, 0.0576255989074707, 0.05706547164916992, 0.05710028839111328, 0.057778175354003904, 0.058180606842041016, 0.05801369476318359, 0.05889843368530273, 0.05875404739379883, 0.05905408096313477, 0.05799731063842774, 0.058788864135742185, 0.058439678192138675, 0.06001971054077149, 0.05916876983642578, 0.059807743072509766, 0.05961318588256836, 0.05978009414672852, 0.05978316879272461, 0.057990142822265625, 0.05989785766601562, 0.05929369735717773, 0.0591912956237793, 0.05901824188232422, 0.059865089416503904, 0.059478015899658204, 0.06076620864868164, 0.05804646301269531, 0.0578600959777832, 0.05801062393188477, 0.059948001861572266, 0.058493953704833984, 0.05760921478271484, 0.05774950408935547, 0.058196990966796876, 0.06236671829223633, 0.05815091323852539, 0.05959372711181641, 0.0587960319519043, 0.05747817611694336, 0.056727519989013674, 0.05689139175415039, 0.05765529632568359, 0.05766451263427735, 0.058022911071777344, 0.057576446533203124, 0.05794815826416016, 0.05803724670410156, 0.05830348968505859, 0.05992550277709961, 0.057885696411132816, 0.05792563247680664, 0.060061695098876954, 0.058502143859863284, 0.057777153015136716, 0.05824204635620117, 0.05750681686401367, 0.05802905654907226, 0.05775462341308594, 0.05727436828613281, 0.05750374221801758, 0.05894246292114258, 0.05875302505493164, 0.057690113067626954, 0.0580208625793457, 0.05822873687744141, 0.05893427276611328, 0.05796147155761719, 0.05766144180297852, 0.059053054809570314, 0.058984447479248046, 0.05874995040893555, 0.05799219131469727, 0.05845708847045898, 0.05845196914672852, 0.061462528228759764, 0.05867827224731445, 0.05773721694946289, 0.0577259521484375, 0.05700812911987305, 0.057570304870605465, 0.05778636932373047, 0.05807206344604492, 0.0578785285949707, 0.05784678268432617, 0.05847244644165039, 0.05904383850097656, 0.05790617752075195, 0.05837926483154297, 0.05872844696044922, 0.057875457763671874, 0.05777203369140625, 0.05779558563232422, 0.05817446517944336, 0.05782425689697265, 0.060217342376708984, 0.0581130256652832, 0.05833011245727539, 0.059049983978271485, 0.058011646270751956, 0.05702656173706055, 0.05775667190551758, 0.057616382598876956, 0.0579420166015625, 0.05799731063842774, 0.05713407897949219, 0.05759590530395508, 0.058858497619628906, 0.060477439880371096, 0.057852928161621096, 0.05805363082885742, 0.057245697021484375, 0.05914419174194336, 0.06059417724609375, 0.05965414428710938, 0.06008729553222656, 0.05818982315063476, 0.05747609710693359, 0.05790412902832031, 0.05783039855957031, 0.057622528076171874, 0.05806182479858398, 0.05816115188598633, 0.057791488647460934, 0.05789081573486328, 0.05857382583618164, 0.05852364730834961, 0.058281982421875, 0.05740339279174805, 0.057280513763427736, 0.05998591995239258, 0.0580208625793457, 0.06127718353271484, 0.059390975952148435, 0.05824512100219727, 0.05822873687744141, 0.05949747085571289, 0.05963673782348633, 0.05794611358642578, 0.059512832641601565, 0.05797683334350586, 0.05784371185302734, 0.05820006561279297, 0.058815486907958986, 0.0591646728515625, 0.05722521591186523, 0.05833011245727539, 0.05941145706176758, 0.05787443161010742, 0.05768499374389648, 0.058436607360839846, 0.059154430389404294, 0.05810892868041992, 0.060349441528320315, 0.05827276611328125, 0.05898649597167969, 0.05906534576416016, 0.060197887420654295, 0.057847808837890625, 0.058256385803222656, 0.05854207992553711, 0.058630142211914066, 0.05764505767822266, 0.0576808967590332, 0.05765631866455078, 0.05781196975708008, 0.05720985412597656, 0.05782220840454102, 0.0585799674987793, 0.05724979019165039, 0.057729022979736325, 0.05925273513793945, 0.05904383850097656, 0.05891891098022461, 0.05740339279174805, 0.059566078186035154, 0.059930622100830076, 0.06043852615356445, 0.060114944458007816, 0.05917798233032227, 0.057434112548828124, 0.05684428787231445, 0.05752115249633789, 0.057839614868164066, 0.05796044921875, 0.058891265869140626, 0.05776588821411133, 0.057734142303466796, 0.05759078216552734, 0.0576286735534668, 0.0577894401550293, 0.05761843109130859, 0.057643009185791017, 0.05777407836914063, 0.05696921539306641, 0.05857689666748047, 0.05897216033935547, 0.05871104049682617, 0.057311233520507814, 0.05730815887451172, 0.057041919708251954, 0.05794815826416016, 0.05755804824829101, 0.05875811386108398, 0.05830144119262695, 0.0577894401550293, 0.05731737518310547, 0.06015795135498047, 0.05730508804321289, 0.05744947052001953, 0.06042828750610352, 0.05798912048339844, 0.05770956802368164, 0.05936844635009766, 0.05962035369873047, 0.05755596923828125, 0.05801062393188477, 0.058047489166259764, 0.057112575531005856, 0.058518527984619144, 0.05960294342041016, 0.059799552917480465, 0.06101708984375, 0.05909401702880859, 0.058249214172363284, 0.05804851150512695, 0.05797273635864258, 0.057611263275146485, 0.05783347320556641, 0.058477569580078125, 0.05791231918334961, 0.05772288131713867, 0.05814374542236328, 0.06040371322631836, 0.05811097717285156, 0.057867263793945314, 0.05868032073974609, 0.05968896102905273, 0.05871308898925781, 0.05785804748535156, 0.05892607879638672, 0.05757747268676758, 0.0577710075378418, 0.057248767852783204, 0.05877043151855469, 0.060283905029296876, 0.058347518920898435, 0.05777612686157227, 0.0578416633605957, 0.058055679321289064, 0.058461185455322265, 0.05811814498901367, 0.05755801773071289, 0.060985343933105465, 0.06243123245239258, 0.05842534255981445, 0.05765529632568359, 0.05894553756713867, 0.058499073028564455, 0.05731327819824219, 0.057783294677734375, 0.05754163360595703, 0.05892095947265625, 0.05749862289428711, 0.05719039916992188, 0.057265151977539064, 0.05743513488769531, 0.06082559967041016, 0.06052249526977539, 0.05794918441772461, 0.05850316619873047, 0.05816831970214844, 0.057891841888427734, 0.05735628890991211, 0.05676031875610352, 0.05843865585327149, 0.05953126525878906, 0.05770444869995117, 0.05768396759033203, 0.05831987380981445, 0.05861478424072265, 0.05844070434570312, 0.0587325439453125, 0.058248191833496096, 0.060988414764404295, 0.058211326599121094, 0.05761843109130859, 0.058464256286621094, 0.05787443161010742, 0.05854105758666992, 0.05965926361083984, 0.05871615982055664, 0.058102783203125, 0.058194942474365234, 0.057652225494384764, 0.058264575958251956, 0.059681793212890626, 0.05989273452758789, 0.05856051254272461, 0.06101708984375, 0.05857894515991211, 0.060096511840820314, 0.059302913665771485, 0.05867212677001953, 0.059200511932373044, 0.0599736328125, 0.057852928161621096, 0.05761228942871094, 0.057621505737304686, 0.05814169692993164, 0.05799219131469727, 0.05673164749145508, 0.05701017761230469, 0.057450496673583984, 0.0584796142578125, 0.057804798126220705, 0.057662464141845705, 0.05768499374389648, 0.0580208625793457, 0.05756415939331055, 0.05891993713378906, 0.05886771011352539, 0.058759166717529294, 0.05823283386230469, 0.057355262756347655, 0.057368576049804686, 0.057218048095703126, 0.05997875213623047, 0.05888819122314453, 0.05934796905517578, 0.05944729614257813, 0.057973758697509765, 0.058265598297119144, 0.058159137725830076, 0.059719646453857425, 0.058156032562255856, 0.057957374572753906, 0.05851955032348633, 0.05913292694091797, 0.058142719268798826, 0.05827276611328125, 0.0577525749206543, 0.05744435119628906, 0.05776588821411133, 0.058278911590576174, 0.059138046264648435, 0.057450496673583984, 0.060744705200195315, 0.05909196853637695, 0.05992959976196289, 0.05805363082885742, 0.05900492858886719, 0.059246593475341794, 0.058157054901123044, 0.05751910400390625, 0.05711769485473633, 0.05897420883178711, 0.057899009704589846, 0.057665534973144535, 0.057055233001708984, 0.05676748657226562, 0.057047039031982424, 0.056834049224853515, 0.056785919189453124, 0.05732352066040039, 0.06020710372924805, 0.05836492919921875, 0.05741056060791016, 0.05815193557739258, 0.057417728424072265, 0.058006526947021485, 0.05709107208251953, 0.05725798416137695, 0.05799628829956055, 0.05936742401123047, 0.05860454559326172, 0.0611256332397461, 0.05804339218139649, 0.05769728088378906, 0.05771878433227539, 0.05781094360351562, 0.05771571350097656, 0.06009446334838867, 0.05925580978393555, 0.05789286422729492, 0.05758054351806641, 0.05773004913330078, 0.05921279907226563, 0.05799219131469727, 0.058211326599121094, 0.05709721755981445, 0.05797990417480469, 0.05832294464111328, 0.05765324783325195, 0.05805158233642578, 0.05795430374145508, 0.05711052703857422, 0.057396224975585934, 0.0577259521484375, 0.05813043212890625, 0.059202560424804686, 0.05817958450317383, 0.05796659088134765, 0.057622528076171874, 0.059894783020019535, 0.05797580718994141, 0.05814476776123047, 0.05790105438232422, 0.058090496063232425, 0.05870284652709961, 0.05861785507202148, 0.05758156967163086, 0.05737472152709961, 0.061093887329101565, 0.061126655578613284, 0.057578495025634766, 0.057919486999511716, 0.058103809356689455, 0.059112449645996094, 0.06006579208374024, 0.057783294677734375, 0.057793537139892576, 0.058014720916748044, 0.057507839202880856, 0.05786524963378906, 0.05831676864624023, 0.059025409698486325, 0.059286529541015626, 0.05788467025756836, 0.05996543884277344, 0.05794303894042969, 0.05789491271972656, 0.058054656982421876, 0.05805055999755859, 0.061080577850341794, 0.06081740951538086, 0.05911859130859375, 0.05771263885498047, 0.05774950408935547, 0.05826355361938477, 0.06028595352172852, 0.05929676818847656, 0.0581847038269043, 0.05826150512695313, 0.057366527557373044, 0.0592097282409668, 0.058024959564208986, 0.05978214263916016, 0.05793484878540039, 0.05847552108764648, 0.05789286422729492, 0.05786521530151367, 0.05790412902832031, 0.06222848129272461, 0.060365825653076174, 0.05792256164550781, 0.057425918579101565, 0.057434112548828124, 0.05717606353759765, 0.059873279571533204, 0.058671104431152345, 0.05797478485107422, 0.0585615348815918, 0.05874687957763672, 0.057785343170166016, 0.05756927871704102, 0.057815040588378906, 0.057447425842285155, 0.05786111831665039, 0.05810073471069336, 0.05797683334350586, 0.058331134796142575, 0.05882470321655273, 0.06035968017578125, 0.058172416687011716, 0.05744537734985351, 0.060037120819091794, 0.06105190277099609, 0.05787443161010742, 0.05735116958618164, 0.05793075180053711, 0.05786624145507813, 0.057518081665039064, 0.05753036880493164, 0.057373695373535157, 0.05792153549194336, 0.05845503997802735, 0.05743308639526367, 0.05736038589477539, 0.05806796646118164, 0.05897011184692383, 0.057823230743408206, 0.05795532989501953, 0.05778227233886719, 0.05788159942626953, 0.05785599899291992, 0.05709209442138672, 0.05803417587280273, 0.060284927368164064, 0.05866188812255859, 0.05735116958618164, 0.05753855895996094, 0.05950566482543945, 0.05865881729125977, 0.060007423400878904, 0.05805363082885742, 0.05789798355102539, 0.05992652893066406, 0.060478462219238284, 0.057458686828613284, 0.05745663833618164, 0.058243072509765625, 0.057717758178710936, 0.05776998519897461, 0.05848883056640625, 0.057875457763671874, 0.05795532989501953, 0.05804032135009766, 0.05774131011962891, 0.057659423828125, 0.05784880065917969, 0.05813555145263672, 0.05919539260864258, 0.060128257751464846, 0.06013747024536133, 0.06131609725952149, 0.059954177856445315, 0.059921409606933596, 0.059117568969726565, 0.059947006225585936, 0.06161203384399414, 0.059957248687744144, 0.05970739364624023, 0.05866291046142578, 0.06074771118164062, 0.058071041107177736, 0.05765324783325195, 0.058636287689208984, 0.057586688995361325, 0.05767987060546875, 0.05935721588134766, 0.057923553466796875, 0.05761740875244141, 0.05762355041503906, 0.05762355041503906, 0.05824204635620117, 0.05820211029052735, 0.05771059036254883, 0.05695283126831055, 0.05744025421142578, 0.061088768005371094, 0.058254337310791014, 0.059865089416503904, 0.05831167984008789, 0.05775360107421875, 0.05784985733032227, 0.05752627182006836, 0.05775462341308594, 0.059061248779296874, 0.05775769424438477, 0.057675777435302736, 0.05766451263427735, 0.05773721694946289, 0.05834854507446289, 0.0578682861328125, 0.05742489624023438, 0.05792768096923828, 0.05982617568969727, 0.06113382339477539, 0.05857177734375, 0.059566078186035154, 0.057995262145996096, 0.05847142410278321, 0.059028480529785154, 0.0578600959777832, 0.057608192443847656, 0.057734142303466796, 0.05773721694946289, 0.05718527984619141, 0.057812992095947265, 0.058521598815917966, 0.05935103988647461, 0.060012542724609375, 0.058613761901855466, 0.059999359130859374, 0.05886246490478515, 0.0579317741394043, 0.05967257690429688, 0.05933055877685547, 0.06785536193847656, 0.06007398223876953, 0.05782835388183594, 0.05769318389892578, 0.06128742218017578, 0.05857484817504883, 0.05757132720947265, 0.05781708908081055]",tokens/s,17.113505600038753,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1217.482752,1002.962944,0.0,356.51584,319.013888,s,25,0.17377513599395752,0.006951005439758301,0.00021285069119682677,0.006859776020050049,0.007271795177459717,0.007306099033355713,0.0075682175064086905,"[0.007650239944458008, 0.007234623908996582, 0.00704527997970581, 0.00680460786819458, 0.0068609600067138675, 0.00677836799621582, 0.00682473611831665, 0.0068039679527282714, 0.00680291223526001, 0.00730847978591919, 0.006837952136993408, 0.006873439788818359, 0.006928832054138184, 0.006819744110107422, 0.00691267204284668, 0.006827583789825439, 0.006859776020050049, 0.0067794880867004395, 0.007181856155395508, 0.006854944229125977, 0.006890560150146484, 0.007296576023101807, 0.006809375762939453, 0.0068458237648010255, 0.006942336082458496]",tokens/s,36829.20438181985,kWh,8.181268208831889e-08,4.482922321235986e-08,1.7226448838091112e-07,2.9890639368158985e-07,tokens/kWh,856455416.8510163,MB,1217.482752,1002.962944,0.0,356.51584,319.016448,s,25,10.10318005371094,0.4041272021484375,0.009041377880648377,0.4006143493652344,0.4117974182128906,0.42338720092773435,0.4344844934082031,"[0.4373000183105469, 0.40733187866210935, 0.398733154296875, 0.40750152587890626, 0.39825311279296877, 0.39877890014648437, 0.3988628234863281, 0.3992034606933594, 0.3991992492675781, 0.4000351257324219, 0.40367324829101564, 0.4066448059082031, 0.3984886474609375, 0.40342138671875, 0.398855224609375, 0.40221279907226565, 0.39863790893554685, 0.40269754028320315, 0.40168536376953123, 0.400095458984375, 0.41466134643554686, 0.42556866455078124, 0.4007449951171875, 0.4006143493652344, 0.3999790649414062]",tokens/s,155.89151055676734,kWh,4.884634116620386e-06,2.676530817879685e-06,8.197834800282627e-06,1.57589997347827e-05,tokens/kWh,3997715.6583706676,,s,1574,10.262640651226052,0.006520102065581985,0.0009050603008066671,0.006338560104370118,0.00674856972694397,0.006865920066833496,0.013484024801254272,"[0.007161856174468994, 0.007085055828094483, 0.007062528133392334, 0.007070720195770264, 0.007012351989746094, 0.007053311824798584, 0.007062528133392334, 0.0070563840866088865, 0.006840320110321045, 0.006719488143920899, 0.006717440128326416, 0.006799359798431396, 0.006890495777130127, 0.00695091199874878, 0.007017471790313721, 0.006897664070129395, 0.006813695907592773, 0.006865920066833496, 0.006972415924072266, 0.006919167995452881, 0.007156735897064209, 0.007051263809204102, 0.007244800090789795, 0.007097343921661377, 0.007085055828094483, 0.006964223861694336, 0.007008255958557129, 0.006976511955261231, 0.006994944095611572, 0.007102464199066162, 0.007038976192474365, 0.007131135940551757, 0.007008255958557129, 0.006998015880584717, 0.007203839778900147, 0.007073791980743408, 0.006860799789428711, 0.006910975933074951, 0.00684441614151001, 0.006865920066833496, 0.007029759883880615, 0.006725632190704346, 0.006808576107025147, 0.006915071964263916, 0.006789120197296142, 0.006917119979858399, 0.0069027838706970214, 0.006814720153808594, 0.006814720153808594, 0.006810624122619629, 0.006862847805023193, 0.006988800048828125, 0.006908927917480469, 0.006994944095611572, 0.006842368125915528, 0.006897664070129395, 0.006791168212890625, 0.006790143966674805, 0.00674508810043335, 0.006684703826904297, 0.006531040191650391, 0.006618112087249756, 0.014370880126953124, 0.0063681921958923336, 0.006546432018280029, 0.006509568214416504, 0.006533120155334473, 0.006860799789428711, 0.007010303974151612, 0.007018496036529541, 0.007047167778015137, 0.007074816226959229, 0.007166975975036621, 0.00690176010131836, 0.006707200050354004, 0.0066344962120056155, 0.006568960189819336, 0.006534143924713135, 0.006561791896820069, 0.006558720111846924, 0.006511616230010986, 0.0067010560035705566, 0.006788095951080322, 0.006545407772064209, 0.006509568214416504, 0.006351871967315674, 0.006299647808074951, 0.006319104194641113, 0.00628223991394043, 0.006306816101074219, 0.006300672054290772, 0.006300672054290772, 0.006296576023101807, 0.00628223991394043, 0.006319104194641113, 0.006301695823669433, 0.006296576023101807, 0.006310912132263183, 0.0062975997924804685, 0.006304768085479737, 0.006388735771179199, 0.006313983917236328, 0.006319104194641113, 0.006331391811370849, 0.0063211522102355954, 0.0063170561790466305, 0.0063170561790466305, 0.006303743839263916, 0.00638259220123291, 0.00636518383026123, 0.006313983917236328, 0.006325247764587402, 0.00628326416015625, 0.006310912132263183, 0.006277120113372803, 0.0063272957801818845, 0.006428671836853027, 0.006278143882751465, 0.0062791681289672855, 0.006275072097778321, 0.006331391811370849, 0.006308864116668702, 0.006355967998504639, 0.0063498239517211915, 0.006298624038696289, 0.013892607688903809, 0.006338560104370118, 0.00633241605758667, 0.006295551776885986, 0.006328320026397705, 0.006322175979614258, 0.0063272957801818845, 0.0063211522102355954, 0.006300672054290772, 0.006311935901641846, 0.006307839870452881, 0.006341631889343262, 0.006340608119964599, 0.006306816101074219, 0.006341631889343262, 0.006344704151153564, 0.006358016014099121, 0.006377471923828125, 0.006352896213531494, 0.006426623821258545, 0.0063201279640197755, 0.006329343795776367, 0.006344704151153564, 0.006325247764587402, 0.006337535858154297, 0.006296576023101807, 0.006323200225830078, 0.006301695823669433, 0.006308864116668702, 0.006310912132263183, 0.006325247764587402, 0.006304768085479737, 0.0062975997924804685, 0.006305791854858398, 0.006301695823669433, 0.006342656135559082, 0.006316031932830811, 0.006303743839263916, 0.006358016014099121, 0.006293504238128662, 0.006315008163452148, 0.006322175979614258, 0.006298624038696289, 0.00632422399520874, 0.0062975997924804685, 0.006325247764587402, 0.006288383960723877, 0.006312960147857666, 0.006315008163452148, 0.006295551776885986, 0.006311935901641846, 0.006301695823669433, 0.006325247764587402, 0.006293504238128662, 0.0063211522102355954, 0.006302720069885254, 0.0062904319763183595, 0.006305791854858398, 0.006285312175750732, 0.006319104194641113, 0.006338560104370118, 0.00633241605758667, 0.006375423908233643, 0.013460479736328124, 0.006305791854858398, 0.006343679904937744, 0.0064174079895019534, 0.006359039783477783, 0.006333439826965332, 0.0063170561790466305, 0.006281216144561768, 0.006622208118438721, 0.007672832012176513, 0.006605823993682861, 0.006567935943603515, 0.006624256134033203, 0.006543360233306885, 0.00667955207824707, 0.006597631931304931, 0.006343679904937744, 0.00688640022277832, 0.006688767910003662, 0.006556672096252441, 0.0065474557876586915, 0.0065372161865234375, 0.006567935943603515, 0.006726655960083008, 0.0066406397819519045, 0.0066109437942504885, 0.0065484800338745115, 0.0065413122177124024, 0.00652185583114624, 0.0065372161865234375, 0.006788095951080322, 0.006560768127441406, 0.006540287971496582, 0.0065136637687683106, 0.006331391811370849, 0.006393856048583985, 0.006346752166748047, 0.006338560104370118, 0.006342656135559082, 0.006304768085479737, 0.0063272957801818845, 0.006326272010803223, 0.006331391811370849, 0.006339583873748779, 0.0063569917678833006, 0.006328320026397705, 0.006371327877044678, 0.006300672054290772, 0.0063539199829101565, 0.006316031932830811, 0.006328320026397705, 0.006304768085479737, 0.006306816101074219, 0.006319104194641113, 0.006328320026397705, 0.006313983917236328, 0.006358016014099121, 0.006299647808074951, 0.006330368041992188, 0.006501376152038574, 0.006432767868041992, 0.006360064029693604, 0.006348832130432129, 0.013516799926757812, 0.006293471813201904, 0.006299647808074951, 0.006329343795776367, 0.006289408206939697, 0.00633241605758667, 0.006273024082183838, 0.0063201279640197755, 0.006305791854858398, 0.00638259220123291, 0.006294528007507324, 0.0063211522102355954, 0.006328320026397705, 0.0063272957801818845, 0.006346752166748047, 0.0063211522102355954, 0.006312960147857666, 0.006315008163452148, 0.006305791854858398, 0.0063272957801818845, 0.006289408206939697, 0.0063211522102355954, 0.00632422399520874, 0.006295551776885986, 0.006330368041992188, 0.0063498239517211915, 0.006306816101074219, 0.006306816101074219, 0.006326272010803223, 0.006323200225830078, 0.006312960147857666, 0.006309887886047363, 0.006310912132263183, 0.006360064029693604, 0.006258687973022461, 0.006284287929534912, 0.006301695823669433, 0.006298624038696289, 0.006306816101074219, 0.006318079948425293, 0.006322175979614258, 0.006351871967315674, 0.0062873601913452145, 0.006339583873748779, 0.006326272010803223, 0.006333439826965332, 0.006313983917236328, 0.006310912132263183, 0.006318079948425293, 0.006341631889343262, 0.006322175979614258, 0.006299647808074951, 0.00632422399520874, 0.00632422399520874, 0.006331391811370849, 0.0063170561790466305, 0.006294528007507324, 0.0063539199829101565, 0.006312960147857666, 0.006299647808074951, 0.0062975997924804685, 0.006338560104370118, 0.006319168090820312, 0.01340614414215088, 0.006311935901641846, 0.006323200225830078, 0.006355999946594238, 0.00633135986328125, 0.006343679904937744, 0.0063498239517211915, 0.006369279861450195, 0.006363135814666748, 0.006346752166748047, 0.00633241605758667, 0.006344704151153564, 0.006348800182342529, 0.006411263942718506, 0.006337535858154297, 0.006418432235717773, 0.006323200225830078, 0.006343679904937744, 0.006329343795776367, 0.006344704151153564, 0.006339583873748779, 0.006340608119964599, 0.006330368041992188, 0.006295551776885986, 0.006338560104370118, 0.006329343795776367, 0.0063201279640197755, 0.0062904319763183595, 0.006266880035400391, 0.006289408206939697, 0.006308864116668702, 0.006259712219238281, 0.006337535858154297, 0.006352896213531494, 0.006319104194641113, 0.0063211522102355954, 0.006328320026397705, 0.006312960147857666, 0.00636518383026123, 0.006388735771179199, 0.0063272957801818845, 0.00628326416015625, 0.006223872184753418, 0.006244351863861084, 0.006242303848266601, 0.006238207817077636, 0.006235136032104492, 0.0062566399574279785, 0.006309887886047363, 0.006315008163452148, 0.006336512088775635, 0.00639081621170044, 0.006297567844390869, 0.006319104194641113, 0.006313983917236328, 0.006340608119964599, 0.006328320026397705, 0.006330368041992188, 0.006351903915405273, 0.006374368190765381, 0.0063498239517211915, 0.006360064029693604, 0.00633241605758667, 0.013469696044921875, 0.006311935901641846, 0.006293504238128662, 0.006331391811370849, 0.006334464073181153, 0.006310912132263183, 0.006326272010803223, 0.0063272957801818845, 0.00638156795501709, 0.006342656135559082, 0.006334464073181153, 0.006344704151153564, 0.00636518383026123, 0.006340608119964599, 0.006334464073181153, 0.006351871967315674, 0.006343679904937744, 0.006312960147857666, 0.006351871967315674, 0.006352896213531494, 0.0063201279640197755, 0.006334464073181153, 0.006289408206939697, 0.006362112045288086, 0.006307839870452881, 0.006327328205108643, 0.006322144031524658, 0.006310912132263183, 0.0063508481979370115, 0.006309887886047363, 0.006326272010803223, 0.006333439826965332, 0.006322175979614258, 0.006336512088775635, 0.006384640216827392, 0.006313983917236328, 0.006315008163452148, 0.006278143882751465, 0.0062761597633361815, 0.006254528045654297, 0.006288383960723877, 0.006246399879455566, 0.006261760234832763, 0.006240287780761719, 0.006336480140686035, 0.006230016231536865, 0.006334464073181153, 0.006394879817962646, 0.006505472183227539, 0.00638976001739502, 0.006295551776885986, 0.0063201279640197755, 0.006348800182342529, 0.006309887886047363, 0.006375423908233643, 0.006319104194641113, 0.0063170561790466305, 0.006329343795776367, 0.006340608119964599, 0.006336512088775635, 0.006313983917236328, 0.006338592052459717, 0.006318048000335693, 0.013455360412597657, 0.006302720069885254, 0.006360064029693604, 0.006343679904937744, 0.006346752166748047, 0.006370304107666015, 0.006325247764587402, 0.006345727920532227, 0.006362112045288086, 0.006364160060882569, 0.0063569917678833006, 0.006355967998504639, 0.006345727920532227, 0.006354944229125976, 0.006330368041992188, 0.0063805441856384275, 0.0063836159706115725, 0.006315008163452148, 0.006299647808074951, 0.006347775936126709, 0.006311935901641846, 0.006330368041992188, 0.006341631889343262, 0.006298624038696289, 0.006344704151153564, 0.006330368041992188, 0.006343679904937744, 0.006323200225830078, 0.006336512088775635, 0.006328320026397705, 0.0063170561790466305, 0.0063170561790466305, 0.006330368041992188, 0.006330368041992188, 0.006394879817962646, 0.006308864116668702, 0.006305791854858398, 0.006295551776885986, 0.006309887886047363, 0.0063539199829101565, 0.006307839870452881, 0.006311935901641846, 0.006302720069885254, 0.006334464073181153, 0.006329343795776367, 0.006315008163452148, 0.006329343795776367, 0.006288383960723877, 0.006344704151153564, 0.006310912132263183, 0.00632422399520874, 0.006366208076477051, 0.00632422399520874, 0.006312960147857666, 0.006316031932830811, 0.006348800182342529, 0.006302752017974853, 0.006309855937957764, 0.006347775936126709, 0.0063591041564941405, 0.006312895774841309, 0.006313983917236328, 0.006325247764587402, 0.013421567916870117, 0.006292511940002441, 0.006318048000335693, 0.006284287929534912, 0.006250495910644531, 0.006252543926239014, 0.006280191898345947, 0.006240255832672119, 0.006270976066589356, 0.006243328094482422, 0.0062904319763183595, 0.006310912132263183, 0.006348800182342529, 0.006377471923828125, 0.006347775936126709, 0.00642252779006958, 0.006299647808074951, 0.006330368041992188, 0.006294528007507324, 0.006319104194641113, 0.006306816101074219, 0.006306816101074219, 0.006318079948425293, 0.006300672054290772, 0.0063621759414672855, 0.0063303041458129886, 0.006376448154449463, 0.006406144142150879, 0.0063170561790466305, 0.006346752166748047, 0.006323200225830078, 0.006330368041992188, 0.006340608119964599, 0.006315008163452148, 0.0063498239517211915, 0.006342656135559082, 0.0063211841583251955, 0.0063119039535522465, 0.006316031932830811, 0.00633241605758667, 0.006310912132263183, 0.006460415840148926, 0.006318079948425293, 0.006337535858154297, 0.006360064029693604, 0.0064430079460144046, 0.006536191940307618, 0.006398975849151611, 0.006340608119964599, 0.006355967998504639, 0.0063170561790466305, 0.006344704151153564, 0.00636518383026123, 0.006319104194641113, 0.006340608119964599, 0.006302752017974853, 0.006353888034820556, 0.006312960147857666, 0.006333439826965332, 0.006303743839263916, 0.006307839870452881, 0.0063211522102355954, 0.006323200225830078, 0.01428889560699463, 0.006546432018280029, 0.006489088058471679, 0.0063498239517211915, 0.006318079948425293, 0.006313983917236328, 0.0063805441856384275, 0.006336512088775635, 0.006339583873748779, 0.0063201279640197755, 0.006329376220703125, 0.006326240062713623, 0.006334464073181153, 0.006346752166748047, 0.006326272010803223, 0.0063201279640197755, 0.006313983917236328, 0.006366208076477051, 0.0063498239517211915, 0.006326272010803223, 0.006313983917236328, 0.006326272010803223, 0.00643891191482544, 0.006305791854858398, 0.0063498239517211915, 0.006306816101074219, 0.006340608119964599, 0.006342656135559082, 0.006311935901641846, 0.006330368041992188, 0.006341631889343262, 0.006312960147857666, 0.00637337589263916, 0.006372352123260498, 0.006325247764587402, 0.0063211522102355954, 0.006334464073181153, 0.006326272010803223, 0.006309887886047363, 0.006345727920532227, 0.006329343795776367, 0.0063201279640197755, 0.006338560104370118, 0.006288383960723877, 0.00633241605758667, 0.0062975997924804685, 0.00638976001739502, 0.006305791854858398, 0.006336512088775635, 0.006323232173919678, 0.006334432125091553, 0.006340608119964599, 0.006363135814666748, 0.006351871967315674, 0.0063569917678833006, 0.006334464073181153, 0.006330368041992188, 0.006323200225830078, 0.006329343795776367, 0.006364160060882569, 0.006340608119964599, 0.006334464073181153, 0.0063272957801818845, 0.013492223739624023, 0.006341631889343262, 0.006325247764587402, 0.006338560104370118, 0.006329343795776367, 0.006358016014099121, 0.006339583873748779, 0.006348800182342529, 0.006317088127136231, 0.0063385281562805176, 0.006312960147857666, 0.006460415840148926, 0.006345727920532227, 0.006306816101074219, 0.0063569917678833006, 0.006334464073181153, 0.006325247764587402, 0.006328320026397705, 0.006423552036285401, 0.0063211522102355954, 0.0062975997924804685, 0.0063211522102355954, 0.006293504238128662, 0.006333439826965332, 0.006340608119964599, 0.006329343795776367, 0.006336512088775635, 0.006751232147216797, 0.006744063854217529, 0.006584320068359375, 0.006546432018280029, 0.006568960189819336, 0.006556672096252441, 0.006502399921417237, 0.0065372161865234375, 0.0065484800338745115, 0.006604800224304199, 0.006407167911529541, 0.006342656135559082, 0.006358016014099121, 0.006330368041992188, 0.006316031932830811, 0.006340608119964599, 0.006311935901641846, 0.00633241605758667, 0.006843391895294189, 0.006594560146331787, 0.006560768127441406, 0.006575136184692383, 0.00657097578048706, 0.006406144142150879, 0.006315008163452148, 0.006339583873748779, 0.006425600051879882, 0.00631606388092041, 0.006355936050415039, 0.006302720069885254, 0.006333439826965332, 0.006330368041992188, 0.006310912132263183, 0.006344704151153564, 0.006304768085479737, 0.00632422399520874, 0.013517824172973633, 0.006319104194641113, 0.0063272957801818845, 0.006360064029693604, 0.006333439826965332, 0.006336512088775635, 0.006340608119964599, 0.006329376220703125, 0.0063190717697143555, 0.006325247764587402, 0.006354944229125976, 0.006652927875518798, 0.006550528049468994, 0.006894591808319092, 0.007411712169647216, 0.00676966381072998, 0.006558720111846924, 0.006564864158630371, 0.00657203197479248, 0.006564864158630371, 0.006553599834442139, 0.006526976108551025, 0.0065771517753601075, 0.006519807815551758, 0.006375487804412842, 0.0063640961647033695, 0.006352896213531494, 0.006599679946899414, 0.007201824188232422, 0.006801375865936279, 0.006573056221008301, 0.006560768127441406, 0.00657203197479248, 0.0065443840026855465, 0.006584320068359375, 0.006487040042877197, 0.00633241605758667, 0.006322175979614258, 0.006316031932830811, 0.006342656135559082, 0.006313983917236328, 0.006331391811370849, 0.006385663986206055, 0.006342656135559082, 0.0063272957801818845, 0.0063272957801818845, 0.006329343795776367, 0.006335487842559814, 0.006326272010803223, 0.006342656135559082, 0.006322175979614258, 0.006335487842559814, 0.006309887886047363, 0.006319104194641113, 0.0063498239517211915, 0.006487040042877197, 0.006425600051879882, 0.006259712219238281, 0.006260735988616943, 0.006341631889343262, 0.006240255832672119, 0.006289408206939697, 0.006339583873748779, 0.013595647811889648, 0.006301695823669433, 0.006333439826965332, 0.006332448005676269, 0.006330336093902588, 0.006393856048583985, 0.006316031932830811, 0.006339615821838379, 0.006336480140686035, 0.006328320026397705, 0.00632422399520874, 0.006351871967315674, 0.0063498239517211915, 0.00633241605758667, 0.006335487842559814, 0.0063539199829101565, 0.006325247764587402, 0.006341631889343262, 0.006333439826965332, 0.006296576023101807, 0.006244351863861084, 0.006275072097778321, 0.006264832019805908, 0.006278175830841065, 0.006272992134094238, 0.006284287929534912, 0.006363135814666748, 0.006260799884796143, 0.006269887924194336, 0.006291456222534179, 0.0062638077735900875, 0.006261760234832763, 0.006247424125671387, 0.006252543926239014, 0.006237184047698975, 0.006264832019805908, 0.006304768085479737, 0.006328320026397705, 0.006347775936126709, 0.0063170561790466305, 0.006343679904937744, 0.006329343795776367, 0.006323200225830078, 0.006340608119964599, 0.006342656135559082, 0.006339583873748779, 0.006310912132263183, 0.006434815883636475, 0.006296576023101807, 0.006354944229125976, 0.006343679904937744, 0.006335487842559814, 0.006327360153198242, 0.0063303041458129886, 0.006334464073181153, 0.006331391811370849, 0.0063569917678833006, 0.006370304107666015, 0.006347775936126709, 0.006333439826965332, 0.006337535858154297, 0.00637337589263916, 0.006334464073181153, 0.013480992317199706, 0.006344672203063965, 0.006305791854858398, 0.006325247764587402, 0.006315008163452148, 0.006780928134918213, 0.006604800224304199, 0.0064880638122558594, 0.006434815883636475, 0.00674508810043335, 0.006551551818847656, 0.006560768127441406, 0.006512639999389648, 0.006592512130737305, 0.0065484800338745115, 0.006518784046173095, 0.006519807815551758, 0.006523903846740723, 0.0066119680404663084, 0.006687744140625, 0.006440959930419922, 0.006462463855743408, 0.00636518383026123, 0.00632422399520874, 0.006336512088775635, 0.006351935863494873, 0.0063190398216247555, 0.006347775936126709, 0.006333439826965332, 0.006401023864746094, 0.00632422399520874, 0.0063539199829101565, 0.006335487842559814, 0.006344704151153564, 0.006339583873748779, 0.006370304107666015, 0.006326272010803223, 0.006358016014099121, 0.006305791854858398, 0.0063201279640197755, 0.006310912132263183, 0.006348800182342529, 0.006328320026397705, 0.006316031932830811, 0.00633241605758667, 0.006330368041992188, 0.006334464073181153, 0.006315008163452148, 0.006323200225830078, 0.006348800182342529, 0.006326272010803223, 0.006346752166748047, 0.006348800182342529, 0.006311935901641846, 0.006339583873748779, 0.006360064029693604, 0.006325247764587402, 0.0063211522102355954, 0.00636518383026123, 0.006392831802368164, 0.006339583873748779, 0.0063610877990722655, 0.00632422399520874, 0.013540351867675781, 0.006294528007507324, 0.006346784114837647, 0.0063190717697143555, 0.006322175979614258, 0.006318143844604492, 0.006336448192596436, 0.006330368041992188, 0.006304800033569336, 0.006304736137390137, 0.0063201279640197755, 0.0063211522102355954, 0.006341631889343262, 0.006293504238128662, 0.00632531213760376, 0.006310848236083985, 0.006334464073181153, 0.006269951820373535, 0.006328320026397705, 0.00633241605758667, 0.00628223991394043, 0.006334464073181153, 0.006303743839263916, 0.006250495910644531, 0.006364160060882569, 0.006275072097778321, 0.0062576642036437985, 0.006276095867156982, 0.006346752166748047, 0.006319104194641113, 0.006334464073181153, 0.0063170561790466305, 0.006335487842559814, 0.006313983917236328, 0.006299647808074951, 0.0063498239517211915, 0.006301695823669433, 0.006285312175750732, 0.006333439826965332, 0.0062863359451293946, 0.006316031932830811, 0.006321216106414795, 0.006377408027648926, 0.006296576023101807, 0.006331391811370849, 0.006308864116668702, 0.006268928050994873, 0.006316031932830811, 0.006401023864746094, 0.006326272010803223, 0.006296576023101807, 0.006296607971191406, 0.006377439975738525, 0.006331391811370849, 0.006369279861450195, 0.006347775936126709, 0.006293504238128662, 0.006319104194641113, 0.006311935901641846, 0.006312992095947266, 0.006487008094787598, 0.006515711784362793, 0.006510591983795166, 0.013452287673950195, 0.006305791854858398, 0.006318079948425293, 0.006391808032989502, 0.006322175979614258, 0.00636518383026123, 0.006310912132263183, 0.0063569917678833006, 0.0063201279640197755, 0.006331391811370849, 0.006322175979614258, 0.006305791854858398, 0.006565887928009034, 0.0065484800338745115, 0.006464511871337891, 0.006369279861450195, 0.006304768085479737, 0.006328320026397705, 0.006302720069885254, 0.00652288007736206, 0.007012351989746094, 0.0067010560035705566, 0.006529024124145508, 0.006529024124145508, 0.0065669121742248536, 0.0065146880149841305, 0.00653926420211792, 0.0065075201988220215, 0.006550591945648193, 0.006313920021057129, 0.006303743839263916, 0.006319104194641113, 0.006298624038696289, 0.006364160060882569, 0.006334464073181153, 0.006319104194641113, 0.00632422399520874, 0.0062975997924804685, 0.006300672054290772, 0.006318079948425293, 0.0063201279640197755, 0.0063201279640197755, 0.006302720069885254, 0.006309887886047363, 0.006307839870452881, 0.006312960147857666, 0.006323200225830078, 0.006401023864746094, 0.006326272010803223, 0.006338560104370118, 0.006336512088775635, 0.006387712001800537, 0.006303743839263916, 0.006336512088775635, 0.006313983917236328, 0.006333439826965332, 0.006329343795776367, 0.006337535858154297, 0.006342656135559082, 0.00638156795501709, 0.00633241605758667, 0.006315040111541748, 0.006309855937957764, 0.013497344017028809, 0.006302720069885254, 0.006315008163452148, 0.006342656135559082, 0.006315008163452148, 0.006334464073181153, 0.006326335906982422, 0.006322112083435059, 0.006367231845855713, 0.006358016014099121, 0.006346752166748047, 0.006345727920532227, 0.006331391811370849, 0.006331391811370849, 0.006337535858154297, 0.006345727920532227, 0.006319104194641113, 0.006386688232421875, 0.006334464073181153, 0.006318079948425293, 0.006345727920532227, 0.006341631889343262, 0.006328320026397705, 0.006334464073181153, 0.006312960147857666, 0.006343679904937744, 0.006319104194641113, 0.0063610877990722655, 0.0063508481979370115, 0.006362112045288086, 0.0063201279640197755, 0.006326272010803223, 0.0063211522102355954, 0.006331391811370849, 0.006364160060882569, 0.006342656135559082, 0.006310912132263183, 0.006311935901641846, 0.006280191898345947, 0.006360064029693604, 0.006337535858154297, 0.006312960147857666, 0.0063201279640197755, 0.006312960147857666, 0.006343679904937744, 0.006327328205108643, 0.006301663875579834, 0.006334464073181153, 0.006305791854858398, 0.0063539199829101565, 0.0064143362045288085, 0.006309887886047363, 0.006245376110076905, 0.006255616188049316, 0.006254591941833496, 0.006255616188049316, 0.006240255832672119, 0.0062638077735900875, 0.006262784004211426, 0.006278143882751465, 0.006312992095947266, 0.0063426241874694825, 0.006305856227874756, 0.01340614414215088, 0.006302720069885254, 0.006305791854858398, 0.0063211522102355954, 0.0063170561790466305, 0.006343679904937744, 0.0063508801460266115, 0.006350815773010254, 0.0063201279640197755, 0.006344704151153564, 0.00632422399520874, 0.0063508481979370115, 0.006339583873748779, 0.006406144142150879, 0.006337567806243897, 0.006387712001800537, 0.006362080097198486, 0.006352896213531494, 0.006338560104370118, 0.006344704151153564, 0.00632422399520874, 0.0063610877990722655, 0.006364160060882569, 0.006329376220703125, 0.006345695972442627, 0.006303743839263916, 0.006372352123260498, 0.006337567806243897, 0.006364128112792969, 0.006333439826965332, 0.006366208076477051, 0.006344704151153564, 0.006319104194641113, 0.006370304107666015, 0.006338560104370118, 0.0063201279640197755, 0.006329343795776367, 0.006341631889343262, 0.006319104194641113, 0.006330368041992188, 0.006354944229125976, 0.0063272957801818845, 0.006315008163452148, 0.0066007041931152345, 0.006546432018280029, 0.006738944053649902, 0.006557695865631104, 0.006524928092956543, 0.006536191940307618, 0.006527999877929688, 0.006543360233306885, 0.006398975849151611, 0.006448128223419189, 0.0064174079895019534, 0.0064440321922302245, 0.00642252779006958, 0.00674508810043335, 0.0064245758056640625, 0.006312960147857666, 0.00659660816192627, 0.006308864116668702, 0.006351871967315674, 0.006323200225830078, 0.01407487964630127, 0.006498303890228272, 0.006504447937011719, 0.006511616230010986, 0.006335487842559814, 0.006337535858154297, 0.00633241605758667, 0.006330368041992188, 0.006345727920532227, 0.006343679904937744, 0.006303743839263916, 0.006342656135559082, 0.006318079948425293, 0.006326272010803223, 0.0065372161865234375, 0.006330368041992188, 0.006326272010803223, 0.006296576023101807, 0.006329343795776367, 0.0063201279640197755, 0.00642252779006958, 0.006347775936126709, 0.0063170561790466305, 0.006331391811370849, 0.006313983917236328, 0.006509568214416504, 0.006408192157745361, 0.006396927833557129, 0.006461440086364746, 0.006453279972076416, 0.006560736179351807, 0.00658022403717041, 0.006363135814666748, 0.006343679904937744, 0.006319104194641113, 0.006352896213531494, 0.0063508481979370115, 0.006307839870452881, 0.006355967998504639, 0.006301695823669433, 0.006461440086364746, 0.006372352123260498, 0.0063272957801818845, 0.006330368041992188, 0.006311935901641846, 0.006353983879089356, 0.006327231884002685, 0.006333439826965332, 0.006345727920532227, 0.006343711853027343, 0.006314976215362549, 0.006315008163452148, 0.006340608119964599, 0.006330368041992188, 0.006329343795776367, 0.00633241605758667, 0.006293504238128662, 0.0064778242111206055, 0.006379519939422608, 0.0064143362045288085, 0.006352896213531494, 0.006323200225830078, 0.006345727920532227, 0.013510656356811524, 0.006329343795776367, 0.0063498239517211915, 0.006369279861450195, 0.006328320026397705, 0.006329343795776367, 0.006338560104370118, 0.006334464073181153, 0.00633241605758667, 0.006352896213531494, 0.006333439826965332, 0.006334464073181153, 0.006310912132263183, 0.006340608119964599, 0.006319104194641113, 0.006363135814666748, 0.006328320026397705, 0.006396927833557129, 0.006310976028442382, 0.006320064067840576, 0.006322175979614258, 0.006333439826965332, 0.0063569917678833006, 0.006348800182342529, 0.006336512088775635, 0.00636518383026123, 0.0063170561790466305, 0.006370304107666015, 0.006337535858154297, 0.006319104194641113, 0.006343679904937744, 0.006306816101074219, 0.006325247764587402, 0.006348800182342529, 0.006403071880340576, 0.006317088127136231, 0.0063088321685791015, 0.006296576023101807, 0.0062863359451293946, 0.006355967998504639, 0.006301695823669433, 0.006302720069885254, 0.006884352207183838, 0.006543360233306885, 0.006334464073181153, 0.006312960147857666, 0.006244351863861084, 0.006259712219238281, 0.006464511871337891, 0.006301695823669433, 0.006336512088775635, 0.006325247764587402, 0.0063170561790466305, 0.006323200225830078, 0.0063211522102355954, 0.006377471923828125, 0.006358016014099121, 0.0063201279640197755, 0.006338560104370118, 0.006419456005096436, 0.006295551776885986, 0.0063498239517211915, 0.006305791854858398, 0.01360588836669922, 0.006304768085479737, 0.006354944229125976, 0.006362112045288086, 0.006316031932830811, 0.006354944229125976, 0.006359039783477783, 0.006298624038696289, 0.006371327877044678, 0.0062975997924804685, 0.006362112045288086, 0.006355967998504639, 0.0063211522102355954, 0.0063539199829101565, 0.006401023864746094, 0.0063610877990722655, 0.006323200225830078, 0.006273024082183838, 0.00628223991394043, 0.006284287929534912, 0.006296576023101807, 0.0062638077735900875, 0.006296576023101807, 0.006262784004211426, 0.006427648067474365, 0.006280191898345947, 0.0066744318008422855, 0.006758399963378906, 0.006755328178405762, 0.006774784088134766, 0.006759424209594727, 0.006770688056945801, 0.00675328016281128, 0.006772736072540283, 0.006731776237487793, 0.006763519763946534, 0.006755328178405762, 0.006714367866516113, 0.006740992069244385, 0.006760447978973389, 0.006731776237487793, 0.006770688056945801, 0.0067573761940002445, 0.006760447978973389, 0.00674508810043335, 0.006729728221893311, 0.006708223819732666, 0.006865920066833496, 0.0067338237762451176, 0.006766592025756836, 0.006737919807434082, 0.006740992069244385, 0.00676255989074707, 0.006774720191955567, 0.00673689603805542, 0.006719520092010498, 0.0067563199996948245, 0.006774784088134766, 0.0067348480224609375, 0.006752255916595459, 0.006765567779541016, 0.006732800006866455, 0.006765567779541016, 0.014397439956665039, 0.006789120197296142, 0.006740992069244385, 0.0067276802062988285, 0.006724607944488525, 0.0067420158386230465, 0.006754303932189941, 0.006752255916595459, 0.006779903888702392, 0.006766592025756836, 0.006743040084838867, 0.0067573761940002445, 0.0067348480224609375, 0.006758399963378906, 0.006743040084838867, 0.0067123198509216305, 0.006767615795135498, 0.006750207901000976, 0.006730751991271973, 0.006774784088134766, 0.006812672138214112, 0.006665215969085693, 0.006649856090545654, 0.006617087841033936, 0.006638591766357422, 0.006654975891113281, 0.006780928134918213, 0.006782976150512696, 0.006779903888702392, 0.006785024166107178, 0.006751232147216797, 0.006788095951080322, 0.006807551860809326, 0.0067573761940002445, 0.006766592025756836, 0.006796319961547851, 0.00674505615234375, 0.006772736072540283, 0.0067645440101623535, 0.006721536159515381, 0.006851583957672119, 0.006762527942657471, 0.006697951793670654, 0.006729728221893311, 0.0067420158386230465, 0.006713344097137451, 0.006731776237487793, 0.006715392112731934, 0.006747136116027832, 0.006749184131622315, 0.006752255916595459, 0.006717440128326416, 0.006750207901000976, 0.006791168212890625, 0.006750207901000976, 0.006779903888702392, 0.0067645440101623535, 0.006738944053649902, 0.006758399963378906, 0.006912000179290771, 0.006724607944488525, 0.006773759841918945, 0.006763519763946534, 0.013530112266540528, 0.006345727920532227, 0.0063170561790466305, 0.006359039783477783, 0.006333439826965332, 0.006370304107666015, 0.006370304107666015, 0.006379519939422608, 0.006368256092071533, 0.0063805441856384275, 0.006341631889343262, 0.006358016014099121, 0.006344704151153564, 0.0063211522102355954, 0.00638156795501709, 0.006335487842559814, 0.006391808032989502, 0.00642252779006958, 0.006322175979614258, 0.006392831802368164, 0.006301695823669433, 0.006359039783477783, 0.006363135814666748, 0.006309887886047363, 0.006370304107666015, 0.00636518383026123, 0.006360064029693604, 0.006386688232421875, 0.006343679904937744, 0.006367231845855713, 0.006369279861450195, 0.006313983917236328, 0.006338560104370118, 0.006354944229125976, 0.00632422399520874, 0.0063508481979370115, 0.006384640216827392, 0.0063211522102355954, 0.006386688232421875, 0.006351871967315674, 0.006351871967315674, 0.006378496170043945, 0.006369279861450195, 0.006348800182342529, 0.00637337589263916, 0.006308864116668702, 0.0064839677810668945, 0.006396927833557129, 0.006362112045288086, 0.006402048110961914, 0.006303743839263916, 0.0063610877990722655, 0.0063508481979370115, 0.006330368041992188, 0.006325247764587402, 0.006348800182342529, 0.006334464073181153, 0.0063539199829101565, 0.006338560104370118, 0.006326272010803223, 0.006387712001800537, 0.006339583873748779, 0.006343679904937744, 0.013538304328918458, 0.006334496021270752, 0.006407135963439941, 0.006395904064178467, 0.006368256092071533, 0.0063498239517211915, 0.00637440013885498, 0.006375423908233643, 0.006309887886047363, 0.006363135814666748, 0.006335487842559814, 0.006386720180511475, 0.00638153600692749, 0.006355967998504639, 0.006352896213531494, 0.0063610877990722655, 0.0063498239517211915, 0.0063836159706115725, 0.006298624038696289, 0.006358016014099121, 0.006367231845855713, 0.006318079948425293, 0.006386688232421875, 0.006346752166748047, 0.006466559886932373, 0.006360064029693604, 0.006313983917236328, 0.0063211522102355954, 0.006377503871917724, 0.006327263832092285, 0.006360064029693604, 0.006348800182342529, 0.006319104194641113, 0.006355967998504639, 0.006342656135559082, 0.006358016014099121, 0.006360064029693604, 0.006295551776885986, 0.006362112045288086, 0.00637337589263916, 0.006303743839263916, 0.0063508481979370115, 0.006303743839263916, 0.006331391811370849, 0.006330368041992188, 0.006351871967315674, 0.006379519939422608, 0.006376448154449463, 0.006451200008392334, 0.006346752166748047, 0.0063170561790466305, 0.006354944229125976, 0.006319104194641113, 0.006318079948425293, 0.0063539199829101565, 0.006364160060882569, 0.006310912132263183, 0.00638259220123291, 0.006332448005676269, 0.006410208225250244, 0.006378496170043945, 0.006313983917236328, 0.006360064029693604, 0.01355673599243164, 0.00633241605758667, 0.006386688232421875, 0.006322175979614258, 0.0063569917678833006, 0.006319104194641113, 0.006343679904937744, 0.006360064029693604, 0.0063272957801818845, 0.006340608119964599, 0.006322175979614258, 0.00632422399520874, 0.006339583873748779, 0.006323200225830078, 0.006388735771179199, 0.006362112045288086, 0.0063170561790466305, 0.0063170561790466305, 0.006334464073181153, 0.006347775936126709, 0.006336512088775635, 0.006436863899230957, 0.006338592052459717, 0.006329311847686768, 0.006319104194641113, 0.006368319988250732, 0.006369215965270996, 0.006333439826965332, 0.0063805441856384275, 0.006359039783477783, 0.006359039783477783, 0.006351903915405273, 0.006307871818542481, 0.006354879856109619, 0.006315072059631347, 0.006333375930786133, 0.006370304107666015, 0.006306816101074219, 0.006340640068054199, 0.006353888034820556, 0.006315008163452148, 0.006436863899230957, 0.006376448154449463, 0.006310912132263183, 0.006344704151153564, 0.006329343795776367, 0.006336512088775635, 0.0063591041564941405, 0.006338496208190918, 0.006379519939422608, 0.006348800182342529, 0.006337535858154297, 0.006375423908233643, 0.006312960147857666, 0.006331391811370849, 0.006339583873748779, 0.0062975997924804685, 0.0063539199829101565, 0.006330368041992188, 0.006308864116668702, 0.006453248023986816, 0.00632422399520874, 0.006330368041992188]",tokens/s,153.3718322108413,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run - self.run_text_generation_memory_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 205, in run_text_generation_memory_tracking - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 454, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample - outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 555, in forward - query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 206, in apply_rotary_pos_emb - q_embed = (q * cos) + (rotate_half(q) * sin) -RuntimeError: CUDA error: an illegal memory access was encountered -CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. -For debugging consider passing CUDA_LAUNCH_BLOCKING=1 -Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. - - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1329.754112,1292.36992,0.0,706.740224,675.13344,s,1,7.7310791015625,7.7310791015625,0.0,7.7310791015625,7.7310791015625,7.7310791015625,7.7310791015625,[7.7310791015625],,kWh,9.868621443764723e-06,5.393015924709197e-06,1.3032510425947663e-05,2.8294147794421582e-05,,MB,1537.257472,1642.594304,0.0,996.1472,942.733312,s,10,0.23997216033935548,0.023997216033935547,0.0004245042522345351,0.023872864723205565,0.024518070983886718,0.02452162780761719,0.024524473266601562,"[0.02451728057861328, 0.023638111114501953, 0.023716384887695313, 0.023650751113891602, 0.024525184631347657, 0.024507328033447264, 0.023493375778198242, 0.02402934455871582, 0.024397119522094727, 0.02349728012084961]",tokens/s,10667.904128461352,kWh,2.808697459619662e-07,1.5384534889371828e-07,8.723119937057518e-07,1.3070270885614363e-06,tokens/kWh,195864341.4818306,MB,1562.017792,1650.982912,0.0,1004.535808,942.735872,s,10,12.860463623046876,1.2860463623046876,0.009932329215395655,1.2883671264648437,1.2960470581054686,1.2983215026855468,1.3001410583496094,"[1.2851380615234376, 1.2894481201171875, 1.289771240234375, 1.295167724609375, 1.2955416259765624, 1.2761458740234375, 1.2872861328125, 1.300595947265625, 1.27203662109375, 1.269332275390625]",tokens/s,48.98734746008648,kWh,1.5274351828964877e-05,8.370167860181873e-06,2.589629275448945e-05,4.95408124436362e-05,tokens/kWh,1271678.7814426066,,s,630,12.858562528610232,0.02041041671207973,0.0005224410026079848,0.02035302448272705,0.020953600692749024,0.021238682174682614,0.021861293926239013,"[0.020888576507568358, 0.020993024826049804, 0.021005311965942384, 0.020771839141845702, 0.020708351135253905, 0.020733951568603515, 0.020737024307250978, 0.020644863128662108, 0.020682752609252928, 0.02064793586730957, 0.02044825553894043, 0.020590591430664062, 0.021020671844482423, 0.020692991256713866, 0.020363264083862305, 0.020305919647216796, 0.02007347106933594, 0.020057088851928712, 0.020050943374633787, 0.02004582405090332, 0.02005299186706543, 0.02227507209777832, 0.02084147262573242, 0.02065920066833496, 0.021120000839233398, 0.020926464080810548, 0.020922367095947265, 0.020768768310546876, 0.0208855037689209, 0.020840448379516603, 0.02085273551940918, 0.020808704376220705, 0.02082815933227539, 0.020832256317138673, 0.02086195182800293, 0.02045644760131836, 0.01987174415588379, 0.020109312057495117, 0.020118528366088868, 0.019941375732421874, 0.02006220817565918, 0.02002943992614746, 0.019778560638427735, 0.02000383949279785, 0.020509695053100584, 0.02044313621520996, 0.02004991912841797, 0.01980620765686035, 0.019614719390869142, 0.019877887725830077, 0.020092927932739257, 0.020133888244628906, 0.020065280914306642, 0.020063232421875, 0.019726335525512697, 0.019937280654907227, 0.019977216720581056, 0.020002815246582033, 0.01979903984069824, 0.020074495315551756, 0.019960832595825196, 0.01982771110534668, 0.019697664260864257, 0.019729408264160156, 0.019992576599121094, 0.019900415420532228, 0.0196628475189209, 0.019927040100097656, 0.019735551834106444, 0.019927040100097656, 0.019727359771728514, 0.019775487899780272, 0.01988915252685547, 0.02126950454711914, 0.020768768310546876, 0.02061311912536621, 0.02062745666503906, 0.02084659194946289, 0.020930559158325195, 0.020556800842285155, 0.02050764846801758, 0.020126720428466797, 0.020068351745605468, 0.019756032943725587, 0.019766271591186522, 0.02004275131225586, 0.019945472717285157, 0.019784704208374023, 0.019934207916259765, 0.019946495056152345, 0.020970495223999023, 0.0236943359375, 0.021138431549072266, 0.021130239486694336, 0.02128486442565918, 0.02126848030090332, 0.020916223526000977, 0.020752384185791017, 0.0208855037689209, 0.02104319953918457, 0.02084966468811035, 0.020766719818115235, 0.0206878719329834, 0.020793344497680662, 0.021078016281127928, 0.020832256317138673, 0.020366336822509764, 0.02065920066833496, 0.020205568313598633, 0.020768768310546876, 0.020716543197631835, 0.020188159942626953, 0.020404224395751954, 0.020364288330078126, 0.020068351745605468, 0.02001919937133789, 0.019817472457885742, 0.01979903984069824, 0.019763200759887696, 0.020368383407592772, 0.020371456146240235, 0.02046976089477539, 0.020853759765625, 0.020676607131958007, 0.020790271759033203, 0.020760576248168947, 0.020891647338867187, 0.0208721923828125, 0.020761600494384767, 0.020748287200927733, 0.020742143630981445, 0.019870719909667968, 0.019952640533447266, 0.019992576599121094, 0.02085273551940918, 0.019941375732421874, 0.01990656089782715, 0.019985408782958985, 0.019961856842041017, 0.020382720947265624, 0.02027519989013672, 0.020495359420776366, 0.020669439315795898, 0.021183488845825195, 0.02029465675354004, 0.01980620765686035, 0.02001203155517578, 0.020725759506225586, 0.020808704376220705, 0.020543487548828124, 0.020732927322387695, 0.02005606460571289, 0.020023296356201172, 0.019957759857177734, 0.019993600845336915, 0.019940351486206053, 0.01992192077636719, 0.020785152435302736, 0.020685823440551757, 0.020557823181152343, 0.020093952178955078, 0.019933183670043944, 0.019985408782958985, 0.01983590316772461, 0.020115455627441405, 0.020718591690063477, 0.020641792297363282, 0.01999564743041992, 0.019943424224853516, 0.02031718444824219, 0.020788223266601562, 0.020410367965698242, 0.020813823699951172, 0.020575231552124023, 0.0204083194732666, 0.02067148780822754, 0.020707328796386718, 0.020396032333374024, 0.02084454345703125, 0.023540735244750977, 0.021725183486938478, 0.020933631896972657, 0.020880384445190428, 0.020767744064331056, 0.020706304550170897, 0.020790271759033203, 0.02049843215942383, 0.02048307228088379, 0.019747840881347657, 0.019853311538696287, 0.019785728454589844, 0.020444160461425782, 0.020959232330322267, 0.02127052879333496, 0.020742143630981445, 0.021167104721069335, 0.02083020782470703, 0.020768768310546876, 0.02072166442871094, 0.020770816802978515, 0.020807680130004884, 0.020816896438598635, 0.020356096267700196, 0.019825664520263672, 0.019775487899780272, 0.020281343460083007, 0.02027827262878418, 0.02005606460571289, 0.020102144241333008, 0.019894271850585937, 0.020125696182250977, 0.02016972732543945, 0.020185087203979494, 0.019810304641723633, 0.019962879180908204, 0.020189184188842774, 0.02064384078979492, 0.021612543106079102, 0.021876735687255858, 0.020896768569946288, 0.020904960632324218, 0.020561920166015626, 0.020632575988769532, 0.020523008346557618, 0.021113855361938477, 0.020858879089355468, 0.02050662422180176, 0.02036735916137695, 0.020063232421875, 0.019999744415283204, 0.020115455627441405, 0.019813375473022463, 0.020707328796386718, 0.02084249687194824, 0.020792320251464845, 0.02081177520751953, 0.019956735610961913, 0.020125696182250977, 0.02040934371948242, 0.02046463966369629, 0.021622783660888673, 0.02087014389038086, 0.020763647079467772, 0.021607423782348634, 0.02130636787414551, 0.020576255798339844, 0.02022707176208496, 0.02102681541442871, 0.020779008865356444, 0.020320255279541014, 0.020542463302612304, 0.020797439575195312, 0.020800512313842775, 0.02105855941772461, 0.0208353271484375, 0.020875263214111327, 0.020750335693359375, 0.020761600494384767, 0.020748287200927733, 0.020810752868652343, 0.020762624740600585, 0.020907007217407226, 0.020754432678222655, 0.02086604881286621, 0.02129305648803711, 0.021020671844482423, 0.020824064254760744, 0.020787200927734374, 0.020743167877197266, 0.020311040878295897, 0.02069708824157715, 0.020787200927734374, 0.020336639404296874, 0.019994623184204103, 0.02000486373901367, 0.02008678436279297, 0.020377599716186523, 0.019950592041015625, 0.0200447998046875, 0.020065280914306642, 0.020123647689819335, 0.020137983322143553, 0.02004275131225586, 0.020248575210571287, 0.020922367095947265, 0.020958208084106447, 0.02082508850097656, 0.020810752868652343, 0.020144128799438478, 0.020741119384765624, 0.02084556770324707, 0.02122854423522949, 0.021382144927978516, 0.020929536819458007, 0.021029888153076173, 0.020976640701293944, 0.020978687286376953, 0.020935680389404295, 0.020864000320434572, 0.02086092758178711, 0.020728832244873048, 0.02027724838256836, 0.019911680221557617, 0.019780607223510743, 0.019769344329833984, 0.01985843276977539, 0.02004172706604004, 0.02082815933227539, 0.020361215591430663, 0.020230144500732423, 0.019668991088867188, 0.019737600326538086, 0.020291584014892578, 0.020741119384765624, 0.02090188789367676, 0.02082508850097656, 0.020602880477905275, 0.019922943115234376, 0.01978265571594238, 0.01999667167663574, 0.020136959075927736, 0.020373504638671876, 0.02028339195251465, 0.02065510368347168, 0.020495359420776366, 0.021155839920043946, 0.02128179168701172, 0.02147737693786621, 0.021944351196289062, 0.021271520614624024, 0.02103091239929199, 0.020794368743896483, 0.020370431900024414, 0.019740671157836915, 0.019869695663452147, 0.020979711532592774, 0.02027212715148926, 0.020709375381469726, 0.02106265640258789, 0.020331520080566406, 0.020372480392456056, 0.02068070411682129, 0.02005606460571289, 0.019804159164428712, 0.019984384536743165, 0.019941375732421874, 0.02001408004760742, 0.02003046417236328, 0.019896320343017578, 0.01998847961425781, 0.02002943992614746, 0.02000486373901367, 0.019942399978637695, 0.020447231292724608, 0.020363264083862305, 0.019947519302368166, 0.019950592041015625, 0.01985945510864258, 0.019745792388916016, 0.019935232162475586, 0.02003046417236328, 0.020883455276489257, 0.020922367095947265, 0.01979801559448242, 0.020067327499389647, 0.02003455924987793, 0.019952640533447266, 0.019914751052856446, 0.019994623184204103, 0.019948543548583983, 0.019962879180908204, 0.019743743896484374, 0.019943424224853516, 0.019566591262817384, 0.01966592025756836, 0.019701759338378907, 0.019800064086914062, 0.019679231643676756, 0.020011007308959963, 0.020115455627441405, 0.019957759857177734, 0.02008576011657715, 0.020082687377929686, 0.02009600067138672, 0.019721216201782226, 0.019689472198486328, 0.019750911712646483, 0.020533248901367186, 0.020904960632324218, 0.020876287460327148, 0.020582399368286132, 0.02074732780456543, 0.020282304763793946, 0.020115455627441405, 0.020578304290771485, 0.020912128448486327, 0.020822015762329102, 0.02033459281921387, 0.02066534423828125, 0.020783103942871094, 0.02081177520751953, 0.020880384445190428, 0.02082713508605957, 0.020741119384765624, 0.020411392211914063, 0.020595712661743162, 0.02084351921081543, 0.020445184707641603, 0.020625408172607423, 0.021542911529541017, 0.020938751220703124, 0.020540416717529295, 0.019987455368041994, 0.020024320602416993, 0.020009983062744142, 0.019893247604370116, 0.01982975959777832, 0.020380672454833985, 0.02128179168701172, 0.02059878349304199, 0.021823488235473632, 0.02107187271118164, 0.02089472007751465, 0.0208035831451416, 0.020213760375976563, 0.020084735870361328, 0.019907583236694337, 0.019935232162475586, 0.019997695922851562, 0.0198604793548584, 0.020101119995117187, 0.01984921646118164, 0.019944448471069336, 0.020189184188842774, 0.019919872283935547, 0.020287488937377928, 0.020545536041259766, 0.02088960075378418, 0.020801536560058592, 0.02049126434326172, 0.02046566390991211, 0.02025984001159668, 0.020098047256469728, 0.02003046417236328, 0.020274175643920898, 0.020395008087158203, 0.020772863388061523, 0.02081279945373535, 0.02062745666503906, 0.020102144241333008, 0.01985638427734375, 0.019965951919555663, 0.01981644821166992, 0.019903488159179687, 0.020118528366088868, 0.02011238479614258, 0.02022400093078613, 0.021651456832885742, 0.022832128524780275, 0.021383167266845703, 0.020904960632324218, 0.020913152694702147, 0.021246976852416992, 0.02110873603820801, 0.02086195182800293, 0.020790271759033203, 0.02079641532897949, 0.020763647079467772, 0.020254720687866212, 0.019941375732421874, 0.020435968399047853, 0.0208035831451416, 0.02031001663208008, 0.020445184707641603, 0.0206878719329834, 0.02090291213989258, 0.020948991775512696, 0.020953088760375976, 0.020896768569946288, 0.02087424087524414, 0.02086195182800293, 0.020584447860717774, 0.020402175903320312, 0.021106687545776368, 0.02104115104675293, 0.020808704376220705, 0.020610048294067384, 0.021349376678466796, 0.02087731170654297, 0.021326847076416015, 0.0212490234375, 0.019895296096801757, 0.02025062370300293, 0.020166656494140626, 0.020393983840942383, 0.020798463821411133, 0.020914176940917968, 0.020840448379516603, 0.02051481628417969, 0.020033536911010744, 0.02002022361755371, 0.02047590446472168, 0.02105446434020996, 0.020772863388061523, 0.021171167373657228, 0.02086604881286621, 0.02065920066833496, 0.02046566390991211, 0.020289535522460937, 0.0198287353515625, 0.02000486373901367, 0.02006118392944336, 0.020007936477661133, 0.019957759857177734, 0.019809280395507813, 0.019768320083618163, 0.019982336044311523, 0.019969024658203126, 0.01985638427734375, 0.020039680480957032, 0.019961856842041017, 0.02002124786376953, 0.019740671157836915, 0.01981439971923828, 0.019993600845336915, 0.020075519561767577, 0.020011007308959963, 0.02008064079284668, 0.0198604793548584, 0.01985843276977539, 0.019696640014648437, 0.020315135955810547, 0.0209039363861084, 0.020771839141845702, 0.020808704376220705, 0.020732927322387695, 0.020792320251464845, 0.02081177520751953, 0.020736000061035157, 0.021539840698242187, 0.02084147262573242, 0.020007936477661133, 0.019826688766479493, 0.019717119216918946, 0.019706880569458008, 0.019931135177612306, 0.019969024658203126, 0.0200130558013916, 0.019997695922851562, 0.020009983062744142, 0.02002739143371582, 0.0200949764251709, 0.02007756805419922, 0.020937728881835937, 0.02087936019897461, 0.020349952697753908, 0.019917823791503905, 0.02002227210998535, 0.01992192077636719, 0.02012876892089844, 0.020031488418579102, 0.02006630325317383, 0.020024320602416993, 0.020068351745605468, 0.020002815246582033, 0.02007859230041504, 0.019952640533447266, 0.020065280914306642, 0.020118528366088868, 0.02044927978515625, 0.022108160018920898, 0.02129305648803711, 0.02070425605773926, 0.02017791938781738, 0.020011007308959963, 0.0200949764251709, 0.020517887115478514, 0.020504575729370117, 0.020607999801635742, 0.020773887634277344, 0.020738048553466795, 0.020766719818115235, 0.020753408432006838, 0.02033459281921387, 0.019991552352905274, 0.019770368576049805, 0.02004582405090332, 0.02008678436279297, 0.020485120773315428, 0.020497407913208008, 0.019992576599121094, 0.020091903686523437, 0.020106239318847655, 0.01979084777832031, 0.02008064079284668, 0.020084735870361328, 0.019767295837402343, 0.019959808349609375, 0.01997209548950195, 0.02003660774230957, 0.02003455924987793, 0.019984384536743165, 0.020016128540039063, 0.01984614372253418, 0.02000383949279785, 0.01990553665161133, 0.019887104034423828, 0.019896320343017578, 0.020166656494140626, 0.019911680221557617, 0.019961856842041017, 0.01965363121032715, 0.02024550437927246, 0.020732927322387695, 0.020694015502929687, 0.0198922233581543, 0.019813375473022463, 0.019951616287231445, 0.020031488418579102, 0.020001792907714845, 0.01998028755187988, 0.019900415420532228, 0.01967001533508301, 0.01967616081237793, 0.019779584884643556, 0.019706880569458008, 0.019607551574707033, 0.01988403129577637, 0.019567615509033204, 0.019963903427124022]",tokens/s,48.99459007165486,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1574.158336,1753.74336,0.0,1168.113664,1154.613248,s,1,7.95439208984375,7.95439208984375,0.0,7.95439208984375,7.95439208984375,7.95439208984375,7.95439208984375,[7.95439208984375],,kWh,1.385489540278968e-05,7.577944454828769e-06,1.8006125516056848e-05,3.94389653736753e-05,,MB,1566.019584,2015.88736,0.0,1369.440256,1323.44832,s,10,0.2845082550048828,0.028450825500488282,0.0001313781295232444,0.028426735877990723,0.028515711784362793,0.02866662359237671,0.028787353038787843,"[0.028817535400390625, 0.028432928085327148, 0.028482175827026366, 0.028436960220336913, 0.028413503646850587, 0.028379968643188477, 0.02831398391723633, 0.028348608016967772, 0.028462047576904296, 0.028420543670654295]",tokens/s,8997.981446816242,kWh,3.3508187776009924e-07,1.8360198852513617e-07,1.6218064595467326e-06,2.1404903258319677e-06,tokens/kWh,119598765.25043283,MB,1575.333888,2036.85888,0.0,1390.411776,1377.250816,s,10,11.30560498046875,1.130560498046875,0.01000876192316815,1.1263247680664064,1.1447047973632811,1.1461443908691407,1.147296065673828,"[1.1387353515625, 1.124177490234375, 1.147583984375, 1.1443848876953124, 1.1207965087890626, 1.1279305419921875, 1.1179615478515625, 1.122024658203125, 1.137291015625, 1.124718994140625]",tokens/s,55.724572111653515,kWh,1.3194532455573004e-05,7.230206820341134e-06,2.5571881962053906e-05,4.599662123796804e-05,tokens/kWh,1369665.821192894,,s,630,11.30166783905028,0.01793915530007983,0.0004079512123769252,0.018107903480529784,0.018286694526672365,0.01846778860092163,0.018971545753479008,"[0.018306047439575195, 0.0184268798828125, 0.018174976348876954, 0.01819443130493164, 0.01808076858520508, 0.018273279190063475, 0.018114559173583983, 0.018122751235961913, 0.01805721664428711, 0.018190336227416993, 0.018121728897094725, 0.01818828773498535, 0.018137088775634767, 0.01784217643737793, 0.0180316162109375, 0.017903615951538086, 0.017967103958129883, 0.01821183967590332, 0.01804902458190918, 0.018273279190063475, 0.018301952362060548, 0.018091007232666014, 0.018151424407958985, 0.018134016036987305, 0.01836953544616699, 0.01820979118347168, 0.01810534477233887, 0.018058240890502928, 0.01822822380065918, 0.018185216903686522, 0.01821900749206543, 0.018207744598388673, 0.018129919052124025, 0.018231296539306642, 0.017768447875976562, 0.01740595245361328, 0.017532928466796875, 0.01740390396118164, 0.01742131233215332, 0.01743155288696289, 0.0174202880859375, 0.017466367721557616, 0.017648639678955077, 0.018355199813842774, 0.01822412872314453, 0.01823641586303711, 0.01827123260498047, 0.018207744598388673, 0.018264064788818358, 0.01814630317687988, 0.018137088775634767, 0.018155519485473632, 0.018268159866333008, 0.018177024841308592, 0.01810534477233887, 0.01817804718017578, 0.01821696090698242, 0.018127872467041017, 0.01819340705871582, 0.018104320526123048, 0.018053119659423827, 0.01806540870666504, 0.018077695846557617, 0.017164287567138673, 0.017375232696533204, 0.01741823959350586, 0.01741721534729004, 0.01745510482788086, 0.01740492820739746, 0.017426431655883787, 0.017483776092529296, 0.017622016906738282, 0.017829887390136717, 0.0182794246673584, 0.018265087127685545, 0.017786880493164063, 0.017580032348632812, 0.01757900810241699, 0.017479679107666016, 0.017539072036743163, 0.01742438316345215, 0.017540096282958984, 0.017477632522583008, 0.017467391967773437, 0.017497087478637697, 0.01745715141296387, 0.0174653434753418, 0.017467424392700194, 0.01746121597290039, 0.017540096282958984, 0.017484800338745117, 0.01743155288696289, 0.017534975051879884, 0.01744179153442383, 0.017542144775390626, 0.017588224411010742, 0.017480703353881837, 0.018093055725097656, 0.018167808532714845, 0.018174976348876954, 0.018610176086425782, 0.018119680404663087, 0.018242559432983398, 0.018100223541259765, 0.017881088256835938, 0.018134016036987305, 0.018106367111206053, 0.018176063537597657, 0.018201536178588867, 0.018141183853149414, 0.018539520263671876, 0.01843097686767578, 0.018112512588500978, 0.018166784286499024, 0.018084896087646483, 0.018145248413085936, 0.018074623107910158, 0.018114559173583983, 0.018104320526123048, 0.01819340705871582, 0.018126848220825196, 0.018199552536010744, 0.018177024841308592, 0.018310176849365235, 0.018208736419677733, 0.018239488601684572, 0.017531904220581054, 0.018111488342285157, 0.018131967544555663, 0.01810534477233887, 0.01819647979736328, 0.01813811111450195, 0.017745920181274414, 0.018265087127685545, 0.018190336227416993, 0.018142208099365235, 0.01824665641784668, 0.018181119918823242, 0.01820159912109375, 0.01821388816833496, 0.018115583419799804, 0.018106367111206053, 0.018125823974609375, 0.01816166305541992, 0.01803468894958496, 0.017829887390136717, 0.017923072814941408, 0.01822105598449707, 0.018092031478881835, 0.018126848220825196, 0.018110464096069336, 0.01810534477233887, 0.01822105598449707, 0.01820876884460449, 0.018374656677246092, 0.01813916778564453, 0.018233312606811523, 0.018234367370605468, 0.01816985511779785, 0.018157567977905274, 0.018155519485473632, 0.018141183853149414, 0.01861631965637207, 0.01902592086791992, 0.018431999206542968, 0.018206720352172853, 0.018207744598388673, 0.01818009567260742, 0.01822003173828125, 0.01819443130493164, 0.018284543991088868, 0.018136064529418947, 0.018139135360717772, 0.018192384719848635, 0.018251775741577148, 0.0182108154296875, 0.01819443130493164, 0.019138559341430664, 0.018906112670898437, 0.018497535705566406, 0.01824870491027832, 0.01820979118347168, 0.01819443130493164, 0.018249727249145507, 0.018153472900390624, 0.018123775482177733, 0.01813811111450195, 0.01823539161682129, 0.01821696090698242, 0.017537023544311522, 0.018058240890502928, 0.018123775482177733, 0.018033664703369142, 0.018157567977905274, 0.018084863662719726, 0.018152448654174806, 0.018124799728393554, 0.01816985511779785, 0.018127872467041017, 0.018159616470336915, 0.018177024841308592, 0.018109439849853515, 0.01816268730163574, 0.018142208099365235, 0.018255872726440428, 0.018157567977905274, 0.01815449523925781, 0.018082815170288084, 0.01819443130493164, 0.017911808013916015, 0.017550336837768556, 0.018141183853149414, 0.018233343124389647, 0.01819545555114746, 0.018128896713256838, 0.018185216903686522, 0.01816268730163574, 0.01879859161376953, 0.01947238349914551, 0.018569215774536133, 0.018249727249145507, 0.018322431564331054, 0.01828556823730469, 0.018132991790771484, 0.017935359954833984, 0.01744179153442383, 0.0177838077545166, 0.018156543731689453, 0.018206720352172853, 0.018153472900390624, 0.018198528289794923, 0.018121728897094725, 0.01822719955444336, 0.018191360473632814, 0.018192384719848635, 0.01922662353515625, 0.01845145606994629, 0.01826201629638672, 0.01816166305541992, 0.01807257652282715, 0.018106367111206053, 0.01815449523925781, 0.01816166305541992, 0.017539072036743163, 0.017502208709716797, 0.01814630317687988, 0.01817804718017578, 0.018112543106079102, 0.018208736419677733, 0.018115583419799804, 0.018335744857788085, 0.018107391357421874, 0.017265663146972657, 0.01749504089355469, 0.017555456161499023, 0.01801523208618164, 0.018094079971313477, 0.018086912155151368, 0.018296831130981444, 0.018120704650878908, 0.01824051284790039, 0.017886207580566405, 0.017514495849609374, 0.017582080841064454, 0.017480703353881837, 0.01746124839782715, 0.017531904220581054, 0.017575935363769533, 0.017467391967773437, 0.017505279541015627, 0.017549312591552735, 0.01810534477233887, 0.018440191268920898, 0.018095104217529297, 0.018157567977905274, 0.018078720092773438, 0.017885183334350584, 0.017649663925170898, 0.01800601577758789, 0.01801318359375, 0.017802240371704102, 0.01742950439453125, 0.017474559783935546, 0.017673215866088866, 0.018172927856445312, 0.018140159606933593, 0.018112512588500978, 0.018139135360717772, 0.018163711547851562, 0.017732608795166017, 0.017238016128540038, 0.017369087219238282, 0.017398784637451172, 0.0172728328704834, 0.01717862319946289, 0.017138687133789063, 0.017305599212646485, 0.017427488327026366, 0.017405920028686524, 0.017434623718261717, 0.017483776092529296, 0.01742233657836914, 0.01823232078552246, 0.018108415603637695, 0.018131967544555663, 0.018082815170288084, 0.017939456939697264, 0.01768550491333008, 0.01738035202026367, 0.01816268730163574, 0.018115583419799804, 0.018078720092773438, 0.018102272033691406, 0.018199552536010744, 0.018122751235961913, 0.017187839508056642, 0.01741414451599121, 0.017464319229125978, 0.017350656509399414, 0.017415168762207032, 0.017350656509399414, 0.017142784118652343, 0.01722675132751465, 0.017496063232421876, 0.017477632522583008, 0.01820364761352539, 0.01840947151184082, 0.018192384719848635, 0.018118656158447266, 0.018147327423095702, 0.01740287971496582, 0.017460224151611328, 0.01738751983642578, 0.01742950439453125, 0.01723391914367676, 0.01763532829284668, 0.01823641586303711, 0.018181119918823242, 0.01808076858520508, 0.01802956771850586, 0.018150400161743165, 0.01805721664428711, 0.018174976348876954, 0.018113536834716795, 0.01816166305541992, 0.018998271942138673, 0.01989017677307129, 0.018536447525024414, 0.01823846435546875, 0.017872896194458008, 0.01809715270996094, 0.018117631912231445, 0.018134016036987305, 0.018126848220825196, 0.01820876884460449, 0.01767628860473633, 0.01758720016479492, 0.017558528900146485, 0.017479679107666016, 0.017452032089233398, 0.01744588851928711, 0.01740492820739746, 0.017434623718261717, 0.017464319229125978, 0.018335744857788085, 0.018251775741577148, 0.018134016036987305, 0.01823744010925293, 0.017935359954833984, 0.01823744010925293, 0.018490367889404297, 0.01860095977783203, 0.018199552536010744, 0.018199552536010744, 0.018331647872924805, 0.01762611198425293, 0.017449983596801756, 0.017490943908691405, 0.018771968841552734, 0.01861427116394043, 0.018242559432983398, 0.01807974433898926, 0.018044927597045898, 0.01818828773498535, 0.018190336227416993, 0.018110464096069336, 0.018100223541259765, 0.018067455291748045, 0.01809715270996094, 0.018041856765747072, 0.018084863662719726, 0.017978368759155275, 0.01798246383666992, 0.01808896064758301, 0.018028543472290038, 0.01744691276550293, 0.017498111724853514, 0.017360895156860352, 0.01744691276550293, 0.017681407928466796, 0.018165760040283203, 0.018156543731689453, 0.018147327423095702, 0.01801523208618164, 0.01809715270996094, 0.017490943908691405, 0.017314815521240236, 0.01742233657836914, 0.017348608016967772, 0.017338367462158204, 0.017386495590209963, 0.017319936752319336, 0.01739571189880371, 0.017415168762207032, 0.017341440200805663, 0.017326080322265625, 0.017374208450317383, 0.017364992141723632, 0.017468416213989257, 0.017474559783935546, 0.017321983337402345, 0.017344512939453126, 0.017485824584960938, 0.017557504653930665, 0.01743257522583008, 0.01732918357849121, 0.017347551345825194, 0.01744691276550293, 0.01737215995788574, 0.017514495849609374, 0.01741926383972168, 0.017427455902099608, 0.01744179153442383, 0.017475584030151366, 0.017701887130737306, 0.017558528900146485, 0.018670591354370117, 0.018485248565673826, 0.018225151062011717, 0.01796403121948242, 0.017520639419555666, 0.017175552368164062, 0.017399839401245117, 0.017363935470581054, 0.017490943908691405, 0.01738956832885742, 0.01742540740966797, 0.017382400512695313, 0.0174202880859375, 0.0174704647064209, 0.01741926383972168, 0.017426431655883787, 0.0174704647064209, 0.01744691276550293, 0.017613824844360353, 0.018100223541259765, 0.018094079971313477, 0.018066432952880858, 0.0181790714263916, 0.018131967544555663, 0.01819545555114746, 0.018092031478881835, 0.018127872467041017, 0.018060287475585936, 0.018091007232666014, 0.018142208099365235, 0.018092031478881835, 0.018134016036987305, 0.018160640716552736, 0.018092031478881835, 0.017839103698730468, 0.01737113571166992, 0.0178288631439209, 0.017458175659179686, 0.017464319229125978, 0.01741721534729004, 0.01736396789550781, 0.017426431655883787, 0.017571840286254883, 0.01746329689025879, 0.01745305633544922, 0.017374208450317383, 0.017351680755615235, 0.017504255294799806, 0.017521663665771483, 0.01743974494934082, 0.017525760650634766, 0.01746124839782715, 0.01740083122253418, 0.018347007751464844, 0.018692096710205077, 0.018280448913574218, 0.018215936660766603, 0.018159616470336915, 0.01821696090698242, 0.018226175308227538, 0.01820467185974121, 0.018124799728393554, 0.018233343124389647, 0.01821183967590332, 0.018167808532714845, 0.01822719955444336, 0.018150400161743165, 0.0182609920501709, 0.017293312072753905, 0.017500160217285156, 0.017449983596801756, 0.017473535537719728, 0.017447935104370118, 0.01741004753112793, 0.017492992401123047, 0.01741619110107422, 0.01745408058166504, 0.017525760650634766, 0.01744076728820801, 0.017906688690185548, 0.01838591957092285, 0.01818623924255371, 0.01814630317687988, 0.01819340705871582, 0.018158592224121094, 0.017868799209594728, 0.01748684883117676, 0.0174704647064209, 0.01741414451599121, 0.017503231048583985, 0.017453088760375976, 0.01749398422241211, 0.018077695846557617, 0.018132991790771484, 0.018258943557739257, 0.01820057678222656, 0.01840640068054199, 0.018164735794067383, 0.01857535934448242, 0.018387968063354493, 0.018512895584106445, 0.01842278480529785, 0.01816166305541992, 0.018183168411254884, 0.01824563217163086, 0.018190336227416993, 0.018181119918823242, 0.018542591094970702, 0.01861222457885742, 0.018481151580810547, 0.018226175308227538, 0.018362367630004883, 0.01827020835876465, 0.018372608184814454, 0.018251775741577148, 0.01827840042114258, 0.018215936660766603, 0.018275327682495117, 0.01816268730163574, 0.01817804718017578, 0.01820364761352539, 0.01818828773498535, 0.01830297660827637, 0.01824358367919922, 0.01821696090698242, 0.018125823974609375, 0.01820364761352539, 0.01822105598449707, 0.018266111373901366, 0.018779136657714843, 0.01822208023071289, 0.01720832061767578, 0.017356800079345702, 0.017346559524536134, 0.01737215995788574, 0.01735577583312988, 0.017765375137329103, 0.01739366340637207, 0.017374208450317383, 0.017435647964477538, 0.017317888259887695, 0.017408000946044923, 0.01746227264404297, 0.01740902328491211, 0.017435647964477538, 0.01743155288696289, 0.01745715141296387, 0.0174335994720459, 0.017367040634155274, 0.017350656509399414, 0.017481727600097655, 0.017533952713012696, 0.01739673614501953, 0.01747865676879883, 0.017328128814697266, 0.017333248138427734, 0.0178155517578125, 0.017485824584960938, 0.017469440460205078, 0.017382400512695313, 0.01742131233215332, 0.017466367721557616, 0.018568191528320312, 0.018787328720092773, 0.018408447265625, 0.018185216903686522, 0.018181119918823242, 0.018215936660766603, 0.018266111373901366, 0.01824051284790039, 0.01820979118347168, 0.01819340705871582, 0.01822105598449707, 0.018257919311523436, 0.018307071685791015, 0.01824563217163086, 0.01816166305541992, 0.01785753631591797, 0.01745408058166504, 0.01741004753112793, 0.017846271514892577, 0.017739776611328126, 0.019405824661254883, 0.0186296329498291, 0.0182609920501709, 0.01841766357421875, 0.018544639587402344, 0.018284576416015625, 0.01831011199951172, 0.018282495498657226, 0.018317312240600587, 0.018325504302978517, 0.018272256851196288, 0.018257919311523436]",tokens/s,55.74398477923595,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 68301 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1574.146048,1753.74336,0.0,1168.113664,1154.613248,s,1,8.0856572265625,8.0856572265625,0.0,8.0856572265625,8.0856572265625,8.0856572265625,8.0856572265625,[8.0856572265625],,kWh,1.4016864449312531e-05,7.65477175005661e-06,1.770695861003091e-05,3.937859480940005e-05,,MB,1719.558144,2015.88736,0.0,1369.440256,1323.44832,s,10,0.28449708557128905,0.028449708557128906,4.510506718818219e-05,0.028452783584594726,0.028506343078613283,0.0285119873046875,0.028516502685546875,"[0.028409631729125976, 0.028375423431396485, 0.028472063064575195, 0.028411008834838866, 0.02841219139099121, 0.02847884750366211, 0.028517631530761717, 0.0284816951751709, 0.028505088806152344, 0.028433504104614257]",tokens/s,8998.334710035253,kWh,3.354726978675281e-07,1.8382157064220195e-07,1.597690135563918e-06,2.1169844040736482e-06,tokens/kWh,120926729.31713009,MB,1726.89408,2036.85888,0.0,1390.411776,1377.250816,s,10,10.956912353515623,1.0956912353515624,0.008465661206525355,1.0935032958984374,1.104293603515625,1.108720654296875,1.112262294921875,"[1.090718994140625, 1.113147705078125, 1.089500732421875, 1.0905718994140625, 1.09120263671875, 1.102773681640625, 1.095803955078125, 1.097947998046875, 1.0819349365234374, 1.103309814453125]",tokens/s,57.49795012258712,kWh,1.290681013824314e-05,7.0725040657772504e-06,2.506244230363515e-05,4.504175650765554e-05,tokens/kWh,1398702.1129891365,,s,630,10.953259010314936,0.017386125413198318,0.0004094871227497082,0.017179647445678712,0.017893477821350097,0.01802137565612793,0.018536427898406985,"[0.016937984466552734, 0.01720832061767578, 0.01719193649291992, 0.017113088607788086, 0.01706598472595215, 0.017163263320922852, 0.017094655990600584, 0.017076223373413087, 0.017104896545410156, 0.017133567810058595, 0.017137664794921875, 0.017101823806762697, 0.017177600860595704, 0.017228799819946287, 0.017161216735839844, 0.017131519317626954, 0.01724825668334961, 0.016954368591308593, 0.016889856338500975, 0.01698406410217285, 0.017172479629516603, 0.017135616302490234, 0.017126399993896483, 0.017179647445678712, 0.01719705581665039, 0.017124351501464845, 0.017135616302490234, 0.017138687133789063, 0.017498111724853514, 0.017367040634155274, 0.01763532829284668, 0.017265663146972657, 0.017154048919677735, 0.017252351760864256, 0.017118207931518553, 0.017141759872436522, 0.01726361656188965, 0.01719500732421875, 0.017148927688598634, 0.017185792922973633, 0.017163263320922852, 0.017183744430541992, 0.017168384552001953, 0.01720217514038086, 0.017238016128540038, 0.01770086479187012, 0.018189311981201172, 0.018066432952880858, 0.017884159088134767, 0.017885183334350584, 0.017914880752563478, 0.017902591705322265, 0.01804287910461426, 0.017987583160400392, 0.017288192749023438, 0.017697792053222656, 0.01786675262451172, 0.017362943649291994, 0.017314815521240236, 0.017242111206054688, 0.017155071258544922, 0.017229824066162108, 0.017129472732543945, 0.01779302406311035, 0.02085478401184082, 0.01928294372558594, 0.018142208099365235, 0.017847295761108398, 0.01797324752807617, 0.017869823455810546, 0.017970176696777345, 0.01784320068359375, 0.01783296012878418, 0.017878015518188475, 0.017959936141967774, 0.017689599990844726, 0.017167360305786132, 0.01721446418762207, 0.017164287567138673, 0.017152000427246093, 0.017129472732543945, 0.017164287567138673, 0.017146879196166993, 0.018354175567626953, 0.017893375396728514, 0.018997247695922852, 0.01799782371520996, 0.017913856506347657, 0.017846271514892577, 0.017903615951538086, 0.01780121612548828, 0.01784320068359375, 0.017830911636352538, 0.01785651206970215, 0.01802137565612793, 0.01785036849975586, 0.01787494468688965, 0.01782579231262207, 0.017861631393432616, 0.01785241508483887, 0.01800499153137207, 0.017854463577270507, 0.017886207580566405, 0.0172677116394043, 0.01721139144897461, 0.01723084831237793, 0.01760870361328125, 0.017672191619873046, 0.01720012855529785, 0.01716633605957031, 0.01716531181335449, 0.017129472732543945, 0.017720319747924804, 0.01759436798095703, 0.017082368850708008, 0.017140735626220704, 0.017079296112060546, 0.017128448486328125, 0.017076223373413087, 0.017152000427246093, 0.017094655990600584, 0.01716633605957031, 0.017143808364868163, 0.017126399993896483, 0.017131519317626954, 0.01715814399719238, 0.016912416458129884, 0.01711408042907715, 0.017062911987304686, 0.017140735626220704, 0.017141759872436522, 0.01704550361633301, 0.017092607498168946, 0.017077247619628907, 0.017144832611083984, 0.017093631744384767, 0.017074176788330078, 0.017022975921630858, 0.017796096801757814, 0.01724313545227051, 0.017083391189575196, 0.017106943130493164, 0.017110015869140623, 0.017082368850708008, 0.017079296112060546, 0.017177600860595704, 0.017076223373413087, 0.017076223373413087, 0.017119232177734374, 0.017055744171142577, 0.01705369567871094, 0.01703014373779297, 0.017135616302490234, 0.017121280670166016, 0.017107967376708985, 0.017123327255249024, 0.017696767807006835, 0.017968128204345703, 0.01788313674926758, 0.017729536056518554, 0.017779712677001954, 0.017712127685546874, 0.017887231826782226, 0.01784012794494629, 0.01786675262451172, 0.017810432434082032, 0.0178155517578125, 0.017488895416259767, 0.01721241569519043, 0.017040384292602538, 0.017864704132080078, 0.017672191619873046, 0.01723494338989258, 0.017167360305786132, 0.017187839508056642, 0.01719193649291992, 0.017121280670166016, 0.017126399993896483, 0.017134592056274413, 0.01721651268005371, 0.017152000427246093, 0.01721241569519043, 0.017119232177734374, 0.017168415069580077, 0.017400800704956056, 0.017296384811401368, 0.017115135192871094, 0.017309696197509765, 0.01719603157043457, 0.01698099136352539, 0.01717043113708496, 0.017091583251953125, 0.017090560913085938, 0.01702809524536133, 0.017273855209350587, 0.017120256423950195, 0.01705369567871094, 0.017765375137329103, 0.017869823455810546, 0.017727487564086913, 0.01780940818786621, 0.017748992919921876, 0.017778688430786133, 0.017795072555541993, 0.01779302406311035, 0.017888256072998047, 0.017755136489868165, 0.01781452751159668, 0.01777663993835449, 0.01777561569213867, 0.017746944427490235, 0.017811456680297853, 0.01744588851928711, 0.017115135192871094, 0.01715814399719238, 0.01722368049621582, 0.017108991622924806, 0.017115135192871094, 0.017129472732543945, 0.017138687133789063, 0.017114112854003907, 0.017119232177734374, 0.017131519317626954, 0.01703424072265625, 0.017094655990600584, 0.017077247619628907, 0.017144832611083984, 0.017100799560546876, 0.017713151931762695, 0.017530879974365234, 0.017084415435791016, 0.01718169593811035, 0.017111040115356444, 0.017146879196166993, 0.017059839248657227, 0.017102848052978514, 0.017083391189575196, 0.017081344604492187, 0.017100799560546876, 0.017141759872436522, 0.01723289680480957, 0.017156095504760743, 0.017138687133789063, 0.01718681526184082, 0.01720012855529785, 0.017169408798217774, 0.01719910430908203, 0.017129472732543945, 0.01715814399719238, 0.017169408798217774, 0.017103872299194335, 0.017131519317626954, 0.016974847793579103, 0.017507328033447265, 0.01721855926513672, 0.017171455383300782, 0.017101823806762697, 0.01722470474243164, 0.01723904037475586, 0.017110015869140623, 0.017100799560546876, 0.017137664794921875, 0.017159168243408202, 0.017385471343994142, 0.01715814399719238, 0.017146879196166993, 0.01720832061767578, 0.017120256423950195, 0.01780019187927246, 0.018082815170288084, 0.01784012794494629, 0.017861631393432616, 0.01778278350830078, 0.017770496368408203, 0.01715814399719238, 0.017201152801513672, 0.017161216735839844, 0.017128448486328125, 0.01704243278503418, 0.017116159439086915, 0.017099775314331055, 0.0170700798034668, 0.01721855926513672, 0.017140735626220704, 0.017117183685302736, 0.01714995193481445, 0.01755340766906738, 0.017675264358520508, 0.01724825668334961, 0.01722060775756836, 0.017177600860595704, 0.017144832611083984, 0.017153024673461914, 0.017111040115356444, 0.017139711380004884, 0.017128448486328125, 0.01719910430908203, 0.01714995193481445, 0.017111040115356444, 0.017140735626220704, 0.017188863754272463, 0.017144832611083984, 0.017134592056274413, 0.017122304916381836, 0.01718169593811035, 0.017063936233520507, 0.017159168243408202, 0.017140735626220704, 0.017082368850708008, 0.01804287910461426, 0.017884159088134767, 0.018341888427734376, 0.017886207580566405, 0.017838079452514647, 0.01775103950500488, 0.01701785659790039, 0.01719603157043457, 0.017780736923217775, 0.0178155517578125, 0.01778483200073242, 0.017844224929809572, 0.01781350326538086, 0.017838079452514647, 0.017781759262084963, 0.01776639938354492, 0.01780940818786621, 0.017542144775390626, 0.017124351501464845, 0.017134592056274413, 0.017120256423950195, 0.017122304916381836, 0.01720319938659668, 0.017126399993896483, 0.017184768676757813, 0.017229824066162108, 0.017142784118652343, 0.017104896545410156, 0.017157119750976564, 0.017147903442382813, 0.017112064361572265, 0.017085439682006837, 0.01710905647277832, 0.01791584014892578, 0.0173885440826416, 0.018951168060302736, 0.018412544250488282, 0.01741107177734375, 0.01719910430908203, 0.017349632263183593, 0.017236991882324217, 0.01717350387573242, 0.01723494338989258, 0.017119232177734374, 0.016944128036499022, 0.016930816650390625, 0.01802137565612793, 0.01743052864074707, 0.01719910430908203, 0.017118207931518553, 0.017152000427246093, 0.01717043113708496, 0.017160192489624023, 0.017154048919677735, 0.017167360305786132, 0.01762099266052246, 0.017923072814941408, 0.01784524726867676, 0.018214912414550782, 0.01789030456542969, 0.01780838394165039, 0.017950719833374023, 0.0180316162109375, 0.0180633602142334, 0.01779916763305664, 0.01789030456542969, 0.0177838077545166, 0.01780940818786621, 0.017833984375, 0.01699839973449707, 0.017167360305786132, 0.01806540870666504, 0.018018304824829103, 0.01783296012878418, 0.017894399642944335, 0.01784217643737793, 0.01784115219116211, 0.0178288631439209, 0.017863679885864257, 0.017534975051879884, 0.017127424240112304, 0.017145856857299805, 0.017168384552001953, 0.017110015869140623, 0.017123327255249024, 0.017147903442382813, 0.017118207931518553, 0.017116159439086915, 0.017108991622924806, 0.017253376007080077, 0.017118207931518553, 0.01719500732421875, 0.01746329689025879, 0.018547712326049806, 0.018050048828125, 0.017795072555541993, 0.017939456939697264, 0.017838079452514647, 0.017930240631103517, 0.017821695327758787, 0.017358848571777344, 0.0172042236328125, 0.017121280670166016, 0.017167360305786132, 0.017309696197509765, 0.017107967376708985, 0.017176576614379883, 0.01722572708129883, 0.017119232177734374, 0.017139711380004884, 0.017732608795166017, 0.017298431396484376, 0.017132543563842775, 0.01716633605957031, 0.01723289680480957, 0.017114112854003907, 0.017126399993896483, 0.01717862319946289, 0.017105920791625977, 0.01717862319946289, 0.017135616302490234, 0.017194015502929687, 0.01708233642578125, 0.017135616302490234, 0.017228799819946287, 0.01719500732421875, 0.017147903442382813, 0.017155071258544922, 0.01783296012878418, 0.017802240371704102, 0.01716531181335449, 0.01717350387573242, 0.016923648834228516, 0.01721241569519043, 0.017183744430541992, 0.017180671691894533, 0.017169408798217774, 0.01714995193481445, 0.017143808364868163, 0.01722060775756836, 0.017171455383300782, 0.017101823806762697, 0.017128448486328125, 0.017179647445678712, 0.01721958351135254, 0.01713052749633789, 0.0172523193359375, 0.017188863754272463, 0.017228799819946287, 0.017283071517944337, 0.017254400253295898, 0.017868799209594728, 0.017596416473388672, 0.01721446418762207, 0.017135616302490234, 0.017326080322265625, 0.016959487915039064, 0.01686425590515137, 0.017145856857299805, 0.017185792922973633, 0.017110015869140623, 0.017123327255249024, 0.017077247619628907, 0.01724415969848633, 0.01741209602355957, 0.017295360565185547, 0.017104896545410156, 0.01782374382019043, 0.01784012794494629, 0.018036735534667968, 0.017915903091430666, 0.017912832260131836, 0.017846271514892577, 0.017777664184570312, 0.01779302406311035, 0.01781350326538086, 0.017140735626220704, 0.017145856857299805, 0.01722163200378418, 0.017090560913085938, 0.017079296112060546, 0.01721548843383789, 0.017201152801513672, 0.017141759872436522, 0.018158592224121094, 0.017728511810302734, 0.01819647979736328, 0.01822105598449707, 0.01794047927856445, 0.01785856056213379, 0.017876991271972655, 0.017917951583862304, 0.017925119400024413, 0.017876991271972655, 0.017888256072998047, 0.01696870422363281, 0.017107967376708985, 0.017118207931518553, 0.017118207931518553, 0.0171909122467041, 0.017056768417358398, 0.017141759872436522, 0.017101823806762697, 0.017075199127197266, 0.017121280670166016, 0.017113088607788086, 0.017082368850708008, 0.017459199905395507, 0.018508800506591795, 0.017622016906738282, 0.01724825668334961, 0.017117183685302736, 0.017155071258544922, 0.0172410888671875, 0.017131519317626954, 0.017127424240112304, 0.0172359676361084, 0.017113088607788086, 0.01715814399719238, 0.017168384552001953, 0.017228799819946287, 0.01719193649291992, 0.017177600860595704, 0.017168384552001953, 0.017111040115356444, 0.017111040115356444, 0.017055744171142577, 0.017092607498168946, 0.0171059513092041, 0.01711408042907715, 0.01714995193481445, 0.017508352279663086, 0.017154048919677735, 0.017107967376708985, 0.017116159439086915, 0.017084415435791016, 0.017105920791625977, 0.017140735626220704, 0.017122304916381836, 0.017123327255249024, 0.017161216735839844, 0.017266687393188478, 0.017060863494873048, 0.017069055557250978, 0.017154048919677735, 0.017086463928222655, 0.017108991622924806, 0.01706598472595215, 0.01718681526184082, 0.017133567810058595, 0.017112064361572265, 0.017090560913085938, 0.017110015869140623, 0.017086463928222655, 0.017084415435791016, 0.01718988800048828, 0.017107967376708985, 0.017067007064819336, 0.016918527603149415, 0.017128448486328125, 0.017082368850708008, 0.017091583251953125, 0.017098751068115235, 0.017103872299194335, 0.017069055557250978, 0.017140735626220704, 0.016921600341796874, 0.01759334373474121, 0.017521663665771483, 0.017086463928222655, 0.017088512420654296, 0.01718681526184082, 0.017183744430541992, 0.017071104049682616, 0.017069055557250978, 0.017176576614379883, 0.017075199127197266, 0.01719705581665039, 0.01718988800048828, 0.01719808006286621, 0.01719910430908203, 0.017111040115356444, 0.017520639419555666, 0.017217536926269532, 0.017148927688598634, 0.017110015869140623, 0.017131519317626954, 0.017113088607788086, 0.017098751068115235, 0.017217536926269532, 0.017311744689941407, 0.017120256423950195, 0.017153024673461914, 0.017141759872436522, 0.017107967376708985, 0.017087488174438475, 0.0172042236328125, 0.018050048828125, 0.018174976348876954, 0.017836032867431642, 0.017917951583862304, 0.017844224929809572, 0.017855487823486327, 0.017847295761108398, 0.01787392044067383, 0.0178155517578125, 0.017876991271972655, 0.01782067108154297, 0.017912832260131836, 0.01804083251953125, 0.017957887649536132, 0.01790771293640137, 0.01784934425354004, 0.017911808013916015, 0.01783705520629883, 0.017979391098022462, 0.017926143646240233, 0.018299936294555664, 0.01923580741882324, 0.019681280136108398, 0.018274303436279296]",tokens/s,57.51712795312466,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 614, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1249.247232,2645.03296,0.0,1998.585856,1692.386816,s,10,0.18110473632812502,0.018110473632812502,0.0005832169414279196,0.017981696128845214,0.01848877696990967,0.019090020084381102,0.01957101457595825,"[0.01969126319885254, 0.017800607681274415, 0.017747167587280274, 0.017933311462402343, 0.017479488372802734, 0.018030080795288086, 0.01773776054382324, 0.018034944534301756, 0.018294944763183593, 0.018355167388916016]",tokens/s,14135.466867977431,kWh,2.0449506301505892e-07,1.12053370853919e-07,6.15953972193862e-07,9.3250240606284e-07,tokens/kWh,274530122.74881846,MB,1250.115584,2645.03296,0.0,1998.585856,1714.454528,s,10,10.794084228515624,1.0794084228515626,0.016644320820693784,1.0746611938476562,1.0933045532226562,1.1081316467285156,1.1199933215332032,"[1.122958740234375, 1.0686610107421874, 1.0693388671875, 1.0852027587890625, 1.0651156005859375, 1.0635029296875, 1.0766837158203124, 1.072638671875, 1.0900096435546875, 1.0799722900390625]",tokens/s,58.365303314539354,kWh,1.2562686432810393e-05,6.8838655438497616e-06,2.551703915640865e-05,4.496359113306879e-05,tokens/kWh,1401133.6375146916,,s,629,10.936251413345326,0.01738672720722629,0.0022197024292994623,0.016885791778564453,0.017659904098510742,0.018658509063720703,0.034891817016601566,"[0.01904435157775879, 0.0187064323425293, 0.01863270378112793, 0.018726911544799805, 0.018726911544799805, 0.018873344421386717, 0.018765823364257812, 0.018811904907226562, 0.018739200592041014, 0.018676736831665038, 0.018762752532958983, 0.018803712844848632, 0.018678783416748047, 0.01875660705566406, 0.018675712585449217, 0.018868223190307617, 0.018815999984741212, 0.018736127853393555, 0.018751487731933594, 0.018682880401611326, 0.018691072463989256, 0.01845964813232422, 0.019186687469482423, 0.018593791961669923, 0.018990079879760743, 0.0188723201751709, 0.01862553596496582, 0.01846067237854004, 0.017209344863891602, 0.016954368591308593, 0.017811456680297853, 0.016908287048339844, 0.01696767997741699, 0.01758720016479492, 0.017490943908691405, 0.017522687911987304, 0.017477632522583008, 0.017419296264648436, 0.01743459129333496, 0.017391616821289063, 0.01677107238769531, 0.01680998420715332, 0.01681920051574707, 0.016915456771850586, 0.01683865547180176, 0.01681100845336914, 0.01679155158996582, 0.016780288696289062, 0.01678745651245117, 0.016950271606445313, 0.016879615783691407, 0.016874496459960937, 0.01683558464050293, 0.016773120880126953, 0.016753664016723634, 0.016665599822998048, 0.01686016082763672, 0.016930816650390625, 0.016816160202026368, 0.016822240829467774, 0.01683558464050293, 0.01679667282104492, 0.03492966461181641, 0.01683558464050293, 0.016866304397583007, 0.01678335952758789, 0.01679871940612793, 0.016550912857055664, 0.016587776184082033, 0.016528383255004882, 0.016562175750732423, 0.01659395217895508, 0.016603103637695314, 0.01661849594116211, 0.017155071258544922, 0.016927743911743166, 0.01680281639099121, 0.016898048400878905, 0.01679974365234375, 0.01720832061767578, 0.01759231948852539, 0.01741312026977539, 0.01738751983642578, 0.01700556755065918, 0.016825344085693358, 0.017289247512817383, 0.01739465522766113, 0.01723187255859375, 0.016772096633911132, 0.01683558464050293, 0.01681407928466797, 0.01676288032531738, 0.016736255645751954, 0.016784383773803712, 0.016827392578125, 0.016879615783691407, 0.01670867156982422, 0.01733932876586914, 0.01680793571472168, 0.01677414321899414, 0.016729087829589845, 0.016703487396240235, 0.016753664016723634, 0.016742399215698242, 0.01681100845336914, 0.01680486488342285, 0.016742399215698242, 0.017876991271972655, 0.017705984115600586, 0.017125375747680666, 0.016781312942504883, 0.0167956485748291, 0.016955392837524414, 0.01680281639099121, 0.016741376876831054, 0.016845823287963867, 0.01765888023376465, 0.017520639419555666, 0.017494016647338868, 0.017333248138427734, 0.01734758377075195, 0.01744588851928711, 0.016930816650390625, 0.01677926445007324, 0.01683660888671875, 0.03493273544311523, 0.016877567291259766, 0.016845823287963867, 0.016837631225585938, 0.0167956485748291, 0.01678233528137207, 0.01701068878173828, 0.018579456329345705, 0.017508352279663086, 0.01745305633544922, 0.017385471343994142, 0.017447935104370118, 0.017209344863891602, 0.01686835289001465, 0.016921600341796874, 0.016829439163208008, 0.01679871940612793, 0.017161216735839844, 0.017475584030151366, 0.017442815780639647, 0.016941055297851563, 0.016772096633911132, 0.016741376876831054, 0.017126399993896483, 0.01683251190185547, 0.016769023895263673, 0.0167587833404541, 0.016749568939208984, 0.01679871940612793, 0.016845823287963867, 0.01680281639099121, 0.016793600082397463, 0.016722944259643553, 0.01680691146850586, 0.016722944259643553, 0.016822303771972656, 0.016772096633911132, 0.016756704330444336, 0.016680959701538087, 0.0167956485748291, 0.016973823547363282, 0.01684787178039551, 0.0165928955078125, 0.016727039337158203, 0.016833536148071288, 0.016866304397583007, 0.016881664276123046, 0.01721651268005371, 0.016885759353637696, 0.016885759353637696, 0.016877567291259766, 0.01679974365234375, 0.01678643226623535, 0.016821247100830078, 0.01676288032531738, 0.01681510353088379, 0.016925695419311524, 0.016898080825805663, 0.01683247947692871, 0.016909311294555664, 0.017939456939697264, 0.017320959091186524, 0.017529855728149413, 0.035783679962158206, 0.017524736404418945, 0.017922048568725587, 0.017533952713012696, 0.017596416473388672, 0.017516544342041016, 0.01758415985107422, 0.017504224777221678, 0.01761587142944336, 0.01772035217285156, 0.01703318405151367, 0.01683967971801758, 0.017302528381347656, 0.017532928466796875, 0.017551359176635743, 0.017521663665771483, 0.0174335994720459, 0.017333248138427734, 0.017681407928466796, 0.01702809524536133, 0.016926719665527345, 0.016827392578125, 0.016739328384399413, 0.01679667282104492, 0.01683967971801758, 0.016821247100830078, 0.016755712509155272, 0.01680588722229004, 0.016742399215698242, 0.01681100845336914, 0.01683865547180176, 0.01679462432861328, 0.01680691146850586, 0.01680588722229004, 0.017383424758911133, 0.01756876754760742, 0.017504255294799806, 0.017168384552001953, 0.017187839508056642, 0.01743155288696289, 0.017506303787231444, 0.017521696090698244, 0.01765782356262207, 0.01742540740966797, 0.01803878402709961, 0.01743974494934082, 0.017356800079345702, 0.01720217514038086, 0.0174202880859375, 0.017399808883666993, 0.01712950325012207, 0.017301471710205078, 0.01706598472595215, 0.016917503356933594, 0.016837631225585938, 0.01700454330444336, 0.01720319938659668, 0.016863231658935548, 0.017084415435791016, 0.016883712768554687, 0.016911359786987306, 0.016948223114013672, 0.016947200775146484, 0.03465932846069336, 0.016855039596557618, 0.016886783599853517, 0.01677926445007324, 0.016875520706176757, 0.017054719924926756, 0.018347007751464844, 0.017502208709716797, 0.016885759353637696, 0.01677827262878418, 0.01684169578552246, 0.01680281639099121, 0.016855039596557618, 0.016882688522338866, 0.016919551849365236, 0.016873472213745116, 0.016904191970825197, 0.01702400016784668, 0.01683456039428711, 0.0168089599609375, 0.01681817626953125, 0.01678950309753418, 0.016825344085693358, 0.0168724479675293, 0.01741926383972168, 0.01754521560668945, 0.01702911949157715, 0.01677107238769531, 0.01682841682434082, 0.016863231658935548, 0.016890880584716796, 0.01681100845336914, 0.01683456039428711, 0.016755712509155272, 0.01681510353088379, 0.01676697540283203, 0.01680076789855957, 0.01677824020385742, 0.016638975143432617, 0.016635904312133788, 0.01660518455505371, 0.01659187126159668, 0.016564224243164064, 0.01677107238769531, 0.016738304138183592, 0.01681407928466797, 0.016753664016723634, 0.016854015350341797, 0.016908287048339844, 0.016905216217041014, 0.016857088088989256, 0.016907264709472656, 0.016872512817382813, 0.01691846466064453, 0.016712703704833985, 0.016891904830932617, 0.01698406410217285, 0.017449983596801756, 0.016718847274780273, 0.0166297607421875, 0.017092607498168946, 0.017095680236816405, 0.017131519317626954, 0.03580416107177734, 0.01741823959350586, 0.017531904220581054, 0.017286144256591796, 0.016875520706176757, 0.016792575836181642, 0.01678745651245117, 0.01683251190185547, 0.016887807846069337, 0.016873472213745116, 0.01686425590515137, 0.016894975662231446, 0.016882688522338866, 0.016870399475097657, 0.016746496200561522, 0.01676595115661621, 0.016723968505859374, 0.01664204788208008, 0.016663551330566406, 0.016582656860351562, 0.01677107238769531, 0.016749568939208984, 0.01679871940612793, 0.01680281639099121, 0.016901151657104492, 0.016837600708007813, 0.0168407039642334, 0.016712703704833985, 0.016760831832885743, 0.01677516746520996, 0.016748544692993163, 0.016825344085693358, 0.01679667282104492, 0.01682329559326172, 0.01675775909423828, 0.01678950309753418, 0.01677926445007324, 0.0168407039642334, 0.0173885440826416, 0.016639999389648438, 0.017270784378051757, 0.01745408058166504, 0.01702707290649414, 0.016867328643798828, 0.01680281639099121, 0.01678643226623535, 0.01683865547180176, 0.01683660888671875, 0.01681305694580078, 0.01685196876525879, 0.01686425590515137, 0.016905216217041014, 0.016862207412719727, 0.01678745651245117, 0.016662527084350585, 0.01660825538635254, 0.016852991104125976, 0.016821247100830078, 0.01678950309753418, 0.016772096633911132, 0.016773120880126953, 0.017385471343994142, 0.016871423721313478, 0.03479449462890625, 0.016877567291259766, 0.016845823287963867, 0.01698918342590332, 0.018563072204589845, 0.018008064270019532, 0.017552383422851564, 0.017528831481933595, 0.017434623718261717, 0.017492992401123047, 0.017391616821289063, 0.016911359786987306, 0.0168089599609375, 0.016854047775268555, 0.016837600708007813, 0.016928768157958983, 0.016761856079101564, 0.01681203269958496, 0.0168407039642334, 0.017082368850708008, 0.016893951416015626, 0.01683865547180176, 0.01681612777709961, 0.017269760131835937, 0.017571840286254883, 0.017605632781982423, 0.017517568588256836, 0.017464319229125978, 0.01724723243713379, 0.01680179214477539, 0.016780288696289062, 0.01679052734375, 0.017031167984008787, 0.017063936233520507, 0.01683558464050293, 0.016869375228881836, 0.016865280151367186, 0.016833536148071288, 0.017081344604492187, 0.01686016082763672, 0.016842752456665038, 0.017504255294799806, 0.017483776092529296, 0.017307647705078123, 0.01740595245361328, 0.01787596893310547, 0.017582080841064454, 0.017482751846313475, 0.016935935974121095, 0.016882688522338866, 0.016845823287963867, 0.0168724479675293, 0.01681715202331543, 0.016829439163208008, 0.01680793571472168, 0.016883712768554687, 0.01684787178039551, 0.01682841682434082, 0.01678233528137207, 0.01678745651245117, 0.01681510353088379, 0.016867328643798828, 0.01681612777709961, 0.03634995269775391, 0.017678335189819337, 0.01745305633544922, 0.01757900810241699, 0.01740287971496582, 0.01696767997741699, 0.01686425590515137, 0.016873472213745116, 0.01681203269958496, 0.016969728469848632, 0.01683558464050293, 0.01681100845336914, 0.01684889602661133, 0.01699942398071289, 0.016881664276123046, 0.017523712158203125, 0.017688575744628905, 0.01743052864074707, 0.01702604866027832, 0.01676697540283203, 0.016785408020019533, 0.01679871940612793, 0.016767999649047852, 0.01678643226623535, 0.016793600082397463, 0.016772096633911132, 0.016752639770507814, 0.01679871940612793, 0.016849920272827147, 0.0167956485748291, 0.01680793571472168, 0.01679462432861328, 0.016788480758666992, 0.01682636833190918, 0.016620576858520506, 0.01664201545715332, 0.016541696548461913, 0.01660620880126953, 0.01659699249267578, 0.016553983688354493, 0.016726015090942382, 0.01682022476196289, 0.016731136322021483, 0.016721920013427736, 0.016743423461914063, 0.01677926445007324, 0.016788480758666992, 0.01682636833190918, 0.016746496200561522, 0.016876544952392578, 0.01685606384277344, 0.017342464447021484, 0.018585599899291993, 0.017912832260131836, 0.017761280059814453, 0.017554431915283202, 0.01761894416809082, 0.017217536926269532, 0.016668672561645507, 0.016897024154663084, 0.01738444709777832, 0.017338367462158204, 0.017370111465454103, 0.03539251327514648, 0.016876544952392578, 0.017663999557495116, 0.017537023544311522, 0.017534975051879884, 0.017489952087402345, 0.01749603271484375, 0.017464319229125978, 0.017743871688842772, 0.017655807495117186, 0.017180671691894533, 0.016876544952392578, 0.016903167724609376, 0.016885791778564453, 0.016744415283203126, 0.017467391967773437, 0.017570816040039062, 0.017704959869384765, 0.017072128295898437, 0.016825344085693358, 0.016929792404174804, 0.01744895935058594, 0.017524736404418945, 0.017520639419555666, 0.017458175659179686, 0.017447935104370118, 0.01744076728820801, 0.017460224151611328, 0.017291263580322267, 0.01724415969848633, 0.01681817626953125, 0.016566272735595702, 0.016907264709472656, 0.01762611198425293, 0.01757798385620117, 0.01747865676879883, 0.0174653434753418, 0.017352703094482422, 0.017552383422851564, 0.017476608276367187, 0.017583103179931642, 0.017557504653930665, 0.017567743301391603, 0.01742848014831543, 0.017554431915283202, 0.01745715141296387, 0.017464319229125978, 0.017435647964477538, 0.017383424758911133, 0.01677004814147949, 0.01717350387573242, 0.01722163200378418, 0.017273855209350587, 0.017217536926269532, 0.01686016082763672, 0.017150976181030272, 0.01758720016479492, 0.017476608276367187, 0.01745510482788086, 0.01721036720275879, 0.01666662406921387, 0.016963584899902344, 0.01720012855529785, 0.03548262405395508, 0.016893951416015626, 0.017361919403076173, 0.017487871170043946, 0.016788480758666992, 0.016672767639160157, 0.01684377670288086, 0.01679462432861328, 0.016883712768554687, 0.017301504135131835, 0.017128448486328125, 0.016857088088989256, 0.01679871940612793, 0.01764659118652344, 0.017615936279296876, 0.017562559127807617, 0.017510400772094727, 0.017022975921630858, 0.01685606384277344, 0.016910335540771485, 0.0174653434753418, 0.017408000946044923, 0.01719808006286621, 0.01721651268005371, 0.01722777557373047, 0.01698508834838867, 0.017738752365112305, 0.01719808006286621, 0.016927743911743166, 0.016893951416015626, 0.01681920051574707, 0.01686425590515137, 0.017348608016967772, 0.01757900810241699, 0.017496063232421876, 0.017507328033447265, 0.0174704647064209, 0.017138687133789063, 0.01746227264404297, 0.017505279541015627, 0.01741721534729004, 0.017737728118896484, 0.017764352798461915, 0.017746944427490235, 0.018143232345581056, 0.017331199645996095, 0.016846847534179688, 0.01680384063720703, 0.01679155158996582, 0.01681612777709961, 0.01681715202331543, 0.01684377670288086, 0.01679462432861328, 0.01679769515991211, 0.01679769515991211, 0.016767999649047852, 0.01686016082763672, 0.01681100845336914, 0.017131519317626954, 0.01700249671936035, 0.01682022476196289, 0.016827392578125, 0.01680998420715332]",tokens/s,57.51513715498906,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694992c-36991b736febcbd50013b9de;5294acfb-7118-4eae-870a-baae2b1481b2) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",deci,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 614, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,838.680576,745.013248,0.0,159.383552,141.760512,s,1,7.0956220703125,7.0956220703125,0.0,7.0956220703125,7.0956220703125,7.0956220703125,7.0956220703125,[7.0956220703125],,kWh,4.097781209024258e-06,2.223222328164558e-06,4.856392773977536e-06,1.1177396311166352e-05,,MB,1374.5152,845.676544,0.0,199.22944,184.771584,s,31,0.18778019237518315,0.006057425560489779,9.690934086281205e-05,0.006030432224273682,0.0062000322341918945,0.006218928098678589,0.00625078387260437,"[0.0061430401802062985, 0.006202976226806641, 0.006079455852508545, 0.006047423839569092, 0.006181312084197998, 0.0059203200340270995, 0.005938943862915039, 0.005989888191223145, 0.005965983867645264, 0.006085184097290039, 0.0062000322341918945, 0.0058783040046691895, 0.006257599830627442, 0.006159423828125, 0.005990240097045898, 0.006015423774719238, 0.00615718412399292, 0.006008831977844238, 0.006040703773498535, 0.006045023918151856, 0.005971776008605957, 0.006234879970550537, 0.006006048202514648, 0.006045695781707764, 0.006192800045013428, 0.005995744228363037, 0.006003071784973144, 0.005996064186096191, 0.006002560138702393, 0.006030432224273682, 0.005993824005126953]",tokens/s,42262.178452474596,kWh,7.244874462295442e-08,3.969828129617949e-08,1.4903607696565178e-07,2.6118310288478567e-07,tokens/kWh,980155290.1870836,MB,1399.816192,847.773696,0.0,201.326592,184.774144,s,31,9.924355773925782,0.3201405088363155,0.0030044752465266072,0.3192297973632813,0.3245202941894531,0.3251853637695312,0.3263435150146485,"[0.32678726196289065, 0.3195428161621094, 0.3212463989257813, 0.3163550720214844, 0.3180973815917969, 0.3155436706542969, 0.3187124938964844, 0.31425302124023435, 0.32254959106445313, 0.3250626220703125, 0.3227999572753906, 0.32294488525390624, 0.32530810546875, 0.3245202941894531, 0.3183255615234375, 0.31907302856445313, 0.3215595703125, 0.3172673950195313, 0.3190076904296875, 0.3177928771972656, 0.31804025268554686, 0.32080825805664065, 0.3200824279785156, 0.3232915954589844, 0.31842132568359377, 0.31822052001953127, 0.3220506286621094, 0.3179142761230469, 0.3171041870117188, 0.32244281005859377, 0.3192297973632813]",tokens/s,196.78859207477313,kWh,3.692210575125243e-06,2.023152572772719e-06,5.777487761472101e-06,1.1492850909370062e-05,tokens/kWh,5481668.603969832,,s,1953,9.91209462881088,0.005075317270256469,0.0001224653569874891,0.005040128231048584,0.00520908145904541,0.005258240222930908,0.005524439144134522,"[0.0050360321998596195, 0.005203968048095703, 0.005319680213928223, 0.005230591773986816, 0.005248000144958496, 0.005247007846832275, 0.005215199947357177, 0.005206016063690186, 0.005257247924804687, 0.005238751888275147, 0.00520908784866333, 0.005100543975830078, 0.0049797120094299315, 0.00496230411529541, 0.005057536125183106, 0.005064703941345215, 0.005081151962280274, 0.005092288017272949, 0.005069888114929199, 0.0050349440574645994, 0.00506060791015625, 0.005054463863372802, 0.005135359764099121, 0.005048319816589355, 0.0050063362121582035, 0.00536575984954834, 0.005425151824951172, 0.005455872058868408, 0.005506048202514649, 0.005583871841430664, 0.00551526403427124, 0.005601280212402344, 0.005525504112243652, 0.0057487359046936035, 0.0052705278396606445, 0.0052008957862854, 0.005258240222930908, 0.005155839920043945, 0.005087232112884522, 0.0051066880226135255, 0.005181439876556396, 0.00510975980758667, 0.005034016132354736, 0.004979680061340332, 0.005044223785400391, 0.0050462718009948735, 0.004994048118591309, 0.005148672103881836, 0.005137407779693603, 0.00511078405380249, 0.0051333122253417966, 0.005102591991424561, 0.005045248031616211, 0.00505241584777832, 0.00505241584777832, 0.005066751956939697, 0.005048319816589355, 0.005064703941345215, 0.005189631938934326, 0.005245952129364013, 0.005194752216339111, 0.005191679954528809, 0.005249023914337158, 0.005261312007904053, 0.005203968048095703, 0.0052930560111999515, 0.005033984184265137, 0.004951039791107178, 0.005026815891265869, 0.005030911922454834, 0.005066751956939697, 0.005323775768280029, 0.005224448204040527, 0.005243904113769531, 0.00521727991104126, 0.005188608169555664, 0.005190656185150146, 0.005198847770690918, 0.005268479824066162, 0.005179391860961914, 0.005222400188446045, 0.005203968048095703, 0.005206016063690186, 0.005216256141662597, 0.005189631938934326, 0.005174272060394287, 0.005180480003356934, 0.005227456092834473, 0.00511078405380249, 0.005112832069396973, 0.005030911922454834, 0.004941823959350586, 0.0049510722160339355, 0.004969503879547119, 0.005005248069763183, 0.00496230411529541, 0.004973567962646484, 0.004951039791107178, 0.005038080215454102, 0.004956160068511963, 0.004946944236755371, 0.004960256099700928, 0.004984831809997559, 0.005024767875671386, 0.004968448162078858, 0.00495411205291748, 0.005000351905822754, 0.004957119941711426, 0.0049222722053527835, 0.004941823959350586, 0.0049489917755126955, 0.005004288196563721, 0.004958208084106445, 0.004964352130889893, 0.004934656143188477, 0.0049992318153381345, 0.004965312004089355, 0.004943871974945068, 0.005028863906860351, 0.00505241584777832, 0.0050135040283203125, 0.005026815891265869, 0.004963327884674072, 0.0049909758567810054, 0.00496127986907959, 0.005212160110473632, 0.0052336640357971195, 0.005195775985717774, 0.005201920032501221, 0.005181439876556396, 0.005352447986602784, 0.0052715520858764645, 0.005192704200744629, 0.005192704200744629, 0.005221375942230225, 0.0051998720169067385, 0.005154816150665284, 0.005082111835479736, 0.005153791904449463, 0.005125120162963868, 0.005183487892150879, 0.005231647968292236, 0.005196767807006836, 0.005183487892150879, 0.0051968002319335935, 0.005197824001312256, 0.005066751956939697, 0.00499507188796997, 0.005014527797698975, 0.004984831809997559, 0.005071872234344482, 0.005063680171966553, 0.005081088066101074, 0.005161983966827393, 0.005193727970123291, 0.005165056228637695, 0.005135359764099121, 0.00511078405380249, 0.005141503810882568, 0.005161983966827393, 0.005088255882263183, 0.005135359764099121, 0.005049344062805176, 0.005008384227752686, 0.004960256099700928, 0.004971519947052002, 0.004973567962646484, 0.005028863906860351, 0.004967423915863037, 0.004968448162078858, 0.004971519947052002, 0.004988927841186524, 0.00502784013748169, 0.0049909758567810054, 0.004965375900268554, 0.005019680023193359, 0.005050335884094238, 0.005053440093994141, 0.005051392078399658, 0.005055488109588623, 0.005023744106292725, 0.0050135040283203125, 0.005185535907745361, 0.005035007953643799, 0.005041152000427246, 0.005008384227752686, 0.004987904071807861, 0.004967423915863037, 0.004976640224456787, 0.0050124797821044925, 0.0050063362121582035, 0.0050135040283203125, 0.005058559894561767, 0.0049387521743774416, 0.005031936168670655, 0.005074944019317627, 0.004994048118591309, 0.004944928169250488, 0.004955103874206543, 0.00497049617767334, 0.004975615978240967, 0.005030911922454834, 0.004966400146484375, 0.004928512096405029, 0.004897791862487793, 0.004946944236755371, 0.004971519947052002, 0.00496230411529541, 0.005000192165374756, 0.004960256099700928, 0.0049459199905395506, 0.00501148796081543, 0.005063648223876953, 0.004975615978240967, 0.00499507188796997, 0.004965375900268554, 0.0049797120094299315, 0.00497049617767334, 0.004958208084106445, 0.004958208084106445, 0.00497049617767334, 0.0050841598510742185, 0.0049725441932678225, 0.004952064037322998, 0.005006368160247803, 0.00497046422958374, 0.005105663776397705, 0.005037055969238281, 0.00501145601272583, 0.005056511878967285, 0.00505241584777832, 0.00502784013748169, 0.005064703941345215, 0.005040128231048584, 0.005090303897857666, 0.005028863906860351, 0.005059584140777588, 0.005029888153076172, 0.0050462718009948735, 0.005029888153076172, 0.005096447944641113, 0.0049459199905395506, 0.0049500160217285155, 0.004943871974945068, 0.00501145601272583, 0.005443583965301513, 0.005149695873260498, 0.005214208126068115, 0.005138432025909424, 0.0049725441932678225, 0.004996096134185791, 0.005007359981536865, 0.005144576072692871, 0.005237760066986084, 0.005186560153961181, 0.0051660799980163576, 0.005224448204040527, 0.0051998720169067385, 0.005183487892150879, 0.005228544235229492, 0.00526643180847168, 0.005206016063690186, 0.005213183879852295, 0.005090303897857666, 0.004975615978240967, 0.004978687763214112, 0.0051528000831604, 0.005043168067932129, 0.005048319816589355, 0.005037055969238281, 0.005097472190856934, 0.005047359943389893, 0.005096384048461914, 0.005088255882263183, 0.005026815891265869, 0.005028863906860351, 0.005040128231048584, 0.005081088066101074, 0.0050442562103271485, 0.004984799861907959, 0.00497049617767334, 0.0049909758567810054, 0.004980735778808594, 0.004981760025024414, 0.005078080177307129, 0.005021632194519043, 0.004975615978240967, 0.004957183837890625, 0.004998144149780274, 0.004978687763214112, 0.004978687763214112, 0.004973567962646484, 0.004986879825592041, 0.004976640224456787, 0.004965375900268554, 0.004983808040618896, 0.004971519947052002, 0.004980735778808594, 0.004974592208862305, 0.004986879825592041, 0.0049797120094299315, 0.004997119903564453, 0.004894847869873047, 0.004945792198181152, 0.004987904071807861, 0.0049827837944030765, 0.0050022401809692385, 0.004957183837890625, 0.004994048118591309, 0.00502784013748169, 0.005088255882263183, 0.004977663993835449, 0.005023744106292725, 0.004964352130889893, 0.005059584140777588, 0.0050432000160217285, 0.0050206718444824215, 0.004976640224456787, 0.004940800189971924, 0.00497049617767334, 0.0050022401809692385, 0.004973567962646484, 0.004967423915863037, 0.005193727970123291, 0.004996096134185791, 0.005033984184265137, 0.005131264209747314, 0.005081088066101074, 0.005032959938049316, 0.005051392078399658, 0.005007359981536865, 0.0052193598747253414, 0.005141471862792969, 0.005031936168670655, 0.005063680171966553, 0.005038080215454102, 0.005037055969238281, 0.0049797439575195316, 0.004987872123718261, 0.00497049617767334, 0.004959231853485108, 0.004963327884674072, 0.004999167919158935, 0.004960256099700928, 0.00497049617767334, 0.004992000102996826, 0.00499612808227539, 0.0050083518028259276, 0.005078015804290771, 0.0049725441932678225, 0.004985856056213379, 0.004944896221160889, 0.004896768093109131, 0.00502784013748169, 0.005090303897857666, 0.005062655925750732, 0.00502784013748169, 0.004978687763214112, 0.004971519947052002, 0.0049797120094299315, 0.004946944236755371, 0.004955135822296143, 0.004923391819000244, 0.0050022401809692385, 0.004937727928161621, 0.004974592208862305, 0.004933695793151855, 0.00497760009765625, 0.004936704158782959, 0.00496127986907959, 0.0049500160217285155, 0.005055520057678222, 0.004929503917694092, 0.004940800189971924, 0.004969471931457519, 0.00496230411529541, 0.0049827837944030765, 0.005063680171966553, 0.004987904071807861, 0.005021696090698242, 0.005005311965942383, 0.005041152000427246, 0.00501145601272583, 0.0050135040283203125, 0.005014527797698975, 0.0053637118339538575, 0.0051701760292053225, 0.005097472190856934, 0.0051363840103149415, 0.0050862717628479, 0.00510969591140747, 0.004969471931457519, 0.005112832069396973, 0.004956160068511963, 0.004956160068511963, 0.004939775943756103, 0.005213183879852295, 0.00511078405380249, 0.004952064037322998, 0.004926464080810547, 0.004896768093109131, 0.004926464080810547, 0.004952064037322998, 0.004956160068511963, 0.0049725441932678225, 0.004935679912567138, 0.0049500160217285155, 0.004941823959350586, 0.005000192165374756, 0.005086207866668701, 0.00552345609664917, 0.005249023914337158, 0.005216256141662597, 0.005216256141662597, 0.005105663776397705, 0.005141568183898926, 0.005107679843902588, 0.00512611198425293, 0.00517632007598877, 0.005125120162963868, 0.005255167961120606, 0.005326848030090332, 0.005186560153961181, 0.00501043176651001, 0.0050135040283203125, 0.005049344062805176, 0.004973567962646484, 0.004947968006134033, 0.00496230411529541, 0.0050503678321838375, 0.0050657281875610355, 0.004940800189971924, 0.0049489917755126955, 0.004969471931457519, 0.0049387521743774416, 0.004992000102996826, 0.004955135822296143, 0.004987904071807861, 0.004944896221160889, 0.005061632156372071, 0.0049500160217285155, 0.0050206718444824215, 0.005031936168670655, 0.0050728960037231445, 0.005059584140777588, 0.005170207977294922, 0.004996064186096191, 0.0049827837944030765, 0.004944896221160889, 0.004900864124298096, 0.004960256099700928, 0.00501964807510376, 0.004940800189971924, 0.004946944236755371, 0.004931583881378174, 0.005005311965942383, 0.005048319816589355, 0.005137407779693603, 0.0050124797821044925, 0.00495411205291748, 0.005017600059509277, 0.005018623828887939, 0.004964352130889893, 0.0050094079971313476, 0.0049387521743774416, 0.004941823959350586, 0.004929535865783692, 0.0049909758567810054, 0.005004288196563721, 0.00496230411529541, 0.004951039791107178, 0.00502784013748169, 0.004975615978240967, 0.004966400146484375, 0.004983808040618896, 0.004993023872375488, 0.004952064037322998, 0.004946944236755371, 0.004952064037322998, 0.004967423915863037, 0.004977663993835449, 0.0049530878067016604, 0.00496230411529541, 0.004951039791107178, 0.004998144149780274, 0.004977663993835449, 0.00496230411529541, 0.004989952087402344, 0.005099520206451416, 0.0049489917755126955, 0.0049428482055664065, 0.004958208084106445, 0.004989952087402344, 0.004947968006134033, 0.004975615978240967, 0.00496127986907959, 0.0050094079971313476, 0.004975615978240967, 0.00497049617767334, 0.004866047859191895, 0.00496230411529541, 0.004956160068511963, 0.004960256099700928, 0.004944896221160889, 0.004998144149780274, 0.00501043176651001, 0.005116928100585938, 0.005181439876556396, 0.005104640007019043, 0.005165056228637695, 0.00510975980758667, 0.0051036162376403805, 0.005094399929046631, 0.005026815891265869, 0.004976640224456787, 0.005000192165374756, 0.005053440093994141, 0.005114880084991455, 0.0050432000160217285, 0.005105663776397705, 0.005030911922454834, 0.005292031764984131, 0.005214208126068115, 0.005269504070281982, 0.0051476478576660155, 0.005124095916748047, 0.0050841598510742185, 0.005381120204925537, 0.005379072189331055, 0.005668863773345947, 0.005153791904449463, 0.005048319816589355, 0.005038080215454102, 0.005039103984832764, 0.005078015804290771, 0.005035007953643799, 0.005048319816589355, 0.005082111835479736, 0.0051066880226135255, 0.005067776203155518, 0.0050421757698059086, 0.0050800638198852536, 0.005028863906860351, 0.005100543975830078, 0.005262335777282715, 0.005261312007904053, 0.005194752216339111, 0.0051404800415039064, 0.005215231895446777, 0.005186560153961181, 0.005149695873260498, 0.004966432094573974, 0.004988895893096924, 0.00496230411529541, 0.0049459199905395506, 0.004934656143188477, 0.0049797120094299315, 0.005018623828887939, 0.005111807823181152, 0.005127168178558349, 0.005112832069396973, 0.005184512138366699, 0.005085184097290039, 0.0051569280624389644, 0.005137343883514404, 0.005096447944641113, 0.005167103767395019, 0.005193727970123291, 0.005178368091583252, 0.005181439876556396, 0.005302271842956543, 0.00522547197341919, 0.005210112094879151, 0.005185535907745361, 0.005206016063690186, 0.005197824001312256, 0.005189631938934326, 0.005079040050506592, 0.005108736038208008, 0.005119999885559082, 0.005114880084991455, 0.005149695873260498, 0.005092351913452148, 0.0051221117973327636, 0.005201856136322022, 0.005111807823181152, 0.005138432025909424, 0.005082111835479736, 0.005111807823181152, 0.005230591773986816, 0.005157887935638428, 0.005092351913452148, 0.00511078405380249, 0.005140511989593506, 0.00502780818939209, 0.0050503678321838375, 0.005132287979125977, 0.005086207866668701, 0.005161983966827393, 0.005117951869964599, 0.005173247814178467, 0.0050769920349121095, 0.005132287979125977, 0.0051435518264770505, 0.005116928100585938, 0.005188608169555664, 0.005091328144073487, 0.005138432025909424, 0.005056511878967285, 0.005093376159667969, 0.005122047901153564, 0.0050657281875610355, 0.0052295680046081545, 0.00506982421875, 0.005232639789581299, 0.005287936210632324, 0.005204991817474365, 0.005256192207336426, 0.00516812801361084, 0.005368832111358642, 0.005210112094879151, 0.0054282240867614745, 0.005088255882263183, 0.00520908784866333, 0.005104640007019043, 0.005089280128479004, 0.00517632007598877, 0.005123072147369385, 0.005116928100585938, 0.005102591991424561, 0.005179391860961914, 0.005178368091583252, 0.005153791904449463, 0.005086207866668701, 0.005132287979125977, 0.005150720119476319, 0.005104640007019043, 0.0050657281875610355, 0.005112832069396973, 0.0052070398330688475, 0.00510975980758667, 0.00515174388885498, 0.00506982421875, 0.005097472190856934, 0.005107711791992187, 0.0051138558387756345, 0.005167103767395019, 0.005073919773101807, 0.00496230411529541, 0.004971519947052002, 0.005067776203155518, 0.0050503678321838375, 0.004934656143188477, 0.005224448204040527, 0.005396480083465576, 0.005215231895446777, 0.005125120162963868, 0.005198847770690918, 0.005145599842071533, 0.005095456123352051, 0.005154784202575684, 0.0051476478576660155, 0.005102591991424561, 0.005107711791992187, 0.005169151782989502, 0.005122047901153564, 0.005130239963531494, 0.00515993595123291, 0.0051138558387756345, 0.0051066880226135255, 0.005099520206451416, 0.005138432025909424, 0.0051066880226135255, 0.005155839920043945, 0.005234687805175781, 0.0052070398330688475, 0.005130239963531494, 0.005100543975830078, 0.005220352172851562, 0.005090303897857666, 0.0051701760292053225, 0.005141503810882568, 0.005194752216339111, 0.0052111358642578124, 0.00516812801361084, 0.005115903854370117, 0.0049827837944030765, 0.004966400146484375, 0.005089280128479004, 0.005132287979125977, 0.004956160068511963, 0.004880383968353271, 0.004952064037322998, 0.0049162240028381345, 0.00497049617767334, 0.004937727928161621, 0.005025792121887207, 0.00501043176651001, 0.005037055969238281, 0.005123072147369385, 0.0050954241752624516, 0.005085184097290039, 0.00499507188796997, 0.004952064037322998, 0.004940800189971924, 0.004910079956054687, 0.004955135822296143, 0.004933631896972656, 0.0050022401809692385, 0.005044223785400391, 0.005339136123657226, 0.005220352172851562, 0.005468160152435303, 0.005271615982055664, 0.005143487930297851, 0.005127168178558349, 0.005284863948822022, 0.005212160110473632, 0.005284863948822022, 0.005180416107177735, 0.005141503810882568, 0.005127168178558349, 0.005114880084991455, 0.005155839920043945, 0.00510975980758667, 0.005115903854370117, 0.00516812801361084, 0.00521833610534668, 0.005218272209167481, 0.00516812801361084, 0.005155839920043945, 0.005078015804290771, 0.005126143932342529, 0.005096447944641113, 0.005152768135070801, 0.005232639789581299, 0.005099520206451416, 0.00515174388885498, 0.005093376159667969, 0.005045248031616211, 0.00521830415725708, 0.005180416107177735, 0.0051701760292053225, 0.0051404800415039064, 0.005154816150665284, 0.005146624088287354, 0.005115903854370117, 0.005163008213043213, 0.0051435518264770505, 0.005134335994720459, 0.005111807823181152, 0.0051404800415039064, 0.0051363840103149415, 0.005088255882263183, 0.005142528057098389, 0.005129216194152832, 0.0053012480735778805, 0.005248000144958496, 0.005181439876556396, 0.005198847770690918, 0.005231616020202637, 0.005264383792877197, 0.005275648117065429, 0.005197824001312256, 0.005195775985717774, 0.005232639789581299, 0.005198847770690918, 0.005180416107177735, 0.005088255882263183, 0.005182464122772217, 0.005061632156372071, 0.005112832069396973, 0.005131264209747314, 0.005125120162963868, 0.005145599842071533, 0.005165056228637695, 0.00520908784866333, 0.005090303897857666, 0.005079040050506592, 0.005134335994720459, 0.005239808082580567, 0.005158912181854248, 0.0050432000160217285, 0.005173247814178467, 0.0051701760292053225, 0.005201920032501221, 0.005181439876556396, 0.005073919773101807, 0.005154816150665284, 0.005075967788696289, 0.005197824001312256, 0.005142528057098389, 0.00511897611618042, 0.0051476478576660155, 0.005116928100585938, 0.005186560153961181, 0.005094399929046631, 0.005131264209747314, 0.005098495960235596, 0.005090303897857666, 0.005125120162963868, 0.005262335777282715, 0.005258240222930908, 0.005123072147369385, 0.005123072147369385, 0.005206016063690186, 0.005100543975830078, 0.005183487892150879, 0.005124095916748047, 0.005115903854370117, 0.005114880084991455, 0.0051435518264770505, 0.005127168178558349, 0.005105663776397705, 0.005213183879852295, 0.005134335994720459, 0.005111807823181152, 0.005175295829772949, 0.005141503810882568, 0.0051875200271606445, 0.005141503810882568, 0.0051066880226135255, 0.005125120162963868, 0.005508096218109131, 0.005467135906219483, 0.0052408318519592285, 0.005215231895446777, 0.005285888195037842, 0.005122047901153564, 0.0052715520858764645, 0.005087232112884522, 0.005252096176147461, 0.005331967830657959, 0.005030911922454834, 0.0052070398330688475, 0.0051036162376403805, 0.005402624130249023, 0.005212160110473632, 0.0052408318519592285, 0.0053043198585510255, 0.005194752216339111, 0.005258272171020508, 0.005851103782653809, 0.005751808166503906, 0.005288959980010987, 0.0050728960037231445, 0.005126143932342529, 0.005107711791992187, 0.005062655925750732, 0.00505241584777832, 0.0050462718009948735, 0.005053440093994141, 0.00506879997253418, 0.005061632156372071, 0.00505241584777832, 0.0050462718009948735, 0.005053440093994141, 0.005048319816589355, 0.005049344062805176, 0.005125120162963868, 0.005061632156372071, 0.005034048080444336, 0.005020607948303222, 0.0050432000160217285, 0.005021696090698242, 0.0050432000160217285, 0.0050769920349121095, 0.005064703941345215, 0.005023744106292725, 0.005028863906860351, 0.005032959938049316, 0.005063680171966553, 0.0050462718009948735, 0.005026815891265869, 0.0050165758132934574, 0.005063680171966553, 0.005038080215454102, 0.005022719860076904, 0.005048319816589355, 0.005017600059509277, 0.00506060791015625, 0.005054463863372802, 0.004969471931457519, 0.005012544155120849, 0.005043136119842529, 0.00501964807510376, 0.0050206718444824215, 0.005018623828887939, 0.005070847988128662, 0.00501964807510376, 0.005039103984832764, 0.00505241584777832, 0.005067776203155518, 0.0050206718444824215, 0.0050841598510742185, 0.005070847988128662, 0.005044223785400391, 0.0050135040283203125, 0.005031936168670655, 0.00505350399017334, 0.005049280166625977, 0.005056511878967285, 0.005029888153076172, 0.005066751956939697, 0.00505241584777832, 0.005071872234344482, 0.005023744106292725, 0.005071872234344482, 0.0050206718444824215, 0.0053002238273620605, 0.005426176071166992, 0.005221375942230225, 0.005049344062805176, 0.005037055969238281, 0.005051392078399658, 0.005047296047210693, 0.0050206718444824215, 0.0050206718444824215, 0.005045248031616211, 0.005039103984832764, 0.005045248031616211, 0.00506060791015625, 0.005026815891265869, 0.0050206718444824215, 0.005078015804290771, 0.005055488109588623, 0.005040128231048584, 0.00501043176651001, 0.005017600059509277, 0.00506060791015625, 0.005010591983795166, 0.004936543941497803, 0.004983808040618896, 0.00502784013748169, 0.005018623828887939, 0.005033984184265137, 0.00505241584777832, 0.005048319816589355, 0.004978687763214112, 0.004958208084106445, 0.005022719860076904, 0.005000192165374756, 0.0050360321998596195, 0.005017600059509277, 0.00501964807510376, 0.0050022401809692385, 0.005039167881011963, 0.005040063858032227, 0.005047296047210693, 0.005030911922454834, 0.005054463863372802, 0.005001215934753418, 0.005017600059509277, 0.0050124797821044925, 0.005051392078399658, 0.00502784013748169, 0.005015552043914795, 0.00501964807510376, 0.005038112163543701, 0.005016543865203857, 0.0050206718444824215, 0.0050124797821044925, 0.005059584140777588, 0.00501964807510376, 0.005028863906860351, 0.005023744106292725, 0.0050503678321838375, 0.005025792121887207, 0.005023744106292725, 0.005022719860076904, 0.0050462718009948735, 0.0050124797821044925, 0.0050135040283203125, 0.005048319816589355, 0.005067808151245117, 0.005023712158203125, 0.005033984184265137, 0.005026815891265869, 0.005045248031616211, 0.0050044159889221194, 0.004982656002044678, 0.005032959938049316, 0.0050206718444824215, 0.005091328144073487, 0.005039103984832764, 0.005038080215454102, 0.005126143932342529, 0.005022719860076904, 0.005017600059509277, 0.005038080215454102, 0.005017600059509277, 0.005022719860076904, 0.005023744106292725, 0.005032959938049316, 0.004994048118591309, 0.005048319816589355, 0.0050135040283203125, 0.0053309440612792965, 0.005134335994720459, 0.005074944019317627, 0.0051404800415039064, 0.005096447944641113, 0.00516812801361084, 0.005215231895446777, 0.005256192207336426, 0.005221375942230225, 0.005186560153961181, 0.0052633600234985355, 0.005194784164428711, 0.005189599990844726, 0.0050728960037231445, 0.0050032639503479, 0.0050462718009948735, 0.0050063362121582035, 0.005018623828887939, 0.005001215934753418, 0.0050514240264892575, 0.005006303787231445, 0.0050206718444824215, 0.005015552043914795, 0.00502784013748169, 0.0050135040283203125, 0.004992000102996826, 0.0050330238342285154, 0.005011392116546631, 0.005035007953643799, 0.005015552043914795, 0.005031936168670655, 0.004935679912567138, 0.0049909758567810054, 0.005031936168670655, 0.005037055969238281, 0.005070847988128662, 0.0050206718444824215, 0.00502784013748169, 0.005056511878967285, 0.005040128231048584, 0.004998144149780274, 0.005587967872619629, 0.005288959980010987, 0.0052111358642578124, 0.006341631889343262, 0.005277696132659912, 0.0054579200744628905, 0.0052930560111999515, 0.005285888195037842, 0.00521830415725708, 0.005129216194152832, 0.005021696090698242, 0.005062655925750732, 0.005035007953643799, 0.005094399929046631, 0.005025792121887207, 0.005021696090698242, 0.005017600059509277, 0.005054463863372802, 0.005023744106292725, 0.00501964807510376, 0.005039103984832764, 0.005051392078399658, 0.005032959938049316, 0.00515174388885498, 0.005105663776397705, 0.005051392078399658, 0.005030911922454834, 0.005031936168670655, 0.005044223785400391, 0.0050421757698059086, 0.005024767875671386, 0.0050421757698059086, 0.005057536125183106, 0.004986879825592041, 0.005024767875671386, 0.005007359981536865, 0.00501145601272583, 0.004986879825592041, 0.004928671836853028, 0.004981599807739258, 0.0050063362121582035, 0.005004288196563721, 0.005029888153076172, 0.0050032639503479, 0.0050022401809692385, 0.0050063362121582035, 0.005028863906860351, 0.005008416175842285, 0.0050011839866638185, 0.0050022401809692385, 0.0050421757698059086, 0.005001215934753418, 0.005007359981536865, 0.005023744106292725, 0.005035007953643799, 0.005022719860076904, 0.00501043176651001, 0.005063680171966553, 0.005031936168670655, 0.00501145601272583, 0.00501145601272583, 0.00506063985824585, 0.0050083518028259276, 0.0050063362121582035, 0.005008384227752686, 0.0050206718444824215, 0.004998144149780274, 0.005041152000427246, 0.005009439945220948, 0.005048287868499756, 0.0050145602226257325, 0.005013472080230713, 0.00531660795211792, 0.005349376201629639, 0.00515993595123291, 0.005014527797698975, 0.005061632156372071, 0.005008384227752686, 0.005004288196563721, 0.0050094079971313476, 0.005044223785400391, 0.005081088066101074, 0.005028863906860351, 0.005007359981536865, 0.005048319816589355, 0.005018623828887939, 0.005005311965942383, 0.004999167919158935, 0.004946944236755371, 0.0050063362121582035, 0.005030911922454834, 0.005047296047210693, 0.005066751956939697, 0.0050360321998596195, 0.005025792121887207, 0.005025792121887207, 0.005033984184265137, 0.005035007953643799, 0.005238815784454346, 0.005220320224761963, 0.0051773438453674315, 0.005222400188446045, 0.005163008213043213, 0.005180416107177735, 0.005231616020202637, 0.005167103767395019, 0.005187615871429443, 0.005192671775817871, 0.005157887935638428, 0.0050165758132934574, 0.004997119903564453, 0.0050657281875610355, 0.005007359981536865, 0.005030911922454834, 0.0050165758132934574, 0.005048319816589355, 0.00501145601272583, 0.00501145601272583, 0.005001215934753418, 0.00506060791015625, 0.0050206718444824215, 0.005005311965942383, 0.005026815891265869, 0.005035039901733399, 0.005020639896392822, 0.005017600059509277, 0.005008384227752686, 0.005093376159667969, 0.005045248031616211, 0.005014527797698975, 0.005032959938049316, 0.005029888153076172, 0.005008384227752686, 0.005008384227752686, 0.0050503678321838375, 0.005063871860504151, 0.004932415962219238, 0.004997119903564453, 0.005041152000427246, 0.005094399929046631, 0.0050135040283203125, 0.0050135040283203125, 0.005037055969238281, 0.0050135040283203125, 0.005090303897857666, 0.0050421757698059086, 0.005062655925750732, 0.005026815891265869, 0.005000192165374756, 0.005014527797698975, 0.0050165758132934574, 0.005007359981536865, 0.005049407958984375, 0.0050308480262756345, 0.005038080215454102, 0.0050135040283203125, 0.005049344062805176, 0.005033984184265137, 0.005044223785400391, 0.0050657281875610355, 0.005025792121887207, 0.005018623828887939, 0.00501043176651001, 0.005047296047210693, 0.005041152000427246, 0.0050022401809692385, 0.0050165758132934574, 0.005037055969238281, 0.0050165758132934574, 0.005024767875671386, 0.0050135040283203125, 0.0050432000160217285, 0.005031936168670655, 0.004993023872375488, 0.005005311965942383, 0.005038080215454102, 0.0050094079971313476, 0.005032959938049316, 0.0050032639503479, 0.005070879936218262, 0.005047264099121093, 0.005024767875671386, 0.0050022401809692385, 0.005063680171966553, 0.005001215934753418, 0.004926464080810547, 0.005004288196563721, 0.005062655925750732, 0.0050360321998596195, 0.005018623828887939, 0.005037055969238281, 0.005057536125183106, 0.0050360321998596195, 0.005078015804290771, 0.005073919773101807, 0.005061632156372071, 0.005037055969238281, 0.005041152000427246, 0.005222400188446045, 0.005250048160552978, 0.0050954241752624516, 0.005035007953643799, 0.005054463863372802, 0.005024767875671386, 0.005029888153076172, 0.0050503678321838375, 0.0050207037925720215, 0.005032959938049316, 0.005048287868499756, 0.005038080215454102, 0.0050094079971313476, 0.0050421757698059086, 0.005033984184265137, 0.0050462718009948735, 0.005032991886138916, 0.005037024021148682, 0.0050206718444824215, 0.005041152000427246, 0.00501145601272583, 0.005022719860076904, 0.0050206718444824215, 0.005024767875671386, 0.005021696090698242, 0.005018623828887939, 0.004955135822296143, 0.0049530878067016604, 0.004963327884674072, 0.004941823959350586, 0.004934656143188477, 0.0049469761848449706, 0.004927455902099609, 0.004976640224456787, 0.004983808040618896, 0.0050135040283203125, 0.005636127948760986, 0.005152736186981201, 0.0050094079971313476, 0.005031936168670655, 0.005063680171966553, 0.005024767875671386, 0.005040128231048584, 0.0050124797821044925, 0.005037055969238281, 0.005015552043914795, 0.005024767875671386, 0.005067776203155518, 0.005044223785400391, 0.005008384227752686, 0.005021696090698242, 0.005047296047210693, 0.0050094079971313476, 0.005022719860076904, 0.0050094079971313476, 0.0050432000160217285, 0.005030911922454834, 0.005026815891265869, 0.005023744106292725, 0.005059584140777588, 0.00501043176651001, 0.005008384227752686, 0.0050094079971313476, 0.005066751956939697, 0.0050135040283203125, 0.00501964807510376, 0.005008384227752686, 0.0050421757698059086, 0.005033984184265137, 0.005008384227752686, 0.0050462718009948735, 0.00505241584777832, 0.005033984184265137, 0.005048319816589355, 0.005188608169555664, 0.005277696132659912, 0.005234687805175781, 0.005144576072692871, 0.0051333122253417966, 0.005041152000427246, 0.005018623828887939, 0.0050360321998596195, 0.005058559894561767, 0.005026815891265869, 0.005028863906860351, 0.004955135822296143, 0.005004288196563721, 0.005044223785400391, 0.00502784013748169, 0.0050094079971313476, 0.005037055969238281, 0.0050462718009948735, 0.0050206718444824215, 0.0050094079971313476, 0.005021696090698242, 0.005048319816589355, 0.004997119903564453, 0.005025792121887207, 0.005026815891265869, 0.005044223785400391, 0.005018623828887939, 0.0050135040283203125, 0.00506879997253418, 0.005248000144958496, 0.00517632007598877, 0.005164031982421875, 0.00521830415725708, 0.005184512138366699, 0.005174335956573487, 0.005229504108428955, 0.005182464122772217, 0.00516099214553833, 0.00517628812789917, 0.005187583923339844, 0.00516812801361084, 0.005325823783874512, 0.005233695983886719, 0.005190624237060547, 0.005202943801879883, 0.005214240074157715, 0.005164000034332275, 0.005037055969238281, 0.0050462718009948735, 0.005115903854370117, 0.0050360321998596195, 0.005037055969238281, 0.005018623828887939, 0.005054463863372802, 0.005005311965942383, 0.0050165758132934574, 0.005089280128479004, 0.005116928100585938, 0.005041183948516846, 0.004951007843017578, 0.004951039791107178, 0.005112832069396973, 0.005178368091583252, 0.005035007953643799, 0.00506060791015625, 0.005022719860076904, 0.005029888153076172, 0.005037055969238281, 0.005083136081695557, 0.005035007953643799, 0.005035007953643799, 0.005040128231048584, 0.0050739521980285645, 0.005016543865203857, 0.005030911922454834, 0.005023744106292725, 0.0050094079971313476, 0.005035007953643799, 0.005030911922454834, 0.005024767875671386, 0.005142528057098389, 0.005224512100219726, 0.005201888084411621, 0.0051844801902771, 0.005194752216339111, 0.005185535907745361, 0.0051968002319335935, 0.005186560153961181, 0.005223423957824707, 0.005126143932342529, 0.005049344062805176, 0.005045248031616211, 0.005031936168670655, 0.0050022401809692385, 0.005032000064849854, 0.005066751956939697, 0.005018559932708741, 0.005023744106292725, 0.00501145601272583, 0.005040128231048584, 0.005250048160552978, 0.005152768135070801, 0.00506060791015625, 0.005022719860076904, 0.00501964807510376, 0.005015552043914795, 0.005015552043914795, 0.004925439834594727, 0.005031936168670655, 0.00501043176651001, 0.005057536125183106, 0.005037055969238281, 0.004978687763214112, 0.00495411205291748, 0.004977663993835449, 0.005393407821655274, 0.005264383792877197, 0.005194752216339111, 0.005203968048095703, 0.005194752216339111, 0.004968448162078858, 0.005044223785400391, 0.005017600059509277, 0.005021696090698242, 0.005041152000427246, 0.005063680171966553, 0.005026815891265869, 0.00501145601272583, 0.005040128231048584, 0.005051392078399658, 0.005018623828887939, 0.005033984184265137, 0.005054463863372802, 0.005053440093994141, 0.005028863906860351, 0.005056511878967285, 0.005033984184265137, 0.00505241584777832, 0.005024767875671386, 0.005023712158203125, 0.004999167919158935, 0.005007359981536865, 0.0050432000160217285, 0.00501964807510376, 0.005018623828887939, 0.005028863906860351, 0.005040128231048584, 0.005025792121887207, 0.0050165758132934574, 0.00501043176651001, 0.005061632156372071, 0.005038080215454102, 0.005031936168670655, 0.005026815891265869, 0.005059584140777588, 0.0049459199905395506, 0.004960256099700928, 0.004947968006134033, 0.00501145601272583, 0.004997119903564453, 0.005028863906860351, 0.0050360321998596195, 0.005075967788696289, 0.00501964807510376, 0.005030911922454834, 0.005057536125183106, 0.005038080215454102, 0.00502784013748169, 0.005054463863372802, 0.00506982421875, 0.005038080215454102, 0.0050094079971313476, 0.005007359981536865, 0.005073919773101807, 0.005089375972747803, 0.005028768062591553, 0.0050503678321838375, 0.005093376159667969, 0.005094399929046631, 0.0050135040283203125, 0.005031936168670655, 0.0050657281875610355, 0.005030911922454834, 0.005049344062805176, 0.005045248031616211, 0.005180416107177735, 0.005868544101715088, 0.006189055919647217, 0.005633024215698243, 0.0052674560546875, 0.005222400188446045, 0.0052091522216796875, 0.0051987838745117185, 0.005244927883148193, 0.005188608169555664, 0.005180416107177735, 0.005081088066101074, 0.005265408039093018, 0.005260287761688232, 0.005508096218109131, 0.005398528099060058, 0.005449728012084961, 0.004997151851654053, 0.004960224151611328, 0.005064703941345215, 0.0050124797821044925, 0.005017600059509277, 0.00505241584777832, 0.005038112163543701, 0.005017568111419677, 0.00502784013748169, 0.005079040050506592, 0.005031936168670655, 0.0050094079971313476, 0.0050135040283203125, 0.005054463863372802, 0.005038080215454102, 0.00501964807510376, 0.0050206718444824215, 0.005074944019317627, 0.005018623828887939, 0.005047296047210693, 0.0050360321998596195, 0.00506060791015625, 0.005014527797698975, 0.00501043176651001, 0.005023744106292725, 0.0050657281875610355, 0.005023744106292725, 0.005033984184265137, 0.005024767875671386, 0.0050432000160217285, 0.0050135040283203125, 0.0050094079971313476, 0.005061632156372071, 0.005061632156372071, 0.005031936168670655, 0.005024767875671386, 0.005066751956939697, 0.005047359943389893, 0.00502675199508667, 0.005038080215454102, 0.005085184097290039, 0.005015552043914795, 0.005022719860076904, 0.005023776054382324, 0.005054431915283203, 0.005033984184265137, 0.005047296047210693, 0.005171199798583984, 0.005257215976715088, 0.0052070398330688475, 0.005105663776397705, 0.005129216194152832, 0.005078015804290771, 0.0050657281875610355, 0.005070847988128662, 0.005085184097290039, 0.005032959938049316, 0.00501145601272583, 0.005029888153076172, 0.005062655925750732, 0.005031936168670655, 0.005045248031616211, 0.005038080215454102, 0.005029888153076172, 0.005064703941345215, 0.005014527797698975, 0.005024767875671386, 0.00503711986541748, 0.005062592029571533, 0.005026815891265869, 0.00502784013748169, 0.00503711986541748, 0.005060544013977051, 0.005047296047210693, 0.005061632156372071, 0.005070847988128662, 0.005030911922454834, 0.005032959938049316, 0.005054463863372802, 0.0050657281875610355, 0.005132287979125977, 0.005198847770690918, 0.005094399929046631, 0.005070847988128662, 0.005045248031616211, 0.005028863906860351, 0.005037055969238281, 0.0050657281875610355, 0.005031936168670655, 0.0050360321998596195, 0.005040128231048584, 0.005105663776397705, 0.0050841598510742185, 0.0050462718009948735, 0.005074944019317627, 0.00501145601272583, 0.005028863906860351, 0.005004288196563721, 0.004957183837890625, 0.004985856056213379, 0.005022751808166504, 0.005017568111419677, 0.005045248031616211, 0.005035039901733399, 0.0050124478340148925, 0.005097472190856934, 0.005079040050506592, 0.005041152000427246, 0.005047296047210693, 0.004968448162078858, 0.005063680171966553, 0.005049344062805176, 0.005018623828887939, 0.005041152000427246, 0.00506879997253418, 0.005071872234344482, 0.0050421757698059086, 0.005026815891265869, 0.0050360321998596195, 0.005008384227752686, 0.00502784013748169, 0.005039103984832764, 0.005051392078399658, 0.005035007953643799, 0.0050135040283203125, 0.005041152000427246, 0.005017600059509277, 0.005026815891265869, 0.005014527797698975, 0.0050094079971313476, 0.005022719860076904, 0.005047296047210693, 0.005015552043914795, 0.005035007953643799, 0.005017600059509277, 0.005082111835479736, 0.005029888153076172, 0.005046304225921631, 0.005058591842651367, 0.005056447982788086, 0.00502784013748169, 0.005029888153076172, 0.005047327995300293, 0.00502780818939209, 0.005002272129058838, 0.00501961612701416, 0.004962431907653809, 0.004987808227539062, 0.00501961612701416, 0.005025792121887207, 0.005105663776397705, 0.005024767875671386, 0.00501145601272583, 0.0050360321998596195, 0.0050503678321838375, 0.00502784013748169, 0.005048319816589355, 0.005879807949066162, 0.0054271998405456545, 0.006360064029693604, 0.005638144016265869, 0.005275648117065429, 0.005288991928100586, 0.005209055900573731, 0.005212160110473632, 0.005252096176147461, 0.005216256141662597, 0.005188608169555664, 0.00516812801361084, 0.005203968048095703, 0.0050503678321838375, 0.005028863906860351, 0.004987904071807861, 0.00499507188796997, 0.004959231853485108, 0.004966432094573974, 0.004973536014556885, 0.004981760025024414, 0.0049459199905395506, 0.0050728960037231445, 0.00501964807510376, 0.0051701760292053225, 0.0050360321998596195, 0.005038080215454102, 0.005032959938049316, 0.005026815891265869, 0.005047296047210693, 0.005014527797698975, 0.005058559894561767, 0.0050032639503479, 0.005063680171966553, 0.0050421757698059086, 0.0050022401809692385, 0.004872191905975342, 0.0049797120094299315, 0.0049428482055664065, 0.005000192165374756, 0.00542310380935669, 0.0052674560546875, 0.005179391860961914, 0.005031936168670655, 0.0050800638198852536, 0.005018623828887939, 0.0050503678321838375, 0.005022719860076904, 0.005075967788696289, 0.0050360321998596195, 0.005075967788696289, 0.005059584140777588, 0.0050135040283203125, 0.005004288196563721, 0.005031936168670655, 0.005047296047210693, 0.0050206718444824215, 0.005015552043914795, 0.005041152000427246, 0.005048319816589355, 0.0050421757698059086, 0.005038080215454102, 0.005023744106292725, 0.005067808151245117, 0.005025760173797607, 0.00502784013748169, 0.0050503678321838375, 0.005067776203155518, 0.005029888153076172, 0.005026815891265869, 0.005035007953643799, 0.005038080215454102, 0.0050135040283203125, 0.00501043176651001, 0.005018623828887939, 0.005030911922454834, 0.005078015804290771, 0.005018623828887939, 0.0050124797821044925, 0.005066751956939697, 0.0050032639503479, 0.0050165758132934574, 0.005007359981536865, 0.005089280128479004, 0.005181439876556396, 0.004937727928161621, 0.0050063362121582035, 0.005023744106292725, 0.005022719860076904, 0.004964352130889893, 0.004973567962646484, 0.00501043176651001, 0.005026815891265869, 0.0050165758132934574, 0.005071872234344482, 0.005026815891265869, 0.0050514559745788576, 0.005018559932708741, 0.0050032639503479, 0.004999167919158935, 0.005047296047210693, 0.004999167919158935, 0.005029888153076172, 0.005000192165374756, 0.0050657281875610355, 0.005037055969238281, 0.005031936168670655, 0.0050728960037231445, 0.005051392078399658, 0.00506879997253418, 0.005041183948516846, 0.00506774377822876, 0.005008384227752686, 0.0050124797821044925, 0.0050360321998596195, 0.0050800638198852536, 0.0050360321998596195, 0.005038080215454102, 0.0050165758132934574, 0.005075967788696289, 0.005017600059509277, 0.005015552043914795, 0.005007359981536865, 0.005048319816589355, 0.005028863906860351, 0.005017600059509277, 0.005018623828887939, 0.0051138558387756345, 0.0050800638198852536, 0.005083136081695557, 0.005062655925750732, 0.004966400146484375, 0.004957183837890625, 0.004848639965057373, 0.004903935909271241, 0.004971519947052002, 0.0049459199905395506, 0.00495411205291748, 0.004952064037322998, 0.00502784013748169, 0.005022719860076904, 0.005015552043914795, 0.00501043176651001, 0.005241856098175048, 0.005037055969238281, 0.005014527797698975, 0.0050206718444824215, 0.0050462718009948735, 0.0050289278030395505, 0.005025728225708008, 0.005058559894561767, 0.0050432000160217285, 0.0050206718444824215, 0.005062655925750732, 0.005053440093994141, 0.005033984184265137, 0.00501964807510376, 0.005024767875671386, 0.005017600059509277, 0.004977663993835449, 0.0050063362121582035, 0.004959231853485108, 0.004946944236755371, 0.004958208084106445, 0.004986879825592041, 0.005154816150665284, 0.00516812801361084, 0.0052295680046081545, 0.005173247814178467, 0.005187583923339844, 0.00515174388885498, 0.00522547197341919, 0.005165088176727295, 0.005040095806121826, 0.0050135040283203125, 0.004967423915863037, 0.00497049617767334, 0.004975615978240967, 0.005210112094879151, 0.005777408123016357, 0.00588595199584961, 0.005496831893920898, 0.005932032108306885, 0.005400576114654541, 0.005191679954528809, 0.005213183879852295, 0.00521727991104126, 0.00517632007598877, 0.005048319816589355, 0.005021696090698242, 0.005079040050506592, 0.005011519908905029, 0.005012415885925293, 0.0050462718009948735, 0.0050503678321838375, 0.005023744106292725, 0.005008384227752686, 0.005047296047210693, 0.0050421757698059086, 0.005045248031616211, 0.005028863906860351, 0.005048319816589355, 0.005045248031616211, 0.005000192165374756, 0.005028863906860351, 0.005055488109588623, 0.0050135040283203125, 0.005017600059509277, 0.005029888153076172, 0.0050657281875610355, 0.005039103984832764, 0.005026815891265869, 0.005037055969238281, 0.005035007953643799, 0.005008384227752686, 0.005028863906860351, 0.00502784013748169, 0.005137407779693603, 0.005099520206451416, 0.005021696090698242, 0.005014527797698975, 0.005020736217498779, 0.005072832107543945, 0.005029888153076172, 0.0050360321998596195, 0.005044223785400391, 0.005051392078399658, 0.0050094079971313476, 0.004934656143188477, 0.004988927841186524, 0.0050657281875610355, 0.005028863906860351, 0.005024767875671386, 0.0050503678321838375, 0.005041152000427246, 0.005066751956939697, 0.005144576072692871, 0.004978687763214112, 0.005223423957824707, 0.0049725441932678225, 0.005096447944641113, 0.00532480001449585, 0.005260287761688232, 0.005134335994720459, 0.005153791904449463, 0.005083136081695557, 0.005039103984832764, 0.00501964807510376, 0.0050503678321838375, 0.005014527797698975, 0.0050094079971313476, 0.005026815891265869, 0.005059584140777588, 0.0050165758132934574, 0.0050165758132934574, 0.00502784013748169, 0.0050657281875610355, 0.0050206718444824215, 0.005021696090698242, 0.005033984184265137, 0.005047296047210693, 0.005024767875671386, 0.00499612808227539, 0.00501961612701416, 0.005053440093994141, 0.005038080215454102, 0.00502784013748169, 0.0050063362121582035, 0.005049344062805176, 0.005104640007019043, 0.0050124797821044925, 0.005031936168670655, 0.005054463863372802, 0.0050165758132934574, 0.0050022401809692385, 0.00504527997970581, 0.005014495849609375, 0.004946944236755371, 0.005024767875671386, 0.005895167827606201, 0.0051066880226135255, 0.005015552043914795, 0.005015552043914795, 0.005030911922454834]",tokens/s,197.03201726134992,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 614, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492c7-4cbb4c04048d833714ff54fd;c39c59af-d66a-4757-8f72-a5dde1a8815d) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 545, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gptj,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 441, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948114-7878a1aa3f47292c3e4ba3d6;46cb3dc6-cd4f-4876-afbd-0570eb64fd52) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7421.50144,8015.839232,0.0,7430.209536,7414.23104,s,1,10.536458984375,10.536458984375,0.0,10.536458984375,10.536458984375,10.536458984375,10.536458984375,[10.536458984375],,kWh,4.4030338854843704e-05,2.411323634256297e-05,6.447421824590238e-05,0.00013261779344330907,,MB,1777.430528,8749.842432,0.0,8103.395328,8044.111872,s,10,1.9894268646240234,0.19894268646240235,0.0001083075076375795,0.19897935485839846,0.1990382888793945,0.1990553382873535,0.19906897781372068,"[0.19869818115234375, 0.19880522155761718, 0.19899740600585938, 0.1989475860595703, 0.19898133850097657, 0.1990345001220703, 0.1990723876953125, 0.19889610290527343, 0.19897737121582032, 0.1990167694091797]",tokens/s,1286.802769944402,kWh,2.3510521574074256e-06,1.2882491584327178e-06,1.3458077215254848e-05,1.709737853109499e-05,tokens/kWh,14973055.637413243,MB,1777.430528,8959.557632,0.0,8313.110528,8265.708032,s,10,18.601238525390627,1.8601238525390626,0.012096883272204165,1.8650569458007813,1.871866748046875,1.8732858520507814,1.8744211352539064,"[1.835210693359375, 1.850995849609375, 1.8677020263671875, 1.865929931640625, 1.8747049560546876, 1.8715513916015625, 1.867650390625, 1.8641839599609376, 1.8590179443359376, 1.8442913818359374]",tokens/s,33.868712512881984,kWh,2.1117825937939608e-05,1.1573148020307187e-05,8.443752887894005e-05,0.0001171285028371868,tokens/kWh,537870.7869900161,,s,630,18.598745090484616,0.02952181760394384,0.0005181365284956107,0.029600255966186522,0.029863220024108888,0.030075955677032466,0.030895309410095217,"[0.028448768615722656, 0.02838937568664551, 0.02815180778503418, 0.028293119430541993, 0.028177408218383788, 0.028068864822387695, 0.02813030433654785, 0.028269567489624024, 0.02811494445800781, 0.02834124755859375, 0.02837196731567383, 0.02832793617248535, 0.028236799240112305, 0.02833919906616211, 0.028041215896606447, 0.028263423919677736, 0.02817024040222168, 0.028276735305786133, 0.028280832290649413, 0.028285951614379884, 0.028256256103515624, 0.02838528060913086, 0.029396991729736328, 0.02978713607788086, 0.029533184051513672, 0.029602815628051758, 0.029542400360107423, 0.029624319076538085, 0.02956492805480957, 0.029616128921508788, 0.029461503982543946, 0.029458431243896483, 0.02943180847167969, 0.02960588836669922, 0.02952191925048828, 0.02956287956237793, 0.029493247985839844, 0.02958438491821289, 0.029643775939941407, 0.029627391815185547, 0.02936627197265625, 0.029591552734375, 0.029542400360107423, 0.02957107162475586, 0.030090240478515624, 0.030120960235595705, 0.029682687759399414, 0.02956492805480957, 0.0295546875, 0.029645824432373048, 0.02952396774291992, 0.029592575073242186, 0.02954751968383789, 0.029636608123779298, 0.029560831069946288, 0.02958950424194336, 0.02960383987426758, 0.02952191925048828, 0.02951475143432617, 0.029655040740966795, 0.02958028793334961, 0.029679616928100585, 0.02955571174621582, 0.02834124755859375, 0.02834022331237793, 0.028110847473144532, 0.028197887420654297, 0.028009471893310548, 0.028231679916381838, 0.028120063781738282, 0.028225536346435546, 0.02812211227416992, 0.029896703720092774, 0.02978713607788086, 0.029755392074584962, 0.02960588836669922, 0.029488128662109377, 0.0295546875, 0.03016703987121582, 0.029886463165283202, 0.02972774314880371, 0.029527040481567384, 0.029665279388427734, 0.029470720291137696, 0.02960383987426758, 0.029442047119140623, 0.029657087326049804, 0.029466623306274413, 0.02958847999572754, 0.029493247985839844, 0.02956390380859375, 0.02978816032409668, 0.029826047897338868, 0.029494272232055665, 0.029718528747558592, 0.02992742347717285, 0.029756416320800783, 0.02959667205810547, 0.029703168869018554, 0.02953625679016113, 0.029707263946533204, 0.029509632110595704, 0.028450815200805665, 0.028216320037841795, 0.02838630485534668, 0.028217344284057616, 0.03076812744140625, 0.030641151428222657, 0.029840383529663086, 0.02957414436340332, 0.028727296829223634, 0.029510656356811524, 0.029602815628051758, 0.029446144104003907, 0.02961305618286133, 0.02993561553955078, 0.029846527099609374, 0.029517824172973633, 0.029557760238647462, 0.02940108871459961, 0.02970419120788574, 0.029504512786865233, 0.02978508758544922, 0.029641727447509765, 0.029615104675292967, 0.029628416061401368, 0.028272640228271483, 0.029784063339233398, 0.02960691261291504, 0.02949017524719238, 0.029503488540649415, 0.029542400360107423, 0.02959974479675293, 0.030044160842895507, 0.0295598087310791, 0.029740032196044923, 0.029491199493408202, 0.029438976287841798, 0.0294021110534668, 0.029543424606323244, 0.029534208297729493, 0.029485055923461914, 0.02976665687561035, 0.029684736251831056, 0.02938572883605957, 0.029838336944580077, 0.029386751174926756, 0.02978611183166504, 0.02957619285583496, 0.029495296478271486, 0.02955264091491699, 0.029609983444213867, 0.02950553512573242, 0.029518848419189454, 0.029344768524169923, 0.029623296737670897, 0.02977177619934082, 0.029608959197998046, 0.029732864379882814, 0.0295598087310791, 0.0295546875, 0.029716480255126954, 0.029624319076538085, 0.029502464294433595, 0.029551616668701174, 0.02962944030761719, 0.02958438491821289, 0.030259199142456054, 0.02963046455383301, 0.029651968002319336, 0.029550592422485353, 0.02954854393005371, 0.029624319076538085, 0.02958847999572754, 0.02958438491821289, 0.029663232803344725, 0.029920255661010742, 0.029817855834960938, 0.030901248931884766, 0.030880767822265624, 0.030053375244140625, 0.02976870346069336, 0.029658111572265625, 0.02960588836669922, 0.029557760238647462, 0.029575168609619142, 0.02953113555908203, 0.029756416320800783, 0.029388799667358398, 0.028241920471191406, 0.02812518310546875, 0.028673023223876954, 0.029626367568969726, 0.02981171226501465, 0.0299233283996582, 0.029453311920166016, 0.02975846481323242, 0.02952191925048828, 0.029641727447509765, 0.029410303115844725, 0.029533184051513672, 0.029485055923461914, 0.02963763236999512, 0.029799423217773437, 0.02975129508972168, 0.029642751693725586, 0.029692928314208986, 0.029566976547241212, 0.029921279907226563, 0.02958028793334961, 0.029507583618164062, 0.029459455490112304, 0.029667327880859375, 0.02956492805480957, 0.029551616668701174, 0.029412351608276367, 0.02942361640930176, 0.029861888885498046, 0.029722623825073242, 0.029453311920166016, 0.02951372718811035, 0.02963046455383301, 0.029578239440917968, 0.029609983444213867, 0.02938982391357422, 0.029472768783569334, 0.0295546875, 0.029504512786865233, 0.0297523193359375, 0.02971955108642578, 0.02959667205810547, 0.02959974479675293, 0.029732864379882814, 0.030991359710693358, 0.030497791290283204, 0.029833215713500977, 0.029718528747558592, 0.02991001510620117, 0.029723648071289063, 0.02959667205810547, 0.029682687759399414, 0.029479936599731447, 0.029815807342529296, 0.029594623565673828, 0.029916160583496092, 0.029611007690429687, 0.029730815887451172, 0.029644800186157227, 0.029683712005615235, 0.029688831329345702, 0.029633535385131835, 0.029855743408203125, 0.028695552825927735, 0.03139481544494629, 0.03162112045288086, 0.030731264114379882, 0.02973695945739746, 0.029682687759399414, 0.029756416320800783, 0.029716480255126954, 0.029476863861083984, 0.029724672317504884, 0.0295280647277832, 0.02974617576599121, 0.0298024959564209, 0.029641727447509765, 0.03014553642272949, 0.029914112091064454, 0.02958745574951172, 0.02978816032409668, 0.030316543579101563, 0.02979635238647461, 0.029656063079833983, 0.029660160064697266, 0.029661184310913087, 0.029578239440917968, 0.029660160064697266, 0.02958028793334961, 0.029820928573608397, 0.02980454444885254, 0.029747200012207032, 0.02978713607788086, 0.029557760238647462, 0.029643775939941407, 0.02959769630432129, 0.02958438491821289, 0.029830144882202148, 0.029722623825073242, 0.029710336685180663, 0.029628416061401368, 0.029692928314208986, 0.029662208557128908, 0.029612031936645508, 0.029667327880859375, 0.029475839614868164, 0.02974412727355957, 0.029675519943237305, 0.029875200271606447, 0.02955571174621582, 0.029634559631347656, 0.029717504501342775, 0.029705215454101562, 0.029715456008911133, 0.029723648071289063, 0.02957619285583496, 0.029692928314208986, 0.029652992248535157, 0.029592575073242186, 0.029549568176269532, 0.029658111572265625, 0.02949836730957031, 0.029894655227661132, 0.029698047637939453, 0.02958028793334961, 0.02958131217956543, 0.030402559280395508, 0.029837312698364257, 0.029634559631347656, 0.029665279388427734, 0.029480960845947264, 0.029841407775878907, 0.029427711486816405, 0.0295546875, 0.029473791122436522, 0.02979635238647461, 0.030831615447998048, 0.03100364875793457, 0.03016192054748535, 0.02969599914550781, 0.02962227249145508, 0.029718528747558592, 0.029660160064697266, 0.029651968002319336, 0.029623296737670897, 0.029672447204589843, 0.029462528228759766, 0.0295598087310791, 0.029473791122436522, 0.029645824432373048, 0.0295731201171875, 0.029443071365356444, 0.02958745574951172, 0.029609983444213867, 0.029652992248535157, 0.029665279388427734, 0.02957107162475586, 0.029620223999023438, 0.029463552474975587, 0.029612031936645508, 0.029839359283447265, 0.02953830337524414, 0.029611007690429687, 0.0295598087310791, 0.029519872665405275, 0.029611007690429687, 0.029404159545898437, 0.02957619285583496, 0.029651968002319336, 0.02993459129333496, 0.029703168869018554, 0.02962124824523926, 0.029671424865722655, 0.02979840087890625, 0.03061862373352051, 0.029749248504638674, 0.02957619285583496, 0.02963763236999512, 0.029648895263671874, 0.029654016494750978, 0.02958847999572754, 0.029626367568969726, 0.029615104675292967, 0.029707263946533204, 0.029655040740966795, 0.029660160064697266, 0.029551616668701174, 0.02969599914550781, 0.02957619285583496, 0.02876006317138672, 0.028444671630859376, 0.028084224700927734, 0.02834943962097168, 0.029265920639038087, 0.030039039611816407, 0.029643775939941407, 0.030007295608520508, 0.029848575592041016, 0.029635583877563477, 0.029636608123779298, 0.029701120376586915, 0.030026752471923827, 0.02957209587097168, 0.029511680603027345, 0.029642751693725586, 0.029569023132324217, 0.02960691261291504, 0.029570047378540038, 0.029726720809936522, 0.029569023132324217, 0.029697023391723632, 0.02971238327026367, 0.029646848678588866, 0.02960383987426758, 0.02978611183166504, 0.029651968002319336, 0.029575168609619142, 0.029697023391723632, 0.029639680862426757, 0.030509056091308592, 0.03035852813720703, 0.030735359191894532, 0.03014963150024414, 0.02991001510620117, 0.02979840087890625, 0.029706239700317383, 0.029645824432373048, 0.029469696044921875, 0.02960588836669922, 0.029616128921508788, 0.030003200531005858, 0.029661184310913087, 0.029845504760742186, 0.02956185531616211, 0.029650943756103516, 0.02957926368713379, 0.029691904067993165, 0.02976153564453125, 0.02969599914550781, 0.029519872665405275, 0.029749248504638674, 0.029516799926757813, 0.029726720809936522, 0.029675519943237305, 0.02959974479675293, 0.029651968002319336, 0.029631488800048827, 0.02956492805480957, 0.029732864379882814, 0.029640703201293944, 0.0296048641204834, 0.029616128921508788, 0.028071935653686524, 0.028253183364868165, 0.02798899269104004, 0.029713407516479492, 0.029657087326049804, 0.02977689552307129, 0.02957107162475586, 0.029648895263671874, 0.029469696044921875, 0.029618175506591796, 0.02960588836669922, 0.029470720291137696, 0.029441024780273436, 0.029654016494750978, 0.02952908706665039, 0.029639680862426757, 0.029474815368652343, 0.02960588836669922, 0.029489152908325194, 0.02915635108947754, 0.029459455490112304, 0.029593599319458007, 0.029541376113891602, 0.029634559631347656, 0.029452287673950195, 0.02953932762145996, 0.029468671798706055, 0.029662208557128908, 0.02937446403503418, 0.02975027275085449, 0.029772800445556642, 0.02975129508972168, 0.029623296737670897, 0.029732864379882814, 0.029551616668701174, 0.029620223999023438, 0.02951475143432617, 0.029648895263671874, 0.029441024780273436, 0.030253055572509766, 0.03037696075439453, 0.030196735382080078, 0.031169536590576172, 0.030151679992675783, 0.02958950424194336, 0.029711360931396483, 0.029516799926757813, 0.029654016494750978, 0.029593599319458007, 0.029661184310913087, 0.029634559631347656, 0.02976665687561035, 0.0297256965637207, 0.029115392684936525, 0.02961408042907715, 0.029749248504638674, 0.02958950424194336, 0.029682687759399414, 0.029459455490112304, 0.029643775939941407, 0.02958950424194336, 0.02958438491821289, 0.029905920028686524, 0.028225536346435546, 0.028297216415405273, 0.02817433547973633, 0.02897203254699707, 0.02960588836669922, 0.029658111572265625, 0.02956390380859375, 0.029607936859130858, 0.029451263427734374, 0.02955571174621582, 0.029611007690429687, 0.029526016235351563, 0.029413375854492187, 0.029577215194702147, 0.029517824172973633, 0.029654016494750978, 0.029461503982543946, 0.029558784484863283, 0.02956595230102539, 0.029625343322753905, 0.0295731201171875, 0.02953625679016113, 0.029518848419189454, 0.02957619285583496, 0.02954035186767578, 0.029481983184814452, 0.029488128662109377, 0.029709312438964845, 0.029825023651123047, 0.028925952911376954, 0.029499391555786132, 0.02956185531616211, 0.02953625679016113, 0.029627391815185547, 0.02940006446838379, 0.02938265609741211, 0.029238271713256835, 0.029949951171875, 0.029550592422485353, 0.02958336067199707, 0.029920255661010742, 0.029671424865722655, 0.02942361640930176, 0.029601791381835937, 0.029691904067993165, 0.029591552734375, 0.029535232543945314, 0.029570047378540038, 0.029489152908325194, 0.02958438491821289, 0.0286167049407959, 0.02949017524719238, 0.029478912353515626, 0.02953011131286621, 0.02954035186767578, 0.02959769630432129, 0.029480960845947264, 0.029570047378540038, 0.02936627197265625, 0.028847103118896485, 0.03021004867553711, 0.03155251121520996, 0.030265344619750976, 0.028012544631958007, 0.028471296310424804, 0.028185600280761718, 0.028184576034545897, 0.02812620735168457, 0.0279685115814209, 0.028015615463256836, 0.028062719345092774, 0.02813337516784668, 0.02816307258605957, 0.028080127716064454, 0.02812723159790039, 0.028091392517089843, 0.028246015548706056, 0.028281856536865234, 0.028670976638793946, 0.029460479736328125, 0.02960691261291504, 0.029473791122436522, 0.02952396774291992, 0.02949017524719238, 0.029487104415893556, 0.029491199493408202, 0.029499391555786132, 0.02935603141784668, 0.02953727912902832, 0.02955673599243164, 0.029626367568969726, 0.029455360412597657, 0.02959052848815918, 0.029543424606323244, 0.029625343322753905, 0.029512704849243163, 0.02954547119140625, 0.02949836730957031, 0.02959667205810547, 0.029882368087768556, 0.029647872924804686, 0.02954035186767578, 0.029652992248535157, 0.02993459129333496, 0.02957926368713379, 0.02958233642578125, 0.029649919509887695, 0.02954547119140625, 0.029600767135620116, 0.029542400360107423, 0.029664255142211913, 0.029620223999023438, 0.029770751953125, 0.029460479736328125, 0.029815807342529296, 0.030006271362304687, 0.029914112091064454, 0.029791231155395507, 0.03000115203857422, 0.02958745574951172, 0.029825023651123047, 0.029462528228759766, 0.030058496475219725, 0.02999603271484375, 0.029937664031982423, 0.029675519943237305]",tokens/s,33.87325311116377,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949019-36803f161ef2193064e775cd;9ffb7bff-aa8e-4217-ae14-7197afb3fb12) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 441, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 441, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4188.852224,4933.025792,0.0,4347.396096,4328.833024,s,1,9.5195185546875,9.5195185546875,0.0,9.5195185546875,9.5195185546875,9.5195185546875,9.5195185546875,[9.5195185546875],,kWh,3.045317082292943e-05,1.667440060583619e-05,4.1975589136045155e-05,8.910316056481077e-05,,MB,1492.86912,5451.022336,0.0,4804.575232,4748.27776,s,10,0.9930765075683593,0.09930765075683592,6.277907176583042e-05,0.09932608032226563,0.09936351776123047,0.09938164520263672,0.09939614715576171,"[0.09916633605957031, 0.09932492828369141, 0.09933920288085937, 0.09926201629638672, 0.09939977264404297, 0.09935948944091796, 0.09932723236083985, 0.09928546905517578, 0.09926124572753907, 0.09935081481933594]",tokens/s,2577.8477091039035,kWh,1.1741179474216171e-06,6.433567101154941e-07,6.707340349366587e-06,8.524815006903699e-06,tokens/kWh,30029977.165801495,MB,1511.251968,5560.07424,0.0,4913.627136,4878.069248,s,10,14.67107763671875,1.467107763671875,0.014681863859447758,1.470994140625,1.4860807861328125,1.4861784545898438,1.486256589355469,"[1.477436767578125, 1.4514793701171875, 1.486276123046875, 1.4772763671875, 1.48605908203125, 1.4730701904296875, 1.4530767822265624, 1.4689180908203125, 1.45441650390625, 1.443068359375]",tokens/s,42.941630846750954,kWh,1.8169751677510622e-05,9.955980501308989e-06,5.5914070858830285e-05,8.40398030376499e-05,tokens/kWh,749644.7840528131,,s,630,14.668818431854243,0.02328383878072103,0.0005742889225078857,0.02305484867095947,0.024071066093444823,0.024271973991394042,0.02490523630142212,"[0.0236810245513916, 0.023330816268920897, 0.023350271224975586, 0.02305536079406738, 0.023200767517089844, 0.023076864242553712, 0.02286899185180664, 0.023142400741577147, 0.023191551208496093, 0.023165952682495116, 0.0230328311920166, 0.023206911087036132, 0.02305843162536621, 0.023021568298339845, 0.02266316795349121, 0.023343103408813477, 0.0234967041015625, 0.024270847320556642, 0.024152063369750978, 0.024062976837158204, 0.024187904357910156, 0.02385305595397949, 0.02352230453491211, 0.023018495559692383, 0.023052288055419923, 0.023028736114501954, 0.023036928176879884, 0.02253107261657715, 0.022987775802612305, 0.024204288482666016, 0.0265482234954834, 0.02448691177368164, 0.023942144393920898, 0.024011775970458983, 0.023833599090576172, 0.02391756820678711, 0.02391961669921875, 0.02328371238708496, 0.02327552032470703, 0.023762943267822266, 0.024573951721191405, 0.02448588752746582, 0.023867391586303712, 0.02333695983886719, 0.023111679077148437, 0.022996992111206056, 0.02349772834777832, 0.02287615966796875, 0.022616064071655274, 0.023021568298339845, 0.022692863464355468, 0.022964223861694336, 0.023130111694335938, 0.022948863983154297, 0.023013376235961915, 0.023537664413452147, 0.02309119987487793, 0.022956031799316406, 0.022592512130737305, 0.022970367431640625, 0.023983104705810547, 0.023790592193603514, 0.024382463455200197, 0.023177215576171875, 0.023774208068847655, 0.02292736053466797, 0.02290176010131836, 0.022958080291748048, 0.02292531204223633, 0.02288128089904785, 0.022890495300292968, 0.02328166389465332, 0.022580223083496095, 0.022632448196411133, 0.022571008682250978, 0.022553600311279298, 0.022957056045532227, 0.023221248626708983, 0.0230328311920166, 0.023035903930664063, 0.023220224380493162, 0.023582719802856447, 0.023044095993041993, 0.022923263549804687, 0.02330521583557129, 0.023840768814086914, 0.023334911346435547, 0.022929407119750975, 0.022625280380249024, 0.022832128524780275, 0.02287820816040039, 0.02263039970397949, 0.022575103759765625, 0.022788095474243163, 0.02305023956298828, 0.02305023956298828, 0.022999040603637694, 0.0227061767578125, 0.02293452835083008, 0.023809024810791016, 0.023385087966918947, 0.02305536079406738, 0.02313113594055176, 0.022980607986450196, 0.023192575454711914, 0.02390118408203125, 0.02295091247558594, 0.023241727828979493, 0.022970367431640625, 0.022549503326416014, 0.022845439910888672, 0.02308710479736328, 0.02351923179626465, 0.02307072067260742, 0.023009279251098632, 0.02310246467590332, 0.023000064849853515, 0.022723583221435546, 0.023040000915527343, 0.023778303146362305, 0.023111679077148437, 0.022666240692138673, 0.02264371109008789, 0.022806528091430665, 0.02267955207824707, 0.023408639907836915, 0.023022592544555662, 0.022816768646240236, 0.022958080291748048, 0.02305433654785156, 0.02267033576965332, 0.022930431365966796, 0.02394726371765137, 0.024046592712402344, 0.02410495948791504, 0.02407935905456543, 0.02392064094543457, 0.02430463981628418, 0.024165376663208008, 0.023992319107055664, 0.024526847839355468, 0.024417280197143554, 0.024053760528564453, 0.023633920669555664, 0.023044095993041993, 0.023232511520385742, 0.02395238494873047, 0.02367487907409668, 0.023225343704223633, 0.023000064849853515, 0.023017471313476562, 0.023028736114501954, 0.022872064590454103, 0.024557567596435546, 0.024613887786865234, 0.02484121513366699, 0.023908351898193358, 0.02311577606201172, 0.023566335678100587, 0.02307174491882324, 0.02266009521484375, 0.02385408020019531, 0.023488512039184572, 0.023117824554443358, 0.022952959060668944, 0.023566335678100587, 0.02349158477783203, 0.023990272521972656, 0.023980031967163085, 0.02283622360229492, 0.023044095993041993, 0.023093248367309572, 0.0230328311920166, 0.0230830078125, 0.02350796890258789, 0.024379392623901368, 0.023809024810791016, 0.023980031967163085, 0.02372812843322754, 0.02272972869873047, 0.02391961669921875, 0.024432640075683593, 0.024219648361206055, 0.023230464935302734, 0.02305536079406738, 0.023826431274414063, 0.02350694465637207, 0.024070144653320313, 0.02410086441040039, 0.02304819107055664, 0.02386944007873535, 0.024038400650024414, 0.023455743789672853, 0.023820287704467775, 0.02386227226257324, 0.02388582420349121, 0.02467532730102539, 0.02394726371765137, 0.02267750358581543, 0.022633472442626954, 0.022734848022460938, 0.02301644706726074, 0.02290073585510254, 0.023555072784423828, 0.023942144393920898, 0.024482816696166993, 0.024203264236450195, 0.024017919540405275, 0.023838720321655273, 0.023957504272460937, 0.023176191329956054, 0.022936576843261718, 0.022922239303588866, 0.022912000656127928, 0.023117824554443358, 0.022889471054077147, 0.02365644836425781, 0.02353152084350586, 0.023319551467895508, 0.02408857536315918, 0.023635967254638672, 0.02427289581298828, 0.023961599349975587, 0.024130559921264647, 0.023981056213378905, 0.023836671829223634, 0.024060928344726562, 0.023623680114746092, 0.022642688751220705, 0.02253107261657715, 0.022930431365966796, 0.022898687362670898, 0.022956031799316406, 0.02306559944152832, 0.022982656478881838, 0.022983680725097655, 0.023299072265625, 0.022961151123046874, 0.023002111434936523, 0.02308403205871582, 0.02290176010131836, 0.02295910453796387, 0.023181312561035155, 0.02365542411804199, 0.02411315155029297, 0.02366361618041992, 0.02264678382873535, 0.022734848022460938, 0.023385087966918947, 0.02351206398010254, 0.024410112380981445, 0.023933952331542968, 0.025047040939331053, 0.023839744567871093, 0.024384511947631835, 0.024011775970458983, 0.02391756820678711, 0.024359935760498046, 0.023682048797607422, 0.025314304351806642, 0.025590784072875978, 0.024386560440063477, 0.024044544219970702, 0.024015871047973633, 0.024143871307373048, 0.024258560180664062, 0.024138751983642577, 0.024006656646728516, 0.024187904357910156, 0.024018943786621092, 0.024146944046020507, 0.02425651168823242, 0.02414489555358887, 0.02490880012512207, 0.02411929512023926, 0.024046592712402344, 0.024345600128173828, 0.02413670349121094, 0.02326937675476074, 0.02308915138244629, 0.02308608055114746, 0.022759424209594727, 0.023053312301635744, 0.022763519287109374, 0.022845439910888672, 0.023334911346435547, 0.024246271133422852, 0.024896511077880858, 0.02391859245300293, 0.023464960098266603, 0.022924287796020508, 0.02268569564819336, 0.022977535247802734, 0.023274496078491212, 0.022800384521484376, 0.022635520935058592, 0.02274508857727051, 0.022647808074951172, 0.022847488403320314, 0.024057855606079103, 0.023355392456054686, 0.02393497657775879, 0.023183359146118163, 0.02269491195678711, 0.02267852783203125, 0.022675455093383787, 0.022622207641601562, 0.022556671142578123, 0.02301644706726074, 0.02290790367126465, 0.022687744140625, 0.02304102325439453, 0.022716415405273437, 0.023002111434936523, 0.022986751556396484, 0.02288844871520996, 0.02304921531677246, 0.022866943359375, 0.022847488403320314, 0.022609920501708985, 0.02260479927062988, 0.02270310401916504, 0.022938623428344726, 0.02305023956298828, 0.02290995216369629, 0.022928384780883788, 0.023031808853149413, 0.02289459228515625, 0.0228351993560791, 0.023568384170532225, 0.023441408157348635, 0.022998016357421876, 0.023031808853149413, 0.023038976669311522, 0.023008256912231444, 0.02308403205871582, 0.022990848541259764, 0.022930431365966796, 0.023198720932006835, 0.023456768035888673, 0.02394726371765137, 0.02313932800292969, 0.02328371238708496, 0.023981056213378905, 0.023867391586303712, 0.02413363265991211, 0.024607744216918945, 0.02429849624633789, 0.023976959228515626, 0.022965248107910157, 0.023179264068603517, 0.02374143981933594, 0.023657472610473632, 0.023925760269165038, 0.023549951553344727, 0.024704000473022462, 0.02510745620727539, 0.024204288482666016, 0.024008703231811524, 0.024010751724243166, 0.023998464584350586, 0.02350592041015625, 0.023059455871582032, 0.023638015747070314, 0.023946239471435548, 0.02391449546813965, 0.023961599349975587, 0.022804479598999023, 0.023029760360717775, 0.023120895385742187, 0.023096319198608398, 0.023010303497314453, 0.023120895385742187, 0.023544832229614256, 0.023053312301635744, 0.0228351993560791, 0.02301644706726074, 0.022998016357421876, 0.023282688140869142, 0.02294272041320801, 0.023043071746826172, 0.023044095993041993, 0.023036928176879884, 0.022985727310180663, 0.023061504364013673, 0.022978559494018554, 0.023017471313476562, 0.023015424728393553, 0.022664192199707032, 0.02286899185180664, 0.023017471313476562, 0.02304102325439453, 0.023006208419799806, 0.023009279251098632, 0.022939647674560547, 0.022887424468994142, 0.02265190315246582, 0.02266009521484375, 0.022982656478881838, 0.02247475242614746, 0.022598655700683593, 0.022707199096679686, 0.02268057632446289, 0.02289356803894043, 0.02267955207824707, 0.02266316795349121, 0.02263039970397949, 0.02272972869873047, 0.022581247329711913, 0.023145471572875977, 0.023021568298339845, 0.022998016357421876, 0.02310041618347168, 0.02311884880065918, 0.02349158477783203, 0.02307583999633789, 0.0229171199798584, 0.022995967864990235, 0.023013376235961915, 0.022897663116455077, 0.023352319717407227, 0.022985727310180663, 0.0237127685546875, 0.024031232833862305, 0.023625728607177734, 0.022684671401977538, 0.023748607635498048, 0.022931455612182617, 0.022806528091430665, 0.02391551971435547, 0.02271129608154297, 0.022714368820190428, 0.022585344314575196, 0.023933952331542968, 0.02389299201965332, 0.023133184432983397, 0.022999040603637694, 0.023390207290649414, 0.02351820755004883, 0.024027135848999022, 0.023605247497558594, 0.023385087966918947, 0.023967744827270508, 0.023880704879760743, 0.024056831359863282, 0.024060928344726562, 0.02392064094543457, 0.023993343353271485, 0.024138751983642577, 0.023793664932250977, 0.023961599349975587, 0.023813119888305666, 0.023831552505493164, 0.02410905647277832, 0.024762367248535155, 0.023035903930664063, 0.023783424377441405, 0.02313932800292969, 0.02369126319885254, 0.02394419288635254, 0.023971839904785155, 0.02388787269592285, 0.023799808502197265, 0.023751680374145507, 0.02386944007873535, 0.023816192626953125, 0.023823360443115234, 0.02333286476135254, 0.02285670471191406, 0.023031808853149413, 0.022845439910888672, 0.022979583740234375, 0.023043071746826172, 0.022974464416503908, 0.022965248107910157, 0.022617088317871094, 0.022640640258789063, 0.022780927658081054, 0.02293452835083008, 0.023011327743530274, 0.023022592544555662, 0.022805503845214844, 0.02288435173034668, 0.02270207977294922, 0.022616064071655274, 0.02285260772705078, 0.022952959060668944, 0.022965248107910157, 0.02291916847229004, 0.022928384780883788, 0.02308915138244629, 0.023030784606933592, 0.022822912216186524, 0.023015424728393553, 0.02283417510986328, 0.022962175369262695, 0.023008256912231444, 0.022981632232666017, 0.023605247497558594, 0.023254016876220703, 0.022964223861694336, 0.022737920761108397, 0.022811647415161132, 0.022725631713867187, 0.022862848281860353, 0.022951936721801756, 0.02290892791748047, 0.022575103759765625, 0.02265190315246582, 0.022562816619873048, 0.022955007553100586, 0.02268262481689453, 0.022790143966674805, 0.02309836769104004, 0.022956031799316406, 0.02307788848876953, 0.02313216018676758, 0.022597631454467772, 0.022635520935058592, 0.02268569564819336, 0.022529024124145508, 0.022759424209594727, 0.022770687103271483, 0.02269491195678711, 0.02271232032775879, 0.022687744140625, 0.022724607467651366, 0.022768640518188478, 0.022975488662719725, 0.02307379150390625, 0.022897663116455077, 0.022640640258789063, 0.02312499237060547, 0.023214080810546874, 0.023111679077148437, 0.02306559944152832, 0.02288844871520996, 0.023002111434936523, 0.02307174491882324, 0.023625728607177734, 0.02327654457092285, 0.023014400482177736, 0.023027711868286133, 0.023219200134277345, 0.02310553550720215, 0.022814720153808594, 0.02306355285644531, 0.023111679077148437, 0.02308403205871582, 0.023161855697631836, 0.02328985595703125, 0.0232806396484375, 0.023240703582763672, 0.022986751556396484, 0.02301644706726074, 0.02287615966796875, 0.022906879425048828, 0.023366655349731445, 0.02512998390197754, 0.024199167251586915, 0.024218624114990234, 0.024193023681640623, 0.024046592712402344, 0.023587839126586914, 0.023822336196899413, 0.023038976669311522, 0.02265497589111328, 0.022781951904296875, 0.02289254379272461, 0.022962175369262695, 0.022700031280517577, 0.022544384002685547, 0.022805503845214844, 0.022675455093383787, 0.02288844871520996, 0.022937599182128905, 0.02288435173034668, 0.0226693115234375, 0.02307788848876953, 0.02352025604248047, 0.023224319458007812, 0.02291097640991211, 0.022701055526733398, 0.02302566337585449, 0.023021568298339845, 0.023001087188720702, 0.02303385543823242, 0.023008256912231444, 0.022675455093383787, 0.023017471313476562, 0.022765567779541016, 0.022989824295043947, 0.0226693115234375, 0.022748159408569335, 0.02264678382873535, 0.022649856567382814, 0.022634496688842775, 0.022577152252197266, 0.022715391159057616, 0.02290995216369629, 0.022602752685546876, 0.02294272041320801, 0.023059455871582032, 0.022855680465698244, 0.02288332748413086, 0.02269900894165039, 0.02282803153991699, 0.023020544052124024, 0.022723583221435546, 0.022673408508300782, 0.02267852783203125, 0.02263859176635742, 0.022855680465698244, 0.02310348892211914, 0.022557695388793944, 0.022935552597045897, 0.022986751556396484, 0.022684671401977538, 0.022640640258789063, 0.02267750358581543, 0.02270207977294922, 0.02307276725769043, 0.02309017562866211, 0.023134208679199218, 0.023430143356323242, 0.023628799438476563, 0.024221696853637696, 0.023174144744873046, 0.023085056304931642, 0.022990848541259764]",tokens/s,42.94824446336563,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17656.070144,20059.783168,0.0,19474.153472,19254.604288,s,1,15.3148095703125,15.3148095703125,0.0,15.3148095703125,15.3148095703125,15.3148095703125,15.3148095703125,[15.3148095703125],,kWh,9.934747421597036e-05,5.4435047905591125e-05,0.00013838427737401793,0.0002921667994955794,,MB,1726.169088,20407.9104,0.0,19761.463296,19447.034368,s,10,5.203118713378907,0.5203118713378907,0.00010707609549308164,0.5203154602050781,0.5204810607910156,0.520488394165039,0.5204942608642578,"[0.5204957275390625, 0.5203200073242188, 0.5202623901367187, 0.520327392578125, 0.5203109130859375, 0.5203474731445312, 0.5202471923828125, 0.5201851806640625, 0.5201430053710937, 0.5204794311523437]",tokens/s,492.0126064810724,kWh,6.153290742291439e-06,3.3707434866032265e-06,3.5728153582500336e-05,4.5252187811395e-05,tokens/kWh,5657185.0419028,MB,1741.733888,20433.076224,0.0,19786.62912,19447.036928,s,10,32.318438232421876,3.2318438232421878,0.0010879095164221821,3.2321892089843747,3.232906396484375,3.2329817626953123,3.2330420556640624,"[3.23280322265625, 3.232281494140625, 3.232096923828125, 3.23305712890625, 3.23070361328125, 3.2328896484375, 3.2320888671875, 3.229580322265625, 3.232283447265625, 3.230653564453125]",tokens/s,19.493516223441254,kWh,3.815126353958361e-05,2.0910438082688504e-05,0.0001842913557662994,0.0002433530573885715,tokens/kWh,258883.12510249417,,s,630,32.31592948913574,0.051295126173231335,0.000234252289479068,0.05131315231323242,0.05149009895324707,0.051536895751953124,0.05194745704650879,"[0.05155737686157227, 0.051023872375488284, 0.05108838272094727, 0.05121535873413086, 0.051113983154296876, 0.05109657669067383, 0.05107814407348633, 0.051074047088623044, 0.05108428955078125, 0.051095550537109374, 0.05106790542602539, 0.05113958358764648, 0.051108863830566405, 0.051122177124023435, 0.05123583984375, 0.05114371109008789, 0.051085281372070315, 0.05113651275634765, 0.051133438110351564, 0.05110681533813476, 0.051120128631591794, 0.05122048187255859, 0.051138561248779295, 0.0511723518371582, 0.05115289688110351, 0.05117440032958984, 0.05115596771240234, 0.05134643173217773, 0.051176448822021485, 0.05118873596191406, 0.05118361663818359, 0.051187713623046874, 0.05137715148925781, 0.05241241455078125, 0.05152665710449219, 0.05145907211303711, 0.05136281585693359, 0.05134950256347656, 0.05137715148925781, 0.05138534545898438, 0.05135257720947266, 0.051422206878662106, 0.05134131240844726, 0.05144166564941406, 0.05138022232055664, 0.051397632598876954, 0.051337215423583986, 0.051506175994873046, 0.051388416290283206, 0.05138534545898438, 0.051383296966552736, 0.05144985580444336, 0.05139148712158203, 0.05157068634033203, 0.05143142318725586, 0.05161881637573242, 0.05152972793579102, 0.05158092880249023, 0.05142835235595703, 0.051620864868164064, 0.051432449340820315, 0.051507198333740234, 0.0514703369140625, 0.0518317756652832, 0.05117030334472656, 0.05101875305175781, 0.051056640625, 0.05099929428100586, 0.05106892776489258, 0.05109452819824219, 0.05120307159423828, 0.05101055908203125, 0.0510750732421875, 0.05104742431640625, 0.05111296081542969, 0.05108224105834961, 0.05125222396850586, 0.05106175994873047, 0.051187713623046874, 0.05104127883911133, 0.051128318786621094, 0.05109964752197266, 0.0512911376953125, 0.05110067367553711, 0.05118361663818359, 0.051105792999267576, 0.05125017547607422, 0.05114265441894531, 0.051158016204833984, 0.0511539192199707, 0.05130137634277344, 0.05118873596191406, 0.05117542266845703, 0.051146751403808595, 0.05122969436645508, 0.05135257720947266, 0.051422206878662106, 0.05134438323974609, 0.051504127502441405, 0.051345409393310545, 0.05138534545898438, 0.05131161499023437, 0.05145600128173828, 0.05134233474731445, 0.05144268798828125, 0.051345409393310545, 0.0514785270690918, 0.051310592651367185, 0.051399681091308595, 0.05133824157714844, 0.05150003051757813, 0.051937278747558595, 0.05145087814331055, 0.05135564804077149, 0.05152460861206055, 0.05138022232055664, 0.05144678497314453, 0.05136383819580078, 0.051553279876708984, 0.05138947296142578, 0.05148976135253906, 0.05191372680664062, 0.05164236831665039, 0.05140582275390625, 0.051506175994873046, 0.051418113708496097, 0.05150207901000976, 0.05104742431640625, 0.05105561447143555, 0.05103615951538086, 0.051037185668945315, 0.05111705780029297, 0.05112527847290039, 0.05106070327758789, 0.05104435348510742, 0.05103615951538086, 0.051113983154296876, 0.051110912322998046, 0.051095550537109374, 0.05108428955078125, 0.051097599029541016, 0.051166206359863284, 0.051097599029541016, 0.05108224105834961, 0.051119102478027346, 0.051181568145751956, 0.05110988616943359, 0.05291929626464844, 0.051251232147216795, 0.051153888702392576, 0.05122150421142578, 0.05116825485229492, 0.051187713623046874, 0.05123072052001953, 0.051158016204833984, 0.05108428955078125, 0.05116416168212891, 0.05125529479980469, 0.05141196823120117, 0.05138022232055664, 0.05134441757202148, 0.05137916946411133, 0.05134745788574219, 0.05142937469482422, 0.05135769653320312, 0.0514068489074707, 0.05135974502563476, 0.051343360900878904, 0.05134438323974609, 0.05146112060546875, 0.05143756866455078, 0.051364864349365234, 0.05132287979125977, 0.05145804977416992, 0.051399681091308595, 0.05134233474731445, 0.051438591003417966, 0.051506175994873046, 0.0514068489074707, 0.05142015838623047, 0.05136383819580078, 0.05147443389892578, 0.05142732620239258, 0.05148672103881836, 0.05146726226806641, 0.0515491828918457, 0.05142118453979492, 0.05146214294433594, 0.05141401672363281, 0.051467201232910154, 0.05106175994873047, 0.05102489471435547, 0.051141632080078124, 0.05101670455932617, 0.0510648307800293, 0.05108428955078125, 0.05112319946289062, 0.05097881698608398, 0.05400883102416992, 0.051209217071533204, 0.05114777755737305, 0.0510648307800293, 0.05108841705322266, 0.05107913589477539, 0.051181568145751956, 0.051089408874511716, 0.05113753509521484, 0.05104947280883789, 0.05118668746948242, 0.05107199859619141, 0.05115903854370117, 0.051095550537109374, 0.05119692611694336, 0.05106995010375977, 0.05117337417602539, 0.05110476684570313, 0.0512624626159668, 0.0511723518371582, 0.051143680572509766, 0.051128318786621094, 0.05118054580688477, 0.051297279357910154, 0.0514252815246582, 0.051313663482666014, 0.05137203216552735, 0.05136383819580078, 0.05140172958374024, 0.05127782440185547, 0.05146214294433594, 0.05132287979125977, 0.05137612915039062, 0.0513177604675293, 0.051452926635742184, 0.051297279357910154, 0.05181443023681641, 0.051401695251464846, 0.05144575881958008, 0.05131468963623047, 0.05139148712158203, 0.051313663482666014, 0.05147443389892578, 0.0513546257019043, 0.05144371032714844, 0.051366912841796876, 0.05153279876708984, 0.0513361930847168, 0.05146214294433594, 0.051367935180664064, 0.05157068634033203, 0.05139353561401367, 0.051681278228759765, 0.05150003051757813, 0.05140172958374024, 0.05102796936035156, 0.05105561447143555, 0.05142937469482422, 0.05113651275634765, 0.0511016960144043, 0.05105459213256836, 0.05117440032958984, 0.051111934661865234, 0.05106073760986328, 0.05104230499267578, 0.0510832633972168, 0.05107712173461914, 0.05109145736694336, 0.05106585693359375, 0.051135486602783206, 0.05110476684570313, 0.05110988616943359, 0.05105868911743164, 0.05115084838867188, 0.05105152130126953, 0.051165184020996096, 0.051151870727539066, 0.05116108703613281, 0.05111500930786133, 0.05113446426391602, 0.05116108703613281, 0.051231742858886715, 0.05114265441894531, 0.05110374450683594, 0.05115289688110351, 0.051211265563964846, 0.05132492828369141, 0.05135769653320312, 0.051620864868164064, 0.05143961715698242, 0.051366912841796876, 0.05136383819580078, 0.051399681091308595, 0.0514150390625, 0.0513331184387207, 0.05138431930541992, 0.05135974502563476, 0.05146112060546875, 0.051364864349365234, 0.05137510299682617, 0.05140480041503906, 0.05147340774536133, 0.0513331184387207, 0.05133824157714844, 0.051386367797851565, 0.05150003051757813, 0.05135871887207031, 0.051397632598876954, 0.05140377426147461, 0.051487743377685545, 0.051402751922607424, 0.051487743377685545, 0.05146316909790039, 0.05154611206054688, 0.05143142318725586, 0.051765247344970705, 0.051451904296875, 0.05150515365600586, 0.0510832633972168, 0.05112319946289062, 0.05114470291137695, 0.05101465606689453, 0.05106995010375977, 0.052016128540039064, 0.05118873596191406, 0.051057662963867184, 0.051090431213378903, 0.0510382080078125, 0.05113241577148438, 0.051165184020996096, 0.05116723251342774, 0.05102284622192383, 0.051156993865966796, 0.05109452819824219, 0.05114265441894531, 0.05105049514770508, 0.05125222396850586, 0.05110784149169922, 0.05120409774780273, 0.05117542266845703, 0.05122355270385742, 0.05111603164672852, 0.05120000076293945, 0.0511723518371582, 0.051299327850341796, 0.05120000076293945, 0.051135486602783206, 0.05113651275634765, 0.05125632095336914, 0.05134438323974609, 0.05146112060546875, 0.051312641143798826, 0.05140377426147461, 0.051329025268554686, 0.051383296966552736, 0.05132799911499023, 0.05151027297973633, 0.051345409393310545, 0.0514703369140625, 0.05132492828369141, 0.05146623992919922, 0.05129011154174805, 0.05144575881958008, 0.05134438323974609, 0.0514785270690918, 0.051337215423583986, 0.05137408065795898, 0.051351551055908204, 0.051664894104003906, 0.05151027297973633, 0.05144166564941406, 0.05140889739990234, 0.051536895751953124, 0.051348480224609375, 0.05164543914794922, 0.05195161437988281, 0.051740673065185545, 0.05141401672363281, 0.051517440795898435, 0.05140991973876953, 0.05149900817871094, 0.051053569793701174, 0.051056640625, 0.05106995010375977, 0.05119385528564453, 0.051171329498291014, 0.05110784149169922, 0.051251232147216795, 0.0511416015625, 0.05111705780029297, 0.05118259048461914, 0.051160064697265625, 0.05117030334472656, 0.051165184020996096, 0.051108863830566405, 0.051141632080078124, 0.05110374450683594, 0.05108838272094727, 0.05114777755737305, 0.05122252655029297, 0.051130367279052735, 0.05120307159423828, 0.05208883285522461, 0.05122252655029297, 0.05128704071044922, 0.05122252655029297, 0.051146751403808595, 0.05121331024169922, 0.05113958358764648, 0.05113446426391602, 0.051154945373535154, 0.05121535873413086, 0.051492862701416016, 0.051348480224609375, 0.05137612915039062, 0.05142732620239258, 0.05134643173217773, 0.05152153778076172, 0.05140480041503906, 0.05140889739990234, 0.05135564804077149, 0.051361793518066405, 0.05133004760742187, 0.0514600944519043, 0.0513177604675293, 0.05133414459228516, 0.05131161499023437, 0.05143654251098633, 0.05143961715698242, 0.05133414459228516, 0.05137408065795898, 0.05144268798828125, 0.05139558410644531, 0.05142015838623047, 0.05139558410644531, 0.05145702362060547, 0.051367935180664064, 0.05144575881958008, 0.051402751922607424, 0.051536895751953124, 0.0513966064453125, 0.05145087814331055, 0.05143654251098633, 0.05134438323974609, 0.05105254364013672, 0.05098086547851562, 0.05109145736694336, 0.05105561447143555, 0.05107814407348633, 0.05101465606689453, 0.05107712173461914, 0.0510115852355957, 0.05173350524902344, 0.05120307159423828, 0.05110476684570313, 0.05104947280883789, 0.051143680572509766, 0.05106687927246094, 0.05117542266845703, 0.05108736038208008, 0.05110476684570313, 0.0510648307800293, 0.05117440032958984, 0.051059711456298826, 0.051171329498291014, 0.05107712173461914, 0.051181568145751956, 0.05115289688110351, 0.05117747116088867, 0.05107199859619141, 0.051282943725585936, 0.051133438110351564, 0.05112627029418945, 0.05113446426391602, 0.0512174072265625, 0.05131673431396484, 0.05136281585693359, 0.05133107376098633, 0.05137408065795898, 0.051378177642822265, 0.05139558410644531, 0.05137408065795898, 0.051452926635742184, 0.05129523086547851, 0.05137919998168945, 0.05129523086547851, 0.0514252815246582, 0.05132799911499023, 0.051383296966552736, 0.05130752182006836, 0.05144678497314453, 0.051320831298828126, 0.05136588668823242, 0.0513259506225586, 0.05151948928833008, 0.05132799911499023, 0.051402751922607424, 0.05137100982666016, 0.05149491119384766, 0.05137100982666016, 0.05150207901000976, 0.051383296966552736, 0.051506175994873046, 0.05133414459228516, 0.051492862701416016, 0.05137203216552735, 0.051402751922607424, 0.05102796936035156, 0.051092479705810545, 0.05104435348510742, 0.051119102478027346, 0.05106585693359375, 0.05110784149169922, 0.05110271835327149, 0.05106073760986328, 0.05104742431640625, 0.05110067367553711, 0.05113651275634765, 0.05109145736694336, 0.05109657669067383, 0.051074047088623044, 0.05119180679321289, 0.05110271835327149, 0.05105561447143555, 0.05108838272094727, 0.05117542266845703, 0.05112934494018555, 0.051158016204833984, 0.0511907844543457, 0.051143680572509766, 0.05120716857910156, 0.051151870727539066, 0.05113241577148438, 0.051269630432128906, 0.05113241577148438, 0.051130367279052735, 0.05114777755737305, 0.05117849731445313, 0.05133107376098633, 0.05160550308227539, 0.05144268798828125, 0.053282817840576174, 0.05145395278930664, 0.05137919998168945, 0.05137100982666016, 0.05135871887207031, 0.05134643173217773, 0.051389438629150394, 0.05135974502563476, 0.05144473648071289, 0.05139148712158203, 0.05139148712158203, 0.05132287979125977, 0.05143961715698242, 0.05137203216552735, 0.051340286254882815, 0.05138227081298828, 0.05148979187011719, 0.051386367797851565, 0.05139353561401367, 0.05133824157714844, 0.051469310760498044, 0.05137919998168945, 0.051448833465576174, 0.05145395278930664, 0.05150515365600586, 0.05145907211303711, 0.05170483016967774, 0.05143756866455078, 0.05146316909790039, 0.05109964752197266, 0.05101260757446289, 0.051171329498291014, 0.051059711456298826, 0.05115596771240234, 0.05107814407348633, 0.051133438110351564, 0.05101055908203125, 0.0511016960144043, 0.05103513717651367, 0.05116108703613281, 0.05140172958374024, 0.05129011154174805, 0.05103308868408203, 0.05120512008666992, 0.05109145736694336, 0.05113753509521484, 0.05104435348510742, 0.05117337417602539, 0.05107199859619141, 0.05120204925537109, 0.051127296447753906, 0.05118873596191406, 0.05109452819824219, 0.05120000076293945, 0.0511723518371582, 0.051253246307373046, 0.05116723251342774, 0.05114265441894531, 0.051158016204833984, 0.05126348876953125, 0.051367935180664064, 0.05140889739990234, 0.051329025268554686, 0.051386367797851565, 0.05130547332763672, 0.05139558410644531, 0.05130342483520508, 0.051451904296875, 0.05133926391601563, 0.051367935180664064, 0.05131468963623047, 0.05146214294433594, 0.051351551055908204, 0.05142425537109375, 0.051315711975097655, 0.051444766998291015, 0.05133718490600586, 0.05137919998168945, 0.05138022232055664, 0.051492862701416016, 0.05134233474731445, 0.0514334716796875, 0.05138022232055664, 0.05149900817871094, 0.05135769653320312, 0.051517440795898435, 0.051481601715087894, 0.051661823272705076, 0.051361793518066405, 0.05149593734741211, 0.05141299057006836]",tokens/s,19.49502953989917,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495ba-46bf15af50f6da881454533c;f514b970-f019-47a3-95ec-ce168f4a3757) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3710, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ - self.model = InternLMModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ - self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in - self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 545, in __init__ - self.self_attn = INTERNLM_ATTENTION_CLASSES[config.attn_implementation](config=config) -KeyError: 'sdpa' - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 1204, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 1004, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 738, in forward - hidden_states, self_attn_weights, present_key_value = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 625, in forward - qkv_states = self.wqkv(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 47, in __init__ - assert out_features % (32 // self.w_bit) == 0 -AssertionError - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1065.988096,1074.266112,0.0,488.636416,482.553856,s,1,7.62,7.62,0.0,7.62,7.62,7.62,7.62,[7.62],,kWh,8.014612021520406e-06,4.376844214493915e-06,1.1462509169946067e-05,2.3853965405960388e-05,,MB,1446.281216,1263.009792,0.0,616.562688,582.974464,s,10,0.1985794239044189,0.019857942390441894,0.0006272681341513647,0.019660639762878417,0.020351802444458007,0.020932093429565427,0.021396326217651366,"[0.01896041679382324, 0.01968124771118164, 0.019925983428955078, 0.019771871566772462, 0.01963360023498535, 0.019624319076538086, 0.019640031814575194, 0.019606719970703124, 0.020222848892211914, 0.02151238441467285]",tokens/s,12891.56726143082,kWh,2.2473322704507807e-07,1.231204523851367e-07,7.161835049277649e-07,1.0640371843579797e-06,tokens/kWh,240593095.58290073,MB,1471.066112,1275.592704,0.0,629.1456,597.192192,s,10,11.240380004882812,1.1240380004882813,0.015241313513433047,1.1221922607421875,1.1506776977539062,1.1516855285644532,1.1524917932128906,"[1.10483642578125, 1.1222066650390625, 1.1233826904296875, 1.1111002197265625, 1.119251220703125, 1.1221778564453124, 1.1086944580078124, 1.1255833740234376, 1.1504537353515625, 1.152693359375]",tokens/s,56.047927180960826,kWh,1.2764852048927712e-05,6.99468851345875e-06,2.1260806298868045e-05,4.10203468612545e-05,tokens/kWh,1535823.1906982297,,s,630,11.235577875137336,0.017834250595456076,0.00045018842218108,0.01777612781524658,0.01830297660827637,0.01846533079147339,0.019205795154571537,"[0.016842752456665038, 0.017113088607788086, 0.017144832611083984, 0.01703731155395508, 0.017047552108764647, 0.01703219223022461, 0.017084415435791016, 0.017022975921630858, 0.017055744171142577, 0.017040384292602538, 0.01704140853881836, 0.017105920791625977, 0.017084415435791016, 0.018440191268920898, 0.018769920349121092, 0.018945024490356444, 0.017923072814941408, 0.017853439331054686, 0.017756160736083985, 0.017762304306030274, 0.017738752365112305, 0.017811456680297853, 0.017666048049926757, 0.017689599990844726, 0.017681407928466796, 0.017760351181030275, 0.017697696685791017, 0.017723392486572266, 0.01780019187927246, 0.01742438316345215, 0.017115135192871094, 0.017179647445678712, 0.017177600860595704, 0.017175552368164062, 0.01719808006286621, 0.017068031311035157, 0.017081344604492187, 0.017229824066162108, 0.017229824066162108, 0.017138687133789063, 0.01698099136352539, 0.017185792922973633, 0.017665023803710937, 0.01777561569213867, 0.017184768676757813, 0.017187839508056642, 0.01816985511779785, 0.017880064010620117, 0.01785139274597168, 0.017737728118896484, 0.01776639938354492, 0.017732608795166017, 0.017697792053222656, 0.017711103439331053, 0.017788000106811523, 0.01780828857421875, 0.017719295501708983, 0.01782476806640625, 0.017596416473388672, 0.017985536575317384, 0.017754112243652344, 0.017918975830078124, 0.017770496368408203, 0.017648639678955077, 0.018172927856445312, 0.017690624237060547, 0.017819648742675782, 0.01755340766906738, 0.01779302406311035, 0.01777663993835449, 0.01780838394165039, 0.017732608795166017, 0.017765375137329103, 0.017777664184570312, 0.017747968673706056, 0.017769472122192383, 0.017711103439331053, 0.01781760025024414, 0.017737728118896484, 0.017768447875976562, 0.017771520614624024, 0.017715200424194336, 0.01767628860473633, 0.01781350326538086, 0.017804288864135744, 0.017704959869384765, 0.017737728118896484, 0.017727487564086913, 0.017762304306030274, 0.017752063751220702, 0.017789951324462892, 0.017769472122192383, 0.017727487564086913, 0.017736703872680663, 0.017716224670410157, 0.01768550491333008, 0.01774284744262695, 0.01784320068359375, 0.018050048828125, 0.018381824493408205, 0.018184192657470705, 0.01779199981689453, 0.01785753631591797, 0.017760255813598632, 0.017917951583862304, 0.01785856056213379, 0.017745920181274414, 0.017785856246948242, 0.017888256072998047, 0.017878015518188475, 0.017754112243652344, 0.01765376091003418, 0.017724416732788087, 0.01782067108154297, 0.017758207321166994, 0.017911808013916015, 0.017827840805053712, 0.017880064010620117, 0.017743871688842772, 0.017785888671875, 0.01769468879699707, 0.017715200424194336, 0.017768447875976562, 0.017696767807006835, 0.017656831741333007, 0.018581504821777343, 0.017701887130737306, 0.017781759262084963, 0.017740800857543947, 0.017733631134033204, 0.017846271514892577, 0.017819648742675782, 0.017687551498413084, 0.017731584548950196, 0.017724416732788087, 0.017892351150512697, 0.01782579231262207, 0.017624063491821287, 0.017754112243652344, 0.017740800857543947, 0.017768447875976562, 0.01779302406311035, 0.01780838394165039, 0.018095104217529297, 0.01866854476928711, 0.01862451171875, 0.017870847702026366, 0.017871871948242187, 0.017836032867431642, 0.017741823196411134, 0.017665023803710937, 0.017559551239013673, 0.017762304306030274, 0.017657855987548828, 0.017730560302734375, 0.017671167373657228, 0.019099647521972657, 0.018089984893798827, 0.017898496627807618, 0.018284543991088868, 0.017743871688842772, 0.01768550491333008, 0.01777561569213867, 0.017673215866088866, 0.017679359436035155, 0.017543167114257813, 0.01783296012878418, 0.017736703872680663, 0.017708032608032227, 0.017708032608032227, 0.017701887130737306, 0.017715200424194336, 0.01770086479187012, 0.017726463317871095, 0.017710079193115236, 0.017681407928466796, 0.01770086479187012, 0.017649663925170898, 0.017733631134033204, 0.017902591705322265, 0.017952768325805665, 0.01783193588256836, 0.017757183074951173, 0.017881088256835938, 0.01776742362976074, 0.017880064010620117, 0.01779814338684082, 0.017871871948242187, 0.017779712677001954, 0.017610815048217772, 0.017848255157470704, 0.01783193588256836, 0.017689599990844726, 0.017764352798461915, 0.01747148895263672, 0.017768447875976562, 0.01768448066711426, 0.01784115219116211, 0.017730592727661133, 0.01785647964477539, 0.01782476806640625, 0.01782579231262207, 0.017732608795166017, 0.017780736923217775, 0.017847295761108398, 0.01775923156738281, 0.017657855987548828, 0.017829887390136717, 0.01794867134094238, 0.017944576263427735, 0.01781862449645996, 0.017908735275268553, 0.018172927856445312, 0.018507776260375978, 0.018062368392944336, 0.017781728744506835, 0.017768447875976562, 0.01780735969543457, 0.017777664184570312, 0.01776742362976074, 0.017542144775390626, 0.017321983337402345, 0.01703219223022461, 0.017085439682006837, 0.01699737548828125, 0.01703424072265625, 0.017044479370117188, 0.017059839248657227, 0.017154048919677735, 0.017091583251953125, 0.017144832611083984, 0.017076223373413087, 0.017102848052978514, 0.017074176788330078, 0.017060863494873048, 0.01761484718322754, 0.017812480926513673, 0.017773567199707033, 0.017716224670410157, 0.01784115219116211, 0.017902591705322265, 0.0176312313079834, 0.017720319747924804, 0.017724416732788087, 0.017666048049926757, 0.01761587142944336, 0.01784012794494629, 0.017697792053222656, 0.017707008361816406, 0.017544191360473634, 0.017680383682250975, 0.017716224670410157, 0.017562623977661132, 0.017763328552246094, 0.017713151931762695, 0.017679359436035155, 0.017728511810302734, 0.017725439071655275, 0.017727487564086913, 0.01777459144592285, 0.017724416732788087, 0.017719295501708983, 0.017695743560791014, 0.01773465538024902, 0.017959936141967774, 0.01780227279663086, 0.017755104064941407, 0.017829887390136717, 0.017702911376953127, 0.017878015518188475, 0.01860710334777832, 0.018151424407958985, 0.017696767807006835, 0.017632255554199217, 0.017744895935058593, 0.0178155517578125, 0.01759129524230957, 0.01770086479187012, 0.017733631134033204, 0.017704959869384765, 0.017754112243652344, 0.017725439071655275, 0.01776742362976074, 0.0176363525390625, 0.017730560302734375, 0.017780736923217775, 0.017694719314575197, 0.017739776611328126, 0.017702911376953127, 0.017773567199707033, 0.01804697608947754, 0.017896448135375977, 0.017694719314575197, 0.01779097557067871, 0.017679359436035155, 0.01765990447998047, 0.017724416732788087, 0.017715200424194336, 0.017682432174682617, 0.017699840545654297, 0.017701887130737306, 0.017714176177978515, 0.017822719573974608, 0.017761280059814453, 0.01760870361328125, 0.017768447875976562, 0.017768447875976562, 0.017698816299438477, 0.01779814338684082, 0.017725439071655275, 0.01781862449645996, 0.017711103439331053, 0.01781760025024414, 0.017633279800415038, 0.017695743560791014, 0.01761689567565918, 0.017748992919921876, 0.01760972785949707, 0.01765888023376465, 0.017754112243652344, 0.017702911376953127, 0.017797119140625, 0.017789951324462892, 0.017763328552246094, 0.01767731285095215, 0.018108415603637695, 0.017928192138671875, 0.01838489532470703, 0.01806540870666504, 0.0177838077545166, 0.017691648483276368, 0.01779097557067871, 0.017499135971069335, 0.017171455383300782, 0.017270784378051757, 0.0172728328704834, 0.017236991882324217, 0.017313791275024415, 0.017311744689941407, 0.017142784118652343, 0.017187839508056642, 0.017102848052978514, 0.017084415435791016, 0.017086463928222655, 0.01705881690979004, 0.017106943130493164, 0.01797427177429199, 0.01798246383666992, 0.017736703872680663, 0.017736703872680663, 0.01778278350830078, 0.017693695068359376, 0.017715200424194336, 0.01783500862121582, 0.017733631134033204, 0.017682432174682617, 0.017687551498413084, 0.01763430404663086, 0.018068479537963866, 0.017765375137329103, 0.017622016906738282, 0.017810432434082032, 0.017693695068359376, 0.01782374382019043, 0.017737728118896484, 0.01775923156738281, 0.018282495498657226, 0.017728511810302734, 0.01766912078857422, 0.017657855987548828, 0.017911808013916015, 0.017696767807006835, 0.017769472122192383, 0.02044313621520996, 0.020336639404296874, 0.0192491512298584, 0.019725311279296876, 0.018058240890502928, 0.01762816047668457, 0.017727487564086913, 0.018119680404663087, 0.017992704391479493, 0.018075647354125975, 0.017727487564086913, 0.018428928375244142, 0.01743769645690918, 0.016913408279418944, 0.016928768157958983, 0.017661951065063478, 0.017747968673706056, 0.01779302406311035, 0.01776639938354492, 0.01776742362976074, 0.017760255813598632, 0.017390592575073242, 0.016874496459960937, 0.017106943130493164, 0.017068031311035157, 0.01704550361633301, 0.017136640548706054, 0.018118656158447266, 0.01783193588256836, 0.017743871688842772, 0.017757183074951173, 0.017690624237060547, 0.01782067108154297, 0.01781657600402832, 0.017758207321166994, 0.017770496368408203, 0.017787904739379884, 0.017743871688842772, 0.017383424758911133, 0.016887807846069337, 0.017111040115356444, 0.017133567810058595, 0.017047552108764647, 0.017031167984008787, 0.017108991622924806, 0.017926143646240233, 0.018001920700073244, 0.017969152450561524, 0.018465791702270508, 0.017934335708618163, 0.017811456680297853, 0.017374208450317383, 0.01714995193481445, 0.018587648391723634, 0.01782579231262207, 0.01802649688720703, 0.017696767807006835, 0.017687551498413084, 0.017340415954589843, 0.017138687133789063, 0.016892927169799805, 0.01721241569519043, 0.017067007064819336, 0.01705369567871094, 0.018160703659057618, 0.017888191223144532, 0.017724416732788087, 0.017663999557495116, 0.017943552017211914, 0.018375680923461913, 0.017819648742675782, 0.017950719833374023, 0.017752063751220702, 0.017531904220581054, 0.017064960479736328, 0.017112064361572265, 0.017043455123901367, 0.0169881591796875, 0.0168089599609375, 0.016857088088989256, 0.01698099136352539, 0.01779097557067871, 0.017731584548950196, 0.017693695068359376, 0.018283519744873047, 0.01799782371520996, 0.017872896194458008, 0.017683456420898438, 0.017514495849609374, 0.017747968673706056, 0.01716531181335449, 0.016946176528930663, 0.017092607498168946, 0.01696767997741699, 0.016907264709472656, 0.01681510353088379, 0.017031167984008787, 0.017044479370117188, 0.017089536666870117, 0.017909759521484374, 0.018206720352172853, 0.018488319396972656, 0.01825382423400879, 0.01824051284790039, 0.018119680404663087, 0.01821696090698242, 0.018256895065307616, 0.018184192657470705, 0.018366464614868162, 0.018276351928710938, 0.018273279190063475, 0.018176000595092775, 0.01814630317687988, 0.018198528289794923, 0.018882560729980468, 0.018540544509887694, 0.01821286392211914, 0.01817804718017578, 0.01824051284790039, 0.018379776000976563, 0.01839411163330078, 0.018166784286499024, 0.01820159912109375, 0.018152448654174806, 0.018250751495361327, 0.018165760040283203, 0.018520063400268554, 0.018922496795654296, 0.018330623626708984, 0.018283519744873047, 0.0183470401763916, 0.018153472900390624, 0.018379776000976563, 0.018296831130981444, 0.018298879623413086, 0.018229280471801758, 0.018187231063842773, 0.01840230369567871, 0.01843302345275879, 0.018301952362060548, 0.01824870491027832, 0.018308095932006836, 0.0182609920501709, 0.01825279998779297, 0.01821286392211914, 0.018288639068603514, 0.01819545555114746, 0.017872896194458008, 0.018233343124389647, 0.01824051284790039, 0.018101247787475586, 0.01794867134094238, 0.018280448913574218, 0.018176000595092775, 0.018145280838012694, 0.017971200942993162, 0.0181790714263916, 0.018275327682495117, 0.017939456939697264, 0.018311168670654295, 0.01830297660827637, 0.018167808532714845, 0.01785241508483887, 0.017944576263427735, 0.017950719833374023, 0.018284543991088868, 0.018135040283203126, 0.017885183334350584, 0.018184192657470705, 0.018231296539306642, 0.01821183967590332, 0.018109439849853515, 0.018440191268920898, 0.01861529541015625, 0.01822003173828125, 0.018299903869628906, 0.018375680923461913, 0.018365440368652345, 0.018292736053466797, 0.018321407318115233, 0.018281471252441405, 0.018405376434326173, 0.0192675838470459, 0.018449407577514648, 0.01830297660827637, 0.018276351928710938, 0.0182609920501709, 0.018259967803955078, 0.01817705535888672, 0.018245599746704103, 0.018092031478881835, 0.017969152450561524, 0.01816985511779785, 0.01942732810974121, 0.018555904388427736, 0.018266111373901366, 0.017925119400024413, 0.01825484848022461, 0.01820876884460449, 0.01807257652282715, 0.018257919311523436, 0.01821183967590332, 0.018197504043579102, 0.018225151062011717, 0.01794867134094238, 0.017910783767700195, 0.018156543731689453, 0.018215936660766603, 0.018374656677246092, 0.018251775741577148, 0.018177024841308592, 0.017985536575317384, 0.018259967803955078, 0.018242559432983398, 0.018020351409912108, 0.01785651206970215, 0.01806438446044922, 0.017992704391479493, 0.01815449523925781, 0.018300928115844727, 0.01820979118347168, 0.018300928115844727, 0.018281471252441405, 0.01966592025756836, 0.018530303955078126, 0.018464767456054687, 0.018265087127685545, 0.018579456329345705, 0.018291711807250977, 0.018242559432983398, 0.018470943450927733, 0.01845039939880371, 0.018334720611572267, 0.018359296798706053, 0.01818726348876953, 0.018259967803955078, 0.018289663314819335, 0.01903104019165039, 0.018552831649780274, 0.018281471252441405, 0.018231296539306642, 0.018405376434326173, 0.018487295150756835, 0.01821494483947754, 0.018428895950317385, 0.018379776000976563, 0.018594816207885743, 0.01850060844421387, 0.018050048828125, 0.018284543991088868, 0.018183168411254884, 0.018276351928710938, 0.01825279998779297, 0.018111488342285157, 0.017951744079589844, 0.018296831130981444, 0.01843302345275879]",tokens/s,56.07188228334003,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b3d-4aac973e3f3ff11b18e0c716;92390d91-62b7-4cb7-abc6-60f7d7e2567b) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4935.008256,7434.928128,0.0,6849.298432,6445.09696,s,1,10.262,10.262,0.0,10.262,10.262,10.262,10.262,[10.262],,kWh,4.0576827624340696e-05,2.222360892968528e-05,5.5498933288045116e-05,0.00011829936984207109,,MB,2743.640064,7797.735424,0.0,7151.28832,6823.3216,s,10,1.0524322586059571,0.10524322586059569,0.00011403310227544977,0.10521894454956054,0.1052693115234375,0.10542322998046876,0.10554636474609376,"[0.1055771484375, 0.10515805053710937, 0.10520623779296875, 0.1051688003540039, 0.10523129272460938, 0.10522589111328125, 0.10522345733642578, 0.105235107421875, 0.1052144317626953, 0.10519184112548828]",tokens/s,2432.460596932818,kWh,1.2440948799488533e-06,6.816999479078663e-07,7.312058481221874e-06,9.237853309078594e-06,tokens/kWh,27712065.935103495,MB,2747.96544,7799.832576,0.0,7153.385472,6823.32416,s,10,18.089114746093752,1.8089114746093753,0.028097698570929208,1.7957100219726563,1.845297204589844,1.8500601623535156,1.853870528564453,"[1.80025634765625, 1.7827547607421874, 1.7911636962890625, 1.7881708984375, 1.84423876953125, 1.8548231201171874, 1.83407958984375, 1.8342572021484376, 1.774661376953125, 1.784708984375]",tokens/s,34.82757497218293,kWh,2.13549148892214e-05,1.1702930891277683e-05,6.47991936521781e-05,9.785703943267717e-05,tokens/kWh,643796.3008613416,,s,630,18.087116794586183,0.028709709197755843,0.0006863828404683962,0.02834943962097168,0.02969313316345215,0.029841612625122072,0.030455459899902344,"[0.028116992950439453, 0.02819993591308594, 0.028301311492919923, 0.028306432723999023, 0.028227584838867188, 0.028224512100219725, 0.02815385627746582, 0.02833203125, 0.028421119689941408, 0.028836864471435547, 0.028235776901245117, 0.02832076835632324, 0.028368896484375, 0.02838118362426758, 0.02857881546020508, 0.02873036766052246, 0.02833612823486328, 0.02834739112854004, 0.02837196731567383, 0.02872831916809082, 0.028596223831176756, 0.028494848251342773, 0.02854911994934082, 0.028375040054321288, 0.028271615982055662, 0.028298240661621094, 0.028257280349731444, 0.028267520904541016, 0.0283371524810791, 0.028077056884765625, 0.02829929542541504, 0.028291040420532227, 0.02816819190979004, 0.02830438423156738, 0.02851840019226074, 0.028280832290649413, 0.02832486343383789, 0.02839449691772461, 0.027857919692993165, 0.027963392257690428, 0.02834432029724121, 0.03182899284362793, 0.03002572822570801, 0.0294021110534668, 0.028449792861938477, 0.02832383918762207, 0.028250112533569335, 0.02834432029724121, 0.02832793617248535, 0.028420095443725587, 0.02875596809387207, 0.02901299285888672, 0.028448768615722656, 0.0283504638671875, 0.02830335998535156, 0.028290048599243164, 0.028246015548706056, 0.028297216415405273, 0.03059712028503418, 0.029953023910522462, 0.029647872924804686, 0.029489152908325194, 0.029500415802001953, 0.028181503295898438, 0.028240896224975585, 0.028289024353027343, 0.02819174385070801, 0.02835353660583496, 0.028021760940551758, 0.028437503814697264, 0.028435455322265626, 0.028331008911132813, 0.028213247299194336, 0.02834943962097168, 0.02831667137145996, 0.028262399673461915, 0.028402687072753906, 0.028334079742431642, 0.02800127983093262, 0.027880447387695313, 0.028285951614379884, 0.02775551986694336, 0.029380607604980468, 0.030113792419433592, 0.028638208389282226, 0.028041215896606447, 0.028267520904541016, 0.02830745506286621, 0.02837708854675293, 0.02847030448913574, 0.02784252738952637, 0.028300287246704102, 0.028300287246704102, 0.028219392776489258, 0.028301311492919923, 0.028233728408813476, 0.028445695877075194, 0.02835251235961914, 0.028270591735839845, 0.02814668846130371, 0.028296192169189452, 0.028263423919677736, 0.028206079483032227, 0.027778047561645508, 0.028108800888061523, 0.02817433547973633, 0.028291072845458985, 0.028219392776489258, 0.02817228889465332, 0.028222463607788087, 0.028219392776489258, 0.028288000106811522, 0.028188671112060547, 0.02825116729736328, 0.02817737579345703, 0.028212223052978515, 0.02810163116455078, 0.02815795135498047, 0.028338176727294922, 0.02833203125, 0.028879871368408205, 0.02839449691772461, 0.028258304595947265, 0.028273664474487304, 0.02832383918762207, 0.02813542366027832, 0.028279808044433592, 0.028192768096923827, 0.02820812797546387, 0.028238847732543947, 0.029190143585205077, 0.029351936340332032, 0.028161024093627928, 0.029104127883911132, 0.030126079559326172, 0.028296192169189452, 0.02816204833984375, 0.02815283203125, 0.027849727630615235, 0.028096511840820314, 0.028115968704223632, 0.028275711059570312, 0.028262399673461915, 0.028239871978759764, 0.02816307258605957, 0.0281712646484375, 0.028181503295898438, 0.028236799240112305, 0.028052480697631835, 0.02833305549621582, 0.028270591735839845, 0.02809343910217285, 0.028486656188964843, 0.02889625549316406, 0.02817433547973633, 0.028285951614379884, 0.028260351181030274, 0.028060672760009765, 0.028273664474487304, 0.02819993591308594, 0.028185600280761718, 0.02813849639892578, 0.028196863174438477, 0.028279808044433592, 0.028207103729248048, 0.028251136779785156, 0.028196863174438477, 0.028321792602539062, 0.02834432029724121, 0.029723648071289063, 0.02958131217956543, 0.02979635238647461, 0.028421119689941408, 0.02835251235961914, 0.028305408477783203, 0.028301311492919923, 0.028292095184326172, 0.028879871368408205, 0.029636608123779298, 0.02834022331237793, 0.028318719863891603, 0.02834022331237793, 0.028334079742431642, 0.028267520904541016, 0.028192768096923827, 0.02838937568664551, 0.028439552307128906, 0.028370943069458008, 0.02813132858276367, 0.028651519775390624, 0.02834636878967285, 0.028342271804809572, 0.028263423919677736, 0.030871551513671876, 0.030204927444458008, 0.029474815368652343, 0.02950655937194824, 0.029343744277954102, 0.02902835273742676, 0.029511680603027345, 0.029470720291137696, 0.028279808044433592, 0.02794803237915039, 0.028262399673461915, 0.028188671112060547, 0.028236799240112305, 0.028082176208496092, 0.028078079223632812, 0.027821056365966795, 0.02815999984741211, 0.028196863174438477, 0.02813849639892578, 0.02813542366027832, 0.027829248428344725, 0.027805696487426756, 0.028091392517089843, 0.02817843246459961, 0.028212223052978515, 0.028180479049682617, 0.028238847732543947, 0.028417024612426758, 0.028545024871826172, 0.028639232635498047, 0.028203008651733398, 0.028102655410766602, 0.02819071960449219, 0.028231679916381838, 0.02815283203125, 0.02815590476989746, 0.028249088287353515, 0.028257280349731444, 0.028252159118652344, 0.02834432029724121, 0.028205055236816406, 0.028226560592651367, 0.028112895965576173, 0.02819174385070801, 0.028215295791625978, 0.028256256103515624, 0.028264448165893553, 0.02836479949951172, 0.02772172737121582, 0.027842559814453126, 0.02811392021179199, 0.02819071960449219, 0.02812313652038574, 0.028196863174438477, 0.028290048599243164, 0.02817433547973633, 0.028206079483032227, 0.02812620735168457, 0.028338176727294922, 0.028289024353027343, 0.028279808044433592, 0.028470272064208983, 0.02855116844177246, 0.02831667137145996, 0.028222463607788087, 0.02816819190979004, 0.02832691192626953, 0.02814361572265625, 0.02934988784790039, 0.029447168350219727, 0.030469120025634764, 0.02973695945739746, 0.029843456268310548, 0.029748224258422853, 0.029435903549194335, 0.029698047637939453, 0.02983526420593262, 0.02975436782836914, 0.029741056442260744, 0.029775871276855468, 0.029922304153442384, 0.029902847290039062, 0.029834239959716798, 0.029069311141967775, 0.029566976547241212, 0.02957107162475586, 0.029718528747558592, 0.029557760238647462, 0.029499391555786132, 0.029748224258422853, 0.029609983444213867, 0.029500415802001953, 0.02962124824523926, 0.02972774314880371, 0.029295616149902344, 0.02980659294128418, 0.030310400009155275, 0.029970432281494142, 0.029656063079833983, 0.029645824432373048, 0.02954649543762207, 0.02932633590698242, 0.028991487503051756, 0.028817407608032225, 0.028875776290893555, 0.0297256965637207, 0.030076927185058593, 0.029813760757446288, 0.029361152648925783, 0.028814367294311524, 0.027922399520874025, 0.028220415115356445, 0.028223487854003908, 0.02832486343383789, 0.029165567398071288, 0.028710912704467774, 0.029038591384887694, 0.02949020767211914, 0.029504480361938475, 0.028226560592651367, 0.02925056076049805, 0.029484031677246093, 0.027905023574829102, 0.02815590476989746, 0.029315071105957033, 0.029420543670654296, 0.029844480514526366, 0.02892185592651367, 0.029839359283447265, 0.029740032196044923, 0.029458431243896483, 0.029516799926757813, 0.029486080169677735, 0.029660160064697266, 0.029718528747558592, 0.02960691261291504, 0.029061119079589845, 0.02898841667175293, 0.029772800445556642, 0.029585407257080077, 0.03060121536254883, 0.029702144622802733, 0.02953932762145996, 0.029616128921508788, 0.029410303115844725, 0.029245439529418944, 0.02957414436340332, 0.029809663772583008, 0.029692928314208986, 0.029502464294433595, 0.029467647552490234, 0.02914201545715332, 0.029222911834716796, 0.02877440071105957, 0.028922880172729492, 0.028922880172729492, 0.028701696395874023, 0.029100032806396486, 0.02944000053405762, 0.02916761589050293, 0.029859840393066408, 0.029426687240600585, 0.029560831069946288, 0.0299182071685791, 0.030029823303222656, 0.02934272003173828, 0.02940928077697754, 0.029618175506591796, 0.029632511138916014, 0.029699071884155274, 0.029009920120239258, 0.029378559112548826, 0.02911948776245117, 0.028831743240356447, 0.029303808212280274, 0.029635583877563477, 0.029651968002319336, 0.0291727352142334, 0.028925952911376954, 0.03221811294555664, 0.029905920028686524, 0.02956185531616211, 0.029594623565673828, 0.02925056076049805, 0.029019136428833008, 0.028075008392333983, 0.02817433547973633, 0.02817228889465332, 0.028668928146362304, 0.028836864471435547, 0.029905920028686524, 0.030422016143798827, 0.029882368087768556, 0.029594623565673828, 0.0293570556640625, 0.02999603271484375, 0.02978201675415039, 0.029293567657470702, 0.029569023132324217, 0.029446144104003907, 0.02939289665222168, 0.028112895965576173, 0.02896998405456543, 0.02875494384765625, 0.028903423309326173, 0.028846080780029298, 0.028854272842407228, 0.0293703670501709, 0.029403135299682616, 0.02935603141784668, 0.028239871978759764, 0.028265472412109374, 0.029446144104003907, 0.029430784225463868, 0.029451263427734374, 0.029414400100708008, 0.02938163185119629, 0.02952396774291992, 0.02953932762145996, 0.02940825653076172, 0.029444095611572265, 0.02938982391357422, 0.028900352478027344, 0.028894208908081056, 0.02953830337524414, 0.02831667137145996, 0.02892902374267578, 0.02932326316833496, 0.028898303985595702, 0.02895359992980957, 0.029443071365356444, 0.03146240043640137, 0.0283504638671875, 0.02833203125, 0.028231679916381838, 0.028080127716064454, 0.02833612823486328, 0.02891366386413574, 0.028266496658325195, 0.029080575942993164, 0.029473791122436522, 0.02940006446838379, 0.028252159118652344, 0.02839449691772461, 0.029328384399414063, 0.029503488540649415, 0.02978099250793457, 0.029430784225463868, 0.028536832809448243, 0.028272640228271483, 0.029023231506347655, 0.02935398483276367, 0.029549568176269532, 0.02879078483581543, 0.02874880027770996, 0.02924236869812012, 0.028802047729492186, 0.029315071105957033, 0.029038591384887694, 0.029638656616210936, 0.028729343414306642, 0.02931711959838867, 0.02979430389404297, 0.028997631072998048, 0.029090816497802735, 0.029055999755859374, 0.02892185592651367, 0.030123008728027343, 0.03033907127380371, 0.029699071884155274, 0.029254655838012695, 0.029962240219116212, 0.029618175506591796, 0.02952396774291992, 0.029157375335693358, 0.029462528228759766, 0.029859840393066408, 0.029633535385131835, 0.029452287673950195, 0.029237247467041014, 0.02949734306335449, 0.02954751968383789, 0.029519872665405275, 0.029371391296386717, 0.029335552215576172, 0.027891712188720705, 0.027863040924072265, 0.027889663696289063, 0.027878400802612304, 0.028048383712768556, 0.02837708854675293, 0.02834943962097168, 0.028169216156005858, 0.028274688720703125, 0.029731840133666993, 0.029541376113891602, 0.02953625679016113, 0.02982707214355469, 0.0301844482421875, 0.029694976806640624, 0.029570047378540038, 0.029487104415893556, 0.029426687240600585, 0.02944819259643555, 0.029520896911621092, 0.029412351608276367, 0.028330015182495116, 0.027943904876708985, 0.028094463348388672, 0.028456960678100586, 0.028305408477783203, 0.028428287506103517, 0.02837708854675293, 0.028192768096923827, 0.02834739112854004, 0.02796031951904297, 0.02813337516784668, 0.02810982322692871, 0.028181503295898438, 0.02815180778503418, 0.02818662452697754, 0.02838630485534668, 0.02817945671081543, 0.028076032638549804, 0.028269567489624024, 0.028015615463256836, 0.028009471893310548, 0.028262399673461915, 0.027991039276123047, 0.028318719863891603, 0.028488704681396484, 0.028604415893554686, 0.028029951095581054, 0.028177408218383788, 0.028220415115356445, 0.02831257629394531, 0.028234752655029297, 0.028247039794921876, 0.027876352310180662, 0.028194816589355468, 0.028255231857299806, 0.02793164825439453, 0.02792755126953125, 0.028253183364868165, 0.028084224700927734, 0.027923456192016603, 0.02814771270751953, 0.027855871200561523, 0.027864063262939453, 0.02814566421508789, 0.027675647735595704, 0.028225536346435546, 0.0279552001953125, 0.02795008087158203, 0.02815999984741211, 0.028193792343139647, 0.028255231857299806, 0.028278783798217775, 0.028181503295898438, 0.028188671112060547, 0.028234752655029297, 0.027703296661376952, 0.027867136001586915, 0.02777292823791504, 0.028212223052978515, 0.02817843246459961, 0.028006399154663086, 0.02855833625793457, 0.028668928146362304, 0.028275711059570312, 0.02834432029724121, 0.028488704681396484, 0.02832896041870117, 0.028417024612426758, 0.028473344802856446, 0.02838015937805176, 0.028313600540161132, 0.02814566421508789, 0.028334079742431642, 0.02836479949951172, 0.028247039794921876, 0.02975436782836914, 0.029456384658813478, 0.02830745506286621, 0.02832896041870117, 0.028395519256591797, 0.028368896484375, 0.028488704681396484, 0.028511232376098632, 0.02818764877319336, 0.028224512100219725, 0.02834943962097168, 0.028256256103515624, 0.028289024353027343, 0.028246015548706056, 0.02832691192626953, 0.02836479949951172, 0.028285951614379884, 0.02837606430053711, 0.028099584579467773, 0.028873727798461913, 0.02856243133544922, 0.028224512100219725, 0.02809753608703613, 0.027892736434936522, 0.028027904510498046, 0.02780467224121094, 0.028259328842163086, 0.027877376556396483, 0.02791628837585449, 0.02838425636291504, 0.028269567489624024, 0.02835763168334961, 0.028258304595947265, 0.02840985679626465, 0.02838835144042969, 0.028207103729248048, 0.02817535972595215, 0.028258304595947265, 0.02796953582763672, 0.02815999984741211, 0.027884544372558592, 0.027831296920776367, 0.028454912185668944, 0.028368896484375, 0.028225536346435546, 0.028196863174438477, 0.028290048599243164, 0.028201984405517577, 0.028283903121948242, 0.028215295791625978, 0.028248064041137694, 0.028331008911132813, 0.028251136779785156, 0.028681215286254884, 0.028860416412353516, 0.028851200103759765]",tokens/s,34.83142211966979,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8199.622656,11294.736384,0.0,10701.766656,10468.923392,s,1,11.705384765625,11.705384765625,0.0,11.705384765625,11.705384765625,11.705384765625,11.705384765625,[11.705384765625],,kWh,5.7591413624310744e-05,3.1514543357986524e-05,8.49011790319959e-05,0.00017400713601429315,,MB,3786.4448,11842.093056,0.0,11188.30592,10924.283904,s,10,1.9916258392333983,0.19916258392333983,7.260825582216576e-05,0.19914740753173826,0.19923951110839844,0.19927415924072264,0.19930187774658203,"[0.19913687133789063, 0.1990388488769531, 0.1992318115234375, 0.1991670684814453, 0.19909481811523438, 0.19930880737304688, 0.19912655639648438, 0.19915609741210938, 0.19913871765136718, 0.1992262420654297]",tokens/s,1285.3819977478179,kWh,2.354537124305504e-06,1.2901092028861892e-06,1.4349510390274536e-05,1.799415671746623e-05,tokens/kWh,14226840.63607775,MB,3786.4448,11844.190208,0.0,11190.403072,10924.286464,s,10,22.86395068359375,2.286395068359375,0.014575000210446271,2.2836083984375,2.303109545898437,2.3087353149414063,2.313235930175781,"[2.3011513671875, 2.286754638671875, 2.28372607421875, 2.2625380859375, 2.301859375, 2.275701171875, 2.274318115234375, 2.28349072265625, 2.314361083984375, 2.280050048828125]",tokens/s,27.55429316299491,kWh,2.721626590993032e-05,1.4915364271905097e-05,0.00010149974895572551,0.00014363137913756094,tokens/kWh,438622.8161164047,,s,630,22.861863979339624,0.036288672983078724,0.0009060653197057868,0.035897855758667,0.03737190246582031,0.037651866340637205,0.03818897315979004,"[0.03552870559692383, 0.03562496185302735, 0.035692543029785154, 0.037212158203125, 0.037198848724365234, 0.03755110549926758, 0.037203968048095705, 0.03710976028442383, 0.0369090576171875, 0.03729919815063477, 0.03706163024902344, 0.037144577026367184, 0.03732787322998047, 0.037082111358642575, 0.03782860946655273, 0.037424129486083986, 0.03742617416381836, 0.03815116882324219, 0.03772415924072266, 0.03729817581176758, 0.03726540756225586, 0.03727155303955078, 0.037449726104736326, 0.03732787322998047, 0.03535257720947266, 0.03550515365600586, 0.03551846313476562, 0.035530750274658206, 0.03500646209716797, 0.03529216003417969, 0.035783679962158206, 0.03575500869750976, 0.03571712112426758, 0.0354068489074707, 0.035302398681640625, 0.03554611206054688, 0.03543142318725586, 0.03559731292724609, 0.035579902648925785, 0.036280319213867186, 0.03739136123657227, 0.037348350524902346, 0.037341182708740234, 0.03712716674804688, 0.03687833786010742, 0.03828326416015625, 0.03564134216308594, 0.03622707366943359, 0.03741491317749023, 0.03559936141967773, 0.035509246826171875, 0.03545087814331055, 0.0353966064453125, 0.03549593734741211, 0.03537100982666016, 0.035378177642822264, 0.03749478530883789, 0.03771084976196289, 0.03713433456420898, 0.03736371231079102, 0.037176319122314457, 0.03706982421875, 0.035432449340820314, 0.0355788803100586, 0.03515596771240234, 0.03557785415649414, 0.03559423828125, 0.03629568099975586, 0.038079486846923825, 0.0356864013671875, 0.03552972793579102, 0.03552972793579102, 0.03540070343017578, 0.0351539192199707, 0.035160064697265625, 0.03539865493774414, 0.03544268798828125, 0.03536588668823242, 0.03670220947265625, 0.03707699203491211, 0.03542425537109375, 0.03548364639282227, 0.03524710464477539, 0.03537612915039062, 0.03542015838623047, 0.035383296966552735, 0.035356670379638674, 0.03539763259887695, 0.035345409393310545, 0.035364864349365234, 0.035410945892333984, 0.035383296966552735, 0.03540787124633789, 0.03540991973876953, 0.03535257720947266, 0.03543961715698242, 0.035438591003417966, 0.03613798522949219, 0.03791360092163086, 0.035253246307373046, 0.03718041610717773, 0.03705753707885742, 0.0371599349975586, 0.037130241394042966, 0.03748044967651367, 0.03709030532836914, 0.03706163024902344, 0.03782963180541992, 0.03726540756225586, 0.03737190246582031, 0.037424129486083986, 0.03705855941772461, 0.03536383819580078, 0.037048320770263675, 0.03701760101318359, 0.037479423522949216, 0.03737190246582031, 0.037029888153076174, 0.03704524612426758, 0.03712716674804688, 0.03736883163452148, 0.0370513916015625, 0.0373043212890625, 0.036953086853027346, 0.0380313606262207, 0.037553150177001955, 0.03591372680664062, 0.03544575881958008, 0.03546112060546875, 0.035620864868164064, 0.035563518524169925, 0.03697971343994141, 0.03559936141967773, 0.035585025787353515, 0.03543142318725586, 0.035517440795898435, 0.03553996658325195, 0.03546623992919922, 0.03541401672363281, 0.0355153923034668, 0.03558297729492187, 0.035588096618652344, 0.03560550308227539, 0.03677798461914063, 0.0351907844543457, 0.03550310516357422, 0.03543040084838867, 0.0372305908203125, 0.0361451530456543, 0.035757057189941405, 0.035501056671142575, 0.03546931076049804, 0.03614003372192383, 0.03712409591674805, 0.0370063362121582, 0.03712716674804688, 0.03709952163696289, 0.03562803268432617, 0.035410945892333984, 0.03802624130249024, 0.03769036865234375, 0.03722854232788086, 0.03717836761474609, 0.037147647857666014, 0.03715686416625977, 0.0355860481262207, 0.035522560119628906, 0.035492862701416016, 0.03547545623779297, 0.03550003051757813, 0.035550209045410154, 0.03537100982666016, 0.03545702362060547, 0.035507198333740234, 0.038547454833984376, 0.03763097763061524, 0.035775489807128906, 0.03655475234985352, 0.0371701774597168, 0.036961280822753906, 0.03728486251831055, 0.03697663879394531, 0.036173824310302735, 0.03702169418334961, 0.037059585571289064, 0.036997119903564454, 0.037032958984375, 0.03703910446166992, 0.03703807830810547, 0.03542323303222656, 0.035179519653320314, 0.03544985580444336, 0.03546316909790039, 0.034976768493652347, 0.03549798583984375, 0.035574783325195314, 0.03589427185058594, 0.037190654754638675, 0.037166080474853515, 0.0358922233581543, 0.03585433578491211, 0.03517440032958984, 0.0353966064453125, 0.03546419143676758, 0.03532287979125977, 0.035335166931152344, 0.03555430221557617, 0.03540377426147461, 0.03545600128173828, 0.03524915313720703, 0.035297279357910154, 0.03543756866455078, 0.03496755218505859, 0.034977790832519534, 0.03546828842163086, 0.0353546257019043, 0.035389438629150394, 0.03536588668823242, 0.03538022232055664, 0.034941951751708986, 0.035416065216064455, 0.035334144592285156, 0.03542937469482422, 0.035350528717041016, 0.03544166564941406, 0.03546112060546875, 0.0354068489074707, 0.03548057556152344, 0.03568742370605469, 0.036071422576904294, 0.03561574554443359, 0.035536895751953124, 0.035517440795898435, 0.035490814208984374, 0.035501056671142575, 0.035517440795898435, 0.035560447692871096, 0.035593215942382815, 0.03765555191040039, 0.037456897735595705, 0.038204414367675785, 0.03709030532836914, 0.037466110229492186, 0.03808153533935547, 0.03801497650146484, 0.03742310333251953, 0.03717836761474609, 0.037184513092041016, 0.03702579116821289, 0.037108734130859376, 0.036929534912109374, 0.03560550308227539, 0.036670463562011715, 0.037294078826904296, 0.03712716674804688, 0.03712204742431641, 0.03552665710449219, 0.03545600128173828, 0.03602227020263672, 0.03678412628173828, 0.037084159851074217, 0.03611443328857422, 0.03712204742431641, 0.037335041046142575, 0.03707494354248047, 0.037215232849121094, 0.03710464096069336, 0.03729612731933594, 0.035419136047363284, 0.03545600128173828, 0.03645542526245117, 0.03830374526977539, 0.03791257476806641, 0.03727974319458008, 0.03547955322265625, 0.03563315200805664, 0.037010433197021485, 0.037168128967285156, 0.03502284622192383, 0.035487743377685545, 0.037440513610839846, 0.03696844863891602, 0.036874240875244144, 0.037647361755371096, 0.03539251327514648, 0.03531366348266601, 0.03707699203491211, 0.03725414276123047, 0.03695001602172852, 0.03700940704345703, 0.03701657485961914, 0.036585472106933595, 0.0352911376953125, 0.03544268798828125, 0.0354252815246582, 0.035422206878662106, 0.03534745788574219, 0.03542323303222656, 0.03532287979125977, 0.035402751922607424, 0.03542937469482422, 0.035901439666748046, 0.037438465118408204, 0.03697868728637695, 0.03693260955810547, 0.0370780143737793, 0.03654553604125976, 0.03712819290161133, 0.03770268630981445, 0.03538940811157226, 0.035383296966552735, 0.0368455696105957, 0.038335487365722655, 0.037749759674072264, 0.037233665466308595, 0.03543142318725586, 0.0352911376953125, 0.035286014556884765, 0.03548876953125, 0.03542425537109375, 0.03543961715698242, 0.0354252815246582, 0.03541401672363281, 0.03558399963378906, 0.035490814208984374, 0.03538022232055664, 0.0374466552734375, 0.03729510498046875, 0.03718963241577149, 0.03719168090820312, 0.03726131057739258, 0.037789695739746096, 0.03734732818603516, 0.0371701774597168, 0.03712102508544922, 0.03731353759765625, 0.03730636978149414, 0.037988353729248046, 0.037177345275878904, 0.03549900817871094, 0.035678207397460936, 0.03714252853393555, 0.037182464599609374, 0.03736064147949219, 0.03714252853393555, 0.03700428771972656, 0.037130241394042966, 0.0354785270690918, 0.03545087814331055, 0.03552870559692383, 0.03542323303222656, 0.03574272155761719, 0.03552665710449219, 0.03551129531860352, 0.03549388885498047, 0.035465217590332034, 0.0354856948852539, 0.035530750274658206, 0.03557068634033203, 0.03592601776123047, 0.03592704010009766, 0.036370433807373044, 0.035544063568115236, 0.03561369705200195, 0.03554611206054688, 0.0354703369140625, 0.03548364639282227, 0.03545804977416992, 0.03546112060546875, 0.03546316909790039, 0.03545087814331055, 0.03568947219848633, 0.035361793518066405, 0.03621068954467774, 0.037070846557617186, 0.03701964950561523, 0.03540889739990234, 0.03542630386352539, 0.035535873413085936, 0.03564031982421875, 0.0371599349975586, 0.036988929748535154, 0.03730944061279297, 0.037792766571044925, 0.03712204742431641, 0.03717324829101563, 0.03696844863891602, 0.036977664947509765, 0.03704422378540039, 0.037010433197021485, 0.03700121688842774, 0.03538534545898438, 0.03615948867797852, 0.037425151824951174, 0.035438591003417966, 0.03544371032714844, 0.03588915252685547, 0.03726131057739258, 0.03691417694091797, 0.03707494354248047, 0.037410816192626956, 0.03544371032714844, 0.03527475357055664, 0.03536281585693359, 0.03538739013671875, 0.03675033569335937, 0.03534438323974609, 0.03550003051757813, 0.03522048187255859, 0.03611852645874023, 0.037149696350097655, 0.036185089111328124, 0.035272705078125, 0.03543142318725586, 0.035381248474121094, 0.03534950256347656, 0.03540172958374024, 0.0350904312133789, 0.03528704071044922, 0.035323902130126955, 0.03481804656982422, 0.03551027297973633, 0.035337215423583986, 0.03539251327514648, 0.03526758575439453, 0.03551641464233399, 0.035416065216064455, 0.03522662353515625, 0.035929088592529294, 0.0355860481262207, 0.03545600128173828, 0.03542835235595703, 0.035460094451904296, 0.0354334716796875, 0.035388416290283206, 0.03605606460571289, 0.03705753707885742, 0.03717532730102539, 0.03711280059814453, 0.03724697494506836, 0.037884929656982425, 0.0357386245727539, 0.03546419143676758, 0.03577139282226562, 0.03534131240844727, 0.0354252815246582, 0.03552972793579102, 0.0354703369140625, 0.03552460861206055, 0.035266559600830076, 0.03536896133422852, 0.037256191253662106, 0.0376258544921875, 0.03728076934814453, 0.03722956848144531, 0.037177345275878904, 0.037169151306152344, 0.03557273483276367, 0.03552972793579102, 0.035108863830566404, 0.03512934494018555, 0.03542937469482422, 0.03550515365600586, 0.03603968048095703, 0.03566694259643555, 0.035432449340820314, 0.03544985580444336, 0.0354150390625, 0.03540991973876953, 0.035552257537841796, 0.03547238540649414, 0.036931583404541016, 0.037160961151123044, 0.03682611083984375, 0.0354150390625, 0.03540582275390625, 0.03543552017211914, 0.03541401672363281, 0.03659366226196289, 0.03670937728881836, 0.03706777572631836, 0.03637247848510742, 0.035422206878662106, 0.035332096099853515, 0.03540377426147461, 0.0358389778137207, 0.03706572723388672, 0.03700735855102539, 0.037032958984375, 0.03706060791015625, 0.03541708755493164, 0.03708620834350586, 0.037678081512451174, 0.03714252853393555, 0.03703807830810547, 0.03705344009399414, 0.0362977294921875, 0.037479423522949216, 0.038437889099121096, 0.037528575897216795, 0.03626496124267578, 0.037212158203125, 0.037144577026367184, 0.036657150268554685, 0.035775489807128906, 0.036482048034667966, 0.03715584182739258, 0.03710566329956055, 0.03696537780761719, 0.03705344009399414, 0.03711897659301758, 0.03706982421875, 0.037212158203125, 0.03711078262329102, 0.037591041564941405, 0.03743948745727539, 0.0354703369140625, 0.03547750473022461, 0.0354785270690918, 0.035460094451904296, 0.037103614807128905, 0.0371517448425293, 0.03711283111572266, 0.037236736297607424, 0.03760332870483398, 0.038593536376953126, 0.037891071319580076, 0.0367718391418457, 0.03712409591674805, 0.036569087982177735, 0.03716198348999023, 0.03738726425170898, 0.037217281341552735, 0.03716198348999023, 0.0356577262878418, 0.03552665710449219, 0.03548364639282227, 0.035520511627197264, 0.03550003051757813, 0.03628646469116211, 0.03725823974609375, 0.0372408332824707, 0.03578675079345703, 0.03617484664916992, 0.03546623992919922, 0.03560038375854492, 0.035506175994873046, 0.03552665710449219, 0.037163009643554686, 0.037031936645507815, 0.037116928100585936, 0.03732787322998047, 0.0370247688293457, 0.03711897659301758, 0.03702579116821289, 0.03751116943359375, 0.035659774780273434, 0.03545087814331055, 0.037250049591064455, 0.03777228927612305, 0.03722444915771484, 0.0372408332824707, 0.037233665466308595, 0.03725209426879883, 0.037130241394042966, 0.03590553665161133, 0.03809689712524414, 0.03782963180541992, 0.037207038879394534, 0.037341182708740234, 0.0371486701965332, 0.03711283111572266, 0.03718656158447266, 0.03705753707885742, 0.03540787124633789, 0.03546316909790039, 0.03658444976806641, 0.03715891265869141, 0.03712204742431641, 0.037000190734863284, 0.03702067184448242, 0.03724492645263672, 0.037028865814208986, 0.03707392120361328, 0.03703705596923828, 0.03709235382080078, 0.0372592658996582, 0.03718963241577149, 0.037572608947753904, 0.03724697494506836, 0.03554099273681641, 0.03551232147216797, 0.035389438629150394, 0.03581951904296875, 0.03590860748291016, 0.035383296966552735, 0.03537408065795898, 0.03540991973876953, 0.03541708755493164, 0.035373054504394534, 0.03542118453979492, 0.03537408065795898, 0.03536076736450195, 0.035372032165527346, 0.03642879867553711, 0.03718143844604492, 0.03529011154174805, 0.035064830780029296, 0.03545087814331055, 0.035438591003417966, 0.035448833465576174, 0.03547443389892578, 0.034994174957275394, 0.035372032165527346, 0.0354856948852539, 0.035373054504394534, 0.03544371032714844, 0.03690291213989258, 0.03706880187988281, 0.037187583923339845, 0.03688652801513672, 0.036362239837646484, 0.03593318557739258, 0.03584511947631836, 0.035547134399414065, 0.035550209045410154, 0.03548876953125, 0.03554611206054688, 0.035573760986328126, 0.035454975128173825]",tokens/s,27.55680816618167,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694916b-5c81fcc12f4a975e21f2ee46;d6e973a0-f217-425f-aa51-7e20b8750b92) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c4b-56e9a1be339f2285213886dd;24438d7c-1431-4b02-af49-71e675c3fedd) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 545, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fc3-59bca28f3b130fdb7f28ad70;6ca362e5-613d-43e9-89fb-90090ff76b41) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949272-30fc1f382cb5d26b08532336;08adbcbe-7fb4-4822-a2c5-f98111028f67) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11152.748544,12432.441344,0.0,11846.811648,11814.785024,s,1,11.960677734375,11.960677734375,0.0,11.960677734375,11.960677734375,11.960677734375,11.960677734375,[11.960677734375],,kWh,6.109958083472407e-05,3.3471528154071284e-05,8.904312678997828e-05,0.00018361423577877364,,MB,2127.290368,13397.131264,0.0,12750.68416,12632.68864,s,10,3.384455780029297,0.33844557800292974,0.00025202906153631133,0.3383465270996094,0.3387694152832031,0.3388287689208984,0.3388762518310547,"[0.3379932861328125, 0.33827227783203123, 0.3385042419433594, 0.3386874694824219, 0.33835653686523437, 0.338330810546875, 0.33888812255859374, 0.3387562255859375, 0.33833029174804685, 0.3383365173339844]",tokens/s,756.3993050539544,kWh,3.997406776180401e-06,2.1903869440606114e-06,2.2498453183933397e-05,2.868624690417441e-05,tokens/kWh,8924137.091030441,MB,2130.264064,13669.761024,0.0,13023.31392,12936.608256,s,10,22.274345214843752,2.2274345214843754,0.013562171348773626,2.22290283203125,2.2420615966796875,2.2496875366210936,2.2557882885742186,"[2.219746826171875, 2.228745361328125, 2.240366943359375, 2.216859375, 2.219094970703125, 2.2573134765625, 2.23928564453125, 2.214644287109375, 2.2122294921875, 2.226058837890625]",tokens/s,28.283659695646826,kWh,2.673288587048664e-05,1.46508837117903e-05,0.00012139410637446723,0.00016277787595674422,tokens/kWh,387030.483287184,,s,630,22.271672317504894,0.03535186082143632,0.0006181706203911879,0.035075073242187504,0.03627264099121094,0.03663733787536621,0.037484420356750495,"[0.036934654235839845, 0.03506380844116211, 0.03498495864868164, 0.03502182388305664, 0.03491430282592774, 0.034890750885009765, 0.03506073760986328, 0.03510784149169922, 0.0349409294128418, 0.035062782287597655, 0.034950145721435545, 0.03505049514770508, 0.03466649627685547, 0.03487641525268555, 0.03484467315673828, 0.03486003112792969, 0.03493580627441406, 0.03498905563354492, 0.034915328979492184, 0.03485184097290039, 0.03503411102294922, 0.035542015075683595, 0.03541708755493164, 0.035053569793701174, 0.0354068489074707, 0.03488665771484375, 0.03484569549560547, 0.035176448822021485, 0.03503923034667969, 0.03501567840576172, 0.03488256072998047, 0.03490611267089844, 0.03499929428100586, 0.03488972854614258, 0.035020801544189455, 0.035192832946777344, 0.03494911956787109, 0.03537919998168945, 0.03739136123657227, 0.036722686767578124, 0.03530137634277344, 0.034991104125976565, 0.03506073760986328, 0.03489177703857422, 0.03500339126586914, 0.03501875305175781, 0.0349409294128418, 0.03495116806030273, 0.0350013427734375, 0.03496243286132812, 0.03575296020507813, 0.03685887908935547, 0.035555328369140625, 0.03517337417602539, 0.03503104019165039, 0.03570892715454101, 0.035522560119628906, 0.03647795104980469, 0.03587481689453125, 0.03507814407348633, 0.03511500930786133, 0.0350382080078125, 0.0354969596862793, 0.035102718353271486, 0.03508531188964844, 0.03505152130126953, 0.03523891067504883, 0.03501567840576172, 0.035007488250732424, 0.03530547332763672, 0.03553279876708984, 0.03552153778076172, 0.03504127883911133, 0.03495423889160156, 0.03479040145874023, 0.035248126983642575, 0.03520614242553711, 0.03507712173461914, 0.037353473663330077, 0.03543961715698242, 0.03500032043457031, 0.034871295928955076, 0.035192832946777344, 0.034993152618408206, 0.03508633422851563, 0.035350528717041016, 0.03508531188964844, 0.035166206359863283, 0.03489177703857422, 0.0352911376953125, 0.03481292724609375, 0.03504742431640625, 0.035148799896240236, 0.03514572906494141, 0.03567308807373047, 0.035138561248779294, 0.0350300178527832, 0.03499622344970703, 0.035059711456298825, 0.03516108703613281, 0.03587481689453125, 0.03513241577148438, 0.03490816116333008, 0.035639297485351565, 0.038629375457763675, 0.03616665649414062, 0.03508224105834961, 0.03500646209716797, 0.03555123138427734, 0.035760128021240234, 0.03499622344970703, 0.03514572906494141, 0.03520204925537109, 0.036299774169921875, 0.03633663940429688, 0.03590860748291016, 0.03496448135375976, 0.03574476623535156, 0.03603148651123047, 0.035151870727539065, 0.03496345520019531, 0.03513753509521484, 0.03536793518066406, 0.035102718353271486, 0.03580416107177734, 0.03645337677001953, 0.035448833465576174, 0.03501465606689453, 0.034769920349121096, 0.0353259506225586, 0.03589017486572266, 0.03477913665771484, 0.03563212966918945, 0.035053569793701174, 0.03618304061889648, 0.03449856185913086, 0.03501260757446289, 0.034945022583007815, 0.035272705078125, 0.03565055847167969, 0.03507302474975586, 0.035860481262207033, 0.03620454406738281, 0.036299774169921875, 0.0354856948852539, 0.03606016159057617, 0.03546214294433594, 0.0355860481262207, 0.035318782806396484, 0.03656192016601562, 0.03678412628173828, 0.036590591430664066, 0.035716094970703126, 0.034974720001220705, 0.03612160110473633, 0.03616972732543945, 0.0361451530456543, 0.03554099273681641, 0.035113983154296875, 0.03581542587280274, 0.036574207305908206, 0.035817470550537106, 0.035299327850341795, 0.03503923034667969, 0.035095550537109374, 0.03506687927246094, 0.03496755218505859, 0.03504844665527344, 0.03515903854370117, 0.035140609741210936, 0.03663872146606445, 0.03495935821533203, 0.03505152130126953, 0.035138561248779294, 0.03513241577148438, 0.0350013427734375, 0.03499008178710938, 0.03645439910888672, 0.036501502990722655, 0.03638886260986328, 0.035737598419189456, 0.03601715087890625, 0.03625676727294922, 0.03611033630371094, 0.03510067367553711, 0.03522662353515625, 0.03565158462524414, 0.03500032043457031, 0.03516928100585937, 0.03518259048461914, 0.03507302474975586, 0.03485696029663086, 0.036155391693115234, 0.03585945510864258, 0.03507302474975586, 0.034971649169921876, 0.03503615951538086, 0.03627212905883789, 0.0353966064453125, 0.03501055908203125, 0.03508633422851563, 0.0349224967956543, 0.034933761596679686, 0.034925567626953126, 0.034909183502197266, 0.034956287384033204, 0.03496857452392578, 0.035492862701416016, 0.03595161437988281, 0.03486617660522461, 0.035796993255615236, 0.034802688598632815, 0.03503923034667969, 0.035004417419433595, 0.03497369766235352, 0.035156993865966796, 0.0349224967956543, 0.036119552612304685, 0.03483852767944336, 0.0359741439819336, 0.03582566452026367, 0.035386367797851564, 0.03504537582397461, 0.0350013427734375, 0.03502796936035156, 0.03501772689819336, 0.03660595321655274, 0.035418113708496096, 0.03491328048706055, 0.034988033294677735, 0.03489791870117188, 0.03495731353759766, 0.03492966461181641, 0.03506687927246094, 0.03497369766235352, 0.035550209045410154, 0.0351201286315918, 0.035097599029541016, 0.03483340835571289, 0.03499827194213867, 0.0348590087890625, 0.03503104019165039, 0.034885631561279294, 0.03505561447143555, 0.035286014556884765, 0.035053569793701174, 0.03495731353759766, 0.035059711456298825, 0.034947071075439456, 0.035079166412353514, 0.03505561447143555, 0.035140609741210936, 0.03561779022216797, 0.035138561248779294, 0.03510067367553711, 0.03513446426391602, 0.03503104019165039, 0.034947071075439456, 0.03484569549560547, 0.034890750885009765, 0.03486617660522461, 0.03488153457641602, 0.034939903259277344, 0.03486515045166016, 0.035525630950927735, 0.0351580810546875, 0.03500128173828125, 0.03493478393554687, 0.03500851058959961, 0.03488256072998047, 0.03486105728149414, 0.034941951751708986, 0.03490508651733398, 0.03498495864868164, 0.03480678558349609, 0.0353361930847168, 0.03553177642822265, 0.03496038436889649, 0.03480678558349609, 0.034994174957275394, 0.034852863311767575, 0.035166206359863283, 0.035748863220214845, 0.037130241394042966, 0.036734977722167966, 0.0355153923034668, 0.03486515045166016, 0.03514777755737305, 0.034852863311767575, 0.035947521209716796, 0.03493580627441406, 0.03513958358764648, 0.03527884674072266, 0.036190208435058595, 0.03503104019165039, 0.03499212646484375, 0.034947071075439456, 0.034971649169921876, 0.034955265045166016, 0.0350382080078125, 0.03506995010375977, 0.036211711883544925, 0.03607654571533203, 0.03518668746948242, 0.03512115097045899, 0.035743743896484374, 0.03514777755737305, 0.035089408874511716, 0.03508224105834961, 0.03505561447143555, 0.03509964752197266, 0.03524095916748047, 0.03525939178466797, 0.03576422500610352, 0.03533107376098633, 0.03539865493774414, 0.0350300178527832, 0.034976768493652347, 0.03488665771484375, 0.03504537582397461, 0.03644211196899414, 0.03501363372802734, 0.03501567840576172, 0.036703231811523435, 0.03714559936523437, 0.03794124984741211, 0.03777228927612305, 0.03530649566650391, 0.036596736907958984, 0.035939327239990236, 0.0351016960144043, 0.03515596771240234, 0.03641753768920898, 0.03627724838256836, 0.036176895141601564, 0.03619839859008789, 0.036195327758789066, 0.03471462249755859, 0.03491635131835937, 0.03601408004760742, 0.03622809600830078, 0.03576422500610352, 0.037580799102783204, 0.0365588493347168, 0.035302398681640625, 0.035410945892333984, 0.03651071929931641, 0.0370513916015625, 0.03701862335205078, 0.03574784088134766, 0.03624448013305664, 0.03530035018920898, 0.03699302291870117, 0.036975616455078124, 0.03530342483520508, 0.03643801498413086, 0.03643494415283203, 0.035759105682373046, 0.03538431930541992, 0.035064830780029296, 0.03504844665527344, 0.035023872375488284, 0.03508531188964844, 0.03498905563354492, 0.035127296447753906, 0.035146751403808595, 0.03492966461181641, 0.03592499160766602, 0.036190208435058595, 0.03530342483520508, 0.03594035339355469, 0.0363059196472168, 0.03546112060546875, 0.03505254364013672, 0.034955265045166016, 0.03673907089233398, 0.035225601196289064, 0.035141632080078124, 0.03545600128173828, 0.034988033294677735, 0.03494911956787109, 0.03477913665771484, 0.03547238540649414, 0.03519385528564453, 0.03501772689819336, 0.035095550537109374, 0.03634175872802734, 0.03623321533203125, 0.036116481781005856, 0.035394561767578124, 0.03495731353759766, 0.03500032043457031, 0.03491328048706055, 0.03546419143676758, 0.03698175811767578, 0.03543552017211914, 0.03637247848510742, 0.03592499160766602, 0.03625676727294922, 0.03623116683959961, 0.036342784881591796, 0.036790271759033204, 0.03658342361450195, 0.03635609436035156, 0.03729100799560547, 0.03663974380493164, 0.03502899169921875, 0.03615334320068359, 0.03558092880249023, 0.0348671989440918, 0.034933761596679686, 0.034955265045166016, 0.035124225616455076, 0.03542118453979492, 0.03611443328857422, 0.036190208435058595, 0.03505868911743164, 0.03511705780029297, 0.035138561248779294, 0.035081214904785156, 0.03498086547851562, 0.03577958297729492, 0.036195327758789066, 0.03526144027709961, 0.0350013427734375, 0.03502489471435547, 0.03505868911743164, 0.03499520111083984, 0.03498905563354492, 0.035095550537109374, 0.035105792999267575, 0.03495116806030273, 0.035102718353271486, 0.0348671989440918, 0.03510681533813476, 0.036318206787109376, 0.0362239990234375, 0.03624345779418945, 0.03527372741699219, 0.03498393630981445, 0.03511603164672852, 0.034969600677490234, 0.03583488082885742, 0.03487948989868164, 0.03494400024414063, 0.03490816116333008, 0.03446579360961914, 0.03454054260253906, 0.03497062301635742, 0.03488051223754883, 0.03505152130126953, 0.03498700714111328, 0.034969600677490234, 0.03484672164916992, 0.03480780792236328, 0.034912254333496096, 0.03550207901000976, 0.036106239318847655, 0.03614003372192383, 0.03597721481323242, 0.03497062301635742, 0.03483955383300781, 0.035059711456298825, 0.034988033294677735, 0.03477811050415039, 0.03486515045166016, 0.03484774398803711, 0.034830337524414064, 0.034976768493652347, 0.03494604873657227, 0.034958335876464845, 0.03482419204711914, 0.034993152618408206, 0.034874366760253905, 0.034904064178466795, 0.03488051223754883, 0.03483443069458008, 0.03488870239257812, 0.03588710403442383, 0.0359106559753418, 0.03518054580688477, 0.03501055908203125, 0.03500851058959961, 0.0349194221496582, 0.03478015899658203, 0.03491020965576172, 0.03477503967285156, 0.034985984802246094, 0.03502489471435547, 0.03498905563354492, 0.035122177124023435, 0.035165184020996096, 0.035522560119628906, 0.03559423828125, 0.03503308868408203, 0.0350382080078125, 0.035119102478027346, 0.035095550537109374, 0.03501260757446289, 0.03577753448486328, 0.036192256927490236, 0.03615334320068359, 0.03630284881591797, 0.03591987228393555, 0.03483135986328125, 0.035198974609375, 0.03481190490722656, 0.03501772689819336, 0.03486105728149414, 0.03508224105834961, 0.03489484786987305, 0.03496755218505859, 0.0348590087890625, 0.035138561248779294, 0.034857982635498046, 0.035004417419433595, 0.03488153457641602, 0.03501567840576172, 0.034902015686035154, 0.03491839981079101, 0.034900993347167966, 0.03494297790527344, 0.03483955383300781, 0.0350013427734375, 0.03487539291381836, 0.034971649169921876, 0.03752243041992188, 0.03685273742675781, 0.03645747375488281, 0.03510784149169922, 0.035451904296875, 0.03502489471435547, 0.035059711456298825, 0.03495116806030273, 0.03482726287841797, 0.03489996719360351, 0.03489791870117188, 0.03486105728149414, 0.03495935821533203, 0.035151870727539065, 0.03487641525268555, 0.03506687927246094, 0.03488051223754883, 0.03497062301635742, 0.034920448303222655, 0.036219905853271485, 0.03556249618530274, 0.0349306869506836, 0.03498905563354492, 0.03483340835571289, 0.034945022583007815, 0.03500851058959961, 0.03498188781738281, 0.034928638458251955, 0.03498393630981445, 0.034971649169921876, 0.03503206253051758, 0.034933761596679686, 0.035019775390625, 0.0350013427734375, 0.03506995010375977, 0.03502489471435547, 0.03504742431640625, 0.03549900817871094, 0.03529011154174805, 0.03506790542602539, 0.035108863830566404, 0.03542835235595703, 0.03507097625732422, 0.03763711929321289, 0.03663564682006836, 0.03521638488769531, 0.035133438110351564, 0.0350013427734375, 0.03495219039916992, 0.0349194221496582, 0.03514265441894531, 0.035089408874511716, 0.03497062301635742, 0.035053569793701174, 0.03480166244506836, 0.03499827194213867, 0.03492454528808594, 0.03499929428100586, 0.034917377471923826, 0.03518975830078125, 0.03790848159790039, 0.035410945892333984, 0.03497881698608398, 0.035004417419433595, 0.03499622344970703, 0.03502796936035156, 0.035342334747314456, 0.03728076934814453, 0.036722686767578124, 0.03484979248046875, 0.03497369766235352, 0.034871295928955076, 0.0349306869506836, 0.03486822509765625, 0.03492761611938477, 0.03487744140625, 0.03488153457641602, 0.03497062301635742, 0.03522457504272461, 0.03515084838867188, 0.03502489471435547, 0.03688140869140625, 0.035110912322998046, 0.03503206253051758, 0.03667148971557617, 0.03569868850708008, 0.03501875305175781, 0.035056640625, 0.035138561248779294, 0.03597312164306641, 0.036347904205322266, 0.03511603164672852, 0.03498188781738281, 0.035983360290527344, 0.03518771362304687, 0.03509657669067383, 0.03495423889160156, 0.03508224105834961, 0.034933761596679686, 0.03507302474975586, 0.034953216552734374, 0.035108863830566404, 0.03498700714111328, 0.035095550537109374]",tokens/s,28.28705411155131,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948cee-5d3c281c356bac8654d3c923;e2f4e06c-196f-4500-b11c-ae1ca59aed45) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6754.865152,7732.723712,0.0,7147.094016,7138.9184,s,1,10.310689453125,10.310689453125,0.0,10.310689453125,10.310689453125,10.310689453125,10.310689453125,[10.310689453125],,kWh,4.098254867986068e-05,2.244598500470359e-05,5.609310042997695e-05,0.00011952163411454123,,MB,1692.143616,8475.11552,0.0,7828.668416,7715.649536,s,10,1.7879208984375,0.17879208984375,0.00022720329250849707,0.1788534927368164,0.17900536956787108,0.17901586837768554,0.17902426742553712,"[0.17829728698730468, 0.17882882690429688, 0.17844630432128905, 0.17893190002441406, 0.17883750915527344, 0.1790263671875, 0.17874295043945312, 0.17893724060058594, 0.17886947631835937, 0.17900303649902344]",tokens/s,1431.8306823513476,kWh,2.1115216688988386e-06,1.1567518257005334e-06,1.1933893475679041e-05,1.5202166970278413e-05,tokens/kWh,16839704.53031484,MB,1700.524032,8600.94464,0.0,7954.497536,7906.468352,s,10,16.4619755859375,1.6461975585937498,0.004025620272553259,1.6465095825195313,1.6508674682617188,1.6509516784667968,1.6510190466308594,"[1.65056201171875, 1.651035888671875, 1.6418956298828125, 1.642655029296875, 1.64730322265625, 1.6389302978515625, 1.6508487548828126, 1.6490084228515625, 1.6457159423828125, 1.6440203857421876]",tokens/s,38.27001180454745,kWh,1.9494108499435114e-05,1.0683387665543385e-05,7.677401082391625e-05,0.00010695150698889475,tokens/kWh,589052.0084634391,,s,630,16.459568111419685,0.026126298589555043,0.000361992311311702,0.02599526405334473,0.02657320899963379,0.02677841854095459,0.02751847414016724,"[0.02694655990600586, 0.025964544296264647, 0.026104831695556642, 0.02615091133117676, 0.02610380744934082, 0.025971712112426756, 0.026015743255615235, 0.026017791748046876, 0.026025983810424806, 0.026597375869750976, 0.026051584243774413, 0.026002431869506838, 0.026405887603759767, 0.025867263793945314, 0.025965568542480468, 0.026034175872802736, 0.026785791397094725, 0.026266624450683593, 0.026016767501831056, 0.026077184677124023, 0.026693632125854492, 0.026298368453979492, 0.02616012763977051, 0.025952255249023438, 0.026031103134155274, 0.02592563247680664, 0.02612428855895996, 0.025956352233886718, 0.026001407623291017, 0.02591641616821289, 0.026050559997558592, 0.025955327987670897, 0.026064895629882814, 0.026181631088256836, 0.02616831970214844, 0.02592767906188965, 0.026053632736206055, 0.026189823150634766, 0.02629734420776367, 0.026007551193237305, 0.02610585594177246, 0.02598297691345215, 0.02614169692993164, 0.026023935317993165, 0.02612428855895996, 0.025965568542480468, 0.026033151626586915, 0.026220544815063477, 0.026714111328125, 0.026389503479003908, 0.02647859191894531, 0.025999359130859375, 0.026081279754638673, 0.02596659278869629, 0.02631270408630371, 0.02634444808959961, 0.02689945602416992, 0.026021888732910156, 0.02608332824707031, 0.026844160079956055, 0.02674483108520508, 0.027249664306640626, 0.026274816513061523, 0.02594508743286133, 0.026630144119262695, 0.026481664657592774, 0.026048511505126954, 0.02592870330810547, 0.02611507225036621, 0.02612428855895996, 0.0269117431640625, 0.02629734420776367, 0.026025983810424806, 0.027458560943603515, 0.026208255767822267, 0.02591641616821289, 0.026438655853271483, 0.025911296844482422, 0.026058752059936522, 0.02647859191894531, 0.025819135665893556, 0.026253311157226563, 0.025959423065185547, 0.025890815734863282, 0.025827327728271485, 0.025868288040161135, 0.02592972755432129, 0.02594099235534668, 0.02660966491699219, 0.026292224884033204, 0.02693529510498047, 0.02816716766357422, 0.026833919525146483, 0.02646937561035156, 0.026050559997558592, 0.02612428855895996, 0.026224639892578124, 0.02590412712097168, 0.025857023239135742, 0.025891839981079103, 0.025964544296264647, 0.027435007095336913, 0.025845760345458983, 0.02592563247680664, 0.025854976654052734, 0.025873407363891602, 0.025860095977783205, 0.02594611167907715, 0.02594713592529297, 0.025899007797241212, 0.026550271987915038, 0.025998336791992187, 0.025952255249023438, 0.02592767906188965, 0.025837568283081053, 0.02591744041442871, 0.025883647918701173, 0.026620927810668944, 0.026496000289916992, 0.02655948829650879, 0.026702848434448243, 0.026204160690307617, 0.026037248611450195, 0.025903104782104492, 0.02592255973815918, 0.025911296844482422, 0.026634239196777345, 0.025777151107788086, 0.026194944381713867, 0.025963520050048827, 0.02590105628967285, 0.02591948890686035, 0.02594611167907715, 0.025874431610107423, 0.02592665672302246, 0.02587750434875488, 0.02589286422729492, 0.025836544036865236, 0.02590412712097168, 0.025968639373779297, 0.025923583984375, 0.025944063186645508, 0.025952255249023438, 0.026277887344360353, 0.026067968368530273, 0.02590105628967285, 0.02589695930480957, 0.02596249580383301, 0.026207231521606447, 0.026570751190185548, 0.026060800552368164, 0.026286079406738282, 0.02591231918334961, 0.025684991836547853, 0.02595123291015625, 0.02596147155761719, 0.02594304084777832, 0.026056703567504884, 0.026010623931884767, 0.026084352493286132, 0.02691276741027832, 0.026476543426513673, 0.026027008056640624, 0.02592972755432129, 0.02597068786621094, 0.026236928939819337, 0.02593894386291504, 0.026036224365234374, 0.02593894386291504, 0.025907199859619142, 0.025915391921997072, 0.025960447311401368, 0.02590208053588867, 0.026104831695556642, 0.02736844825744629, 0.0267458553314209, 0.02611507225036621, 0.026078208923339844, 0.02591231918334961, 0.025863168716430664, 0.025843711853027345, 0.02591948890686035, 0.02595123291015625, 0.025840639114379883, 0.025894912719726562, 0.025939968109130858, 0.026037248611450195, 0.026466304779052735, 0.02615705680847168, 0.025955327987670897, 0.02587238311767578, 0.026070016860961914, 0.026250240325927734, 0.026648576736450196, 0.02648678398132324, 0.026092544555664062, 0.02591641616821289, 0.026011648178100585, 0.025867263793945314, 0.026067968368530273, 0.02593484878540039, 0.025931776046752928, 0.025852928161621092, 0.025903104782104492, 0.025857023239135742, 0.025860095977783205, 0.02592665672302246, 0.02593382453918457, 0.02591744041442871, 0.02591744041442871, 0.025860095977783205, 0.026034175872802736, 0.025862144470214843, 0.026047487258911133, 0.025862144470214843, 0.02589286422729492, 0.025785343170166015, 0.02588057518005371, 0.02607411193847656, 0.025997312545776367, 0.025869312286376952, 0.02637107276916504, 0.025816064834594726, 0.0259420166015625, 0.025911296844482422, 0.02592563247680664, 0.02589593505859375, 0.025986047744750978, 0.025865215301513672, 0.02591948890686035, 0.026355712890625, 0.02595737648010254, 0.026213375091552735, 0.02711039924621582, 0.026241024017333983, 0.02593382453918457, 0.025903104782104492, 0.025907199859619142, 0.027453439712524414, 0.02789580726623535, 0.02676736068725586, 0.0267458553314209, 0.025886720657348632, 0.025971712112426756, 0.025845760345458983, 0.025939968109130858, 0.025871360778808594, 0.025868288040161135, 0.025850879669189454, 0.025886720657348632, 0.025834495544433594, 0.025903104782104492, 0.026202112197875976, 0.026358783721923826, 0.02590003204345703, 0.02592563247680664, 0.025971712112426756, 0.026025983810424806, 0.025891839981079103, 0.02590105628967285, 0.02589798355102539, 0.02608742332458496, 0.02593894386291504, 0.02593075180053711, 0.026019840240478515, 0.026425344467163086, 0.026524671554565428, 0.025915391921997072, 0.025983999252319336, 0.026042367935180662, 0.02592153549194336, 0.026608640670776368, 0.026068992614746093, 0.025964544296264647, 0.02592665672302246, 0.025959423065185547, 0.025956352233886718, 0.02590412712097168, 0.02592767906188965, 0.025935871124267578, 0.02594508743286133, 0.026001407623291017, 0.02591744041442871, 0.02591641616821289, 0.026092544555664062, 0.02592153549194336, 0.025915391921997072, 0.025977855682373048, 0.026481664657592774, 0.025976831436157227, 0.02612838363647461, 0.026650623321533205, 0.025977855682373048, 0.026019840240478515, 0.026170368194580077, 0.026015743255615235, 0.02595737648010254, 0.02592972755432129, 0.026019840240478515, 0.025935871124267578, 0.0271011848449707, 0.026040319442749024, 0.027459583282470702, 0.028067840576171874, 0.026821632385253907, 0.026846208572387696, 0.026216447830200194, 0.026053632736206055, 0.025967615127563477, 0.026000383377075196, 0.026287103652954103, 0.026298368453979492, 0.02595020866394043, 0.02593791961669922, 0.025956352233886718, 0.026089471817016603, 0.02592665672302246, 0.025790464401245116, 0.02592563247680664, 0.026020864486694335, 0.026041343688964845, 0.025891839981079103, 0.026364927291870118, 0.025968639373779297, 0.026071039199829102, 0.02594508743286133, 0.025865215301513672, 0.02555904006958008, 0.025862144470214843, 0.025845760345458983, 0.025899007797241212, 0.026298368453979492, 0.02593484878540039, 0.025967615127563477, 0.02588979148864746, 0.02591744041442871, 0.025967615127563477, 0.026208255767822267, 0.026670080184936523, 0.025956352233886718, 0.02587648010253906, 0.02587750434875488, 0.02591846466064453, 0.02698240089416504, 0.02732339286804199, 0.02654515266418457, 0.02614681625366211, 0.0265482234954834, 0.026201087951660155, 0.02592972755432129, 0.025785343170166015, 0.025774080276489256, 0.025944063186645508, 0.02652774429321289, 0.026181631088256836, 0.02593075180053711, 0.025969663619995118, 0.02594304084777832, 0.025959423065185547, 0.02592972755432129, 0.026415103912353514, 0.02594304084777832, 0.02589593505859375, 0.02589286422729492, 0.02574028778076172, 0.025867263793945314, 0.026005504608154296, 0.02588876724243164, 0.025701375961303712, 0.025881599426269532, 0.02572083282470703, 0.02571980857849121, 0.025614336013793947, 0.025604095458984375, 0.025511936187744142, 0.025645055770874024, 0.02588979148864746, 0.02655129623413086, 0.026059776306152343, 0.027248640060424805, 0.026177536010742186, 0.02614271926879883, 0.026044416427612304, 0.026054655075073242, 0.026452991485595705, 0.026679296493530274, 0.026003456115722655, 0.02591744041442871, 0.02592870330810547, 0.025981952667236328, 0.025944063186645508, 0.02596147155761719, 0.02594713592529297, 0.026246143341064454, 0.02592460823059082, 0.026206207275390626, 0.026260480880737305, 0.026030080795288086, 0.025661439895629884, 0.0261212158203125, 0.02630143928527832, 0.026827775955200195, 0.026643455505371092, 0.025831424713134765, 0.025669631958007814, 0.02592255973815918, 0.026008575439453126, 0.026306560516357422, 0.025987071990966795, 0.025968639373779297, 0.02676940727233887, 0.02632089614868164, 0.025973760604858398, 0.026089471817016603, 0.026660863876342773, 0.026089471817016603, 0.026007551193237305, 0.02656051254272461, 0.02632294464111328, 0.026405887603759767, 0.02652672004699707, 0.02648575973510742, 0.02597068786621094, 0.02652364730834961, 0.026028032302856444, 0.026418176651000977, 0.02592460823059082, 0.026145792007446288, 0.02631372833251953, 0.026041343688964845, 0.02595020866394043, 0.026392576217651367, 0.02594611167907715, 0.02590924835205078, 0.025975807189941406, 0.02589388847351074, 0.025860095977783205, 0.026009599685668947, 0.025849855422973633, 0.027146240234375, 0.027608064651489257, 0.026010623931884767, 0.026270719528198243, 0.028037120819091797, 0.026010623931884767, 0.026557439804077147, 0.02614476776123047, 0.025939968109130858, 0.026031103134155274, 0.026215423583984376, 0.02595327949523926, 0.02590617561340332, 0.026161151885986327, 0.02657177543640137, 0.025994239807128908, 0.025948160171508788, 0.026021888732910156, 0.025923583984375, 0.02587648010253906, 0.026586111068725587, 0.02609561538696289, 0.026269695281982423, 0.02575257682800293, 0.026237951278686524, 0.02637414360046387, 0.026650623321533205, 0.026261503219604493, 0.025830400466918944, 0.026101760864257813, 0.02590003204345703, 0.027065343856811523, 0.02631884765625, 0.026234880447387695, 0.026003456115722655, 0.026396671295166017, 0.02654515266418457, 0.026754047393798826, 0.026755071640014647, 0.02595327949523926, 0.025915391921997072, 0.02592051124572754, 0.0259368953704834, 0.026658815383911134, 0.026275840759277344, 0.0259051513671875, 0.026019840240478515, 0.026391551971435546, 0.026327039718627928, 0.026053632736206055, 0.02597478485107422, 0.025894912719726562, 0.02594918441772461, 0.02594099235534668, 0.025967615127563477, 0.02591744041442871, 0.02594918441772461, 0.025890815734863282, 0.026006528854370117, 0.026018815994262694, 0.02636185646057129, 0.0259420166015625, 0.026001407623291017, 0.02594099235534668, 0.025960447311401368, 0.02609152030944824, 0.026007551193237305, 0.02570854377746582, 0.02609561538696289, 0.025972736358642577, 0.025657344818115234, 0.025613311767578126, 0.025791488647460937, 0.025996288299560546, 0.026007551193237305, 0.026431488037109374, 0.02754252815246582, 0.02832076835632324, 0.02649907112121582, 0.026594303131103517, 0.02647859191894531, 0.026174463272094727, 0.02593484878540039, 0.026437631607055666, 0.02627174377441406, 0.025985023498535157, 0.02590208053588867, 0.025977855682373048, 0.025886720657348632, 0.026023935317993165, 0.026175487518310548, 0.026244096755981446, 0.02614169692993164, 0.026565631866455077, 0.02593791961669922, 0.025867263793945314, 0.025915391921997072, 0.025992191314697266, 0.02591846466064453, 0.025977855682373048, 0.025486335754394532, 0.026013696670532226, 0.025973760604858398, 0.025976831436157227, 0.025994239807128908, 0.025964544296264647, 0.025964544296264647, 0.025948160171508788, 0.026384384155273437, 0.026389503479003908, 0.026192895889282225, 0.02608742332458496, 0.025848831176757812, 0.02588057518005371, 0.02590003204345703, 0.02687283134460449, 0.02651955223083496, 0.02607513618469238, 0.025907199859619142, 0.02589593505859375, 0.026251264572143555, 0.02592870330810547, 0.025911296844482422, 0.026040319442749024, 0.025972736358642577, 0.025976831436157227, 0.02590412712097168, 0.026059776306152343, 0.02596147155761719, 0.02588979148864746, 0.02592870330810547, 0.025907199859619142, 0.025972736358642577, 0.026210304260253905, 0.02595840072631836, 0.025968639373779297, 0.026000383377075196, 0.0259102725982666, 0.026019840240478515, 0.026402816772460938, 0.026630144119262695, 0.0263505916595459, 0.02607411193847656, 0.026045440673828125, 0.0259051513671875, 0.026104831695556642, 0.025996288299560546, 0.025899007797241212, 0.02648575973510742, 0.026439680099487304, 0.02592972755432129, 0.025853952407836913, 0.02594611167907715, 0.025831424713134765, 0.02590003204345703, 0.02671308708190918, 0.025992191314697266, 0.02609561538696289, 0.02598297691345215, 0.02613862419128418, 0.026283008575439453, 0.02594713592529297, 0.026433536529541016, 0.026018815994262694, 0.025977855682373048, 0.025972736358642577, 0.025956352233886718, 0.02589695930480957, 0.026089471817016603, 0.026096639633178712, 0.026208255767822267, 0.02631987190246582, 0.026096639633178712, 0.026045440673828125, 0.026052608489990234, 0.02594713592529297, 0.025990144729614258, 0.02592665672302246, 0.025990144729614258, 0.02592563247680664, 0.026011648178100585, 0.026017791748046876, 0.025975807189941406, 0.025851903915405275, 0.02670796775817871, 0.026178560256958007, 0.025911296844482422, 0.02608639907836914, 0.026686464309692383, 0.026622976303100586, 0.02611814308166504]",tokens/s,38.27560940453261,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949371-407674857434fc174bd383a3;ec6ab7ea-adb7-435f-b82a-4272733aa337) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,915.554304,845.676544,0.0,260.046848,253.883392,s,1,7.21129736328125,7.21129736328125,0.0,7.21129736328125,7.21129736328125,7.21129736328125,7.21129736328125,[7.21129736328125],,kWh,5.261660095134681e-06,2.862550796337855e-06,8.148895408011114e-06,1.6273106299483652e-05,,MB,1399.033856,988.28288,0.0,341.835776,312.754176,s,17,0.17877142333984375,0.010515966078814339,0.0002288984160006703,0.010449407577514648,0.010713779067993165,0.010972134208679199,0.0112174027633667,"[0.010246111869812011, 0.010454943656921387, 0.010498144149780274, 0.01043558406829834, 0.010390975952148437, 0.010448736190795898, 0.010406335830688477, 0.010475584030151367, 0.010449407577514648, 0.010389760017395019, 0.010592639923095703, 0.01047372817993164, 0.010420672416687012, 0.010895487785339355, 0.010413855552673339, 0.011278719902038574, 0.010500736236572265]",tokens/s,24343.935505435147,kWh,1.2473468524141157e-07,6.834855140450184e-08,3.2082537692289063e-07,5.139086135688041e-07,tokens/kWh,498143041.85762733,MB,1423.4624,1015.545856,0.0,369.098752,313.412096,s,17,9.860448974609376,0.5800264102711397,0.0036231496997287415,0.5793173828125,0.58472900390625,0.5871739379882813,0.5876752661132812,"[0.5830091552734376, 0.5769498291015625, 0.5832034912109375, 0.5826878051757812, 0.5762576293945313, 0.5783075561523437, 0.5878005981445312, 0.5781835327148438, 0.5768833618164062, 0.5750750122070313, 0.5759237670898437, 0.5870172729492188, 0.5796244506835937, 0.5803236083984376, 0.58164013671875, 0.578244384765625, 0.5793173828125]",tokens/s,108.61574384268117,kWh,6.725752996934768e-06,3.685257175054284e-06,1.0874106418524872e-05,2.1285116590513923e-05,tokens/kWh,2959814.6541549615,,s,1071,9.85335602378847,0.009200145680474746,0.00024955476476517255,0.009117695808410644,0.009455615997314454,0.009563136100769042,0.00998307819366455,"[0.009416704177856445, 0.009340928077697755, 0.009137151718139648, 0.009203712463378906, 0.009118720054626465, 0.009117695808410644, 0.009111552238464356, 0.009132032394409179, 0.009178112030029297, 0.00911359977722168, 0.009174015998840332, 0.00913920021057129, 0.0091146240234375, 0.009120767593383788, 0.009189375877380371, 0.009143296241760255, 0.009225215911865235, 0.009185279846191406, 0.009790464401245117, 0.010061823844909668, 0.009782272338867188, 0.009484288215637206, 0.009470975875854493, 0.00941260814666748, 0.009452544212341308, 0.009455615997314454, 0.00929587173461914, 0.009847807884216308, 0.009470975875854493, 0.009448448181152343, 0.009307135581970214, 0.009127936363220214, 0.009110527992248535, 0.009179136276245118, 0.00913100814819336, 0.009094143867492676, 0.009115648269653321, 0.009127936363220214, 0.00909721565246582, 0.009092096328735352, 0.009095168113708496, 0.009183232307434081, 0.009178112030029297, 0.009167872428894042, 0.009125887870788574, 0.00908083152770996, 0.009086976051330567, 0.00910540771484375, 0.009119744300842286, 0.009085951805114746, 0.009096192359924317, 0.00910848045349121, 0.009201663970947266, 0.009508864402770996, 0.009273344039916993, 0.009170944213867188, 0.009126912117004395, 0.009357312202453612, 0.009276415824890137, 0.009111552238464356, 0.009171968460083007, 0.009240575790405273, 0.009194496154785157, 0.009165823936462402, 0.009628671646118164, 0.009423871994018555, 0.00923852825164795, 0.009190400123596192, 0.00920576000213623, 0.009117695808410644, 0.009059328079223633, 0.009107456207275391, 0.00910643196105957, 0.00910643196105957, 0.009081855773925781, 0.00921292781829834, 0.009085951805114746, 0.009112575531005859, 0.009355263710021973, 0.00912384033203125, 0.008961024284362793, 0.009060352325439454, 0.009128959655761718, 0.009135104179382325, 0.009120767593383788, 0.00912384033203125, 0.009145343780517578, 0.009086976051330567, 0.00912281608581543, 0.009135104179382325, 0.009145343780517578, 0.009110527992248535, 0.0091494722366333, 0.009143263816833495, 0.009163776397705077, 0.00913920021057129, 0.009168895721435547, 0.009085951805114746, 0.009115648269653321, 0.009107456207275391, 0.009130016326904297, 0.009106399536132813, 0.009083904266357423, 0.009126912117004395, 0.009178112030029297, 0.00943616008758545, 0.009554944038391112, 0.009240575790405273, 0.00910540771484375, 0.009083904266357423, 0.009127936363220214, 0.009103360176086426, 0.009116671562194823, 0.009119775772094727, 0.009096159934997558, 0.009096192359924317, 0.009077759742736816, 0.009134079933166504, 0.009102335929870605, 0.009133055686950683, 0.00909721565246582, 0.009129983901977539, 0.009137151718139648, 0.00910643196105957, 0.009136128425598144, 0.00910643196105957, 0.009378848075866699, 0.009194463729858398, 0.009483263969421387, 0.009677824020385742, 0.00932249641418457, 0.009142271995544434, 0.009126912117004395, 0.009071776390075684, 0.00907862377166748, 0.009146368026733399, 0.009504768371582031, 0.009138175964355469, 0.00930406379699707, 0.009192447662353515, 0.009151488304138184, 0.009125887870788574, 0.009141247749328613, 0.009390080451965332, 0.009134079933166504, 0.009117695808410644, 0.00908291244506836, 0.009126879692077637, 0.009162752151489258, 0.009117695808410644, 0.00912281608581543, 0.009183232307434081, 0.00914739227294922, 0.009117695808410644, 0.009132032394409179, 0.009208831787109375, 0.009144319534301757, 0.009136128425598144, 0.009174015998840332, 0.00941158390045166, 0.009240575790405273, 0.009096192359924317, 0.009270272254943847, 0.00919654369354248, 0.009128959655761718, 0.009153535842895508, 0.009104384422302245, 0.009226240158081055, 0.009109503746032714, 0.009739263534545899, 0.009461759567260742, 0.009475071907043458, 0.009479167938232422, 0.009468928337097168, 0.009457663536071777, 0.009500672340393066, 0.009431039810180664, 0.009662464141845703, 0.00941260814666748, 0.009128959655761718, 0.00908083152770996, 0.00910540771484375, 0.009104384422302245, 0.009235456466674804, 0.009430015563964844, 0.009374719619750976, 0.009497599601745605, 0.009128992080688476, 0.009002976417541504, 0.009107456207275391, 0.009151488304138184, 0.009150464057922364, 0.00910643196105957, 0.009288703918457031, 0.00940544033050537, 0.009388031959533692, 0.00954265594482422, 0.009171968460083007, 0.009148415565490722, 0.009117695808410644, 0.009149439811706543, 0.009131072044372558, 0.009132991790771485, 0.009187328338623046, 0.00918835163116455, 0.009142271995544434, 0.009268256187438964, 0.0094934720993042, 0.00912384033203125, 0.00909004783630371, 0.009149439811706543, 0.009111552238464356, 0.009116671562194823, 0.009117695808410644, 0.008987648010253906, 0.009459712028503419, 0.009373696327209472, 0.009356287956237793, 0.009142271995544434, 0.009095168113708496, 0.009132032394409179, 0.009075712203979493, 0.009158656120300293, 0.009093119621276855, 0.009110527992248535, 0.009088000297546387, 0.009502719879150391, 0.00940544033050537, 0.009350144386291503, 0.009157631874084473, 0.009149439811706543, 0.009112575531005859, 0.009062399864196777, 0.009187328338623046, 0.00913920021057129, 0.009127936363220214, 0.009400320053100587, 0.009438207626342773, 0.00970956802368164, 0.010275839805603027, 0.009669631958007812, 0.009363455772399902, 0.009600000381469726, 0.00910752010345459, 0.009088959693908692, 0.009481216430664062, 0.009066495895385742, 0.009107456207275391, 0.00910540771484375, 0.009052160263061524, 0.009341952323913574, 0.00922111988067627, 0.009119744300842286, 0.009220095634460449, 0.009133055686950683, 0.009036800384521485, 0.008993791580200196, 0.009102335929870605, 0.009096223831176758, 0.009137151718139648, 0.009424863815307617, 0.009099295616149902, 0.009120736122131348, 0.00903987216949463, 0.008980480194091797, 0.00931942367553711, 0.00954470443725586, 0.009324543952941895, 0.009525247573852539, 0.009398271560668945, 0.00941977596282959, 0.009259008407592773, 0.009104384422302245, 0.009086976051330567, 0.009062399864196777, 0.00894156837463379, 0.009078783988952637, 0.00911359977722168, 0.009110527992248535, 0.009165823936462402, 0.009091072082519532, 0.009110527992248535, 0.009078783988952637, 0.00903987216949463, 0.008985600471496581, 0.008969216346740723, 0.00899071979522705, 0.00910848045349121, 0.009088000297546387, 0.009084927558898925, 0.009103360176086426, 0.009115648269653321, 0.009236479759216308, 0.009529343605041504, 0.009236479759216308, 0.008936448097229004, 0.009010175704956054, 0.00909721565246582, 0.00909721565246582, 0.009093119621276855, 0.009120800018310548, 0.009176032066345215, 0.009104384422302245, 0.00913920021057129, 0.00913308811187744, 0.009149408340454102, 0.008996864318847657, 0.009004032135009766, 0.00903270435333252, 0.00899891185760498, 0.009146368026733399, 0.009082880020141602, 0.009236479759216308, 0.009224191665649414, 0.009332736015319825, 0.00908902359008789, 0.009102335929870605, 0.00914739227294922, 0.009093119621276855, 0.009085951805114746, 0.009160703659057617, 0.008950783729553222, 0.009082880020141602, 0.009116767883300781, 0.009193375587463378, 0.0091146240234375, 0.009118720054626465, 0.009137151718139648, 0.009102335929870605, 0.009092096328735352, 0.009103360176086426, 0.00913920021057129, 0.009091072082519532, 0.009056256294250489, 0.009158656120300293, 0.009273344039916993, 0.0091146240234375, 0.009070624351501464, 0.009117664337158203, 0.009099264144897461, 0.00918835163116455, 0.009134079933166504, 0.00909823989868164, 0.009162752151489258, 0.009116671562194823, 0.009136128425598144, 0.009200639724731445, 0.00907475185394287, 0.00901318359375, 0.00990726375579834, 0.009745344161987305, 0.009365504264831542, 0.009136128425598144, 0.009094143867492676, 0.009119744300842286, 0.009099264144897461, 0.009107456207275391, 0.009276415824890137, 0.009438207626342773, 0.00941875171661377, 0.009283583641052246, 0.009111552238464356, 0.009083904266357423, 0.00909721565246582, 0.009174015998840332, 0.009077759742736816, 0.009101311683654785, 0.009227264404296874, 0.009434111595153808, 0.009342975616455078, 0.009357312202453612, 0.009215999603271484, 0.009151488304138184, 0.009145343780517578, 0.009140224456787109, 0.009049087524414063, 0.009083904266357423, 0.009138175964355469, 0.00908083152770996, 0.009102335929870605, 0.00909004783630371, 0.009077759742736816, 0.00912281608581543, 0.009103360176086426, 0.009104384422302245, 0.009081855773925781, 0.009192480087280273, 0.009112544059753418, 0.009133055686950683, 0.009152511596679687, 0.009178112030029297, 0.009116671562194823, 0.009081855773925781, 0.009117695808410644, 0.009104384422302245, 0.009084927558898925, 0.00908083152770996, 0.009099264144897461, 0.009060352325439454, 0.009085951805114746, 0.009137151718139648, 0.00910643196105957, 0.008954879760742187, 0.009155584335327148, 0.009225215911865235, 0.009172991752624511, 0.00973516845703125, 0.009739263534545899, 0.012907520294189453, 0.011762687683105469, 0.009634816169738769, 0.009517056465148926, 0.00941875171661377, 0.009448448181152343, 0.009406463623046875, 0.009602047920227052, 0.009644031524658203, 0.009456640243530273, 0.00941977596282959, 0.009490431785583496, 0.009438207626342773, 0.00909823989868164, 0.009107456207275391, 0.009081919670104981, 0.009069503784179688, 0.00910540771484375, 0.009082880020141602, 0.009005056381225587, 0.008839167594909669, 0.00897433567047119, 0.009057279586791991, 0.009348095893859864, 0.009159680366516113, 0.009118720054626465, 0.009107456207275391, 0.009762816429138184, 0.009552895545959473, 0.009444352149963378, 0.00941055965423584, 0.009142271995544434, 0.009183232307434081, 0.009409536361694336, 0.009354240417480468, 0.009111552238464356, 0.00911359977722168, 0.00919961643218994, 0.009571328163146972, 0.009524224281311035, 0.00929792022705078, 0.009115648269653321, 0.00910540771484375, 0.009086976051330567, 0.0092457275390625, 0.009500639915466309, 0.008978431701660156, 0.009081855773925781, 0.009099264144897461, 0.009142271995544434, 0.009548800468444824, 0.009452544212341308, 0.009343999862670899, 0.009442303657531738, 0.00942182445526123, 0.009492480278015136, 0.009408512115478516, 0.009153535842895508, 0.009150464057922364, 0.00910028839111328, 0.00910540771484375, 0.009060352325439454, 0.009058303833007812, 0.009079808235168458, 0.00909004783630371, 0.009110527992248535, 0.009063424110412598, 0.00908083152770996, 0.009099264144897461, 0.009066495895385742, 0.00908083152770996, 0.009170944213867188, 0.009094143867492676, 0.009028608322143555, 0.009018367767333984, 0.009054207801818847, 0.008975359916687011, 0.009059328079223633, 0.00912179183959961, 0.009164863586425782, 0.00912992000579834, 0.009064448356628419, 0.00910643196105957, 0.008985600471496581, 0.009044992446899413, 0.009168895721435547, 0.009170944213867188, 0.009416704177856445, 0.009347071647644043, 0.009037823677062988, 0.009027584075927735, 0.008962047576904298, 0.009000960350036622, 0.009096192359924317, 0.009082880020141602, 0.00909721565246582, 0.008971263885498047, 0.009054207801818847, 0.00909721565246582, 0.00899788761138916, 0.008828927993774414, 0.008983551979064941, 0.00898252773284912, 0.00894976043701172, 0.008943615913391113, 0.008966143608093263, 0.008994815826416015, 0.009019392013549805, 0.009219103813171387, 0.009242591857910156, 0.009170944213867188, 0.00897433567047119, 0.008969216346740723, 0.009074687957763672, 0.009055232048034668, 0.008964096069335938, 0.009027584075927735, 0.00923136043548584, 0.009146400451660156, 0.009221088409423829, 0.009374719619750976, 0.009431039810180664, 0.009052160263061524, 0.009560064315795898, 0.009227264404296874, 0.009598976135253906, 0.010333184242248536, 0.009432064056396485, 0.0094136323928833, 0.009389056205749511, 0.009515007972717286, 0.009553919792175293, 0.009027584075927735, 0.00912179183959961, 0.009115648269653321, 0.00902451229095459, 0.009061375617980956, 0.00902348804473877, 0.009036800384521485, 0.008976384162902832, 0.008938495635986327, 0.009058303833007812, 0.008957951545715333, 0.008970239639282226, 0.008937472343444825, 0.009656319618225098, 0.009505791664123535, 0.009145343780517578, 0.009137151718139648, 0.009063424110412598, 0.008968192100524902, 0.008944640159606934, 0.009102335929870605, 0.009085951805114746, 0.009061408042907716, 0.009028575897216796, 0.009432095527648926, 0.009068511962890624, 0.009059328079223633, 0.008980544090270996, 0.009234368324279785, 0.009832448005676269, 0.009847807884216308, 0.009494527816772461, 0.009356287956237793, 0.00908083152770996, 0.00908083152770996, 0.00909823989868164, 0.009070591926574707, 0.009066495895385742, 0.00912281608581543, 0.009062399864196777, 0.009094143867492676, 0.009116671562194823, 0.009067520141601563, 0.008979455947875976, 0.008965120315551758, 0.009250816345214843, 0.009234432220458985, 0.009077759742736816, 0.00907472038269043, 0.00904905605316162, 0.009094143867492676, 0.009070591926574707, 0.00921395206451416, 0.00919654369354248, 0.009081888198852538, 0.009082847595214844, 0.009111552238464356, 0.009120767593383788, 0.009083968162536621, 0.009051072120666503, 0.00932863998413086, 0.009292799949645996, 0.009363455772399902, 0.00900812816619873, 0.008962047576904298, 0.00901734447479248, 0.009035776138305664, 0.009077759742736816, 0.009056256294250489, 0.009062399864196777, 0.009065471649169921, 0.009083904266357423, 0.009033727645874023, 0.009036800384521485, 0.009060352325439454, 0.009061375617980956, 0.00900812816619873, 0.009000960350036622, 0.009067520141601563, 0.009084927558898925, 0.009049087524414063, 0.009051136016845703, 0.009095168113708496, 0.009060352325439454, 0.009048064231872559, 0.009067520141601563, 0.009084927558898925, 0.009029631614685058, 0.009051136016845703, 0.009057279586791991, 0.009325568199157714, 0.009067520141601563, 0.009071616172790528, 0.009082880020141602, 0.00910028839111328, 0.009092096328735352, 0.00909721565246582, 0.009068575859069825, 0.009140192031860351, 0.009070591926574707, 0.00909004783630371, 0.009066559791564942, 0.009073599815368653, 0.008954879760742187, 0.009079808235168458, 0.00910028839111328, 0.009078783988952637, 0.009084927558898925, 0.009054207801818847, 0.0091146240234375, 0.009066495895385742, 0.009055232048034668, 0.009067520141601563, 0.00909004783630371, 0.009049087524414063, 0.009077792167663573, 0.009286623954772949, 0.009135104179382325, 0.009027584075927735, 0.009041919708251953, 0.009065471649169921, 0.009093119621276855, 0.009071616172790528, 0.009037823677062988, 0.009115648269653321, 0.009081855773925781, 0.009072640419006347, 0.009290816307067871, 0.009132991790771485, 0.009067520141601563, 0.009365504264831542, 0.00910028839111328, 0.009063424110412598, 0.009035776138305664, 0.009065471649169921, 0.009063424110412598, 0.009034751892089844, 0.009067520141601563, 0.009277440071105958, 0.009407487869262696, 0.009660415649414063, 0.009763839721679688, 0.009354240417480468, 0.009144319534301757, 0.009111552238464356, 0.009063424110412598, 0.009054207801818847, 0.009095168113708496, 0.009137151718139648, 0.009203712463378906, 0.009431039810180664, 0.009178112030029297, 0.009093119621276855, 0.009977855682373048, 0.009656352043151855, 0.009920479774475098, 0.010166272163391114, 0.009541631698608399, 0.0107642879486084, 0.011099136352539063, 0.009995264053344726, 0.009612288475036621, 0.009631744384765625, 0.009433088302612304, 0.009533503532409667, 0.009413567543029785, 0.009695232391357422, 0.00921497631072998, 0.009277440071105958, 0.009481216430664062, 0.009516032218933105, 0.009475071907043458, 0.009201663970947266, 0.009099264144897461, 0.009066495895385742, 0.009050111770629882, 0.009266176223754884, 0.009671680450439453, 0.009128959655761718, 0.009366527557373047, 0.009104384422302245, 0.009054207801818847, 0.009316351890563965, 0.00920473575592041, 0.009112575531005859, 0.009064448356628419, 0.009102335929870605, 0.009061375617980956, 0.009042943954467773, 0.00909721565246582, 0.009125920295715332, 0.009184224128723145, 0.009059328079223633, 0.00909823989868164, 0.009388031959533692, 0.009076736450195312, 0.009037823677062988, 0.009107456207275391, 0.009063424110412598, 0.009067584037780761, 0.009027520179748534, 0.009163776397705077, 0.009099264144897461, 0.009046015739440917, 0.009104384422302245, 0.00909004783630371, 0.009012224197387696, 0.009049087524414063, 0.009099264144897461, 0.009054207801818847, 0.008952960014343262, 0.009027456283569336, 0.009077759742736816, 0.009085951805114746, 0.009027584075927735, 0.009046015739440917, 0.00901734447479248, 0.00912281608581543, 0.009084927558898925, 0.00912384033203125, 0.009125887870788574, 0.00910643196105957, 0.00909823989868164, 0.00908083152770996, 0.00909721565246582, 0.009038847923278808, 0.009043968200683594, 0.00908902359008789, 0.00910643196105957, 0.009036800384521485, 0.009077759742736816, 0.009250816345214843, 0.00943616008758545, 0.009463808059692384, 0.009438207626342773, 0.009571328163146972, 0.009029631614685058, 0.00908083152770996, 0.009134079933166504, 0.009098272323608398, 0.009183199882507324, 0.009127936363220214, 0.009050111770629882, 0.009141247749328613, 0.009275391578674316, 0.009177087783813476, 0.00910540771484375, 0.009135104179382325, 0.009262080192565919, 0.009125887870788574, 0.009458687782287598, 0.009835519790649415, 0.009455615997314454, 0.009203712463378906, 0.009198592185974122, 0.009415679931640625, 0.009367551803588867, 0.009393152236938476, 0.009293824195861817, 0.009488384246826171, 0.009190400123596192, 0.009155584335327148, 0.0091146240234375, 0.00909721565246582, 0.008965120315551758, 0.009065471649169921, 0.009279583930969238, 0.009342880249023437, 0.009071616172790528, 0.009050111770629882, 0.009076800346374512, 0.0091145601272583, 0.00910540771484375, 0.009053183555603026, 0.009441280364990234, 0.009099264144897461, 0.009112575531005859, 0.009337856292724609, 0.00908902359008789, 0.009352191925048828, 0.009358336448669433, 0.009103360176086426, 0.009068544387817384, 0.009074687957763672, 0.009091072082519532, 0.009075712203979493, 0.00923852825164795, 0.009358336448669433, 0.009268223762512207, 0.009439231872558594, 0.009197567939758301, 0.009078783988952637, 0.009062399864196777, 0.009057279586791991, 0.009077759742736816, 0.009069567680358886, 0.009060352325439454, 0.009553919792175293, 0.009481216430664062, 0.009302016258239745, 0.009323519706726074, 0.009118720054626465, 0.009034751892089844, 0.009050111770629882, 0.00980070400238037, 0.00932044792175293, 0.009225215911865235, 0.009178112030029297, 0.009399295806884766, 0.009208831787109375, 0.009189375877380371, 0.009401344299316406, 0.009063424110412598, 0.009057279586791991, 0.009065471649169921, 0.009030655860900879, 0.009041919708251953, 0.008953856468200684, 0.009022463798522949, 0.00932044792175293, 0.009373696327209472, 0.009359359741210938, 0.009453568458557129, 0.00929587173461914, 0.009093119621276855, 0.009078783988952637, 0.009041919708251953, 0.009082880020141602, 0.009060352325439454, 0.009134079933166504, 0.009400320053100587, 0.009142271995544434, 0.009252863883972168, 0.009277440071105958, 0.009141247749328613, 0.009065471649169921, 0.009067520141601563, 0.009091072082519532, 0.009179136276245118, 0.009077759742736816, 0.009116671562194823, 0.009943039894104003, 0.009072640419006347, 0.00899071979522705, 0.009035776138305664, 0.009424896240234374, 0.009417728424072265, 0.00937987232208252, 0.009401311874389648, 0.009382911682128906, 0.009613311767578125, 0.00951296043395996, 0.009207807540893554, 0.009109503746032714, 0.009138175964355469, 0.009107456207275391, 0.009254912376403808, 0.009566207885742188, 0.009158656120300293, 0.009165823936462402, 0.00982528018951416, 0.009737215995788574, 0.009490431785583496, 0.009218048095703125, 0.009274368286132812, 0.009156607627868652, 0.009150464057922364, 0.009076736450195312, 0.009047039985656738, 0.009050111770629882, 0.009003007888793945, 0.009088000297546387, 0.009273344039916993, 0.009414655685424805, 0.009335807800292969, 0.009346048355102539, 0.009422847747802734, 0.009374719619750976, 0.009352191925048828, 0.00943616008758545, 0.009094143867492676, 0.009079808235168458, 0.00910540771484375, 0.009126912117004395, 0.009093119621276855, 0.009073663711547851, 0.009148415565490722, 0.009061375617980956, 0.009044992446899413, 0.009048128128051757, 0.009105343818664551, 0.009072640419006347, 0.00909004783630371, 0.009104384422302245, 0.009261055946350098, 0.009233407974243164, 0.009075712203979493, 0.009057279586791991, 0.008970303535461426, 0.009110464096069335, 0.009143327713012695, 0.009083904266357423, 0.009041888236999512, 0.009185279846191406, 0.009780223846435548, 0.010115072250366211, 0.009558015823364258, 0.009347071647644043, 0.009432064056396485, 0.00960102367401123, 0.009447423934936524, 0.009394240379333497, 0.009477055549621582, 0.009409536361694336, 0.009450495719909668, 0.009235456466674804, 0.009079808235168458, 0.009115648269653321, 0.009141247749328613, 0.009060352325439454, 0.009059328079223633, 0.009115648269653321, 0.009085951805114746, 0.009047039985656738, 0.009037823677062988, 0.00930303955078125, 0.009088000297546387, 0.009138175964355469, 0.009041919708251953, 0.009142271995544434, 0.009132032394409179, 0.009059328079223633, 0.009057279586791991, 0.009047039985656738, 0.009048064231872559, 0.009038847923278808, 0.009172991752624511, 0.00902451229095459, 0.009037856101989747, 0.009059295654296876, 0.009179136276245118, 0.009182208061218262, 0.009150464057922364, 0.009073663711547851, 0.00910028839111328, 0.009092096328735352, 0.009064448356628419, 0.009156607627868652, 0.009052160263061524, 0.009054207801818847, 0.009450495719909668, 0.008987648010253906, 0.009056256294250489, 0.009086976051330567, 0.009133055686950683, 0.009104384422302245, 0.009060352325439454, 0.00909823989868164, 0.009156607627868652, 0.00910643196105957, 0.009053183555603026, 0.009118720054626465, 0.009174015998840332, 0.009126912117004395, 0.00910643196105957, 0.009176063537597656, 0.009065471649169921, 0.009069567680358886, 0.009255935668945312, 0.009361408233642577, 0.009602047920227052, 0.010516480445861816, 0.009357312202453612, 0.009179136276245118, 0.009172991752624511, 0.009110527992248535, 0.009070591926574707, 0.009060352325439454, 0.00940339183807373, 0.009276415824890137, 0.009458687782287598, 0.009102335929870605, 0.009059328079223633, 0.00908083152770996, 0.009094143867492676, 0.009088000297546387, 0.009396224021911622, 0.009474047660827637, 0.00918015956878662, 0.009495552062988282, 0.009170944213867188, 0.009126912117004395, 0.00908902359008789, 0.009120767593383788, 0.009066495895385742, 0.009084927558898925, 0.009067520141601563, 0.009242624282836913, 0.009362431526184082, 0.009182208061218262, 0.00928767967224121, 0.009465855598449707, 0.009357312202453612, 0.009265151977539063, 0.00902451229095459, 0.009005056381225587, 0.009067520141601563, 0.009054207801818847, 0.009079808235168458, 0.009077759742736816, 0.009067520141601563, 0.009176063537597656, 0.009095168113708496, 0.009043968200683594, 0.009070591926574707, 0.009064448356628419, 0.009085951805114746, 0.009052160263061524, 0.009073663711547851, 0.00908902359008789, 0.009065471649169921, 0.009066495895385742, 0.009127936363220214, 0.009140224456787109, 0.009068544387817384, 0.009044992446899413, 0.009138175964355469, 0.00910028839111328, 0.009088000297546387, 0.009091072082519532, 0.009166848182678223]",tokens/s,108.69393102353548,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8197.869568,11287.396352,0.0,10701.766656,10468.923392,s,1,11.77667578125,11.77667578125,0.0,11.77667578125,11.77667578125,11.77667578125,11.77667578125,[11.77667578125],,kWh,5.85412957312782e-05,3.2059763506521074e-05,8.151506521192342e-05,0.0001721161244497227,,MB,2736.41472,11834.753024,0.0,11188.30592,10924.283904,s,10,1.9910370635986325,0.19910370635986324,4.7260783204637515e-05,0.19908956909179687,0.19914786224365236,0.19917823257446288,0.19920252883911133,"[0.1990630340576172, 0.1991375732421875, 0.19914111328125, 0.1990765380859375, 0.19908831787109374, 0.19912661743164062, 0.19906130981445314, 0.19920860290527342, 0.1990908203125, 0.1990431365966797]",tokens/s,1285.7621019736391,kWh,2.3555931016205624e-06,1.2907472471000921e-06,1.4106705185138609e-05,1.7753045533859263e-05,tokens/kWh,14420061.026247432,MB,2740.740096,11836.850176,0.0,11190.403072,10924.286464,s,10,22.505097167968753,2.2505097167968753,0.011792297520471078,2.25305615234375,2.263860815429687,2.2655295288085937,2.2668644995117186,"[2.263489990234375, 2.2349384765625, 2.235915771484375, 2.233608642578125, 2.249439208984375, 2.2459560546875, 2.25945166015625, 2.2671982421875, 2.258426025390625, 2.256673095703125]",tokens/s,27.99365829429396,kWh,2.6860327224764697e-05,1.472053349615598e-05,9.985724709705364e-05,0.00014143810781797433,tokens/kWh,445424.51091808086,,s,630,22.503112705230716,0.035719226516239225,0.0005782629818726287,0.03556966400146484,0.03642910537719726,0.03698140239715576,0.0378296529006958,"[0.03582668685913086, 0.0357314567565918, 0.03642675018310547, 0.03567411041259766, 0.035661823272705076, 0.036544513702392575, 0.03584204864501953, 0.03590655899047852, 0.03582463836669922, 0.03614003372192383, 0.03565465545654297, 0.03576934432983398, 0.03553484725952148, 0.03558911895751953, 0.035550209045410154, 0.03579596710205078, 0.036209663391113284, 0.03594854354858398, 0.03668070220947266, 0.03613183975219727, 0.03598745727539063, 0.03573350524902344, 0.03568435287475586, 0.03572633743286133, 0.03561471939086914, 0.035707904815673826, 0.035507198333740234, 0.03566694259643555, 0.03558707046508789, 0.03631206512451172, 0.03583180618286133, 0.035373054504394534, 0.03588198471069336, 0.03548364639282227, 0.035707904815673826, 0.03730636978149414, 0.03718963241577149, 0.037070846557617186, 0.03559936141967773, 0.03838771057128906, 0.03774156951904297, 0.036377601623535157, 0.03556966400146484, 0.03718860626220703, 0.03737190246582031, 0.036029441833496094, 0.03559219360351563, 0.03553792190551758, 0.03560857772827149, 0.03543142318725586, 0.0355860481262207, 0.03553996658325195, 0.0355676155090332, 0.035643390655517575, 0.03544780731201172, 0.03558297729492187, 0.035579902648925785, 0.03556454467773437, 0.035509246826171875, 0.03529830551147461, 0.035149822235107424, 0.03505152130126953, 0.035517440795898435, 0.035850238800048825, 0.03547955322265625, 0.03521638488769531, 0.03554611206054688, 0.0357386245727539, 0.03669606399536133, 0.03581849670410156, 0.03514470291137695, 0.03560345458984375, 0.03564441680908203, 0.03551334381103516, 0.035571712493896485, 0.03544473648071289, 0.03560755157470703, 0.0349378547668457, 0.03580825424194336, 0.03558297729492187, 0.03567001724243164, 0.03561574554443359, 0.03548057556152344, 0.03550207901000976, 0.035310592651367184, 0.03544985580444336, 0.035606529235839846, 0.03543654251098633, 0.035507198333740234, 0.035381248474121094, 0.0354856948852539, 0.03520000076293945, 0.03531980895996094, 0.03554304122924805, 0.03552870559692383, 0.03555123138427734, 0.0354785270690918, 0.03623014450073242, 0.03543142318725586, 0.035438591003417966, 0.035451904296875, 0.035460094451904296, 0.03545804977416992, 0.03546623992919922, 0.0354334716796875, 0.0353259506225586, 0.03544063949584961, 0.03553792190551758, 0.03549900817871094, 0.035460094451904296, 0.035197952270507815, 0.03503513717651367, 0.03530547332763672, 0.034931713104248044, 0.03492659378051758, 0.03523583984375, 0.03537715148925781, 0.03521945571899414, 0.03530137634277344, 0.03529011154174805, 0.03534643173217773, 0.03538739013671875, 0.03536896133422852, 0.03537408065795898, 0.03592704010009766, 0.035590145111083986, 0.035591167449951173, 0.03540787124633789, 0.03526144027709961, 0.03542118453979492, 0.03544780731201172, 0.03548262405395508, 0.03507199859619141, 0.0354334716796875, 0.03538227081298828, 0.03546828842163086, 0.03517337417602539, 0.03558707046508789, 0.035471359252929685, 0.035509246826171875, 0.03540991973876953, 0.0355153923034668, 0.035364864349365234, 0.035501056671142575, 0.03544780731201172, 0.03552153778076172, 0.03542732620239258, 0.035178497314453126, 0.03527884674072266, 0.035555328369140625, 0.036334590911865236, 0.035620864868164064, 0.0354703369140625, 0.03545395278930664, 0.03547545623779297, 0.03549184036254883, 0.03548672103881836, 0.03550515365600586, 0.03548057556152344, 0.03553996658325195, 0.035369983673095705, 0.035560447692871096, 0.035625984191894534, 0.03534950256347656, 0.035490814208984374, 0.035576831817626955, 0.0358656005859375, 0.03545907211303711, 0.03518668746948242, 0.035573760986328126, 0.03551232147216797, 0.03550207901000976, 0.035533824920654294, 0.03652505493164063, 0.035544063568115236, 0.035197952270507815, 0.03537100982666016, 0.035588096618652344, 0.03568742370605469, 0.03614310455322266, 0.03551129531860352, 0.03539865493774414, 0.035487743377685545, 0.03542323303222656, 0.03501465606689453, 0.03538022232055664, 0.035356670379638674, 0.03540991973876953, 0.03530649566650391, 0.03592806243896484, 0.03528192138671875, 0.03568435287475586, 0.03554099273681641, 0.03532185745239258, 0.035266559600830076, 0.035043327331542966, 0.03575807952880859, 0.03682099151611328, 0.03557068634033203, 0.035574783325195314, 0.035040256500244144, 0.03608883285522461, 0.03604991912841797, 0.03582566452026367, 0.03543961715698242, 0.035315711975097655, 0.035122177124023435, 0.03512115097045899, 0.035119102478027346, 0.03511705780029297, 0.03550207901000976, 0.03537715148925781, 0.03548057556152344, 0.03523583984375, 0.0350832633972168, 0.03552972793579102, 0.03559833526611328, 0.035388416290283206, 0.035353599548339845, 0.03530649566650391, 0.03525836944580078, 0.035490814208984374, 0.0350832633972168, 0.035399681091308595, 0.03550822448730469, 0.03543142318725586, 0.035504127502441404, 0.03499622344970703, 0.035280895233154294, 0.0350904312133789, 0.035542015075683595, 0.03592499160766602, 0.03597721481323242, 0.035507198333740234, 0.035639297485351565, 0.03565363311767578, 0.03553484725952148, 0.03545087814331055, 0.035536895751953124, 0.03551027297973633, 0.03563417434692383, 0.035509246826171875, 0.035538944244384765, 0.035522560119628906, 0.035507198333740234, 0.03517542266845703, 0.035062782287597655, 0.03542015838623047, 0.03524710464477539, 0.035312641143798826, 0.0349378547668457, 0.035335166931152344, 0.03630080032348633, 0.035672065734863284, 0.0356864013671875, 0.03630284881591797, 0.03569049453735352, 0.03556966400146484, 0.035388416290283206, 0.03556454467773437, 0.035544063568115236, 0.03560038375854492, 0.03545600128173828, 0.035081214904785156, 0.03513139343261719, 0.03544063949584961, 0.03558399963378906, 0.035639297485351565, 0.0355491828918457, 0.035555328369140625, 0.03550003051757813, 0.03502796936035156, 0.03491430282592774, 0.035151870727539065, 0.03533926391601563, 0.03534438323974609, 0.0352542724609375, 0.03576422500610352, 0.035588096618652344, 0.035329025268554685, 0.03815935897827148, 0.03728486251831055, 0.03582259368896484, 0.03561574554443359, 0.036528129577636716, 0.03575296020507813, 0.03546828842163086, 0.035509246826171875, 0.03548160171508789, 0.035574783325195314, 0.03651071929931641, 0.037394432067871096, 0.03628543853759766, 0.0361267204284668, 0.03584000015258789, 0.035566593170166014, 0.03533824157714844, 0.03557580947875977, 0.03554816055297851, 0.03558092880249023, 0.03547750473022461, 0.03551334381103516, 0.03558297729492187, 0.03559731292724609, 0.03550515365600586, 0.03597824096679687, 0.03620556640625, 0.03544985580444336, 0.035716094970703126, 0.03563520050048828, 0.03545804977416992, 0.035198974609375, 0.035389438629150394, 0.03645951843261719, 0.03513958358764648, 0.035081214904785156, 0.035871742248535156, 0.03821670532226563, 0.03719782257080078, 0.0368455696105957, 0.036977664947509765, 0.03684966278076172, 0.03515084838867188, 0.03529830551147461, 0.03505561447143555, 0.035046398162841795, 0.035310592651367184, 0.03554611206054688, 0.035492862701416016, 0.03547340774536133, 0.035560447692871096, 0.03575603103637695, 0.03562700653076172, 0.03556249618530274, 0.03539763259887695, 0.03530649566650391, 0.036211711883544925, 0.0359741439819336, 0.03561164855957031, 0.035783679962158206, 0.035279872894287106, 0.035555328369140625, 0.03565055847167969, 0.03563008117675781, 0.03546112060546875, 0.03615846252441406, 0.03569561767578125, 0.03509862518310547, 0.03552665710449219, 0.03686707305908203, 0.035591167449951173, 0.035432449340820314, 0.03560755157470703, 0.03562700653076172, 0.03548876953125, 0.03542630386352539, 0.03522355270385742, 0.0355676155090332, 0.03522048187255859, 0.03541708755493164, 0.03517542266845703, 0.03522048187255859, 0.03573452758789063, 0.03529830551147461, 0.035609600067138675, 0.03610214233398437, 0.03587686538696289, 0.03552972793579102, 0.035604480743408204, 0.035381248474121094, 0.035079166412353514, 0.035019775390625, 0.035093505859375, 0.035552257537841796, 0.035296257019042966, 0.03529523086547852, 0.03555737686157227, 0.035600414276123045, 0.03784806442260742, 0.03711385726928711, 0.036857856750488284, 0.03562803268432617, 0.03570278549194336, 0.035604480743408204, 0.03584716796875, 0.036982784271240236, 0.03574476623535156, 0.0357283821105957, 0.035970046997070314, 0.03609088134765625, 0.03553177642822265, 0.035389438629150394, 0.03518975830078125, 0.03558195114135742, 0.03572531127929687, 0.03552972793579102, 0.03549593734741211, 0.03552972793579102, 0.035504127502441404, 0.03530649566650391, 0.03499827194213867, 0.03498291015625, 0.03499827194213867, 0.03553177642822265, 0.03543040084838867, 0.0354252815246582, 0.035507198333740234, 0.03551232147216797, 0.035492862701416016, 0.03555737686157227, 0.035542015075683595, 0.03539971160888672, 0.03511907196044922, 0.03553792190551758, 0.03548057556152344, 0.035590145111083986, 0.03541299057006836, 0.03562803268432617, 0.03640627288818359, 0.03551334381103516, 0.03546726226806641, 0.03548876953125, 0.03548364639282227, 0.03542937469482422, 0.03547955322265625, 0.03560345458984375, 0.03555123138427734, 0.03558911895751953, 0.035236862182617186, 0.03679846572875976, 0.03740979385375977, 0.03693772888183594, 0.03772108840942383, 0.03757875061035156, 0.037498878479003905, 0.037144577026367184, 0.0354785270690918, 0.037026817321777344, 0.03620454406738281, 0.03538739013671875, 0.03577241516113281, 0.03685478210449219, 0.03587686538696289, 0.03600281524658203, 0.03679846572875976, 0.0369356803894043, 0.03577241516113281, 0.03565670394897461, 0.03630284881591797, 0.035225601196289064, 0.03570483016967774, 0.03554611206054688, 0.03553792190551758, 0.03659161758422851, 0.03697971343994141, 0.03690086364746094, 0.035737598419189456, 0.03700121688842774, 0.03693670272827149, 0.03718348693847656, 0.03573350524902344, 0.03568537521362305, 0.03609907150268555, 0.03577753448486328, 0.03557273483276367, 0.03563827133178711, 0.035883007049560545, 0.03580825424194336, 0.035800064086914066, 0.036117504119873044, 0.03604991912841797, 0.03566387176513672, 0.03560755157470703, 0.035746814727783204, 0.036144126892089845, 0.035773441314697264, 0.03556966400146484, 0.0356864013671875, 0.035280895233154294, 0.03510988616943359, 0.035192832946777344, 0.036070400238037106, 0.0363397102355957, 0.03622604751586914, 0.03621785736083984, 0.03523379135131836, 0.03508019256591797, 0.0359628791809082, 0.03554099273681641, 0.035266559600830076, 0.036087806701660154, 0.0368455696105957, 0.03921100616455078, 0.035604480743408204, 0.03540991973876953, 0.036324352264404294, 0.03560140609741211, 0.03540172958374024, 0.036279296875, 0.035871742248535156, 0.035855358123779296, 0.03582156753540039, 0.035860481262207033, 0.035399681091308595, 0.03725823974609375, 0.03743743896484375, 0.03575193786621094, 0.035192832946777344, 0.035568641662597655, 0.03711180877685547, 0.036316158294677735, 0.035716094970703126, 0.036574207305908206, 0.035683326721191407, 0.035743743896484374, 0.035576831817626955, 0.036316158294677735, 0.03689574432373047, 0.035697662353515625, 0.036450302124023434, 0.036324352264404294, 0.03605913543701172, 0.036383743286132815, 0.035707904815673826, 0.03559628677368164, 0.03560243225097656, 0.03599462509155273, 0.03552054214477539, 0.03523273468017578, 0.035166206359863283, 0.035487743377685545, 0.03605196762084961, 0.035727359771728515, 0.035860481262207033, 0.03625574493408203, 0.03724390411376953, 0.03542323303222656, 0.03562496185302735, 0.03563520050048828, 0.035675136566162106, 0.0360079345703125, 0.03585228729248047, 0.035609600067138675, 0.03539148712158203, 0.03522457504272461, 0.035591167449951173, 0.0350750732421875, 0.03559833526611328, 0.03532287979125977, 0.03568230438232422, 0.0367718391418457, 0.03599257659912109, 0.03561062240600586, 0.03568947219848633, 0.03570380783081055, 0.035576831817626955, 0.03560755157470703, 0.03556966400146484, 0.035402751922607424, 0.03568947219848633, 0.035299327850341795, 0.035727359771728515, 0.03561369705200195, 0.03569561767578125, 0.03570175933837891, 0.035678207397460936, 0.035683326721191407, 0.03589017486572266, 0.03558911895751953, 0.03525632095336914, 0.0351539192199707, 0.0352542724609375, 0.03569561767578125, 0.03688550567626953, 0.03863040161132812, 0.03610009765625, 0.0356577262878418, 0.03555942535400391, 0.03578572845458984, 0.03559731292724609, 0.035544063568115236, 0.03557273483276367, 0.03606220626831055, 0.037784576416015625, 0.037923839569091795, 0.03693772888183594, 0.03672371292114258, 0.03595775985717774, 0.03683327865600586, 0.036357120513916014, 0.03577241516113281, 0.036296703338623046, 0.035388416290283206, 0.035490814208984374, 0.0353177604675293, 0.03605811309814453, 0.03562393569946289, 0.03543142318725586, 0.0350904312133789, 0.03498905563354492, 0.03510988616943359, 0.035550209045410154, 0.03609395217895508, 0.03602739334106445, 0.03589120101928711, 0.037015552520751956, 0.03640627288818359, 0.035643390655517575, 0.03565363311767578, 0.03563212966918945, 0.03569356918334961, 0.03566080093383789, 0.03561779022216797, 0.03557580947875977, 0.035471359252929685, 0.03513753509521484, 0.03570278549194336, 0.035555328369140625, 0.03568537521362305, 0.035517440795898435, 0.035272705078125, 0.03558297729492187, 0.035593215942382815, 0.035776512145996094, 0.035053569793701174, 0.03506175994873047, 0.03512319946289062, 0.035416065216064455, 0.03513139343261719, 0.0356126708984375]",tokens/s,27.996126947076096,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1200, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 976, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 613, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694816c-6ff1b14d48c00850208281c1;3413ef81-8cc1-4aa5-9667-1b9ab447b385) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1870.442496,2926.051328,0.0,2340.421632,2285.568,s,1,8.6807001953125,8.6807001953125,0.0,8.6807001953125,8.6807001953125,8.6807001953125,8.6807001953125,[8.6807001953125],,kWh,2.232381517640584e-05,1.2219121859755752e-05,2.90147454340417e-05,6.355768247020329e-05,,MB,1626.48064,3305.63584,0.0,2659.188736,2578.857984,s,10,0.32739574432373053,0.032739574432373054,2.8607856957292097e-05,0.032739103317260744,0.032772690582275395,0.03278098526000977,0.032787621002197266,"[0.032672576904296875, 0.03277084732055664, 0.03273855972290039, 0.032739646911621095, 0.03272880172729492, 0.03274409484863281, 0.032744289398193356, 0.0327305908203125, 0.032789279937744144, 0.032737056732177736]",tokens/s,7819.2830676157455,kWh,3.874237640345493e-07,2.1228842525872347e-07,1.8077036319542769e-06,2.4074158212475497e-06,tokens/kWh,106338089.88899057,MB,1647.751168,3305.63584,0.0,2659.188736,2578.860544,s,10,13.083309326171875,1.3083309326171875,0.007979860104445145,1.307180847167969,1.321175927734375,1.322069775390625,1.322784853515625,"[1.2997330322265626, 1.3029073486328124, 1.320977294921875, 1.308014892578125, 1.311153564453125, 1.3031741943359374, 1.3104853515625, 1.29755322265625, 1.3063468017578126, 1.322963623046875]",tokens/s,48.15295459993038,kWh,1.598921268394991e-05,8.759414962175048e-06,3.187846220583311e-05,5.662708985195805e-05,tokens/kWh,1112541.7210155572,,s,630,13.08163173484803,0.020764494817219082,0.0004027650281508144,0.020592639923095703,0.02125619125366211,0.021366988849639893,0.022245949115753175,"[0.020584447860717774, 0.020537343978881836, 0.020412416458129884, 0.020381696701049806, 0.0204902400970459, 0.0204769287109375, 0.020592639923095703, 0.020558847427368163, 0.020552703857421875, 0.020531200408935548, 0.020599807739257812, 0.020486143112182616, 0.02048307228088379, 0.020468736648559572, 0.02043903923034668, 0.02038374328613281, 0.020485120773315428, 0.020563968658447264, 0.02047283172607422, 0.02046976089477539, 0.02050048065185547, 0.021303295135498047, 0.021113855361938477, 0.020489215850830078, 0.020512767791748047, 0.020593664169311524, 0.020592639923095703, 0.02062848091125488, 0.020528127670288086, 0.02059775924682617, 0.020493312835693358, 0.020368383407592772, 0.020552703857421875, 0.02057318305969238, 0.020512767791748047, 0.0206878719329834, 0.020591615676879883, 0.020708351135253905, 0.020485120773315428, 0.020379648208618165, 0.020520959854125977, 0.02062950325012207, 0.020617216110229493, 0.020564992904663085, 0.020547584533691408, 0.020591615676879883, 0.02049126434326172, 0.020534271240234374, 0.020555776596069338, 0.020787200927734374, 0.02109644889831543, 0.020800512313842775, 0.022219776153564453, 0.021154815673828126, 0.021369855880737306, 0.02122137641906738, 0.02084454345703125, 0.020231168746948244, 0.020503551483154296, 0.020446207046508787, 0.020520959854125977, 0.020593664169311524, 0.020538368225097657, 0.020632575988769532, 0.020612096786499022, 0.02046156883239746, 0.0204902400970459, 0.020786176681518553, 0.021195775985717775, 0.021127168655395507, 0.02029465675354004, 0.020345855712890625, 0.020523008346557618, 0.020531200408935548, 0.02045849609375, 0.020593664169311524, 0.020569087982177735, 0.02047385597229004, 0.02045849609375, 0.020542463302612304, 0.02060697555541992, 0.020577280044555665, 0.020908031463623047, 0.020981760025024415, 0.021328895568847657, 0.021202943801879884, 0.02108518409729004, 0.022132736206054687, 0.020904960632324218, 0.020548608779907225, 0.020531200408935548, 0.020550655364990233, 0.020564992904663085, 0.021072895050048827, 0.021157888412475585, 0.021106687545776368, 0.020969472885131835, 0.02082099151611328, 0.020780031204223632, 0.02067046356201172, 0.02024038314819336, 0.020612096786499022, 0.020582399368286132, 0.021173248291015623, 0.020563968658447264, 0.020609024047851563, 0.020529151916503906, 0.02049945640563965, 0.020570112228393556, 0.020528127670288086, 0.020454399108886717, 0.020551679611206054, 0.020591615676879883, 0.020571136474609376, 0.020509695053100584, 0.020505599975585938, 0.02069196891784668, 0.02051584053039551, 0.020487167358398437, 0.020552703857421875, 0.020520959854125977, 0.020246528625488282, 0.02046668815612793, 0.02041753578186035, 0.02065203285217285, 0.020488191604614257, 0.020814847946166993, 0.020544511795043945, 0.020380672454833985, 0.020700159072875975, 0.020455423355102538, 0.02059878349304199, 0.020463615417480468, 0.020271104812622072, 0.02042572784423828, 0.020549631118774413, 0.020758527755737305, 0.02388787269592285, 0.02146201515197754, 0.021215232849121093, 0.021183488845825195, 0.020569087982177735, 0.020568063735961914, 0.0214466552734375, 0.021347328186035155, 0.021122047424316406, 0.02126540756225586, 0.021163007736206055, 0.02044927978515625, 0.02048409652709961, 0.02047488021850586, 0.02067865562438965, 0.020832256317138673, 0.020342784881591795, 0.020553728103637696, 0.020525056838989256, 0.020497407913208008, 0.020402175903320312, 0.020571136474609376, 0.020550655364990233, 0.020578304290771485, 0.020564992904663085, 0.020745216369628908, 0.02119987106323242, 0.021134336471557616, 0.021140480041503908, 0.021111808776855468, 0.02123366355895996, 0.021485567092895508, 0.020560895919799805, 0.020521984100341797, 0.020626432418823244, 0.02064793586730957, 0.02067046356201172, 0.021308416366577147, 0.02124185562133789, 0.02123366355895996, 0.021939199447631837, 0.022709247589111328, 0.021597183227539063, 0.021349376678466796, 0.021151744842529296, 0.021227519989013673, 0.02131865692138672, 0.021219327926635743, 0.021193727493286133, 0.021160959243774414, 0.021177343368530274, 0.021204992294311522, 0.02064896011352539, 0.02047385597229004, 0.020388864517211915, 0.020531200408935548, 0.020497407913208008, 0.02064793586730957, 0.020513792037963868, 0.02049228858947754, 0.020529151916503906, 0.02067148780822754, 0.02046463966369629, 0.020374528884887694, 0.020549631118774413, 0.020610048294067384, 0.020535295486450195, 0.020478975296020507, 0.02069196891784668, 0.020685823440551757, 0.02059878349304199, 0.020822015762329102, 0.021445632934570313, 0.021180416107177736, 0.021186559677124024, 0.020587520599365236, 0.02062335968017578, 0.020876287460327148, 0.021358591079711914, 0.021123071670532227, 0.021195775985717775, 0.02045747184753418, 0.020536319732666015, 0.020700159072875975, 0.02066431999206543, 0.02058956718444824, 0.020941823959350587, 0.022615039825439453, 0.021053440093994142, 0.021279743194580078, 0.021223424911499023, 0.021187583923339845, 0.021115903854370118, 0.02125619125366211, 0.021113855361938477, 0.021123071670532227, 0.02104729652404785, 0.02060697555541992, 0.020624383926391602, 0.020559871673583984, 0.020526079177856444, 0.020714496612548827, 0.020358144760131838, 0.020454399108886717, 0.020528127670288086, 0.02062233543395996, 0.020523008346557618, 0.020479999542236327, 0.020543487548828124, 0.020599807739257812, 0.020596736907958983, 0.020582399368286132, 0.020530176162719727, 0.02059775924682617, 0.020714496612548827, 0.020779008865356444, 0.020626432418823244, 0.020502527236938475, 0.021185535430908203, 0.021113855361938477, 0.021194751739501954, 0.021123071670532227, 0.020564992904663085, 0.020526079177856444, 0.02131046485900879, 0.020548608779907225, 0.020624383926391602, 0.020428800582885744, 0.02043801689147949, 0.02045849609375, 0.020759552001953126, 0.02068070411682129, 0.020580352783203124, 0.02083839988708496, 0.02062848091125488, 0.02058956718444824, 0.020339712142944336, 0.0204769287109375, 0.02063871955871582, 0.020521984100341797, 0.020579328536987306, 0.020554752349853517, 0.020519935607910156, 0.020497407913208008, 0.020547584533691408, 0.020501504898071288, 0.0206561279296875, 0.020594688415527345, 0.020572160720825194, 0.020756479263305663, 0.020768768310546876, 0.02225663948059082, 0.021377023696899415, 0.02311680030822754, 0.022395904541015626, 0.021383167266845703, 0.02127359962463379, 0.02048409652709961, 0.020612096786499022, 0.020562944412231447, 0.020552703857421875, 0.02050764846801758, 0.02053222465515137, 0.02064691162109375, 0.020625408172607423, 0.020855808258056642, 0.021358591079711914, 0.02124492835998535, 0.021192703247070312, 0.021217279434204102, 0.020677631378173827, 0.02062745666503906, 0.020602880477905275, 0.020557823181152343, 0.020611072540283205, 0.020568063735961914, 0.020562944412231447, 0.020551679611206054, 0.0206376953125, 0.020376575469970702, 0.020462591171264647, 0.020518911361694335, 0.020505599975585938, 0.020502527236938475, 0.020343807220458983, 0.020510719299316405, 0.020518911361694335, 0.020516864776611327, 0.020528127670288086, 0.020544511795043945, 0.02049126434326172, 0.020525056838989256, 0.0204902400970459, 0.0204083194732666, 0.020544511795043945, 0.020545536041259766, 0.02046771240234375, 0.020574207305908202, 0.020620288848876952, 0.020478975296020507, 0.02045132827758789, 0.02050048065185547, 0.02052403259277344, 0.020533248901367186, 0.020711423873901368, 0.020527103424072265, 0.020553728103637696, 0.020527103424072265, 0.020559871673583984, 0.02039193534851074, 0.020516864776611327, 0.020527103424072265, 0.02050662422180176, 0.020692991256713866, 0.020436992645263673, 0.02049843215942383, 0.02049843215942383, 0.020495359420776366, 0.02052403259277344, 0.020880384445190428, 0.020932607650756836, 0.0206878719329834, 0.021106687545776368, 0.02109542465209961, 0.02104729652404785, 0.020518911361694335, 0.020493312835693358, 0.021323776245117186, 0.0219289608001709, 0.021212160110473634, 0.02110873603820801, 0.021171199798583985, 0.021386240005493166, 0.020997119903564454, 0.021154815673828126, 0.021141504287719725, 0.020412416458129884, 0.020265983581542968, 0.02040115165710449, 0.021444608688354492, 0.021201919555664063, 0.02064896011352539, 0.020582399368286132, 0.02088755226135254, 0.021082111358642578, 0.021183488845825195, 0.02127872085571289, 0.021190656661987304, 0.020617216110229493, 0.020809728622436522, 0.020454399108886717, 0.020410367965698242, 0.020793344497680662, 0.020900863647460938, 0.020587520599365236, 0.020426752090454102, 0.02061311912536621, 0.0208855037689209, 0.02122035217285156, 0.02127052879333496, 0.02130636787414551, 0.020668415069580077, 0.02104422378540039, 0.020572160720825194, 0.02068377685546875, 0.020577280044555665, 0.021556224822998047, 0.02105446434020996, 0.020582399368286132, 0.020574207305908202, 0.02070425605773926, 0.020590591430664062, 0.020574207305908202, 0.02107084846496582, 0.021587968826293946, 0.021364736557006835, 0.021210111618041993, 0.021218303680419923, 0.021158912658691405, 0.021127168655395507, 0.02122547149658203, 0.02059775924682617, 0.021195775985717775, 0.021223424911499023, 0.020726783752441406, 0.020641792297363282, 0.020641792297363282, 0.02036735916137695, 0.020468736648559572, 0.020572160720825194, 0.020537343978881836, 0.02047283172607422, 0.020539392471313478, 0.02041548728942871, 0.020510719299316405, 0.020539392471313478, 0.020715520858764647, 0.020428800582885744, 0.02058956718444824, 0.020537343978881836, 0.020642816543579103, 0.020609024047851563, 0.020521984100341797, 0.020529151916503906, 0.020582399368286132, 0.020546560287475587, 0.021146623611450196, 0.021142528533935546, 0.02047590446472168, 0.020355072021484375, 0.02033459281921387, 0.020488191604614257, 0.020884479522705078, 0.020537343978881836, 0.020377599716186523, 0.020569087982177735, 0.02047488021850586, 0.020766719818115235, 0.020510719299316405, 0.02045849609375, 0.020520959854125977, 0.020572160720825194, 0.020961280822753905, 0.021149696350097655, 0.021111808776855468, 0.021111808776855468, 0.02036735916137695, 0.02042163276672363, 0.020561920166015626, 0.020455423355102538, 0.020331520080566406, 0.020502527236938475, 0.02059775924682617, 0.020534271240234374, 0.020495359420776366, 0.020446207046508787, 0.020641792297363282, 0.020537343978881836, 0.020470783233642577, 0.02064691162109375, 0.020503551483154296, 0.02046976089477539, 0.020168703079223634, 0.020454399108886717, 0.0204400634765625, 0.02067148780822754, 0.02050662422180176, 0.020533248901367186, 0.0204902400970459, 0.020612096786499022, 0.02062745666503906, 0.02048409652709961, 0.020592639923095703, 0.02058956718444824, 0.02085785675048828, 0.021181440353393553, 0.020488191604614257, 0.020588544845581053, 0.020548608779907225, 0.020559871673583984, 0.02065100860595703, 0.020792320251464845, 0.020626432418823244, 0.020161535263061522, 0.020557823181152343, 0.020518911361694335, 0.02062233543395996, 0.020946943283081054, 0.02065407943725586, 0.020479999542236327, 0.020684799194335936, 0.0204769287109375, 0.020580352783203124, 0.020444160461425782, 0.020525056838989256, 0.020503551483154296, 0.02049945640563965, 0.020551679611206054, 0.020540416717529295, 0.020517887115478514, 0.020535295486450195, 0.020502527236938475, 0.020586496353149415, 0.02103910446166992, 0.022166528701782227, 0.021353471755981446, 0.02123673629760742, 0.021342208862304687, 0.021214208602905273, 0.020273151397705077, 0.02045132827758789, 0.020560895919799805, 0.020516864776611327, 0.020582399368286132, 0.020566015243530272, 0.020372480392456056, 0.020331520080566406, 0.02043289566040039, 0.0204769287109375, 0.020550655364990233, 0.020554752349853517, 0.020974592208862306, 0.02124492835998535, 0.021214208602905273, 0.02127257537841797, 0.021072895050048827, 0.020526079177856444, 0.02052403259277344, 0.021349376678466796, 0.02123980712890625, 0.02106777572631836, 0.020923391342163086, 0.020447231292724608, 0.020530176162719727, 0.02045952033996582, 0.020299776077270508, 0.020609024047851563, 0.020626432418823244, 0.02045952033996582, 0.020798463821411133, 0.021338111877441408, 0.021173248291015623, 0.020535295486450195, 0.020504575729370117, 0.02061414337158203, 0.020541439056396483, 0.021032960891723632, 0.02067148780822754, 0.020560895919799805, 0.020516864776611327, 0.020649984359741212, 0.020603904724121092, 0.020676607131958007, 0.021145599365234375, 0.02171801567077637, 0.02231808090209961, 0.02125619125366211, 0.021097471237182617, 0.021209087371826172, 0.021411840438842773, 0.02124799919128418, 0.02127462387084961, 0.0212992000579834, 0.021331968307495116, 0.02062848091125488, 0.020523008346557618, 0.02063667106628418, 0.0214466552734375, 0.021424127578735352, 0.020899839401245117, 0.02062335968017578, 0.02063871955871582, 0.020831232070922853, 0.0212992000579834, 0.02163199996948242, 0.021359615325927735, 0.02126335906982422, 0.021176319122314453, 0.02128691291809082, 0.021368831634521485, 0.021219327926635743, 0.021485567092895508, 0.0218920955657959, 0.020766719818115235, 0.020553728103637696, 0.02066534423828125, 0.020618240356445314, 0.020711423873901368, 0.020381696701049806, 0.020925439834594727, 0.021187583923339845, 0.021238784790039062, 0.020558847427368163, 0.020553728103637696, 0.020563968658447264, 0.020369407653808593, 0.02059775924682617, 0.02119987106323242, 0.02123263931274414, 0.021177343368530274, 0.020979711532592774, 0.020741119384765624, 0.02087014389038086, 0.021227519989013673, 0.02128179168701172, 0.021174272537231444, 0.02069196891784668, 0.020508672714233397, 0.020523008346557618, 0.020559871673583984, 0.020508672714233397, 0.02030182456970215, 0.0212490234375]",tokens/s,48.15912974539327,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 614, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1064, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 804, in forward - attn_outputs, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 666, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 545, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 125608 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 614, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949118-79007d023fd65816461713b3;8fdbdf07-c644-4548-b48b-e6085f36dc4a) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948248-18d2409c7f4a20813d8e82a7;bc8c46ef-4899-4a2d-a8b2-cab0dd019f37) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 614, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481d5-7a8ff08d2b374de832affc38;7e8766eb-a2b6-4ae2-a3a5-1838dd20a502) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694948b-3a42593a022553ab6e8816a4;83b705e0-c1b2-4fd0-ab11-c897f8600238) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2218.835968,2693.267456,0.0,2107.63776,1984.899072,s,1,8.0579384765625,8.0579384765625,0.0,8.0579384765625,8.0579384765625,8.0579384765625,8.0579384765625,[8.0579384765625],,kWh,1.468873475416154e-05,8.034848902150202e-06,2.149585053001779e-05,4.4219434186329534e-05,,MB,2320.777216,3020.423168,0.0,2373.976064,2247.84384,s,10,0.5128084144592285,0.051280841445922855,0.00015117602927932903,0.05135111999511719,0.05143772048950195,0.05144157943725586,0.05144466659545898,"[0.05117462539672851, 0.05105401611328125, 0.05111145782470703, 0.05135244750976563, 0.051418880462646484, 0.05134979248046875, 0.05139379119873047, 0.05143686294555664, 0.05144543838500976, 0.051071102142333985]",tokens/s,4992.117773066565,kWh,6.038448411811689e-07,3.307659213188193e-07,3.108673121857255e-06,4.043283884357243e-06,tokens/kWh,63314871.60484059,MB,2329.010176,3104.309248,0.0,2457.862144,2341.362176,s,10,14.676592041015626,1.4676592041015624,0.011291549895625315,1.4639102783203124,1.4735424072265624,1.486644921875,1.49712693359375,"[1.4997474365234376, 1.46099560546875, 1.4626693115234375, 1.4706307373046874, 1.4634158935546875, 1.4671356201171875, 1.4644046630859375, 1.468495849609375, 1.4578072509765625, 1.4612896728515625]",tokens/s,42.92549648033985,kWh,1.728905209465148e-05,9.474494736187714e-06,3.9468860940142124e-05,6.623240777098133e-05,tokens/kWh,951195.9797361081,,s,630,14.673584148406986,0.023291403410169813,0.0005339644198673089,0.023457280158996584,0.02382878704071045,0.024014130687713624,0.024883804302215578,"[0.02303385543823242, 0.02349260711669922, 0.023990272521972656, 0.023575551986694337, 0.023790592193603514, 0.02364723205566406, 0.023364608764648437, 0.023630847930908205, 0.023561216354370116, 0.023831552505493164, 0.023571456909179687, 0.02407219123840332, 0.024984575271606444, 0.024820735931396484, 0.023993343353271485, 0.023641088485717773, 0.023600128173828124, 0.023581695556640626, 0.02353459167480469, 0.023734272003173826, 0.023572479248046875, 0.023455743789672853, 0.0236759033203125, 0.023591936111450194, 0.02351206398010254, 0.023568384170532225, 0.024130559921264647, 0.0239554557800293, 0.02329804801940918, 0.023739391326904297, 0.024543231964111328, 0.023665664672851562, 0.02347929573059082, 0.023650304794311523, 0.023785472869873047, 0.023595008850097656, 0.02349875259399414, 0.023933952331542968, 0.024431615829467773, 0.024060928344726562, 0.023573503494262696, 0.02349567985534668, 0.023550975799560548, 0.023614463806152345, 0.02346188735961914, 0.026529792785644532, 0.024833023071289064, 0.024337408065795898, 0.023734272003173826, 0.02392166328430176, 0.02347007942199707, 0.02391551971435547, 0.023574527740478517, 0.023557119369506836, 0.023787519454956055, 0.023576576232910155, 0.02370457649230957, 0.02365644836425781, 0.023641088485717773, 0.023524351119995117, 0.02373017692565918, 0.02371788787841797, 0.02370457649230957, 0.022809600830078124, 0.022841344833374022, 0.02367283248901367, 0.02386534309387207, 0.02348441505432129, 0.02349465560913086, 0.023780351638793946, 0.02369024085998535, 0.023566335678100587, 0.023548927307128906, 0.023550975799560548, 0.023538688659667968, 0.023537664413452147, 0.023581695556640626, 0.023554048538208007, 0.023828479766845705, 0.023776256561279296, 0.023598079681396485, 0.023426048278808592, 0.023640064239501952, 0.024224767684936522, 0.023985151290893555, 0.023602176666259765, 0.023183359146118163, 0.022951936721801756, 0.02351103973388672, 0.023548927307128906, 0.023578624725341796, 0.024010751724243166, 0.023443456649780273, 0.023646207809448243, 0.023370752334594725, 0.02264678382873535, 0.022437887191772463, 0.0227061767578125, 0.022760448455810548, 0.022574079513549804, 0.022700031280517577, 0.02269900894165039, 0.022776832580566408, 0.022701055526733398, 0.022830080032348633, 0.0235100154876709, 0.023351295471191406, 0.023366655349731445, 0.02254745674133301, 0.02251571273803711, 0.022552576065063477, 0.02270207977294922, 0.023130111694335938, 0.022862848281860353, 0.022781951904296875, 0.02307174491882324, 0.022775808334350587, 0.0226693115234375, 0.02268876838684082, 0.022746112823486327, 0.022708223342895507, 0.0224768009185791, 0.023162879943847657, 0.02327347183227539, 0.022760448455810548, 0.02244710350036621, 0.02245734405517578, 0.022593536376953126, 0.022832128524780275, 0.022433792114257813, 0.022335487365722655, 0.02263039970397949, 0.022786048889160155, 0.023233535766601563, 0.023524351119995117, 0.02349875259399414, 0.023598079681396485, 0.022594560623168947, 0.02268876838684082, 0.02282803153991699, 0.02351820755004883, 0.023661567687988282, 0.023538688659667968, 0.023412736892700195, 0.0224901123046875, 0.022794240951538085, 0.0228853759765625, 0.02354380798339844, 0.023793664932250977, 0.02374143981933594, 0.023665664672851562, 0.02368000030517578, 0.02350387191772461, 0.023345151901245118, 0.02305740737915039, 0.023007232666015624, 0.022779903411865234, 0.022748159408569335, 0.022758399963378906, 0.022734848022460938, 0.022790143966674805, 0.022766592025756836, 0.022760448455810548, 0.023863296508789062, 0.024016895294189454, 0.023747583389282227, 0.023632896423339843, 0.023738367080688477, 0.023833599090576172, 0.02366054344177246, 0.02368921661376953, 0.023576576232910155, 0.023590911865234376, 0.023513088226318358, 0.0237076473236084, 0.02367692756652832, 0.023985151290893555, 0.02372915267944336, 0.02374553680419922, 0.02368819236755371, 0.023616512298583983, 0.023060480117797853, 0.02272768020629883, 0.022790143966674805, 0.022687744140625, 0.022806528091430665, 0.022778879165649413, 0.022800384521484376, 0.022755327224731444, 0.022730752944946288, 0.022839296340942384, 0.02327961540222168, 0.022714368820190428, 0.022611967086791994, 0.023060480117797853, 0.02274508857727051, 0.023900159835815428, 0.0237127685546875, 0.02351206398010254, 0.023181312561035155, 0.022747135162353514, 0.02269593620300293, 0.022715391159057616, 0.022815744400024415, 0.02310041618347168, 0.02287308883666992, 0.023353343963623048, 0.02369536018371582, 0.023606271743774415, 0.023590911865234376, 0.02351206398010254, 0.023358463287353515, 0.02364313507080078, 0.02368921661376953, 0.02349567985534668, 0.023610368728637695, 0.023546880722045898, 0.02430668830871582, 0.024862720489501954, 0.02408243179321289, 0.023734272003173826, 0.02386124801635742, 0.023779327392578126, 0.023636991500854493, 0.023532543182373047, 0.023419904708862304, 0.02353152084350586, 0.02345267105102539, 0.02352332878112793, 0.023517183303833008, 0.023650304794311523, 0.023592960357666014, 0.02349465560913086, 0.024000511169433594, 0.023937023162841797, 0.02352025604248047, 0.02323865509033203, 0.023183359146118163, 0.022801408767700194, 0.022715391159057616, 0.0228351993560791, 0.022762496948242186, 0.022800384521484376, 0.022767616271972657, 0.022794240951538085, 0.02275328063964844, 0.023137279510498047, 0.023924736022949217, 0.02331340789794922, 0.022730752944946288, 0.02309222412109375, 0.023763967514038087, 0.022772735595703125, 0.02290790367126465, 0.022762496948242186, 0.02269388771057129, 0.02351411247253418, 0.023766016006469725, 0.023418880462646483, 0.023800832748413086, 0.023841791152954102, 0.023793664932250977, 0.02366054344177246, 0.023598079681396485, 0.023560192108154295, 0.02369126319885254, 0.02345881652832031, 0.0236810245513916, 0.023560192108154295, 0.02365132713317871, 0.02280243110656738, 0.022979583740234375, 0.022750207901000977, 0.022742015838623047, 0.023009279251098632, 0.02288128089904785, 0.022715391159057616, 0.023104511260986327, 0.022737920761108397, 0.022793216705322264, 0.022731775283813475, 0.022763519287109374, 0.022725631713867187, 0.022746112823486327, 0.022742015838623047, 0.022822912216186524, 0.02269081687927246, 0.02352025604248047, 0.023793664932250977, 0.02366054344177246, 0.023630847930908205, 0.023634944915771484, 0.023613439559936524, 0.023634944915771484, 0.023607295989990236, 0.023615488052368162, 0.02489241600036621, 0.02508083152770996, 0.023746559143066406, 0.024156160354614258, 0.02384486389160156, 0.02370150375366211, 0.02249113655090332, 0.02351820755004883, 0.02265907287597656, 0.022809600830078124, 0.022754304885864256, 0.02275328063964844, 0.02288025665283203, 0.022649856567382814, 0.02249113655090332, 0.022527999877929687, 0.02243071937561035, 0.022611967086791994, 0.022607872009277344, 0.02267136001586914, 0.022766592025756836, 0.022779903411865234, 0.023076864242553712, 0.02285055923461914, 0.022785024642944338, 0.022732799530029296, 0.02269696044921875, 0.022761472702026365, 0.02272870445251465, 0.022763519287109374, 0.02272768020629883, 0.02285158348083496, 0.023625728607177734, 0.024985599517822265, 0.02432204818725586, 0.024037376403808593, 0.023800832748413086, 0.02373017692565918, 0.023561216354370116, 0.023590911865234376, 0.023574527740478517, 0.023781375885009767, 0.023394304275512694, 0.02269696044921875, 0.022940671920776368, 0.02287615966796875, 0.02280243110656738, 0.022773759841918945, 0.023383039474487305, 0.0237127685546875, 0.023620607376098633, 0.023589887619018556, 0.023669759750366212, 0.02369228744506836, 0.023488512039184572, 0.023572479248046875, 0.02351923179626465, 0.02367897605895996, 0.02364825630187988, 0.023597055435180665, 0.023604223251342774, 0.023654399871826173, 0.022803455352783202, 0.022794240951538085, 0.02272051239013672, 0.022800384521484376, 0.023237632751464843, 0.022723583221435546, 0.022750207901000977, 0.022780927658081054, 0.023151615142822265, 0.023653375625610353, 0.02350796890258789, 0.023545856475830077, 0.022817792892456053, 0.02289664077758789, 0.023411712646484374, 0.023603200912475586, 0.023545856475830077, 0.023545856475830077, 0.023706623077392578, 0.023776256561279296, 0.022770687103271483, 0.02308710479736328, 0.022936576843261718, 0.02291302490234375, 0.022777856826782225, 0.022724607467651366, 0.022782976150512696, 0.02371686363220215, 0.02390937614440918, 0.02286899185180664, 0.02282803153991699, 0.022787071228027343, 0.022780927658081054, 0.022656000137329102, 0.022742015838623047, 0.022757375717163086, 0.022742015838623047, 0.02274406433105469, 0.022840320587158205, 0.02288332748413086, 0.02274508857727051, 0.022784000396728517, 0.022840320587158205, 0.022772735595703125, 0.02268262481689453, 0.023326719284057617, 0.02283417510986328, 0.022746112823486327, 0.022784000396728517, 0.022831104278564454, 0.022765567779541016, 0.022779903411865234, 0.022737920761108397, 0.022791168212890626, 0.023601152420043944, 0.023985151290893555, 0.022898687362670898, 0.02288640022277832, 0.02369843292236328, 0.02369024085998535, 0.023638015747070314, 0.023677951812744142, 0.023615488052368162, 0.023603200912475586, 0.022840320587158205, 0.022794240951538085, 0.02295910453796387, 0.02368000030517578, 0.02367897605895996, 0.02451251220703125, 0.02550681686401367, 0.0241582088470459, 0.023812095642089845, 0.02386227226257324, 0.023855104446411132, 0.023710720062255858, 0.023642112731933593, 0.02384998321533203, 0.023631872177124022, 0.02370355224609375, 0.02367283248901367, 0.02369024085998535, 0.023634944915771484, 0.02290278434753418, 0.02264575958251953, 0.023230464935302734, 0.022814720153808594, 0.022722560882568358, 0.0227194881439209, 0.022665216445922853, 0.022607872009277344, 0.02267750358581543, 0.02271232032775879, 0.02267136001586914, 0.024428543090820314, 0.024199167251586915, 0.023630847930908205, 0.02346188735961914, 0.023650304794311523, 0.02351820755004883, 0.023632896423339843, 0.02272972869873047, 0.022755327224731444, 0.024459264755249024, 0.02570649528503418, 0.02406809616088867, 0.023812095642089845, 0.02369536018371582, 0.02368716812133789, 0.023119871139526366, 0.02371993637084961, 0.023996416091918944, 0.023051263809204102, 0.02271334457397461, 0.022991872787475585, 0.023425024032592775, 0.02289151954650879, 0.022737920761108397, 0.022717439651489257, 0.022793216705322264, 0.02273689651489258, 0.023051263809204102, 0.02361039924621582, 0.02351203155517578, 0.023842815399169923, 0.0236943359375, 0.023440383911132814, 0.02387763214111328, 0.023550975799560548, 0.02313113594055176, 0.022768640518188478, 0.022775808334350587, 0.02272870445251465, 0.02273587226867676, 0.022790143966674805, 0.022808576583862306, 0.022796287536621093, 0.023109632492065428, 0.024023040771484375, 0.023576576232910155, 0.023548927307128906, 0.023616512298583983, 0.023573503494262696, 0.023772159576416017, 0.023604223251342774, 0.023623680114746092, 0.02306764793395996, 0.02346700859069824, 0.023034879684448242, 0.022766592025756836, 0.02284851264953613, 0.023611391067504883, 0.023966720581054687, 0.02349875259399414, 0.02264678382873535, 0.022691839218139647, 0.02260479927062988, 0.02269900894165039, 0.022726655960083008, 0.022750207901000977, 0.022642688751220705, 0.022692863464355468, 0.02308198356628418, 0.023604223251342774, 0.023573535919189453, 0.023621599197387697, 0.023649280548095702, 0.023536640167236327, 0.023577600479125976, 0.024178688049316405, 0.023581695556640626, 0.02353971290588379, 0.023214080810546874, 0.02245631980895996, 0.022734848022460938, 0.022824960708618162, 0.022816768646240236, 0.02273587226867676, 0.022739967346191405, 0.022778879165649413, 0.022775808334350587, 0.022773759841918945, 0.022780927658081054, 0.022754304885864256, 0.023447551727294923, 0.023059455871582032, 0.022733823776245117, 0.022928384780883788, 0.022757375717163086, 0.02261299133300781, 0.022684671401977538, 0.022708223342895507, 0.0227194881439209, 0.02268057632446289, 0.022765567779541016, 0.023417856216430662, 0.023742464065551756, 0.023606271743774415, 0.023576576232910155, 0.023748607635498048, 0.023546880722045898, 0.023549951553344727, 0.023472127914428712, 0.02351206398010254, 0.023500799179077148, 0.023600128173828124, 0.023575551986694337, 0.023536640167236327, 0.0226693115234375, 0.02266111946105957, 0.023234560012817384, 0.022824960708618162, 0.02264371109008789, 0.022689792633056642, 0.022774784088134766, 0.02269081687927246, 0.022977535247802734, 0.022756351470947265, 0.02270207977294922, 0.023227392196655275, 0.023872512817382813, 0.023550975799560548, 0.02348134422302246, 0.02287615966796875, 0.02273689651489258, 0.023553024291992186, 0.023196672439575194, 0.022767616271972657, 0.022700031280517577, 0.02269696044921875, 0.02264473533630371, 0.022687744140625, 0.022830080032348633, 0.022794240951538085, 0.022734848022460938, 0.023950336456298828, 0.022970367431640625, 0.022813695907592774, 0.022890495300292968, 0.022756351470947265, 0.022849536895751952, 0.023825408935546875, 0.023573503494262696, 0.023723007202148438, 0.023615488052368162, 0.023541759490966797, 0.023608320236206053, 0.023635967254638672, 0.02365132713317871, 0.023575551986694337, 0.02370457649230957, 0.02353971290588379, 0.023554048538208007, 0.024056831359863282, 0.02368819236755371, 0.023566335678100587, 0.02350182342529297, 0.023563264846801758, 0.023577600479125976, 0.023554048538208007, 0.023533567428588868, 0.023593984603881835, 0.023649280548095702, 0.023600128173828124, 0.023604223251342774, 0.022804479598999023, 0.022765567779541016, 0.022766592025756836, 0.02286079978942871, 0.022707199096679686, 0.022798336029052735, 0.022789119720458984]",tokens/s,42.934295645034695,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 614, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3573.08416,4698.144768,0.0,4112.515072,3976.487424,s,1,9.6717333984375,9.6717333984375,0.0,9.6717333984375,9.6717333984375,9.6717333984375,9.6717333984375,[9.6717333984375],,kWh,3.3852561644443966e-05,1.853844722436595e-05,4.78972605400009e-05,0.00010028826940881082,,MB,1629.908992,4928.831488,0.0,4282.384384,4102.201856,s,10,0.8844129257202149,0.08844129257202149,5.4411583600818026e-05,0.08843313598632813,0.08849000396728515,0.08853329010009765,0.08856791900634765,"[0.08845692443847657, 0.08848038482666015, 0.08838092803955078, 0.08840064239501953, 0.08841165161132812, 0.08857657623291015, 0.08844143676757812, 0.08842483520507813, 0.08845263671875, 0.08838690948486329]",tokens/s,2894.5755150686923,kWh,1.0455875713126844e-06,5.729277208303392e-07,5.8192401519469055e-06,7.4377554440899285e-06,tokens/kWh,34418985.932566345,MB,1671.426048,4937.220096,0.0,4290.772992,4102.204416,s,10,16.61282360839844,1.6612823608398437,0.021837891606323274,1.6718255615234376,1.6762251831054686,1.678409100341797,1.6801562341308593,"[1.6346978759765625, 1.6727110595703125, 1.6072100830078124, 1.658299072265625, 1.680593017578125, 1.6723753662109375, 1.6712757568359375, 1.6653023681640624, 1.674619140625, 1.6757398681640625]",tokens/s,37.92251184088358,kWh,1.907035448215951e-05,1.0450762424366128e-05,5.252944541585293e-05,8.205056232237856e-05,tokens/kWh,767819.2350769216,,s,630,16.610826263427732,0.02636639089432974,0.0006581063712693775,0.026644991874694823,0.026907750511169434,0.027049625492095948,0.027677583618164064,"[0.02552115249633789, 0.025565183639526368, 0.025554943084716796, 0.02570751953125, 0.025615360260009764, 0.02557439994812012, 0.025590784072875978, 0.025643007278442383, 0.02556620788574219, 0.02553036880493164, 0.025734144210815428, 0.02553241539001465, 0.025607168197631838, 0.02555392074584961, 0.02573209571838379, 0.02552524757385254, 0.025608192443847655, 0.025539583206176757, 0.025612287521362305, 0.025593856811523437, 0.025547775268554687, 0.025603071212768554, 0.02553343963623047, 0.025935871124267578, 0.028100608825683594, 0.02714726448059082, 0.02673459243774414, 0.026711040496826172, 0.026834943771362304, 0.026787839889526367, 0.026802175521850585, 0.026828800201416016, 0.02666803169250488, 0.026992639541625976, 0.025610240936279297, 0.025552896499633788, 0.02558361625671387, 0.025663488388061522, 0.025595903396606445, 0.025605119705200196, 0.025544704437255858, 0.025654272079467775, 0.025619455337524414, 0.02545254325866699, 0.02557542419433594, 0.025586687088012695, 0.025444351196289062, 0.02547302436828613, 0.025632768630981444, 0.025555967330932617, 0.02549452781677246, 0.025478143692016602, 0.02551296043395996, 0.026714111328125, 0.026599424362182617, 0.026786815643310546, 0.026598400115966796, 0.026467327117919923, 0.02667622375488281, 0.026645503997802734, 0.02551296043395996, 0.02636185646057129, 0.026368000030517577, 0.02551296043395996, 0.02547711944580078, 0.025418752670288085, 0.025547775268554687, 0.025568256378173827, 0.02549862480163574, 0.02554982376098633, 0.02554368019104004, 0.025595903396606445, 0.025622528076171876, 0.025623552322387694, 0.025435136795043944, 0.025577472686767577, 0.025511936187744142, 0.025607168197631838, 0.026688512802124024, 0.026073087692260744, 0.026652671813964843, 0.026657791137695314, 0.026762239456176756, 0.026776575088500978, 0.02680729675292969, 0.026669055938720702, 0.02673459243774414, 0.02674176025390625, 0.026694656372070313, 0.027241472244262696, 0.027695104598999022, 0.027058176040649414, 0.02680012893676758, 0.026970111846923828, 0.02689023971557617, 0.02673971176147461, 0.02693734359741211, 0.027051008224487305, 0.026831872940063478, 0.026969087600708007, 0.025646080017089845, 0.026908672332763672, 0.026747903823852538, 0.026770431518554686, 0.0267509765625, 0.026693632125854492, 0.026694656372070313, 0.026727424621582032, 0.026858495712280273, 0.026811391830444335, 0.026672128677368165, 0.026877952575683595, 0.026789888381958008, 0.0267827205657959, 0.02671308708190918, 0.026796031951904296, 0.026817535400390623, 0.026894336700439454, 0.026694656372070313, 0.025677824020385744, 0.03037900733947754, 0.027538431167602538, 0.02693836784362793, 0.027047935485839843, 0.02694758415222168, 0.026793983459472655, 0.02553241539001465, 0.025375743865966797, 0.02536140823364258, 0.025495552062988282, 0.025495552062988282, 0.02546892738342285, 0.025417728424072264, 0.025478143692016602, 0.025440256118774415, 0.025455615997314454, 0.025475072860717773, 0.025423871994018556, 0.02550476837158203, 0.02554265594482422, 0.025474048614501952, 0.025469951629638672, 0.02545254325866699, 0.025392127990722657, 0.025456640243530275, 0.025536512374877928, 0.025388032913208007, 0.02546892738342285, 0.02553036880493164, 0.025520128250122072, 0.025465856552124022, 0.025210880279541017, 0.025423871994018556, 0.025440256118774415, 0.025440256118774415, 0.02509004783630371, 0.025672704696655273, 0.02571571159362793, 0.025648128509521483, 0.025433088302612306, 0.025589759826660157, 0.025667583465576172, 0.02553036880493164, 0.025486335754394532, 0.025564159393310547, 0.02550067138671875, 0.025540607452392578, 0.025487360000610353, 0.02549862480163574, 0.02549862480163574, 0.025503744125366212, 0.02555187225341797, 0.025491455078125, 0.025569280624389647, 0.025474048614501952, 0.02556723213195801, 0.025596927642822266, 0.025605119705200196, 0.02549247932434082, 0.026210304260253905, 0.025644031524658203, 0.025560064315795897, 0.02552524757385254, 0.025556991577148438, 0.025620479583740235, 0.02547711944580078, 0.025487360000610353, 0.02553446388244629, 0.02548531150817871, 0.02551091194152832, 0.025414655685424805, 0.025423871994018556, 0.025479167938232423, 0.025554943084716796, 0.02552115249633789, 0.025442304611206053, 0.025544704437255858, 0.025539583206176757, 0.026175487518310548, 0.026472448348999023, 0.026044416427612304, 0.026263551712036134, 0.026688512802124024, 0.026645503997802734, 0.027382783889770508, 0.02754560089111328, 0.02669875144958496, 0.026647552490234375, 0.02668339157104492, 0.02667519950866699, 0.026659839630126952, 0.026650623321533205, 0.026662912368774414, 0.02666803169250488, 0.026690559387207033, 0.026704896926879884, 0.026591232299804687, 0.026604543685913085, 0.02677452850341797, 0.026833919525146483, 0.025601024627685546, 0.02551910400390625, 0.025587711334228515, 0.025561088562011718, 0.02549862480163574, 0.02550169563293457, 0.025475072860717773, 0.02554473686218262, 0.026523616790771483, 0.02678374481201172, 0.02671513557434082, 0.02675302314758301, 0.02629734420776367, 0.026222591400146485, 0.02675200080871582, 0.026702848434448243, 0.026057727813720705, 0.026415103912353514, 0.026582015991210937, 0.0263874568939209, 0.026570751190185548, 0.026636287689208983, 0.026846208572387696, 0.026689535140991212, 0.026670080184936523, 0.02675302314758301, 0.026764287948608398, 0.026763263702392577, 0.026615808486938477, 0.02670796775817871, 0.026440704345703125, 0.025972736358642577, 0.025592832565307616, 0.025868288040161135, 0.02679193687438965, 0.026674175262451173, 0.02675200080871582, 0.02676736068725586, 0.026688512802124024, 0.026622976303100586, 0.026644479751586913, 0.026674175262451173, 0.026705919265747072, 0.02666803169250488, 0.027123712539672853, 0.02692198371887207, 0.02735513687133789, 0.02698854446411133, 0.026885120391845704, 0.02671718406677246, 0.026487808227539062, 0.02674892807006836, 0.025657344818115234, 0.025584640502929686, 0.025564159393310547, 0.025838592529296874, 0.026913824081420897, 0.02680828857421875, 0.02547302436828613, 0.026610687255859376, 0.02696499252319336, 0.02672947120666504, 0.026891263961791992, 0.026813440322875977, 0.0267007999420166, 0.026805248260498047, 0.026672128677368165, 0.02631372833251953, 0.02674380874633789, 0.02710323143005371, 0.02686566352844238, 0.026841087341308592, 0.02630451202392578, 0.02671308708190918, 0.026860544204711914, 0.026630144119262695, 0.026702848434448243, 0.02617241668701172, 0.02613657569885254, 0.02676736068725586, 0.026789888381958008, 0.026987520217895508, 0.02819174385070801, 0.02729267120361328, 0.02677555274963379, 0.02692300796508789, 0.02711859130859375, 0.026780672073364258, 0.026703872680664063, 0.026686464309692383, 0.026811391830444335, 0.026864639282226564, 0.02656358337402344, 0.026652671813964843, 0.02739302444458008, 0.025634815216064453, 0.026364927291870118, 0.026594303131103517, 0.026816511154174806, 0.026705919265747072, 0.02672230339050293, 0.02668339157104492, 0.026630144119262695, 0.02652876853942871, 0.026762239456176756, 0.026831872940063478, 0.026808319091796876, 0.026764287948608398, 0.02673356819152832, 0.026866687774658202, 0.026856447219848634, 0.026614784240722656, 0.025455615997314454, 0.02545254325866699, 0.0255098876953125, 0.026034175872802736, 0.02695577621459961, 0.026868736267089844, 0.02660147285461426, 0.026638336181640625, 0.026805248260498047, 0.02676019287109375, 0.026471424102783202, 0.02673766326904297, 0.026184736251831056, 0.02675606346130371, 0.026360832214355468, 0.02512179183959961, 0.026181631088256836, 0.025777151107788086, 0.02565836715698242, 0.0259420166015625, 0.026252288818359375, 0.026409984588623047, 0.0267827205657959, 0.026645503997802734, 0.026762239456176756, 0.02669977569580078, 0.026681343078613282, 0.027067392349243165, 0.02799001693725586, 0.027256832122802735, 0.02690457534790039, 0.026933248519897462, 0.026770431518554686, 0.026788864135742187, 0.026756095886230468, 0.026200063705444337, 0.026655744552612305, 0.026310655593872072, 0.026013696670532226, 0.026492927551269533, 0.026689535140991212, 0.02671308708190918, 0.026803199768066405, 0.02690764808654785, 0.02673971176147461, 0.026736640930175783, 0.025608192443847655, 0.025432064056396485, 0.025568256378173827, 0.02553241539001465, 0.025619455337524414, 0.025624576568603515, 0.02736128044128418, 0.027290624618530275, 0.026970111846923828, 0.026811391830444335, 0.02673356819152832, 0.026687488555908204, 0.026797056198120117, 0.026529792785644532, 0.026192895889282225, 0.026730495452880858, 0.026617855072021485, 0.026756095886230468, 0.02653900718688965, 0.026655744552612305, 0.026847232818603517, 0.02652364730834961, 0.026693632125854492, 0.02675200080871582, 0.026735616683959962, 0.026179584503173828, 0.025472000122070314, 0.025520128250122072, 0.026594303131103517, 0.026647552490234375, 0.026805248260498047, 0.026467327117919923, 0.02690355110168457, 0.026209280014038085, 0.026642431259155275, 0.02715648078918457, 0.026624000549316407, 0.026677248001098632, 0.02672640037536621, 0.02677555274963379, 0.026686464309692383, 0.026690559387207033, 0.02671820831298828, 0.026694656372070313, 0.026605567932128905, 0.026677248001098632, 0.026060800552368164, 0.026068992614746093, 0.026068992614746093, 0.02653081512451172, 0.026720256805419923, 0.02672230339050293, 0.026600448608398438, 0.026770431518554686, 0.026627071380615236, 0.02674073600769043, 0.02689023971557617, 0.026818559646606444, 0.026695680618286134, 0.026646528244018555, 0.026728448867797853, 0.026648576736450196, 0.026635263442993166, 0.025436159133911132, 0.02550476837158203, 0.025754623413085938, 0.026736640930175783, 0.026829824447631836, 0.02617344093322754, 0.026672128677368165, 0.02690355110168457, 0.026772480010986328, 0.026613759994506835, 0.026659839630126952, 0.02673356819152832, 0.026655744552612305, 0.02675712013244629, 0.026681343078613282, 0.026677248001098632, 0.02673459243774414, 0.027511808395385744, 0.02680729675292969, 0.026834943771362304, 0.026220544815063477, 0.026263551712036134, 0.02612428855895996, 0.026295295715332033, 0.026755071640014647, 0.02673459243774414, 0.026688512802124024, 0.026802175521850585, 0.026719232559204102, 0.026892288208007813, 0.026756095886230468, 0.026845184326171875, 0.02674176025390625, 0.026284032821655274, 0.025602048873901367, 0.025659391403198242, 0.025181184768676756, 0.025454591751098633, 0.025899007797241212, 0.025757696151733397, 0.026787839889526367, 0.02532352066040039, 0.02609766387939453, 0.02611404800415039, 0.02671001625061035, 0.026274816513061523, 0.02674278450012207, 0.026652671813964843, 0.026513408660888672, 0.02668441581726074, 0.02615705680847168, 0.02634649658203125, 0.026534912109375, 0.026187776565551758, 0.02669875144958496, 0.0265031681060791, 0.02660147285461426, 0.027205631256103514, 0.02607513618469238, 0.026012672424316406, 0.026411008834838868, 0.02657587242126465, 0.026772480010986328, 0.025610240936279297, 0.02678169631958008, 0.026697727203369142, 0.026908672332763672, 0.026908672332763672, 0.02651238441467285, 0.02630348777770996, 0.026408960342407226, 0.026610687255859376, 0.02675814437866211, 0.026426368713378907, 0.026652671813964843, 0.026824703216552736, 0.026826751708984374, 0.02672947120666504, 0.02708787155151367, 0.025581567764282227, 0.025523199081420898, 0.026589183807373046, 0.026840063095092775, 0.02681548881530762, 0.026607616424560547, 0.02670796775817871, 0.026573823928833007, 0.02657689666748047, 0.02615193557739258, 0.026887168884277345, 0.026876928329467774, 0.026747903823852538, 0.026626047134399415, 0.026859519958496093, 0.026705919265747072, 0.02678374481201172, 0.026735616683959962, 0.02690457534790039, 0.026856447219848634, 0.02555904006958008, 0.02555904006958008, 0.027634687423706054, 0.02695577621459961, 0.026663936614990235, 0.026756095886230468, 0.02671615982055664, 0.026062847137451172, 0.026476543426513673, 0.02660966491699219, 0.02649395179748535, 0.027107328414916993, 0.026942464828491212, 0.02695884895324707, 0.026771455764770507, 0.02675814437866211, 0.026586111068725587, 0.026632192611694337, 0.026418176651000977, 0.026366975784301756, 0.0251463680267334, 0.025479167938232423, 0.02553548812866211, 0.02614067268371582, 0.02715648078918457, 0.02795417594909668, 0.02698240089416504, 0.02612326431274414, 0.02672537612915039, 0.02633318328857422, 0.026608640670776368, 0.026869760513305665, 0.026849279403686522, 0.026178560256958007, 0.026859519958496093, 0.026986495971679687, 0.026887168884277345, 0.027000831604003905, 0.026786815643310546, 0.027007999420166014, 0.02572697639465332, 0.02656051254272461, 0.026720256805419923, 0.026792959213256837, 0.026812416076660156, 0.02674995231628418, 0.026860544204711914, 0.026993663787841796, 0.026866687774658202, 0.025577472686767577, 0.026661888122558593, 0.027215871810913086, 0.02688204765319824, 0.02680012893676758, 0.026825727462768553, 0.02675712013244629, 0.026824703216552736, 0.02675814437866211, 0.027038719177246092, 0.02676019287109375, 0.026869760513305665, 0.026824703216552736, 0.031932416915893554, 0.02692300796508789, 0.026785791397094725, 0.026693632125854492, 0.026839040756225587, 0.02673151969909668, 0.02673356819152832, 0.02676838493347168, 0.026595327377319337, 0.02671308708190918, 0.026916864395141602, 0.025686016082763673, 0.02553036880493164, 0.0255283203125, 0.025428991317749023, 0.02554265594482422, 0.025378816604614256, 0.025440256118774415, 0.025351167678833008, 0.02537779235839844, 0.025412607192993163, 0.02547711944580078, 0.027044864654541017, 0.026878976821899415, 0.026851327896118164, 0.026736640930175783, 0.026643455505371092, 0.026508287429809572]",tokens/s,37.92707177890837,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494e8-0a566c5b19e6b2165a2bc06a;a878e53e-764f-4e93-bff1-470e428c5181) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5166.927872,5651.300352,0.0,5058.330624,5057.441792,s,1,11.2009345703125,11.2009345703125,0.0,11.2009345703125,11.2009345703125,11.2009345703125,11.2009345703125,[11.2009345703125],,kWh,5.186874821388807e-05,2.8409514090951658e-05,7.527478244201413e-05,0.00015555304474685385,,MB,1790.427136,5869.40416,0.0,5215.617024,5189.707776,s,10,1.3316333160400389,0.1331633316040039,7.052766358341119e-05,0.13315169525146486,0.13324486999511717,0.13327843322753907,0.13330528381347656,"[0.13322096252441407, 0.1331188507080078, 0.13331199645996095, 0.13315773010253906, 0.13314566040039064, 0.13308335876464844, 0.13317219543457032, 0.13323741149902343, 0.13308927917480468, 0.1330958709716797]",tokens/s,1922.4511501505772,kWh,1.5749511867129177e-06,8.629485278419648e-07,8.955659016373815e-06,1.1393558730928697e-05,tokens/kWh,22468835.773415394,MB,1798.639616,5888.278528,0.0,5234.491392,5189.710336,s,10,24.611257568359374,2.4611257568359375,0.03677725850690704,2.4783171386718754,2.493844482421875,2.49514306640625,2.49618193359375,"[2.391347412109375, 2.40199462890625, 2.47711279296875, 2.429751220703125, 2.470335693359375, 2.493555908203125, 2.479521484375, 2.496441650390625, 2.48525146484375, 2.4859453125]",tokens/s,25.59804180059202,kWh,2.8393310958564203e-05,1.5560354345452882e-05,7.857593878662397e-05,0.00012252960409064105,tokens/kWh,514161.45891890634,,s,630,24.608345085144023,0.03906086521451436,0.000875116998583955,0.039394304275512694,0.03982940139770508,0.040112486457824705,0.04120421310424805,"[0.03794124984741211, 0.03753574371337891, 0.03747430419921875, 0.037602302551269534, 0.03770675277709961, 0.037705726623535156, 0.03754291152954101, 0.037539871215820315, 0.03808047866821289, 0.038556671142578124, 0.03853414535522461, 0.03778355026245117, 0.03746099090576172, 0.03792281723022461, 0.03746815872192383, 0.037703678131103514, 0.03765657424926758, 0.03766886520385742, 0.03741593551635742, 0.0373196792602539, 0.03748044967651367, 0.04010291290283203, 0.03979673767089844, 0.03838566589355469, 0.03803750228881836, 0.03875020980834961, 0.038160385131835936, 0.037910526275634765, 0.03814092636108399, 0.0383559684753418, 0.03821977615356445, 0.03781836700439453, 0.03782656097412109, 0.03788800048828125, 0.038188030242919925, 0.03773132705688476, 0.03813273620605469, 0.038904830932617186, 0.038184959411621096, 0.03764940643310547, 0.037610496520996094, 0.03805491256713867, 0.037628929138183595, 0.037716991424560545, 0.037718017578125, 0.03782860946655273, 0.03773747253417969, 0.03794636917114258, 0.03809791946411133, 0.03786547088623047, 0.03787571334838867, 0.03775283050537109, 0.037749759674072264, 0.03788288116455078, 0.03769139099121094, 0.037651454925537106, 0.037735424041748046, 0.037920768737792966, 0.0376627197265625, 0.037822463989257815, 0.03855052947998047, 0.03808768081665039, 0.03822387313842773, 0.037533695220947266, 0.03752959823608398, 0.037716991424560545, 0.03778047943115234, 0.0376627197265625, 0.037651454925537106, 0.037599231719970705, 0.03772313690185547, 0.037733375549316404, 0.037733375549316404, 0.03772825622558594, 0.037814273834228515, 0.03776409530639648, 0.037735424041748046, 0.037722110748291016, 0.03800371170043945, 0.03772723388671875, 0.03755212783813477, 0.037749759674072264, 0.0376995849609375, 0.03778355026245117, 0.03782860946655273, 0.03772313690185547, 0.037754878997802735, 0.0377784309387207, 0.037515262603759765, 0.03741593551635742, 0.03770163345336914, 0.03735039901733399, 0.0375654411315918, 0.0376995849609375, 0.03778355026245117, 0.03789209747314453, 0.03788083267211914, 0.037628929138183595, 0.037479423522949216, 0.03760435104370117, 0.037958656311035156, 0.03787059020996094, 0.03757056045532227, 0.03767603302001953, 0.037370880126953124, 0.03763507080078125, 0.037768192291259765, 0.03769036865234375, 0.03767603302001953, 0.037823486328125, 0.037678081512451174, 0.03772108840942383, 0.038004737854003906, 0.039623680114746096, 0.04123955154418945, 0.04005376052856445, 0.03963596725463867, 0.04006399917602539, 0.03928268814086914, 0.03920281600952148, 0.03974655914306641, 0.039602176666259765, 0.03966873550415039, 0.0393891830444336, 0.039539710998535156, 0.03962265777587891, 0.03877068710327149, 0.03996979141235352, 0.040079360961914064, 0.03954073715209961, 0.039400447845458986, 0.03935846328735351, 0.039403518676757815, 0.0394598388671875, 0.03948646545410156, 0.039588863372802735, 0.0395335693359375, 0.03823820877075195, 0.03781222534179687, 0.037612545013427735, 0.03757056045532227, 0.03787571334838867, 0.03934207916259766, 0.03937996673583984, 0.039347198486328124, 0.03895808029174805, 0.03771289443969727, 0.03779379272460937, 0.039657470703125, 0.03905228805541992, 0.0394700813293457, 0.03936153411865234, 0.03926630401611328, 0.03931033706665039, 0.040185855865478515, 0.03922227096557617, 0.03933695983886719, 0.039375873565673826, 0.03930214309692383, 0.03942195129394531, 0.03942092895507812, 0.03965030288696289, 0.03942297744750976, 0.03930214309692383, 0.039311359405517575, 0.039277568817138675, 0.03930624008178711, 0.039962623596191404, 0.03951513671875, 0.03964313507080078, 0.03938508987426758, 0.03935641479492188, 0.039373825073242184, 0.039800830841064457, 0.03952947235107422, 0.03943219375610352, 0.039406593322753904, 0.039390209197998044, 0.04132761764526367, 0.03990425491333008, 0.0401868782043457, 0.03974553680419922, 0.03937484741210937, 0.03951718521118164, 0.039428096771240234, 0.03942092895507812, 0.03950387191772461, 0.03947417449951172, 0.03930316925048828, 0.03785932922363281, 0.03767603302001953, 0.0375623664855957, 0.037664768218994144, 0.03761663818359375, 0.037852161407470705, 0.03817574310302734, 0.03780505752563477, 0.03756032180786133, 0.03768115234375, 0.037689342498779296, 0.037539840698242184, 0.037157886505126955, 0.03742822265625, 0.038128639221191404, 0.03829043197631836, 0.03769548797607422, 0.037647361755371096, 0.037700607299804685, 0.037814273834228515, 0.03772927856445313, 0.03763507080078125, 0.03759718322753906, 0.037569534301757815, 0.037628929138183595, 0.03760639953613281, 0.03803852844238281, 0.039229438781738284, 0.03935232162475586, 0.039346176147460936, 0.03925708770751953, 0.039413761138916016, 0.03956326293945313, 0.039512065887451174, 0.03936972808837891, 0.03918438339233398, 0.03803033447265625, 0.03749990463256836, 0.03762995147705078, 0.03749785614013672, 0.03767398452758789, 0.03808256149291992, 0.03821875381469726, 0.03774156951904297, 0.03943423843383789, 0.03955814361572266, 0.03959500885009765, 0.03950284957885742, 0.03943936157226562, 0.04008755111694336, 0.04126105499267578, 0.04056371307373047, 0.03980595016479492, 0.03943116760253906, 0.039416831970214845, 0.03952230453491211, 0.03943219375610352, 0.039556095123291016, 0.03962572860717774, 0.03961958312988281, 0.039678974151611326, 0.03958169555664062, 0.03942502212524414, 0.03783475112915039, 0.038199295043945314, 0.038351871490478515, 0.037910526275634765, 0.03863552093505859, 0.04067327880859375, 0.03960934448242188, 0.03928678512573242, 0.0393891830444336, 0.039390209197998044, 0.03926937484741211, 0.03955507278442383, 0.03928575897216797, 0.039362560272216796, 0.03924787139892578, 0.039754753112792966, 0.03924889755249023, 0.03811123275756836, 0.03786547088623047, 0.038484992980957033, 0.03977523040771484, 0.03941888046264649, 0.039501823425292966, 0.039327743530273435, 0.03927961730957031, 0.039231487274169925, 0.03930624008178711, 0.03967078399658203, 0.04026367950439453, 0.03969023895263672, 0.039777278900146484, 0.03941273498535156, 0.03952844619750977, 0.03813683319091797, 0.03767603302001953, 0.03910860824584961, 0.03948646545410156, 0.03928166580200195, 0.03930316925048828, 0.039259136199951174, 0.03935129547119141, 0.03925094223022461, 0.039346176147460936, 0.03950694274902344, 0.03944243240356445, 0.03935129547119141, 0.039588863372802735, 0.038351871490478515, 0.037343231201171875, 0.03786444854736328, 0.03943116760253906, 0.03930828857421875, 0.03938816070556641, 0.039706623077392575, 0.040569854736328126, 0.040049663543701174, 0.03970150375366211, 0.03940966415405273, 0.039504894256591795, 0.03931545639038086, 0.039444480895996094, 0.039482368469238284, 0.039482368469238284, 0.03721113586425781, 0.038091777801513675, 0.03959500885009765, 0.040120319366455076, 0.04009471893310547, 0.039591934204101564, 0.03942707061767578, 0.03949055862426758, 0.039430145263671876, 0.03944038391113281, 0.039408641815185545, 0.03947110366821289, 0.03886899185180664, 0.039376895904541014, 0.040720382690429685, 0.040022014617919925, 0.03945062255859375, 0.03967488098144531, 0.039534591674804685, 0.03967488098144531, 0.039521278381347655, 0.03963187026977539, 0.04017356872558594, 0.040648704528808595, 0.03957145690917969, 0.039577598571777346, 0.03960422515869141, 0.039792640686035156, 0.039828479766845705, 0.03952435302734375, 0.039686145782470705, 0.03964211273193359, 0.03952640151977539, 0.03953664016723633, 0.038882305145263675, 0.03944038391113281, 0.039610366821289066, 0.03961446380615234, 0.03976192092895508, 0.03991142272949219, 0.04053094482421875, 0.04148428726196289, 0.040858623504638675, 0.03969126510620117, 0.039446529388427735, 0.03955916976928711, 0.03932262420654297, 0.03935846328735351, 0.03956838226318359, 0.03954380798339844, 0.039636993408203126, 0.039403518676757815, 0.03996364974975586, 0.03961548614501953, 0.039547904968261716, 0.039462913513183595, 0.039521278381347655, 0.03942195129394531, 0.039446529388427735, 0.03950899124145508, 0.03930112075805664, 0.03817574310302734, 0.038744064331054685, 0.03877785491943359, 0.03799039840698242, 0.03766579055786133, 0.03759308624267578, 0.03767705535888672, 0.0394700813293457, 0.0395335693359375, 0.03964518356323242, 0.039378944396972655, 0.03948748779296875, 0.03941888046264649, 0.03966259384155273, 0.039534591674804685, 0.03950694274902344, 0.039329792022705076, 0.039414783477783204, 0.03933695983886719, 0.03939123153686523, 0.03934515380859375, 0.03938816070556641, 0.03936358261108398, 0.03948646545410156, 0.03942092895507812, 0.03923455810546875, 0.03962572860717774, 0.03938816070556641, 0.03991961669921875, 0.03995750427246094, 0.039384063720703126, 0.03932262420654297, 0.039433216094970705, 0.03951513671875, 0.03944755172729492, 0.03880857467651367, 0.03933900833129883, 0.03931033706665039, 0.03964211273193359, 0.039636993408203126, 0.03922739028930664, 0.039346176147460936, 0.039299072265625, 0.039174144744873046, 0.039314430236816404, 0.039357440948486325, 0.03929702377319336, 0.03925299072265625, 0.03940249633789063, 0.039550975799560545, 0.03931340789794922, 0.03955712127685547, 0.03981107330322266, 0.03931340789794922, 0.04022886276245117, 0.040052734375, 0.03946086502075195, 0.039414783477783204, 0.03955199813842773, 0.03968102264404297, 0.03951103973388672, 0.03944755172729492, 0.03949055862426758, 0.03948441696166992, 0.039894016265869144, 0.03897753524780274, 0.03968102264404297, 0.038932479858398435, 0.03896627044677734, 0.03955199813842773, 0.03944140625, 0.039605247497558595, 0.039365631103515625, 0.03992166519165039, 0.039792640686035156, 0.0393809928894043, 0.039537662506103514, 0.040150016784667966, 0.04073574447631836, 0.03983769607543945, 0.039532543182373044, 0.03942707061767578, 0.039398399353027344, 0.0393994255065918, 0.039512065887451174, 0.039577598571777346, 0.03957555389404297, 0.039349246978759765, 0.03944243240356445, 0.039387134552001955, 0.039382015228271484, 0.03938816070556641, 0.0393779182434082, 0.03926835250854492, 0.03931238555908203, 0.03934822463989258, 0.03944345474243164, 0.03976192092895508, 0.03957145690917969, 0.04000358581542969, 0.03958272171020508, 0.0393963508605957, 0.040256511688232424, 0.041332736968994144, 0.04057292938232422, 0.03988684844970703, 0.03968000030517578, 0.039626750946044925, 0.039392257690429686, 0.03952435302734375, 0.03932262420654297, 0.03971993637084961, 0.03990323257446289, 0.03988991928100586, 0.0395786247253418, 0.03954687881469727, 0.040046592712402344, 0.039532543182373044, 0.03937279891967774, 0.03934207916259766, 0.03945779037475586, 0.03974246215820312, 0.03974348831176758, 0.03951718521118164, 0.039823360443115234, 0.03987558364868164, 0.03969126510620117, 0.03949465560913086, 0.03950592041015625, 0.0396124153137207, 0.0397916145324707, 0.04111769485473633, 0.04038451385498047, 0.039847934722900394, 0.03916799926757813, 0.03949158477783203, 0.03943731307983398, 0.03872051239013672, 0.039408641815185545, 0.039597057342529295, 0.03934105682373047, 0.0393809928894043, 0.03951103973388672, 0.039452671051025394, 0.03962879943847656, 0.039636993408203126, 0.04007526397705078, 0.03960627365112305, 0.03958988952636719, 0.03942911911010742, 0.03936153411865234, 0.03948953628540039, 0.04051148986816406, 0.039792640686035156, 0.039618560791015625, 0.039634944915771485, 0.03960319900512695, 0.03950284957885742, 0.03975065612792969, 0.039623680114746096, 0.03943423843383789, 0.041680896759033206, 0.04020019149780273, 0.039537662506103514, 0.0394598388671875, 0.03946700668334961, 0.039518207550048826, 0.03804159927368164, 0.03779174423217774, 0.03781836700439453, 0.03790233612060547, 0.03784908676147461, 0.03834368133544922, 0.03997183990478516, 0.039618560791015625, 0.039109630584716795, 0.039229438781738284, 0.03953868865966797, 0.040120319366455076, 0.04007731246948242, 0.039465984344482424, 0.03943219375610352, 0.039657470703125, 0.03988172912597656, 0.038577152252197267, 0.03787673568725586, 0.03891302490234375, 0.03942092895507812, 0.0393994255065918, 0.03967795181274414, 0.03936870574951172, 0.038583297729492184, 0.03950899124145508, 0.03942502212524414, 0.03956326293945313, 0.038989822387695314, 0.03952230453491211, 0.039397377014160156, 0.03933695983886719, 0.039406593322753904, 0.03950387191772461, 0.041319423675537106, 0.0401899528503418, 0.03944550323486328, 0.03944755172729492, 0.03952435302734375, 0.03982233428955078, 0.03960319900512695, 0.039532543182373044, 0.03945574569702148, 0.03942195129394531, 0.03936870574951172, 0.03951718521118164, 0.03932672119140625, 0.03942604827880859, 0.03924582290649414, 0.039518207550048826, 0.03965030288696289, 0.039329792022705076, 0.03959500885009765, 0.03940966415405273, 0.04058931350708008, 0.04047257614135742, 0.038435840606689455, 0.03767910385131836, 0.03823206329345703, 0.039444480895996094, 0.03992268753051758, 0.039657470703125, 0.0393256950378418, 0.03924582290649414, 0.03975987243652344, 0.03950694274902344, 0.039406593322753904, 0.03938611221313477, 0.03927859115600586, 0.03922534561157227, 0.03924070358276367, 0.03935334396362305, 0.039413761138916016, 0.039330814361572264, 0.039452671051025394, 0.039618560791015625, 0.039436286926269534, 0.03941068649291992, 0.03954073715209961, 0.03943731307983398, 0.03932672119140625, 0.03945471954345703, 0.03940147018432617, 0.03932262420654297, 0.039462913513183595, 0.039446529388427735, 0.039809024810791016]",tokens/s,25.601071417855252,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 87799 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493d2-53e5d6e0771e6cca3e572585;3bd21c41-644c-4ec7-bc91-12e19074b5d8) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490c6-227ed1d3643fef5743285def;b6987a35-3757-4f11-a09a-1c25c34296b6) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 545, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491e9-1b19c4010aafe9e57f7345d0;b70b3100-21e1-41b3-99df-a98dfad0a9c6) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17838.415872,24081.072128,0.0,23444.062208,22019.972096,s,1,18.3287421875,18.3287421875,0.0,18.3287421875,18.3287421875,18.3287421875,18.3287421875,[18.3287421875],,kWh,0.00013623980195277455,7.465048256269771e-05,0.0002943844021739972,0.0005052746866894694,,MB,4451.106816,24146.08384,0.0,23496.491008,20926.605824,s,10,4.918377197265625,0.4918377197265625,0.0003760529525239721,0.4918617553710938,0.49218525085449216,0.4924477279663086,0.4926577096557617,"[0.492710205078125, 0.4919119873046875, 0.4918916015625, 0.49147958374023437, 0.49212692260742186, 0.4914801940917969, 0.49191433715820315, 0.4918319091796875, 0.4917283935546875, 0.4913020629882813]",tokens/s,520.4968828790182,kWh,5.812386391931152e-06,3.184424457178385e-06,2.420303523542875e-05,3.3199846084538285e-05,tokens/kWh,7710879.121190366,MB,4455.432192,23005.233152,0.0,22347.251712,20926.608384,s,10,48.66162158203125,4.866162158203125,0.007468603151705288,4.865214599609375,4.877979296875,4.878231103515625,4.878432548828125,"[4.85516259765625, 4.85863671875, 4.8639814453125, 4.8594140625, 4.86644775390625, 4.862896484375, 4.8714404296875, 4.87792333984375, 4.86723583984375, 4.87848291015625]",tokens/s,12.946547597843168,kWh,5.7634642681054924e-05,3.1588189990339226e-05,0.0001690266471259716,0.00025824947979736574,tokens/kWh,243950.15257894289,,s,630,48.65896754455573,0.07723645641992963,0.0005316764344481234,0.07704064178466796,0.07794503631591797,0.07835867881774902,0.07926602897644043,"[0.07710002899169922, 0.07693106842041016, 0.07692185974121094, 0.07785472106933594, 0.07696998596191407, 0.07693721771240235, 0.07679283142089843, 0.07685939025878906, 0.07690342712402344, 0.07686553955078125, 0.07680716705322266, 0.07743385314941406, 0.0774307861328125, 0.07742566680908203, 0.07671807861328125, 0.07669862365722656, 0.07679897308349609, 0.07816191864013672, 0.07742361450195312, 0.07680818939208985, 0.07672319793701173, 0.07692594909667969, 0.07678771209716796, 0.07700275421142579, 0.07699967956542969, 0.07708672332763672, 0.07720243072509765, 0.07683277130126953, 0.07693414306640625, 0.07701606750488281, 0.07743795013427734, 0.07701197052001953, 0.07729869079589843, 0.07700172424316407, 0.07687474822998047, 0.07655014038085937, 0.07684505462646485, 0.07935794830322265, 0.07794483184814453, 0.07740620422363281, 0.07676518249511718, 0.07666483306884765, 0.07680000305175781, 0.07794278717041016, 0.07683277130126953, 0.07693516540527344, 0.07669657897949218, 0.07700685119628906, 0.07720140838623046, 0.07699251556396484, 0.07725878143310547, 0.07687267303466797, 0.07722188568115235, 0.07698841857910156, 0.07685939025878906, 0.07677133178710938, 0.07713177490234376, 0.07688294219970702, 0.07671091461181641, 0.0768532485961914, 0.07678463745117188, 0.07682662200927734, 0.07682764434814453, 0.07717273712158203, 0.07691366577148437, 0.0768880615234375, 0.07699558258056641, 0.07699763488769532, 0.07691161346435547, 0.07681638336181641, 0.07695462036132812, 0.07677439880371094, 0.07728537750244141, 0.07714406585693359, 0.07705702209472656, 0.0768901138305664, 0.07696998596191407, 0.07688191986083984, 0.07685529327392578, 0.07715430450439453, 0.0766924819946289, 0.0768768310546875, 0.07683379364013672, 0.07683376312255859, 0.07693414306640625, 0.07678463745117188, 0.07735295867919922, 0.07702841949462891, 0.07697401428222657, 0.07704678344726562, 0.07694131469726563, 0.077085693359375, 0.07722496032714844, 0.07696998596191407, 0.07709388732910157, 0.07683379364013672, 0.07693516540527344, 0.07680409240722656, 0.07691366577148437, 0.07703244781494141, 0.07724543762207031, 0.07686348724365234, 0.07686553955078125, 0.07754956817626953, 0.07826739501953126, 0.07824384307861328, 0.07800115203857422, 0.07697510528564454, 0.07701708984375, 0.07696588897705078, 0.07704678344726562, 0.07698636627197265, 0.07698636627197265, 0.07794687652587891, 0.07720243072509765, 0.07698329925537109, 0.07843840026855468, 0.07706829071044922, 0.07702937316894531, 0.07692902374267578, 0.07851213073730469, 0.07732121276855469, 0.0770723876953125, 0.07715840148925782, 0.07700787353515624, 0.07685017395019532, 0.07703040313720703, 0.07694847869873046, 0.07862579345703125, 0.07812812805175781, 0.07695155334472656, 0.07671091461181641, 0.0768532485961914, 0.07884083557128906, 0.07758131408691406, 0.07846604919433593, 0.07672115325927735, 0.07702528381347656, 0.07696691131591797, 0.07680409240722656, 0.07732736206054687, 0.07717068481445312, 0.07687782287597657, 0.07766937255859375, 0.07690137481689453, 0.07736831665039062, 0.07750860595703125, 0.07835135650634766, 0.07689727783203125, 0.07702528381347656, 0.07709593963623047, 0.07699456024169922, 0.07691161346435547, 0.07701708984375, 0.07815270233154296, 0.07753113555908203, 0.07696697235107422, 0.07652140808105469, 0.07668121337890625, 0.07693516540527344, 0.07741439819335938, 0.0771409912109375, 0.07693824005126954, 0.07673036956787109, 0.07693824005126954, 0.07833497619628907, 0.07798579406738282, 0.07738572692871094, 0.07793561553955078, 0.07688191986083984, 0.07680512237548828, 0.07693004608154297, 0.07682662200927734, 0.0773232650756836, 0.07724441528320312, 0.07703961944580077, 0.07695155334472656, 0.07689215850830078, 0.07711129760742187, 0.07673958587646484, 0.0767457275390625, 0.07693209838867188, 0.07681536102294922, 0.07679078674316406, 0.07675494384765626, 0.07697714996337891, 0.07684095764160156, 0.0775167999267578, 0.07724134063720703, 0.07710822296142578, 0.07696076965332031, 0.07697408294677735, 0.0771747817993164, 0.0772495346069336, 0.07736934661865234, 0.07753011322021484, 0.07710208129882813, 0.07680512237548828, 0.07684710693359376, 0.07689933013916016, 0.07681126403808594, 0.0767119369506836, 0.07694131469726563, 0.0767262725830078, 0.07709388732910157, 0.07684607696533204, 0.07677747344970703, 0.07700685119628906, 0.07729971313476562, 0.07692390441894531, 0.07682867431640625, 0.0769095687866211, 0.07676416015625, 0.07684812927246094, 0.076653564453125, 0.07674674987792969, 0.07698534393310547, 0.07672831726074218, 0.0768880615234375, 0.07677133178710938, 0.07673958587646484, 0.0798740463256836, 0.07854694366455078, 0.07792332458496094, 0.077159423828125, 0.07693824005126954, 0.07715020751953125, 0.07691366577148437, 0.07694233703613282, 0.0769781723022461, 0.07711846160888672, 0.07702528381347656, 0.07698534393310547, 0.07678873443603515, 0.07754956817626953, 0.07698738861083984, 0.07712665557861328, 0.07937229156494141, 0.078166015625, 0.07706009674072266, 0.07703756713867188, 0.07695974731445313, 0.0769587173461914, 0.07725670623779297, 0.0771061782836914, 0.07694028472900391, 0.07695155334472656, 0.07708159637451172, 0.07723725128173828, 0.07671501159667969, 0.07699148559570312, 0.07728435516357422, 0.07709388732910157, 0.07699353790283203, 0.07718502044677734, 0.07790796661376953, 0.07698841857910156, 0.07735910034179687, 0.07754649353027344, 0.07769190216064453, 0.0774676513671875, 0.07759871673583985, 0.07699763488769532, 0.07863603210449219, 0.0775167999267578, 0.07700172424316407, 0.07728844451904297, 0.07726182556152343, 0.077517822265625, 0.07676518249511718, 0.07696383666992188, 0.07780659484863281, 0.07812710571289062, 0.0770508804321289, 0.07689727783203125, 0.07687577819824219, 0.07717990112304687, 0.0767979507446289, 0.07698636627197265, 0.07708159637451172, 0.077264892578125, 0.07772876739501954, 0.0784189453125, 0.07722291564941407, 0.07741747283935548, 0.07748812866210937, 0.07694233703613282, 0.07703040313720703, 0.07656038665771485, 0.07699251556396484, 0.07669554901123046, 0.07688294219970702, 0.07677030181884766, 0.07702937316894531, 0.0769587173461914, 0.077517822265625, 0.07699046325683594, 0.07717171478271484, 0.07714918518066406, 0.0768901138305664, 0.07693004608154297, 0.07699353790283203, 0.07687577819824219, 0.07690137481689453, 0.07687474822998047, 0.07686656188964844, 0.07826943969726563, 0.07743795013427734, 0.0777349090576172, 0.07737446594238281, 0.07689625549316406, 0.07825305938720703, 0.07712767791748047, 0.07696896362304688, 0.0769781723022461, 0.07752601623535156, 0.07756697845458985, 0.07712255859375, 0.07707852935791015, 0.07697618865966797, 0.07703135681152344, 0.07690137481689453, 0.07694643402099609, 0.07694233703613282, 0.07700479888916016, 0.07683277130126953, 0.07689113616943359, 0.0768358383178711, 0.07686758422851563, 0.07727410888671875, 0.07725875091552735, 0.07699353790283203, 0.07673856353759766, 0.07682969665527344, 0.07668940734863282, 0.07706521606445313, 0.07688396453857421, 0.07698636627197265, 0.07862169647216796, 0.07706829071044922, 0.07820492553710938, 0.07675084686279297, 0.07860326385498047, 0.07727104187011719, 0.07749836730957031, 0.0769454116821289, 0.07689727783203125, 0.0769648666381836, 0.07709798431396485, 0.07715225219726562, 0.07706317138671875, 0.07691571044921874, 0.07681536102294922, 0.07754959869384766, 0.07856022644042969, 0.07732838439941406, 0.07815679931640625, 0.07725567626953125, 0.07701299285888671, 0.07695667266845703, 0.0771962890625, 0.07689830780029297, 0.07705907440185547, 0.07683891296386719, 0.07717683410644531, 0.0768174057006836, 0.07711641693115234, 0.07745126342773437, 0.0777154541015625, 0.07776358032226563, 0.07789875030517578, 0.07706111907958985, 0.07710105895996094, 0.07710310363769532, 0.07700991821289062, 0.07687884521484376, 0.07695155334472656, 0.07665869140625, 0.07792947387695312, 0.07705599975585938, 0.07854080200195312, 0.07720857238769531, 0.07702015686035156, 0.07690239715576172, 0.07686860656738281, 0.07686246490478515, 0.07715225219726562, 0.07810662078857422, 0.07724339294433594, 0.08019149017333985, 0.0777553939819336, 0.07733657836914062, 0.07761408233642578, 0.07764889526367187, 0.0770877456665039, 0.07687474822998047, 0.07686553955078125, 0.07669657897949218, 0.07700991821289062, 0.07697920227050781, 0.07792230224609376, 0.07742054748535156, 0.0771809310913086, 0.07738674926757813, 0.076801025390625, 0.07714713287353515, 0.07722803497314454, 0.07727001953125, 0.07746867370605469, 0.07722291564941407, 0.0776079330444336, 0.07696691131591797, 0.07748198699951171, 0.07770111846923829, 0.0769648666381836, 0.07708364868164062, 0.07817318725585938, 0.07756185913085938, 0.07691673278808593, 0.07695053100585937, 0.07708057403564453, 0.07686656188964844, 0.07690547180175782, 0.07689421081542969, 0.07670783996582031, 0.07705292510986328, 0.07822745513916016, 0.0774471664428711, 0.07704166412353515, 0.07737139129638672, 0.07743590545654297, 0.07760384368896485, 0.07700275421142579, 0.0770334701538086, 0.07727104187011719, 0.07703449249267579, 0.07686246490478515, 0.0771666259765625, 0.0769044189453125, 0.07862783813476562, 0.07722598266601563, 0.07729049682617188, 0.07689933013916016, 0.0769443817138672, 0.07699353790283203, 0.07710720062255859, 0.07715634918212891, 0.0769249267578125, 0.07693929290771484, 0.07736214447021485, 0.07781581115722656, 0.07755366516113281, 0.07713279724121094, 0.07688294219970702, 0.07829196929931641, 0.07925657653808593, 0.07721676635742188, 0.0769617919921875, 0.07695260620117188, 0.0790937271118164, 0.07702835083007813, 0.07744306945800782, 0.07686962890625, 0.07738982391357421, 0.07780352020263671, 0.07707852935791015, 0.07704370880126953, 0.07691366577148437, 0.07781887817382813, 0.07685427093505859, 0.07917772674560547, 0.07729254150390626, 0.07708467102050781, 0.07691571044921874, 0.07705497741699219, 0.07676518249511718, 0.07751168060302735, 0.07806156921386719, 0.07696588897705078, 0.07673343658447265, 0.0769269790649414, 0.07715328216552735, 0.07684095764160156, 0.07773696136474609, 0.07761817932128906, 0.07808512115478515, 0.07708672332763672, 0.07948595428466797, 0.077264892578125, 0.077412353515625, 0.07816191864013672, 0.0774502410888672, 0.07692082977294921, 0.07693926239013672, 0.07699148559570312, 0.0765880355834961, 0.07690854644775391, 0.07748095703125, 0.07775641632080078, 0.0770877456665039, 0.07827967834472656, 0.07760076904296875, 0.07784960174560547, 0.07943270111083985, 0.07821414184570312, 0.07721984100341797, 0.07926988983154297, 0.07710514831542968, 0.07775334167480469, 0.07730073547363281, 0.0772894744873047, 0.07764991760253906, 0.07685836791992187, 0.07698226928710937, 0.07688909149169922, 0.07701299285888671, 0.0770334701538086, 0.07706623840332032, 0.0769249267578125, 0.07759980773925781, 0.07696176147460937, 0.07674979400634765, 0.07717068481445312, 0.07787315368652344, 0.07780249786376953, 0.07682457733154296, 0.07735295867919922, 0.07689113616943359, 0.07752089691162109, 0.0768358383178711, 0.07839231872558594, 0.07755980682373047, 0.07717581176757812, 0.07689215850830078, 0.07679283142089843, 0.07720448303222656, 0.07727206420898437, 0.07868825531005859, 0.07708467102050781, 0.07707647705078124, 0.07685734558105468, 0.07686962890625, 0.07691571044921874, 0.0769966049194336, 0.0770334701538086, 0.07719939422607422, 0.07684502410888672, 0.07696281433105469, 0.07721574401855469, 0.07729357147216796, 0.07762432098388672, 0.07752294158935547, 0.07704370880126953, 0.07695257568359375, 0.07705292510986328, 0.07703961944580077, 0.07810364532470702, 0.07700572967529297, 0.07712973022460938, 0.07689113616943359, 0.07699148559570312, 0.07691161346435547, 0.0772495346069336, 0.07712767791748047, 0.07793049621582031, 0.07712153625488281, 0.07680512237548828, 0.07771647644042969, 0.07706317138671875, 0.07726080322265624, 0.07689421081542969, 0.0790804443359375, 0.07800627136230469, 0.07704780578613281, 0.07691161346435547, 0.07866368103027344, 0.07730278778076172, 0.07782297515869141, 0.07694847869873046, 0.07727410888671875, 0.07689113616943359, 0.07806976318359375, 0.07694950103759765, 0.07687065887451172, 0.07904768371582031, 0.07729561614990234, 0.07773798370361328, 0.07873741149902344, 0.07738470458984376, 0.07689113616943359, 0.0776263656616211, 0.07711334228515625, 0.076906494140625, 0.07691776275634765, 0.07745126342773437, 0.07715737915039063, 0.0769280014038086, 0.07735398101806641, 0.07715634918212891, 0.07745536041259765, 0.0768901138305664, 0.07782809448242188, 0.07707852935791015, 0.07699251556396484, 0.07722291564941407, 0.07700383758544922, 0.07704876708984375, 0.07822643280029297, 0.07706623840332032, 0.0773560333251953, 0.07723417663574218, 0.07756902313232422, 0.07738368225097657, 0.07731199645996094, 0.07769497680664063, 0.07698329925537109, 0.07700685119628906, 0.07886643218994141, 0.07685427093505859, 0.07850188446044921, 0.07801344299316407, 0.07702732849121094, 0.07680614471435547, 0.07697714996337891, 0.0773785629272461, 0.0769781723022461, 0.07728025817871094, 0.07827455902099609, 0.07701503753662109, 0.07836466979980469]",tokens/s,12.947253749745645,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1224.593408,1005.060096,0.0,358.612992,318.913024,s,23,0.170830846786499,0.007427428121152131,0.00024413026802978468,0.007357215881347657,0.0075946943283081055,0.007852940797805786,0.008269951295852663,"[0.008380415916442872, 0.00787830400466919, 0.007307871818542481, 0.007413631916046143, 0.007416895866394043, 0.0072780799865722655, 0.007253759860992431, 0.007340799808502197, 0.00735097599029541, 0.0072897601127624514, 0.007357215881347657, 0.007351200103759766, 0.007452095985412598, 0.007375872135162354, 0.007336959838867187, 0.007404831886291504, 0.007378367900848388, 0.007350815773010254, 0.007241919994354248, 0.007624671936035156, 0.007474783897399902, 0.00736736011505127, 0.007204256057739258]",tokens/s,34466.843141968995,kWh,8.354114192041346e-08,4.5776604383460934e-08,1.7923360244695143e-07,3.0855134875082585e-07,tokens/kWh,829683620.0406166,MB,1224.593408,1005.060096,0.0,358.612992,328.804864,s,23,10.12951983642578,0.44041390593155577,0.009542742855595688,0.4373620910644531,0.4437781982421875,0.45109108581542967,0.474916849975586,"[0.4814091796875, 0.45189859008789063, 0.4373620910644531, 0.43599356079101564, 0.4377264404296875, 0.4396331787109375, 0.4402783508300781, 0.43823895263671875, 0.43630859375, 0.4375812683105469, 0.43703091430664065, 0.43696951293945313, 0.4364114990234375, 0.43596710205078126, 0.43623919677734374, 0.43678070068359376, 0.43731332397460937, 0.43723104858398437, 0.4438235473632812, 0.44186441040039065, 0.4435968017578125, 0.4382613220214844, 0.4316002502441406]",tokens/s,143.04725430216266,kWh,5.045933914928732e-06,2.7649476769741185e-06,8.713782771215293e-06,1.6524664363118138e-05,tokens/kWh,3812482.8810811713,,s,1448,10.284382205963144,0.007102473899145811,0.0009630348618537469,0.006931456089019775,0.00721827836036682,0.007574528217315674,0.014597724084854125,"[0.007964672088623047, 0.00796569585800171, 0.007897088050842285, 0.00785203218460083, 0.007930880069732665, 0.007737343788146973, 0.007622655868530274, 0.00760422420501709, 0.007505919933319092, 0.0074967041015625, 0.0075335679054260255, 0.007648255825042725, 0.007678976058959961, 0.007658495903015137, 0.007508992195129394, 0.007552000045776367, 0.007705599784851074, 0.00764415979385376, 0.007676928043365478, 0.007573503971099854, 0.007642111778259277, 0.007664639949798584, 0.00765337610244751, 0.0074629120826721195, 0.007479296207427978, 0.007540736198425293, 0.007504896163940429, 0.007546879768371582, 0.007592959880828858, 0.007724031925201416, 0.007574528217315674, 0.007631872177124023, 0.007477248191833496, 0.007482367992401123, 0.007755775928497314, 0.0074967041015625, 0.007511040210723877, 0.007269375801086426, 0.007320576190948487, 0.007468031883239746, 0.007742464065551758, 0.00800051212310791, 0.007601151943206787, 0.007601151943206787, 0.008147968292236327, 0.008087552070617676, 0.007609344005584716, 0.007886847972869874, 0.007772160053253174, 0.007862271785736084, 0.0077844481468200685, 0.00765235185623169, 0.007622655868530274, 0.007872511863708496, 0.007607295989990235, 0.007804927825927735, 0.007388160228729248, 0.007648255825042725, 0.007409664154052734, 0.007129087924957276, 0.007120895862579346, 0.007165952205657959, 0.01587097644805908, 0.007638016223907471, 0.007574528217315674, 0.007745535850524903, 0.007613440036773681, 0.007448575973510742, 0.00725708818435669, 0.00749567985534668, 0.007291903972625732, 0.007451648235321045, 0.007557119846343994, 0.007046144008636474, 0.007103487968444824, 0.006964223861694336, 0.007108607769012451, 0.007080959796905518, 0.007032832145690918, 0.006998047828674316, 0.007235551834106445, 0.006897664070129395, 0.007023615837097168, 0.007216127872467041, 0.007019552230834961, 0.006999008178710938, 0.007031807899475098, 0.007042079925537109, 0.006928351879119873, 0.007155712127685547, 0.006977536201477051, 0.007123968124389648, 0.006990848064422607, 0.007591936111450195, 0.007071743965148926, 0.006994944095611572, 0.006910975933074951, 0.0069928960800170895, 0.007258111953735351, 0.006973440170288086, 0.007234591960906983, 0.0069836478233337405, 0.007014400005340577, 0.006899712085723877, 0.007013376235961914, 0.007256063938140869, 0.006994944095611572, 0.007252992153167725, 0.007035903930664063, 0.007158783912658692, 0.006909952163696289, 0.006960127830505371, 0.0070594558715820314, 0.007104512214660645, 0.006937600135803222, 0.0069632000923156735, 0.0069324798583984375, 0.007048192024230957, 0.00692633581161499, 0.00800153636932373, 0.007895040035247802, 0.007285759925842285, 0.007116799831390381, 0.00709119987487793, 0.007049215793609619, 0.01459609603881836, 0.00693555212020874, 0.006933504104614257, 0.006979584217071533, 0.006875135898590088, 0.00694271993637085, 0.006948863983154297, 0.006976511955261231, 0.006959104061126709, 0.0069283838272094726, 0.00693452787399292, 0.006937600135803222, 0.006903808116912841, 0.006919167995452881, 0.0069283838272094726, 0.006905856132507324, 0.007003136157989502, 0.007137279987335205, 0.006895616054534912, 0.006892543792724609, 0.0068986878395080565, 0.006917119979858399, 0.00692633581161499, 0.006939648151397705, 0.00692633581161499, 0.006933504104614257, 0.00690176010131836, 0.006895616054534912, 0.006918144226074219, 0.00692633581161499, 0.0068986878395080565, 0.006986752033233643, 0.006941696166992187, 0.00695091199874878, 0.0068853759765625, 0.0069324798583984375, 0.006906879901885986, 0.006952960014343262, 0.006914048194885254, 0.00693555212020874, 0.00689356803894043, 0.006964223861694336, 0.006908927917480469, 0.006937600135803222, 0.00693555212020874, 0.007006207942962647, 0.006982656002044678, 0.007010303974151612, 0.006971392154693603, 0.006910975933074951, 0.006913023948669434, 0.006918144226074219, 0.006919167995452881, 0.006913023948669434, 0.006947840213775635, 0.006929408073425293, 0.006910975933074951, 0.0069027838706970214, 0.006880256175994873, 0.006883327960968018, 0.0069283838272094726, 0.007147520065307617, 0.006989823818206787, 0.014668800354003907, 0.006913023948669434, 0.00693452787399292, 0.006882304191589355, 0.00694374418258667, 0.006876160144805908, 0.006906879901885986, 0.006904831886291504, 0.0068884482383728025, 0.006895616054534912, 0.006920191764831543, 0.006875135898590088, 0.006978559970855713, 0.006938623905181885, 0.006918144226074219, 0.007023615837097168, 0.006938623905181885, 0.006894591808319092, 0.006882304191589355, 0.006928415775299073, 0.006888415813446045, 0.006894591808319092, 0.006958079814910889, 0.006945792198181152, 0.006919167995452881, 0.006920191764831543, 0.006819839954376221, 0.006814720153808594, 0.006752255916595459, 0.006714367866516113, 0.006791168212890625, 0.00672870397567749, 0.0067870721817016606, 0.006815743923187256, 0.006841343879699707, 0.006661119937896728, 0.00682700777053833, 0.0067348480224609375, 0.006766592025756836, 0.0067870721817016606, 0.006717440128326416, 0.006973440170288086, 0.007019519805908203, 0.006986752033233643, 0.007194623947143554, 0.006993919849395752, 0.0069632000923156735, 0.006990848064422607, 0.007023615837097168, 0.006989823818206787, 0.006961152076721191, 0.007007232189178467, 0.006951935768127441, 0.007015423774719238, 0.006973440170288086, 0.006993919849395752, 0.006976511955261231, 0.0072765440940856935, 0.006922239780426025, 0.007046144008636474, 0.006909952163696289, 0.007103487968444824, 0.006964223861694336, 0.014816287994384766, 0.0069242558479309085, 0.007008255958557129, 0.006899712085723877, 0.006957056045532227, 0.006952960014343262, 0.006903808116912841, 0.007002111911773682, 0.007018496036529541, 0.0069847040176391605, 0.007208960056304932, 0.006906879901885986, 0.007000127792358399, 0.006973375797271728, 0.006986783981323242, 0.007034848213195801, 0.006924287796020508, 0.006998015880584717, 0.006948863983154297, 0.00688640022277832, 0.00694374418258667, 0.006968319892883301, 0.006874112129211426, 0.006914048194885254, 0.00687718391418457, 0.006922239780426025, 0.00687820816040039, 0.0068986878395080565, 0.006887423992156983, 0.006941696166992187, 0.006927360057830811, 0.006922239780426025, 0.006933504104614257, 0.006924287796020508, 0.006929408073425293, 0.00690176010131836, 0.006960127830505371, 0.006919167995452881, 0.006922239780426025, 0.006909952163696289, 0.006914048194885254, 0.006906879901885986, 0.00694374418258667, 0.006897664070129395, 0.007015423774719238, 0.006918144226074219, 0.006923264026641846, 0.006927360057830811, 0.00693555212020874, 0.006977536201477051, 0.006917119979858399, 0.006937600135803222, 0.0069212160110473635, 0.00692633581161499, 0.006949888229370117, 0.006940671920776367, 0.006956031799316406, 0.006988800048828125, 0.0069283838272094726, 0.006905856132507324, 0.006929408073425293, 0.006905888080596924, 0.006922207832336426, 0.014533632278442383, 0.006973440170288086, 0.006919167995452881, 0.006920191764831543, 0.006881279945373535, 0.006919167995452881, 0.006936575889587402, 0.006895616054534912, 0.006916096210479736, 0.006924287796020508, 0.006922239780426025, 0.006907904148101806, 0.006941696166992187, 0.006944767951965332, 0.007005184173583984, 0.006919167995452881, 0.00692633581161499, 0.00687820816040039, 0.0069324798583984375, 0.006889472007751465, 0.006889472007751465, 0.007050240039825439, 0.00713318395614624, 0.007243775844573975, 0.007284736156463623, 0.0071905279159545895, 0.007258111953735351, 0.007041024208068848, 0.006989823818206787, 0.007035903930664063, 0.007030784130096435, 0.0069918718338012695, 0.006993919849395752, 0.006941696166992187, 0.0069253120422363285, 0.006946815967559815, 0.006966271877288818, 0.006931456089019775, 0.007050240039825439, 0.006999040126800537, 0.006977536201477051, 0.006973440170288086, 0.006953983783721924, 0.006923264026641846, 0.006951935768127441, 0.006907904148101806, 0.007014431953430176, 0.006917088031768799, 0.006968319892883301, 0.006927360057830811, 0.006930431842803955, 0.00694271993637085, 0.006913023948669434, 0.007008255958557129, 0.006936575889587402, 0.00692633581161499, 0.006958079814910889, 0.007015423774719238, 0.006990848064422607, 0.006922239780426025, 0.00692633581161499, 0.00693452787399292, 0.007012351989746094, 0.014345215797424317, 0.00676966381072998, 0.006661119937896728, 0.0067717118263244626, 0.006789120197296142, 0.006773759841918945, 0.00672051191329956, 0.006830080032348633, 0.006714367866516113, 0.006793216228485107, 0.006797311782836914, 0.00676358413696289, 0.0067491202354431155, 0.006756351947784424, 0.006776832103729248, 0.006715392112731934, 0.006752255916595459, 0.007037951946258545, 0.0069959678649902345, 0.006970367908477783, 0.00693555212020874, 0.006985727787017822, 0.0073062400817871095, 0.006953983783721924, 0.00690176010131836, 0.007269375801086426, 0.006917119979858399, 0.006968319892883301, 0.007024640083312988, 0.007193600177764893, 0.007230463981628418, 0.006991936206817627, 0.007226304054260254, 0.006900735855102539, 0.007223296165466309, 0.0069970240592956544, 0.006896607875823975, 0.007322624206542969, 0.007062528133392334, 0.006974463939666748, 0.00688640022277832, 0.007178239822387696, 0.0069621758460998535, 0.007023615837097168, 0.0069212160110473635, 0.007299071788787842, 0.0071833600997924804, 0.007261184215545655, 0.007209983825683594, 0.007198719978332519, 0.007049215793609619, 0.0069959678649902345, 0.007003136157989502, 0.007105535984039306, 0.007074816226959229, 0.006998015880584717, 0.0069918718338012695, 0.00690176010131836, 0.006965248107910156, 0.007762944221496582, 0.006952960014343262, 0.006976511955261231, 0.0069202561378479, 0.01471072006225586, 0.0072724480628967286, 0.006916096210479736, 0.006939648151397705, 0.006899712085723877, 0.007030784130096435, 0.006989823818206787, 0.006977536201477051, 0.007060480117797851, 0.0072120318412780765, 0.006953983783721924, 0.006909952163696289, 0.006884352207183838, 0.006938623905181885, 0.006926400184631348, 0.00695084810256958, 0.006960127830505371, 0.006939648151397705, 0.0068853759765625, 0.006904831886291504, 0.006896639823913574, 0.006917119979858399, 0.006908959865570068, 0.006914015769958496, 0.006940671920776367, 0.006941696166992187, 0.006894591808319092, 0.006892543792724609, 0.007236608028411865, 0.007029759883880615, 0.006912000179290771, 0.006895679950714112, 0.006928319931030273, 0.006906879901885986, 0.006958079814910889, 0.006897664070129395, 0.006916096210479736, 0.006875167846679688, 0.006910943984985351, 0.00690176010131836, 0.00689356803894043, 0.0069027838706970214, 0.007012351989746094, 0.006970367908477783, 0.006946879863739014, 0.006910912036895752, 0.006938623905181885, 0.006899712085723877, 0.0069028158187866215, 0.006964191913604736, 0.006917119979858399, 0.006930431842803955, 0.006939648151397705, 0.0070594558715820314, 0.006999040126800537, 0.006949888229370117, 0.00695091199874878, 0.007007232189178467, 0.006937600135803222, 0.006905856132507324, 0.006920191764831543, 0.006880256175994873, 0.00692633581161499, 0.014599167823791503, 0.0069253120422363285, 0.006918144226074219, 0.006918144226074219, 0.006866943836212158, 0.006870016098022461, 0.00694374418258667, 0.0069816322326660156, 0.006914048194885254, 0.006931456089019775, 0.006946815967559815, 0.006905856132507324, 0.006940671920776367, 0.006916096210479736, 0.007143424034118652, 0.006927360057830811, 0.006937600135803222, 0.006957056045532227, 0.006896639823913574, 0.006907904148101806, 0.006887423992156983, 0.006913023948669434, 0.006900735855102539, 0.006895616054534912, 0.006890495777130127, 0.0068915200233459475, 0.006884352207183838, 0.0069324798583984375, 0.006916096210479736, 0.006897664070129395, 0.006910975933074951, 0.006903808116912841, 0.00693452787399292, 0.007013376235961914, 0.00692633581161499, 0.006889472007751465, 0.006912000179290771, 0.0068986878395080565, 0.006977536201477051, 0.006904895782470703, 0.006909887790679931, 0.006899712085723877, 0.006912000179290771, 0.006887423992156983, 0.006910975933074951, 0.006879231929779053, 0.006884352207183838, 0.0068915200233459475, 0.006903808116912841, 0.006946815967559815, 0.006875135898590088, 0.006924287796020508, 0.006918144226074219, 0.006905856132507324, 0.006973440170288086, 0.006930431842803955, 0.006915071964263916, 0.00691206407546997, 0.007011263847351074, 0.0069212160110473635, 0.006913023948669434, 0.006920191764831543, 0.006917119979858399, 0.014561280250549317, 0.006904831886291504, 0.0069283838272094726, 0.006930431842803955, 0.006904831886291504, 0.006907904148101806, 0.006908927917480469, 0.006931456089019775, 0.0069283838272094726, 0.006903808116912841, 0.006924287796020508, 0.006895616054534912, 0.006883327960968018, 0.007070720195770264, 0.006931456089019775, 0.006934559822082519, 0.006951903820037842, 0.007005184173583984, 0.0070225920677185055, 0.006919167995452881, 0.006883327960968018, 0.006973440170288086, 0.006927360057830811, 0.006957056045532227, 0.006895616054534912, 0.006903808116912841, 0.007022624015808106, 0.007080927848815918, 0.006919167995452881, 0.006929408073425293, 0.006917119979858399, 0.006916096210479736, 0.006899712085723877, 0.006899712085723877, 0.006895616054534912, 0.006917119979858399, 0.006946815967559815, 0.006915071964263916, 0.006927360057830811, 0.006884352207183838, 0.006908927917480469, 0.00687820816040039, 0.006930431842803955, 0.006919167995452881, 0.006918144226074219, 0.007233535766601563, 0.007015423774719238, 0.006924287796020508, 0.006957056045532227, 0.0069253120422363285, 0.006894591808319092, 0.006938623905181885, 0.006927360057830811, 0.006912000179290771, 0.006912000179290771, 0.006947840213775635, 0.007037951946258545, 0.006979584217071533, 0.006904831886291504, 0.006974463939666748, 0.006968319892883301, 0.006972415924072266, 0.006949888229370117, 0.014623744010925293, 0.006917119979858399, 0.006960127830505371, 0.006922239780426025, 0.0069283838272094726, 0.00698367977142334, 0.0068986878395080565, 0.006946815967559815, 0.006912000179290771, 0.0069027838706970214, 0.00688640022277832, 0.0069324798583984375, 0.006915071964263916, 0.00689356803894043, 0.006929408073425293, 0.006899712085723877, 0.006931456089019775, 0.006884352207183838, 0.006897664070129395, 0.006930431842803955, 0.006949888229370117, 0.006898752212524414, 0.006896575927734375, 0.006960127830505371, 0.006882304191589355, 0.006930431842803955, 0.006897664070129395, 0.006989823818206787, 0.006908927917480469, 0.006970367908477783, 0.007172095775604248, 0.007028736114501953, 0.006956031799316406, 0.00694374418258667, 0.006889472007751465, 0.0068915200233459475, 0.006998015880584717, 0.006939648151397705, 0.0069283838272094726, 0.006924352169036865, 0.006933440208435058, 0.0069253120422363285, 0.006907904148101806, 0.007010303974151612, 0.006948863983154297, 0.006965248107910156, 0.0069324798583984375, 0.006885407924652099, 0.006927328109741211, 0.006943808078765869, 0.006981567859649658, 0.006958079814910889, 0.006923295974731445, 0.006906847953796386, 0.00693452787399292, 0.0068986878395080565, 0.006922239780426025, 0.006908927917480469, 0.006931456089019775, 0.006900735855102539, 0.006931456089019775, 0.006889472007751465, 0.006872096061706543, 0.01465238380432129, 0.006907904148101806, 0.006927360057830811, 0.006946815967559815, 0.006900735855102539, 0.006948863983154297, 0.006927360057830811, 0.006892543792724609, 0.006960127830505371, 0.006889503955841064, 0.00689353609085083, 0.0069621758460998535, 0.006900735855102539, 0.006875135898590088, 0.006910975933074951, 0.006908927917480469, 0.006912000179290771, 0.006855679988861084, 0.006907904148101806, 0.007006207942962647, 0.006874112129211426, 0.006908927917480469, 0.006940671920776367, 0.006881279945373535, 0.00693452787399292, 0.0068986878395080565, 0.006946815967559815, 0.006880256175994873, 0.00688640022277832, 0.006918144226074219, 0.0069283838272094726, 0.0068915200233459475, 0.00698367977142334, 0.0069283838272094726, 0.006909952163696289, 0.007021567821502686, 0.006881279945373535, 0.0070522880554199216, 0.0069324798583984375, 0.00693555212020874, 0.006896639823913574, 0.006949920177459717, 0.006969312191009522, 0.006952960014343262, 0.006909952163696289, 0.006976511955261231, 0.0068853759765625, 0.006871039867401123, 0.006940671920776367, 0.006964223861694336, 0.006941696166992187, 0.006859776020050049, 0.006858751773834228, 0.006897664070129395, 0.00692633581161499, 0.006903808116912841, 0.006887423992156983, 0.006958079814910889, 0.006865920066833496, 0.006944767951965332, 0.007127039909362793, 0.00709119987487793, 0.007139328002929687, 0.014806015968322754, 0.006964223861694336, 0.00694271993637085, 0.006897664070129395, 0.00694271993637085, 0.006931456089019775, 0.0069110398292541505, 0.006891456127166748, 0.006899712085723877, 0.006905856132507324, 0.0069027838706970214, 0.006922239780426025, 0.006948863983154297, 0.00690176010131836, 0.006965248107910156, 0.00694271993637085, 0.006922304153442383, 0.006911935806274414, 0.006879231929779053, 0.007000063896179199, 0.007001088142395019, 0.006974463939666748, 0.006907904148101806, 0.006933504104614257, 0.00693452787399292, 0.006912000179290771, 0.006887423992156983, 0.00692633581161499, 0.006903808116912841, 0.006927360057830811, 0.006933504104614257, 0.006930431842803955, 0.006933504104614257, 0.0069253120422363285, 0.006906879901885986, 0.00693452787399292, 0.007009280204772949, 0.0068915200233459475, 0.006912000179290771, 0.006920191764831543, 0.006969344139099121, 0.006940671920776367, 0.006894591808319092, 0.006872064113616944, 0.006859776020050049, 0.0069253120422363285, 0.006975488185882568, 0.007011328220367432, 0.006889472007751465, 0.006876160144805908, 0.00688643217086792, 0.006930399894714356, 0.006944767951965332, 0.006916096210479736, 0.006895616054534912, 0.006900735855102539, 0.006912000179290771, 0.0068986878395080565, 0.006919167995452881, 0.00687718391418457, 0.006874112129211426, 0.0068915200233459475, 0.006866943836212158, 0.014652416229248047, 0.0069212160110473635, 0.006944767951965332, 0.00687718391418457, 0.0069642882347106935, 0.006937535762786865, 0.006914048194885254, 0.0069212160110473635, 0.006927360057830811, 0.0069253120422363285, 0.006930431842803955, 0.006933504104614257, 0.006915103912353515, 0.0069283838272094726, 0.006927328109741211, 0.006971392154693603, 0.006904831886291504, 0.006937600135803222, 0.006914048194885254, 0.006896639823913574, 0.00689356803894043, 0.006919167995452881, 0.006933504104614257, 0.006907904148101806, 0.0068689918518066405, 0.006903808116912841, 0.007013376235961914, 0.006936575889587402, 0.006941696166992187, 0.006883359909057617, 0.006904799938201905, 0.00687718391418457, 0.00688643217086792, 0.006876128196716309, 0.006919167995452881, 0.006908927917480469, 0.0068925762176513675, 0.0069201598167419436, 0.006894591808319092, 0.0068689918518066405, 0.006906879901885986, 0.006959104061126709, 0.006904831886291504, 0.006931488037109375, 0.0068873920440673825, 0.006906879901885986, 0.006896671772003174, 0.006886367797851563, 0.006938623905181885, 0.00687820816040039, 0.00687718391418457, 0.006907904148101806, 0.006998015880584717, 0.0069027838706970214, 0.006923264026641846, 0.006890495777130127, 0.0068986878395080565, 0.006960127830505371, 0.0069027838706970214, 0.0069212160110473635, 0.0068884482383728025, 0.006894591808319092, 0.006938623905181885, 0.014593024253845215, 0.006933504104614257, 0.006898719787597657, 0.0069242558479309085, 0.006897664070129395, 0.006896639823913574, 0.0069253120422363285, 0.006961152076721191, 0.006929408073425293, 0.006920191764831543, 0.006914048194885254, 0.0069283838272094726, 0.0068915200233459475, 0.006884352207183838, 0.006931456089019775, 0.006946815967559815, 0.0069027838706970214, 0.006856704235076904, 0.0069253120422363285, 0.0069212160110473635, 0.006968319892883301, 0.006905856132507324, 0.006894591808319092, 0.006916096210479736, 0.006867968082427979, 0.0069324798583984375, 0.006917119979858399, 0.006860799789428711, 0.007000063896179199, 0.007016448020935059, 0.007054336071014404, 0.006895679950714112, 0.006895552158355713, 0.006907904148101806, 0.0069283838272094726, 0.006920191764831543, 0.006859776020050049, 0.007019519805908203, 0.006895616054534912, 0.006855679988861084, 0.006979584217071533, 0.006900735855102539, 0.006946815967559815, 0.006892543792724609, 0.006904831886291504, 0.006873087882995605, 0.006938623905181885, 0.006903808116912841, 0.006917119979858399, 0.006920191764831543, 0.006860799789428711, 0.006915071964263916, 0.006945792198181152, 0.006906879901885986, 0.006913023948669434, 0.0068884482383728025, 0.006949888229370117, 0.006906879901885986, 0.007002111911773682, 0.006951935768127441, 0.006909952163696289, 0.0069027838706970214, 0.006880256175994873, 0.014742527961730957, 0.006971392154693603, 0.006909952163696289, 0.006938623905181885, 0.0068884482383728025, 0.006948863983154297, 0.006922239780426025, 0.006908927917480469, 0.0069283838272094726, 0.006914048194885254, 0.00687820816040039, 0.006920191764831543, 0.0068884482383728025, 0.006875135898590088, 0.006957056045532227, 0.0069212160110473635, 0.00693452787399292, 0.006905856132507324, 0.006978559970855713, 0.006904831886291504, 0.0069253120422363285, 0.006929408073425293, 0.006948863983154297, 0.006897664070129395, 0.006883327960968018, 0.00692633581161499, 0.006916096210479736, 0.006927360057830811, 0.007006207942962647, 0.006929408073425293, 0.0069212160110473635, 0.006900735855102539, 0.0069253120422363285, 0.006931456089019775, 0.006907904148101806, 0.00709939193725586, 0.006915071964263916, 0.00694374418258667, 0.006969344139099121, 0.006910975933074951, 0.006994944095611572, 0.006936575889587402, 0.0069069118499755855, 0.006930399894714356, 0.006910975933074951, 0.0069253120422363285, 0.00690176010131836, 0.00689356803894043, 0.006899712085723877, 0.0068853759765625, 0.006951935768127441, 0.006900735855102539, 0.006927360057830811, 0.006890495777130127, 0.0068915200233459475, 0.0069027838706970214, 0.006995999813079834, 0.006994912147521973, 0.006920191764831543, 0.0069324798583984375, 0.006895616054534912, 0.0069550080299377445, 0.006908927917480469, 0.014665727615356445, 0.006914048194885254, 0.006918144226074219, 0.006918144226074219, 0.006912000179290771, 0.00694374418258667, 0.00690176010131836, 0.006952960014343262, 0.006883327960968018, 0.006876160144805908, 0.006899712085723877, 0.006956031799316406, 0.006949888229370117, 0.006945792198181152, 0.0069253120422363285, 0.006944767951965332, 0.006945792198181152, 0.006855679988861084, 0.006905856132507324, 0.006890495777130127, 0.006905856132507324, 0.0069027838706970214, 0.006953983783721924, 0.006895616054534912, 0.007087103843688965, 0.007011328220367432, 0.006944767951965332, 0.0069253120422363285, 0.007228415966033935, 0.006970367908477783, 0.00694271993637085, 0.006924287796020508, 0.0069959678649902345, 0.007029759883880615, 0.006946815967559815, 0.006916096210479736, 0.0068915200233459475, 0.006931456089019775, 0.0068915200233459475, 0.006907904148101806, 0.006862847805023193, 0.006905856132507324, 0.006903808116912841, 0.006897664070129395, 0.006985727787017822, 0.0068986878395080565, 0.006974463939666748, 0.007048192024230957, 0.006961152076721191, 0.00692633581161499, 0.006879231929779053, 0.0068853759765625, 0.006927360057830811, 0.0069283838272094726, 0.006991936206817627, 0.006887360095977783, 0.006966271877288818, 0.006919167995452881, 0.006979584217071533, 0.006908927917480469, 0.006952960014343262, 0.006882304191589355, 0.006916096210479736, 0.014645248413085938, 0.006903808116912841, 0.0069550080299377445, 0.006904831886291504, 0.006919167995452881, 0.006909952163696289, 0.00693452787399292, 0.006914048194885254, 0.006996992111206054, 0.006957056045532227, 0.006909952163696289, 0.006961152076721191, 0.006930431842803955, 0.00694374418258667, 0.006899712085723877, 0.007104512214660645, 0.006967296123504638, 0.0068915200233459475, 0.006910975933074951, 0.006908927917480469, 0.007009280204772949, 0.006941696166992187, 0.006906879901885986, 0.006910975933074951, 0.006908927917480469, 0.006881279945373535, 0.006944767951965332, 0.0070225920677185055, 0.007060480117797851, 0.006876160144805908, 0.006990848064422607, 0.006943808078765869, 0.006904767990112305, 0.006899712085723877, 0.006931456089019775, 0.006919167995452881, 0.006939648151397705, 0.006909952163696289, 0.0069325118064880375, 0.006895584106445312, 0.006896639823913574, 0.006879231929779053, 0.00690176010131836, 0.006894591808319092, 0.0068884482383728025, 0.006903808116912841, 0.006913087844848633, 0.006928319931030273, 0.006899712085723877, 0.006903840065002441, 0.0069242558479309085, 0.0068915200233459475, 0.006914048194885254, 0.006912000179290771, 0.006933504104614257, 0.006919167995452881, 0.006971392154693603, 0.007261184215545655, 0.006969344139099121, 0.006930431842803955, 0.006971392154693603, 0.006887423992156983, 0.006945792198181152, 0.014636063575744629, 0.007252960205078125, 0.007076863765716553, 0.007001088142395019, 0.006970367908477783, 0.006974463939666748, 0.006895616054534912, 0.0069847040176391605, 0.006874112129211426, 0.0070522880554199216, 0.0069263682365417484, 0.007037919998168945, 0.006966271877288818, 0.007018496036529541, 0.007205887794494629, 0.006953983783721924, 0.0069959678649902345, 0.00704307222366333, 0.00689356803894043, 0.006999040126800537, 0.006931456089019775, 0.0070225920677185055, 0.006976511955261231, 0.00693452787399292, 0.007004159927368164, 0.007181312084197998, 0.006958079814910889, 0.006990848064422607, 0.006975488185882568, 0.006904831886291504, 0.007112703800201416, 0.007269375801086426, 0.0069632000923156735, 0.006927360057830811, 0.00698473596572876, 0.006968287944793701, 0.006958079814910889, 0.007260159969329834, 0.007002111911773682, 0.006919167995452881, 0.006956031799316406, 0.007010303974151612, 0.006985727787017822, 0.007192575931549072, 0.006986752033233643, 0.007312384128570557, 0.007241727828979493, 0.007263232231140137, 0.007411712169647216, 0.007120895862579346, 0.007129087924957276, 0.007027711868286133, 0.007016448020935059, 0.00725708818435669, 0.006986752033233643, 0.007013376235961914, 0.006919167995452881, 0.007062528133392334, 0.007166975975036621, 0.007005184173583984, 0.007010303974151612, 0.007048192024230957, 0.00693452787399292, 0.015050751686096191, 0.006931456089019775, 0.007106560230255127, 0.007230463981628418, 0.007049215793609619, 0.006980607986450196, 0.006979584217071533, 0.0069621758460998535, 0.006921279907226563, 0.007070655822753906, 0.007021567821502686, 0.006990880012512207, 0.0069836478233337405, 0.007192575931549072, 0.007269375801086426, 0.006907904148101806, 0.007000063896179199, 0.006956031799316406, 0.0068915200233459475, 0.006993919849395752, 0.006939648151397705, 0.0068915200233459475, 0.006939648151397705, 0.006964223861694336, 0.006922239780426025, 0.007088128089904785, 0.006985727787017822, 0.00687820816040039, 0.006965248107910156, 0.006884352207183838, 0.007039999961853028, 0.00694271993637085, 0.007050240039825439, 0.006924287796020508, 0.006986752033233643, 0.007234560012817383, 0.007004159927368164, 0.007000063896179199, 0.006998015880584717, 0.0069550080299377445, 0.006915071964263916, 0.007016448020935059, 0.006907904148101806, 0.006964223861694336, 0.0069918718338012695, 0.006897664070129395, 0.00707583999633789, 0.006890495777130127, 0.007128064155578613, 0.007211040019989014, 0.006999008178710938, 0.006974463939666748, 0.006976511955261231, 0.006882304191589355, 0.006985727787017822, 0.00693555212020874, 0.00790015983581543, 0.007143424034118652, 0.006940671920776367, 0.0069632000923156735, 0.006894591808319092, 0.0069816322326660156, 0.0068915200233459475, 0.014715904235839844, 0.00690176010131836, 0.006988800048828125, 0.006951935768127441, 0.006908927917480469, 0.006919167995452881, 0.00688640022277832, 0.0069212160110473635, 0.0068884482383728025, 0.006951935768127441, 0.006890495777130127, 0.0068689918518066405, 0.006906879901885986, 0.007001120090484619, 0.006945759773254395, 0.006952960014343262, 0.0069918718338012695, 0.006864895820617676, 0.006909952163696289, 0.006866943836212158, 0.006892543792724609, 0.006919167995452881, 0.0070348801612854, 0.0070563840866088865, 0.0070052480697631835, 0.006940608024597168, 0.006910975933074951, 0.006906879901885986, 0.007321599960327148, 0.007094272136688232, 0.007371776103973389, 0.007673855781555176, 0.007137279987335205, 0.006964223861694336, 0.006982656002044678, 0.007284736156463623, 0.006924287796020508, 0.007064576148986816, 0.007103487968444824, 0.006976511955261231, 0.006929408073425293, 0.006939648151397705, 0.0069816322326660156, 0.006917119979858399, 0.006888480186462403, 0.007059423923492431, 0.006910975933074951, 0.006922239780426025, 0.006870016098022461, 0.006909952163696289, 0.006894591808319092, 0.007114751815795899, 0.007982079982757568, 0.007064576148986816, 0.007649280071258545, 0.0077281279563903805, 0.00708403205871582, 0.0070225920677185055, 0.007300096035003662, 0.007173120021820068, 0.007039999961853028, 0.006969344139099121, 0.006939648151397705, 0.014486528396606446, 0.006723584175109864, 0.006748159885406494, 0.006717440128326416, 0.0067758078575134275, 0.0067573761940002445, 0.006811647891998291, 0.0067983360290527345, 0.006649856090545654, 0.006770688056945801, 0.006931456089019775, 0.0067983360290527345, 0.0067348480224609375, 0.006781951904296875, 0.006737919807434082, 0.0067573761940002445, 0.006744063854217529, 0.006760447978973389, 0.006822912216186523, 0.006897664070129395, 0.007024640083312988, 0.006904831886291504, 0.007005184173583984, 0.00687820816040039, 0.0070266880989074704, 0.007021567821502686, 0.006913023948669434, 0.006964223861694336, 0.007012351989746094, 0.006915071964263916, 0.007185408115386963, 0.007006207942962647, 0.006979584217071533, 0.00723967981338501, 0.007209983825683594, 0.006987775802612305, 0.0069212160110473635, 0.0069621758460998535, 0.006931456089019775, 0.007063551902770996, 0.0069847040176391605, 0.00698367977142334, 0.007051263809204102, 0.007016448020935059, 0.007003136157989502, 0.007104512214660645, 0.007120895862579346, 0.007079936027526855, 0.007358463764190673, 0.00728166389465332, 0.006987775802612305, 0.0068915200233459475, 0.006978559970855713, 0.007234560012817383, 0.007006207942962647, 0.0069283838272094726, 0.007095295906066895, 0.007061503887176514, 0.007015423774719238, 0.007035903930664063, 0.007011328220367432, 0.007107615947723389, 0.006958047866821289, 0.014362624168395996, 0.006820864200592041, 0.006730751991271973, 0.00682700777053833, 0.00679423999786377, 0.0068280320167541505, 0.006766592025756836, 0.0067276802062988285, 0.006837247848510742, 0.006752255916595459, 0.006815807819366455, 0.006806464195251465, 0.006740992069244385, 0.006767615795135498, 0.006711296081542969, 0.006871039867401123, 0.006770688056945801, 0.006800384044647217, 0.0068393278121948245, 0.00679318380355835, 0.006830080032348633, 0.006724607944488525, 0.00673689603805542, 0.006818816184997558, 0.00675328016281128, 0.006808576107025147, 0.006770688056945801, 0.006814720153808594, 0.006792191982269287, 0.006724607944488525, 0.00677785587310791, 0.006762495994567871, 0.0067041277885437015, 0.006730751991271973, 0.006803455829620361, 0.00672160005569458, 0.006800320148468018, 0.006797376155853271, 0.006750144004821777, 0.006681600093841553, 0.0067348480224609375, 0.0067358717918396, 0.006731776237487793, 0.006802432060241699, 0.006825984001159668, 0.006759424209594727, 0.006781951904296875, 0.007107583999633789, 0.007262207984924316, 0.006866943836212158, 0.006956031799316406, 0.00722431993484497, 0.006994944095611572, 0.007202847957611084, 0.007022560119628906, 0.006889472007751465, 0.007014400005340577, 0.006987775802612305, 0.007175168037414551, 0.0074291200637817386, 0.0069550080299377445, 0.006973440170288086, 0.0067645440101623535]",tokens/s,140.7960119529994,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,3156.463616,4874.305536,0.0,4244.635648,4125.520384,s,1,10.6742138671875,10.6742138671875,0.0,10.6742138671875,10.6742138671875,10.6742138671875,10.6742138671875,[10.6742138671875],,kWh,4.487446893191797e-05,2.4578391358224637e-05,7.254672470402568e-05,0.0001419995849941683,,MB,3081.592832,5052.563456,0.0,4404.0192,4310.79936,s,10,0.7337405776977539,0.07337405776977539,0.0003911040659991897,0.07317011260986328,0.07409628601074218,0.07410951766967773,0.07412010299682617,"[0.07308048248291016, 0.07339206695556641, 0.07310777282714843, 0.07412274932861328, 0.07309053039550781, 0.07313597106933593, 0.07303215789794922, 0.07409334564208984, 0.07320425415039063, 0.0734812469482422]",tokens/s,3488.9715490895587,kWh,8.735044363716169e-07,4.786343003211514e-07,3.2894225661767683e-06,4.641561302869537e-06,tokens/kWh,55153855.19991602,MB,3085.918208,5056.75776,0.0,4406.116352,4310.80192,s,10,23.475072998046876,2.3475072998046875,0.017922478063043534,2.3428026123046877,2.3729294921875,2.3815111328125,2.3883764453125,"[2.32936279296875, 2.345328369140625, 2.346780517578125, 2.34321435546875, 2.331931640625, 2.34083984375, 2.3710224609375, 2.3900927734375, 2.334109375, 2.342390869140625]",tokens/s,26.836977250397304,kWh,2.795156608265586e-05,1.5318391604205438e-05,5.4402818032029334e-05,9.767277571889062e-05,tokens/kWh,645010.8490959507,,s,630,23.47301276016236,0.03725875041295612,0.0005933017915341776,0.037036544799804685,0.038195304107666014,0.03836078224182129,0.0391454716873169,"[0.03711590576171875, 0.03704627227783203, 0.03736678314208984, 0.036729854583740236, 0.03685273742675781, 0.03679846572875976, 0.0369244155883789, 0.037029888153076174, 0.038994945526123044, 0.037953536987304685, 0.037085216522216795, 0.03671651077270508, 0.036913150787353514, 0.03651379013061523, 0.03659366226196289, 0.036618240356445314, 0.03679641723632812, 0.03691110229492187, 0.036890625, 0.03704217529296875, 0.0368721923828125, 0.03695206451416016, 0.036944896697998046, 0.03744255828857422, 0.03706163024902344, 0.03663974380493164, 0.036512767791748044, 0.03688857650756836, 0.036999168395996096, 0.03696025466918945, 0.03697971343994141, 0.037184513092041016, 0.03690598297119141, 0.03689779281616211, 0.03663257598876953, 0.036708351135253905, 0.03695718383789062, 0.03678822326660156, 0.03687628936767578, 0.03703500747680664, 0.036959232330322264, 0.036918270111083985, 0.036910079956054685, 0.036926464080810545, 0.036738048553466796, 0.03629363250732422, 0.036896766662597655, 0.03670016098022461, 0.0366827507019043, 0.03684864044189453, 0.0378603515625, 0.03701657485961914, 0.036618240356445314, 0.03707904052734375, 0.03692748641967773, 0.037082111358642575, 0.0379422721862793, 0.03697151947021484, 0.03702579116821289, 0.03686297607421875, 0.03693772888183594, 0.036874240875244144, 0.036961280822753906, 0.037174270629882815, 0.03647180938720703, 0.036340736389160154, 0.03679641723632812, 0.03706880187988281, 0.036874240875244144, 0.03688857650756836, 0.03670425415039062, 0.03689984130859375, 0.03677695846557617, 0.03744563293457031, 0.037424129486083986, 0.03706572723388672, 0.03684044647216797, 0.03681894302368164, 0.03680767822265625, 0.036967422485351564, 0.036939777374267575, 0.03699507141113281, 0.03667660903930664, 0.036552703857421875, 0.03695001602172852, 0.036999168395996096, 0.036934654235839845, 0.03719270324707031, 0.03707904052734375, 0.037738494873046875, 0.038659072875976565, 0.03823001480102539, 0.03802521514892578, 0.037288959503173826, 0.037823486328125, 0.03725823974609375, 0.03657318496704102, 0.03833958435058594, 0.03871334457397461, 0.03727052688598633, 0.037185535430908204, 0.03750400161743164, 0.03693670272827149, 0.037969921112060545, 0.03762688064575195, 0.03715071868896484, 0.03676979064941406, 0.03755929565429687, 0.03744768142700195, 0.036547584533691405, 0.037048320770263675, 0.03737395095825195, 0.03695206451416016, 0.03679334259033203, 0.037577728271484374, 0.037940223693847655, 0.037233665466308595, 0.03760639953613281, 0.03753472137451172, 0.038942718505859376, 0.03725721740722656, 0.03709132766723633, 0.03685580825805664, 0.03667865753173828, 0.03657727813720703, 0.03736064147949219, 0.03789516830444336, 0.03681075286865235, 0.037305343627929685, 0.03709235382080078, 0.037771263122558595, 0.03744153594970703, 0.03787366485595703, 0.03702579116821289, 0.03751935958862305, 0.036961280822753906, 0.03668582534790039, 0.03681382369995117, 0.036741119384765625, 0.03700428771972656, 0.03654246520996094, 0.03643494415283203, 0.03656806564331055, 0.0374200325012207, 0.03783782577514649, 0.03643801498413086, 0.036657150268554685, 0.037238784790039066, 0.036877311706542966, 0.03792588806152344, 0.036517887115478515, 0.03733913421630859, 0.0373831672668457, 0.037372928619384765, 0.03676979064941406, 0.03714355087280274, 0.037101566314697264, 0.036722686767578124, 0.03685887908935547, 0.03875942230224609, 0.039037952423095705, 0.03748863983154297, 0.03694182586669922, 0.036383743286132815, 0.03795558547973633, 0.03685478210449219, 0.03691212844848633, 0.03717324829101563, 0.03771187210083008, 0.038765567779541016, 0.037631999969482424, 0.036908031463623044, 0.03741798400878906, 0.03709235382080078, 0.03674009704589844, 0.037012481689453126, 0.03692031860351563, 0.03705344009399414, 0.03711795043945312, 0.0371599349975586, 0.03775385665893555, 0.03711795043945312, 0.03874611282348633, 0.03699814224243164, 0.0366295051574707, 0.03786137771606445, 0.037988353729248046, 0.037015552520751956, 0.03733606338500976, 0.03724288177490234, 0.036982784271240236, 0.03719372940063476, 0.036985855102539066, 0.03709542465209961, 0.03746201705932617, 0.03776921463012695, 0.03710771179199219, 0.03710259246826172, 0.037114879608154294, 0.037389312744140625, 0.037324798583984374, 0.03685580825805664, 0.03704524612426758, 0.03722956848144531, 0.037820415496826174, 0.03700838470458984, 0.037043201446533204, 0.03781942367553711, 0.03817264175415039, 0.037013504028320314, 0.036982784271240236, 0.03715379333496094, 0.03708108901977539, 0.03702169418334961, 0.03646054458618164, 0.036689918518066404, 0.03759001541137695, 0.036994049072265625, 0.03654348754882813, 0.03688652801513672, 0.038340606689453126, 0.03921920013427734, 0.03787059020996094, 0.03700121688842774, 0.03694694519042969, 0.03652710342407227, 0.03684454345703125, 0.036828159332275394, 0.03690598297119141, 0.03773235321044922, 0.0387061767578125, 0.037305343627929685, 0.03769241714477539, 0.037459968566894535, 0.03829248046875, 0.03701964950561523, 0.03688550567626953, 0.03749990463256836, 0.03746713638305664, 0.036523006439208985, 0.036450302124023434, 0.03694079971313476, 0.036673534393310545, 0.03717529678344727, 0.03693772888183594, 0.03688652801513672, 0.036929534912109374, 0.036350975036621096, 0.036913150787353514, 0.03676671981811523, 0.03689574432373047, 0.03683737564086914, 0.03707699203491211, 0.03674521636962891, 0.036956161499023435, 0.03684044647216797, 0.03646976089477539, 0.03709030532836914, 0.03712102508544922, 0.036994049072265625, 0.03680153656005859, 0.036432895660400394, 0.036603904724121096, 0.03709952163696289, 0.0368455696105957, 0.037119998931884765, 0.0369879035949707, 0.03685171127319336, 0.03705548858642578, 0.03704115295410156, 0.03694079971313476, 0.03687833786010742, 0.03688857650756836, 0.0373309440612793, 0.03760639953613281, 0.03725823974609375, 0.03705753707885742, 0.03702579116821289, 0.03686809539794922, 0.03694387054443359, 0.03697049713134765, 0.03704012680053711, 0.03713536071777344, 0.03680665588378906, 0.03689984130859375, 0.03699609756469727, 0.03711897659301758, 0.03700121688842774, 0.03698995208740234, 0.036945919036865234, 0.03739852905273437, 0.03694694519042969, 0.03705548858642578, 0.03689267349243164, 0.03758182525634766, 0.03731660842895508, 0.0367011833190918, 0.036847614288330076, 0.036729854583740236, 0.03710464096069336, 0.038029312133789066, 0.037154815673828126, 0.036959232330322264, 0.03691110229492187, 0.036601856231689454, 0.0368353271484375, 0.03720499038696289, 0.03712102508544922, 0.036910079956054685, 0.03735244750976562, 0.03732582473754883, 0.03684864044189453, 0.03702169418334961, 0.03692236709594727, 0.03712204742431641, 0.03746099090576172, 0.03766886520385742, 0.037269504547119144, 0.03693875122070313, 0.03671244812011719, 0.03710976028442383, 0.0372408332824707, 0.03682918548583984, 0.03688550567626953, 0.0369244155883789, 0.03703705596923828, 0.036929534912109374, 0.03689164733886719, 0.03691110229492187, 0.03674726486206055, 0.03648409652709961, 0.03668582534790039, 0.036918270111083985, 0.03679436874389649, 0.03718143844604492, 0.03669913482666016, 0.037174270629882815, 0.03683635330200195, 0.037064704895019535, 0.03657625579833984, 0.03676774215698242, 0.03666227340698242, 0.03697868728637695, 0.03710771179199219, 0.03668889617919922, 0.03647385787963867, 0.03690086364746094, 0.03684966278076172, 0.03732582473754883, 0.03680767822265625, 0.03743436813354492, 0.039175167083740234, 0.0384266242980957, 0.03712921524047851, 0.03697151947021484, 0.037389312744140625, 0.03755929565429687, 0.03647795104980469, 0.03669708633422852, 0.038284286499023434, 0.03907276916503906, 0.03755110549926758, 0.03716198348999023, 0.03730022430419922, 0.03789209747314453, 0.03782656097412109, 0.03713536071777344, 0.036779006958007815, 0.03734220886230469, 0.03782451248168945, 0.03698483276367188, 0.03700428771972656, 0.037185535430908204, 0.036666431427001954, 0.03701548767089844, 0.036635711669921876, 0.03792582321166992, 0.037236736297607424, 0.03863759994506836, 0.03772515106201172, 0.037084159851074217, 0.036985855102539066, 0.03697356796264648, 0.03744255828857422, 0.036603904724121096, 0.036956161499023435, 0.03728793716430664, 0.037607425689697264, 0.03699302291870117, 0.036972545623779295, 0.037719039916992186, 0.037303295135498044, 0.03727360153198242, 0.03775692749023438, 0.03732992172241211, 0.036985855102539066, 0.037574657440185545, 0.03685068893432617, 0.036790271759033204, 0.036857856750488284, 0.039191551208496093, 0.0387327995300293, 0.03716403198242187, 0.03687116622924805, 0.03744563293457031, 0.03647180938720703, 0.036918270111083985, 0.03714252853393555, 0.03695718383789062, 0.0370513916015625, 0.03687321472167969, 0.036980735778808595, 0.03768524932861328, 0.03699097442626953, 0.03733401489257813, 0.03690188980102539, 0.036980735778808595, 0.03757670211791992, 0.03719168090820312, 0.036757503509521484, 0.03867443084716797, 0.039913471221923826, 0.03827609634399414, 0.03832729721069336, 0.03817574310302734, 0.038199295043945314, 0.038255615234375, 0.03832524871826172, 0.03826073455810547, 0.03848294448852539, 0.03824947357177735, 0.038520832061767575, 0.03798015975952149, 0.03835289764404297, 0.03824947357177735, 0.03826278305053711, 0.038335487365722655, 0.038133758544921875, 0.038056961059570314, 0.03849420928955078, 0.03833651351928711, 0.0382740478515625, 0.03836723327636719, 0.03806412887573242, 0.03830579376220703, 0.038217727661132815, 0.037803009033203126, 0.03808563232421875, 0.039365631103515625, 0.0382105598449707, 0.03825971221923828, 0.03827814483642578, 0.038163455963134765, 0.03826278305053711, 0.03803340911865234, 0.038196224212646485, 0.03807846450805664, 0.038166526794433595, 0.0382105598449707, 0.038201343536376955, 0.038184959411621096, 0.03772927856445313, 0.03809996795654297, 0.03825151824951172, 0.0380497932434082, 0.03853414535522461, 0.03817574310302734, 0.03834265518188477, 0.03829248046875, 0.03798220825195313, 0.03810201644897461, 0.03812966537475586, 0.037969921112060545, 0.0381952018737793, 0.038035457611083984, 0.03807027053833008, 0.03817779159545898, 0.03788288116455078, 0.037501953125, 0.03802316665649414, 0.040018943786621096, 0.03803955078125, 0.037449726104736326, 0.03691622543334961, 0.03737702560424805, 0.03677798461914063, 0.036934654235839845, 0.03697459030151367, 0.036942848205566405, 0.03821363067626953, 0.037634048461914066, 0.03787059020996094, 0.037733375549316404, 0.037370880126953124, 0.038586368560791014, 0.038371326446533204, 0.03710566329956055, 0.03919769668579102, 0.03749478530883789, 0.036953086853027346, 0.03679641723632812, 0.036928512573242187, 0.03711078262329102, 0.03681075286865235, 0.03705548858642578, 0.036947967529296875, 0.03700838470458984, 0.03694387054443359, 0.03700428771972656, 0.03658444976806641, 0.037032958984375, 0.03783270263671875, 0.03707187271118164, 0.03697459030151367, 0.03800883102416992, 0.03738623809814453, 0.03739648056030274, 0.03750297546386719, 0.036926464080810545, 0.037408767700195314, 0.036918270111083985, 0.03727360153198242, 0.03714048004150391, 0.03697049713134765, 0.036972545623779295, 0.036964351654052735, 0.036947967529296875, 0.03687526321411133, 0.03679846572875976, 0.03700940704345703, 0.03703705596923828, 0.037160961151123044, 0.03678515243530273, 0.0367646713256836, 0.03685580825805664, 0.03685273742675781, 0.036929569244384765, 0.036987873077392576, 0.03754291152954101, 0.03702374267578125, 0.03686297607421875, 0.03749990463256836, 0.036896766662597655, 0.037154815673828126, 0.036926464080810545, 0.036765697479248044, 0.03708927917480469, 0.03696230316162109, 0.036772865295410156, 0.03751116943359375, 0.03691929626464844, 0.03708927917480469, 0.03681075286865235, 0.036961280822753906, 0.03684966278076172, 0.036841472625732424, 0.03683430480957031, 0.03688550567626953, 0.037176319122314457, 0.036752384185791014, 0.03678515243530273, 0.036569087982177735, 0.03685990524291992, 0.03683225631713867, 0.03688140869140625, 0.03767091369628906, 0.03784703826904297, 0.037928958892822266, 0.03887411117553711, 0.037163009643554686, 0.036972545623779295, 0.03811328125, 0.03695206451416016, 0.036757503509521484, 0.03672063827514648, 0.03683020782470703, 0.036890625, 0.037064704895019535, 0.0369541130065918, 0.03690291213989258, 0.0366929931640625, 0.037174270629882815, 0.037397502899169925, 0.036594718933105466, 0.03692643356323242, 0.03702579116821289, 0.03705548858642578, 0.03682099151611328, 0.037012481689453126, 0.03691110229492187, 0.03707596969604492, 0.03654553604125976, 0.03686502456665039, 0.03686707305908203, 0.037029888153076174, 0.036977664947509765, 0.03655372619628906, 0.036915199279785156, 0.037160961151123044, 0.03679436874389649, 0.036790271759033204, 0.03729305648803711, 0.036822017669677735, 0.03699097442626953, 0.037166080474853515, 0.03699302291870117, 0.036961280822753906, 0.03679743957519531, 0.03699097442626953, 0.03703603363037109, 0.03854848098754883, 0.037988353729248046, 0.03794636917114258, 0.036975616455078124, 0.036841472625732424, 0.037002239227294925, 0.03691622543334961, 0.0371517448425293, 0.03743027114868164, 0.03723980712890625, 0.039065601348876954, 0.038537216186523435, 0.03721318435668945, 0.036997119903564454, 0.03786751937866211, 0.03718963241577149, 0.03705548858642578, 0.036923393249511716, 0.03766681671142578, 0.037272575378417966]",tokens/s,26.839332745101036,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1398.992896,1382.547456,0.0,752.877568,710.554112,s,1,8.10747412109375,8.10747412109375,0.0,8.10747412109375,8.10747412109375,8.10747412109375,8.10747412109375,[8.10747412109375],,kWh,1.5256206406216835e-05,8.345869786701836e-06,1.99011270320959e-05,4.350320322501457e-05,,MB,1645.494272,1667.760128,0.0,1019.215872,949.099008,s,10,0.28068710327148433,0.028068710327148437,0.00041774229539086734,0.0280949764251709,0.02845760612487793,0.02864181079864502,0.028789174537658693,"[0.027597055435180665, 0.02794976043701172, 0.028348640441894533, 0.02841667175292969, 0.02809222412109375, 0.027213024139404296, 0.02810380744934082, 0.02804217529296875, 0.028097728729248046, 0.02882601547241211]",tokens/s,9120.476039556164,kWh,3.300916473133637e-07,1.8087285550992022e-07,7.561605987204985e-07,1.2671251015437825e-06,tokens/kWh,202032143.2257212,MB,1683.095552,1678.245888,0.0,1027.60448,949.101568,s,10,14.301846191406248,1.430184619140625,0.008921900045389813,1.4267332763671874,1.4425214965820312,1.442625372314453,1.4427084729003905,"[1.442729248046875, 1.42425244140625, 1.42574072265625, 1.4274154052734376, 1.4202757568359374, 1.4380875244140625, 1.4381480712890624, 1.4424984130859375, 1.4260511474609374, 1.4166474609375]",tokens/s,44.05025697861,kWh,1.769182608726952e-05,9.695005121474231e-06,2.887927977627716e-05,5.62661109850209e-05,tokens/kWh,1119679.304240021,,s,630,14.29984877967834,0.022698172666156105,0.0005304154471981052,0.0226933765411377,0.02326794261932373,0.02347801628112793,0.024102962818145755,"[0.022394880294799805, 0.02289356803894043, 0.023192575454711914, 0.023079935073852538, 0.023181312561035155, 0.02325196838378906, 0.02314854431152344, 0.022169599533081053, 0.02246553611755371, 0.023154687881469727, 0.022326271057128907, 0.02225971221923828, 0.023043071746826172, 0.0230645751953125, 0.02309734344482422, 0.023171072006225587, 0.023267328262329103, 0.023170047760009766, 0.022676479339599608, 0.024433664321899414, 0.02411929512023926, 0.0233123836517334, 0.023387136459350585, 0.023228416442871092, 0.02332876777648926, 0.02223308753967285, 0.022280191421508787, 0.022177791595458983, 0.022235136032104492, 0.02227302360534668, 0.02204876708984375, 0.02270515251159668, 0.022977535247802734, 0.022583295822143554, 0.022278144836425783, 0.021993471145629884, 0.022008832931518556, 0.02230271911621094, 0.022370304107666016, 0.022222848892211915, 0.022193151473999022, 0.022055936813354493, 0.02225971221923828, 0.022219776153564453, 0.02231705665588379, 0.023023616790771483, 0.023085056304931642, 0.02302566337585449, 0.023564287185668945, 0.02347417640686035, 0.02311577606201172, 0.02305638313293457, 0.02211123275756836, 0.022938623428344726, 0.024015871047973633, 0.023367679595947266, 0.023151615142822265, 0.023022592544555662, 0.023027711868286133, 0.024427520751953126, 0.02369331169128418, 0.024609792709350587, 0.0232806396484375, 0.023113727569580078, 0.022467584609985353, 0.023017471313476562, 0.023038976669311522, 0.022955007553100586, 0.023192575454711914, 0.022335487365722655, 0.021953535079956055, 0.02225766372680664, 0.02211737632751465, 0.022154239654541014, 0.022132736206054687, 0.022199296951293947, 0.022166528701782227, 0.022141952514648438, 0.022157312393188477, 0.022176767349243166, 0.022210559844970702, 0.02274406433105469, 0.02255462455749512, 0.022166528701782227, 0.022174720764160157, 0.022196256637573242, 0.022235103607177734, 0.02227609634399414, 0.022296575546264647, 0.022199296951293947, 0.02234880065917969, 0.02242252731323242, 0.02304819107055664, 0.02308608055114746, 0.023047168731689452, 0.022872064590454103, 0.023232511520385742, 0.0231014404296875, 0.023800832748413086, 0.023553024291992186, 0.023243776321411135, 0.02310553550720215, 0.023037952423095705, 0.02289971160888672, 0.022166528701782227, 0.022237184524536133, 0.022153215408325197, 0.022181888580322266, 0.022235136032104492, 0.022940671920776368, 0.022391807556152343, 0.022350847244262697, 0.0222740478515625, 0.02224332809448242, 0.022221824645996095, 0.022195199966430663, 0.02246553611755371, 0.022090751647949217, 0.022178815841674804, 0.022185983657836913, 0.0222423038482666, 0.023702527999877928, 0.02457907295227051, 0.02345471954345703, 0.02309529685974121, 0.02305638313293457, 0.022334463119506837, 0.022136831283569337, 0.022213632583618165, 0.022793216705322264, 0.023166976928710937, 0.022998016357421876, 0.023206911087036132, 0.023434240341186522, 0.023468032836914062, 0.023169023513793945, 0.02328883171081543, 0.02306252861022949, 0.02304921531677246, 0.02307174491882324, 0.022658048629760744, 0.022839296340942384, 0.023592960357666014, 0.0235100154876709, 0.023160831451416015, 0.023006208419799806, 0.022784000396728517, 0.022260736465454102, 0.022196224212646484, 0.022180864334106445, 0.022146047592163084, 0.022221824645996095, 0.022205440521240235, 0.022269952774047853, 0.022175743103027345, 0.022177791595458983, 0.022236160278320313, 0.022211584091186523, 0.022180864334106445, 0.022221824645996095, 0.02209587287902832, 0.02211942481994629, 0.0221265926361084, 0.022193151473999022, 0.022140928268432617, 0.022153215408325197, 0.022174720764160157, 0.022212608337402344, 0.022666240692138673, 0.02225971221923828, 0.02213478469848633, 0.022009855270385743, 0.022222848892211915, 0.022164480209350586, 0.022303743362426756, 0.022162431716918944, 0.022215679168701173, 0.022288383483886717, 0.023018495559692383, 0.02307583999633789, 0.023008256912231444, 0.02311884880065918, 0.022995967864990235, 0.02304204750061035, 0.02285772705078125, 0.023026687622070312, 0.023214080810546874, 0.02312396812438965, 0.02365132713317871, 0.023340032577514647, 0.023195648193359376, 0.02301644706726074, 0.022946815490722656, 0.02307788848876953, 0.023222272872924804, 0.02238368034362793, 0.022077375411987305, 0.02189516830444336, 0.02192076873779297, 0.022182912826538087, 0.022175743103027345, 0.02265395164489746, 0.023568384170532225, 0.022406143188476564, 0.022230016708374024, 0.022244352340698242, 0.022200319290161134, 0.022141952514648438, 0.02191974449157715, 0.021970943450927736, 0.021939199447631837, 0.022223871231079103, 0.02225049591064453, 0.022162431716918944, 0.02215116882324219, 0.02208768081665039, 0.022213632583618165, 0.022419519424438476, 0.02297235107421875, 0.02294988822937012, 0.023129087448120117, 0.023068672180175782, 0.023113727569580078, 0.023104511260986327, 0.023186431884765626, 0.022998016357421876, 0.023197696685791015, 0.02305638313293457, 0.022999040603637694, 0.022971391677856445, 0.022429695129394533, 0.022203392028808593, 0.022393856048583984, 0.023529472351074218, 0.023373823165893554, 0.023037952423095705, 0.022305791854858398, 0.022205440521240235, 0.022237184524536133, 0.022383615493774413, 0.02229964828491211, 0.023146495819091797, 0.022972415924072266, 0.022980607986450196, 0.023037952423095705, 0.023011327743530274, 0.023037952423095705, 0.022976512908935546, 0.022597631454467772, 0.022185983657836913, 0.023006208419799806, 0.022915136337280272, 0.023134208679199218, 0.02307788848876953, 0.023164928436279295, 0.022172672271728516, 0.022154239654541014, 0.022082559585571288, 0.02220134353637695, 0.022218751907348632, 0.02224742317199707, 0.022166528701782227, 0.022165504455566407, 0.022562816619873048, 0.02271129608154297, 0.023242752075195314, 0.02318956756591797, 0.023006143569946288, 0.022898687362670898, 0.023195648193359376, 0.02312294387817383, 0.022590463638305663, 0.02260479927062988, 0.022811647415161132, 0.023038976669311522, 0.02312396812438965, 0.023344127655029297, 0.02370969581604004, 0.022938623428344726, 0.02211327934265137, 0.02225049591064453, 0.022188032150268554, 0.022304767608642577, 0.022228992462158204, 0.02305536079406738, 0.023045120239257814, 0.02268057632446289, 0.022184959411621095, 0.022063104629516602, 0.02189926338195801, 0.02206822395324707, 0.02204876708984375, 0.02226278305053711, 0.022246400833129884, 0.022268991470336914, 0.022246335983276366, 0.022218751907348632, 0.022195199966430663, 0.02226585578918457, 0.022202367782592772, 0.022205440521240235, 0.022253568649291993, 0.022013952255249023, 0.022188032150268554, 0.022228992462158204, 0.022210559844970702, 0.022207487106323243, 0.02225049591064453, 0.02220134353637695, 0.022581247329711913, 0.023188480377197264, 0.023196672439575194, 0.023079935073852538, 0.0230328311920166, 0.022334463119506837, 0.022208511352539064, 0.02230169677734375, 0.022221824645996095, 0.02225663948059082, 0.022350847244262697, 0.022218751907348632, 0.02252288055419922, 0.022951936721801756, 0.02229555130004883, 0.022199296951293947, 0.022244352340698242, 0.022427648544311524, 0.02227609634399414, 0.02222489547729492, 0.022188032150268554, 0.022136831283569337, 0.022296575546264647, 0.022199296951293947, 0.02226688003540039, 0.022125568389892578, 0.022365184783935548, 0.022931455612182617, 0.024360960006713867, 0.02329599952697754, 0.023130111694335938, 0.023242752075195314, 0.0230830078125, 0.023004159927368165, 0.023004159927368165, 0.02308095932006836, 0.022090751647949217, 0.022183935165405275, 0.022946815490722656, 0.023043071746826172, 0.023011327743530274, 0.023111679077148437, 0.023079935073852538, 0.023136255264282226, 0.023200767517089844, 0.023242752075195314, 0.023019519805908203, 0.02307481575012207, 0.022980607986450196, 0.023213056564331053, 0.023029760360717775, 0.023097408294677733, 0.023039936065673828, 0.022986751556396484, 0.023058496475219726, 0.023097280502319337, 0.02311884880065918, 0.023193599700927735, 0.023480319976806642, 0.02347520065307617, 0.023037952423095705, 0.023005184173583985, 0.023008256912231444, 0.023036928176879884, 0.023008256912231444, 0.023096319198608398, 0.0231014404296875, 0.023257087707519532, 0.023031808853149413, 0.023213056564331053, 0.023405567169189453, 0.022751232147216797, 0.02210201644897461, 0.02263654327392578, 0.023670783996582033, 0.022979583740234375, 0.023129087448120117, 0.02306662368774414, 0.023015424728393553, 0.02309427261352539, 0.022784000396728517, 0.02247065544128418, 0.022161407470703123, 0.02225152015686035, 0.02265907287597656, 0.02308608055114746, 0.023164928436279295, 0.023023616790771483, 0.023008256912231444, 0.02262015914916992, 0.022362112045288086, 0.02327347183227539, 0.022962175369262695, 0.02292736053466797, 0.022986751556396484, 0.022946815490722656, 0.023178239822387696, 0.022972448348999024, 0.022930400848388672, 0.023358463287353515, 0.02303385543823242, 0.02294988822937012, 0.023754751205444336, 0.02330316734313965, 0.02328166389465332, 0.023290880203247072, 0.023799808502197265, 0.023196672439575194, 0.022984703063964843, 0.022377471923828125, 0.022181888580322266, 0.02213478469848633, 0.022211584091186523, 0.0222423038482666, 0.022223871231079103, 0.022143999099731446, 0.022169599533081053, 0.022191104888916017, 0.0224849910736084, 0.023205888748168944, 0.02313113594055176, 0.02312704086303711, 0.022969343185424804, 0.022953983306884765, 0.023217151641845703, 0.02251468849182129, 0.022125568389892578, 0.022000640869140626, 0.02224332809448242, 0.022236160278320313, 0.023152639389038086, 0.022940671920776368, 0.023088127136230468, 0.022970367431640625, 0.022983680725097655, 0.022994943618774414, 0.022930431365966796, 0.02290483283996582, 0.022187007904052734, 0.022098943710327147, 0.022133760452270508, 0.02203647994995117, 0.02343731117248535, 0.024012800216674804, 0.023162879943847657, 0.023150592803955077, 0.02364825630187988, 0.023448575973510744, 0.02293350410461426, 0.02294988822937012, 0.0225894718170166, 0.02215216064453125, 0.022837247848510742, 0.022901824951171875, 0.02278700828552246, 0.02290380859375, 0.022968320846557616, 0.022924287796020508, 0.022830080032348633, 0.022956031799316406, 0.023005184173583985, 0.023044095993041993, 0.02244710350036621, 0.022268928527832032, 0.022198272705078126, 0.022188032150268554, 0.02226585578918457, 0.02232524871826172, 0.02224332809448242, 0.022168575286865236, 0.022338560104370117, 0.022191104888916017, 0.02285772705078125, 0.02328371238708496, 0.023165952682495116, 0.023026687622070312, 0.022961151123046874, 0.023564287185668945, 0.02310553550720215, 0.023031808853149413, 0.022945791244506835, 0.022970367431640625, 0.022962175369262695, 0.023035903930664063, 0.026400768280029296, 0.023977983474731446, 0.022562816619873048, 0.022180896759033203, 0.022261728286743165, 0.022339584350585938, 0.02302566337585449, 0.023654399871826173, 0.023310335159301757, 0.02305536079406738, 0.023018495559692383, 0.02292531204223633, 0.022184959411621095, 0.02206105613708496, 0.022190080642700196, 0.021894144058227538, 0.021876735687255858, 0.02184806442260742, 0.021897216796875, 0.022165504455566407, 0.022238208770751954, 0.022967296600341795, 0.02251571273803711, 0.022288383483886717, 0.02207539176940918, 0.02226278305053711, 0.022178815841674804, 0.02225152015686035, 0.022165504455566407, 0.0222740478515625, 0.02226585578918457, 0.023230464935302734, 0.02310758399963379, 0.022432767868041992, 0.022237184524536133, 0.022147071838378905, 0.023360511779785157, 0.02314352035522461, 0.022963104248046876, 0.023013376235961915, 0.02292531204223633, 0.022846464157104493, 0.022952959060668944, 0.022943744659423827, 0.023008256912231444, 0.02310655975341797, 0.02284851264953613, 0.022996992111206056, 0.02311065673828125, 0.023047168731689452, 0.023023616790771483, 0.023425024032592775, 0.023451648712158202, 0.022232063293457033, 0.022014976501464844, 0.022296575546264647, 0.02227507209777832, 0.02214201545715332, 0.02214803123474121, 0.022791168212890626, 0.024062976837158204, 0.023613439559936524, 0.023370752334594725, 0.023152639389038086, 0.02311577606201172, 0.022866943359375, 0.022286336898803712, 0.022226943969726562, 0.02215116882324219, 0.022218751907348632, 0.022313983917236328, 0.022681600570678712, 0.022956031799316406, 0.02310655975341797, 0.02353049659729004, 0.02333798408508301, 0.023061504364013673, 0.02307583999633789, 0.022976512908935546, 0.023369728088378908, 0.022238208770751954, 0.022280191421508787, 0.022046720504760742, 0.021940223693847655, 0.02241535949707031, 0.022227968215942383, 0.022575103759765625, 0.02303385543823242, 0.02234880065917969, 0.022220800399780274, 0.022338560104370117, 0.022219871520996092, 0.02224527931213379, 0.022213695526123046, 0.02237331199645996, 0.022387712478637696, 0.02306252861022949, 0.02246143913269043, 0.02227609634399414, 0.022375423431396483, 0.022054912567138672, 0.022214656829833986, 0.02211840057373047, 0.02227302360534668, 0.022172672271728516, 0.022185983657836913, 0.022192127227783204, 0.02228223991394043, 0.022214656829833986, 0.022235136032104492, 0.022211584091186523, 0.022211584091186523, 0.022034431457519533, 0.02289664077758789, 0.02304204750061035, 0.022936576843261718, 0.023203840255737306, 0.023030784606933592, 0.023170047760009766, 0.022573055267333983, 0.022185983657836913, 0.022193151473999022, 0.02232729530334473, 0.02208153533935547, 0.022228992462158204, 0.022208511352539064, 0.022153215408325197, 0.02185523223876953, 0.022732799530029296, 0.022824960708618162, 0.02227507209777832, 0.022253568649291993, 0.02227609634399414, 0.022213632583618165, 0.022179840087890625, 0.023584768295288085, 0.022979583740234375]",tokens/s,44.05640994576804,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1611.022336,2095.579136,0.0,1465.909248,1358.169088,s,1,8.38158203125,8.38158203125,0.0,8.38158203125,8.38158203125,8.38158203125,8.38158203125,[8.38158203125],,kWh,1.892617814097422e-05,1.0331055115144741e-05,2.9042523234046325e-05,5.829975649016529e-05,,MB,1750.40512,2122.842112,0.0,1474.297856,1356.544512,s,10,0.2577041912078858,0.025770419120788573,2.3254592825358703e-05,0.0257752161026001,0.02578468723297119,0.025801751232147216,0.025815402431488037,"[0.025765951156616212, 0.025734079360961913, 0.025780895233154296, 0.025772735595703124, 0.025733760833740234, 0.025777984619140625, 0.025779935836791994, 0.02577769660949707, 0.02576233673095703, 0.025818815231323244]",tokens/s,9933.870256440223,kWh,3.0419216013465725e-07,1.6666104934498948e-07,1.1473545311362109e-06,1.6182077406158575e-06,tokens/kWh,158199712.91360375,MB,1754.730496,2124.939264,0.0,1474.297856,1409.970176,s,10,11.231544067382814,1.123154406738281,0.012411083634588465,1.1238303222656252,1.1411046997070313,1.1436659973144532,1.1457150354003907,"[1.146227294921875, 1.1405355224609375, 1.110298583984375, 1.108239501953125, 1.1238349609375, 1.106463134765625, 1.1238265380859376, 1.128398681640625, 1.1238341064453126, 1.1198857421875]",tokens/s,56.0920204933856,kWh,1.3281805243268446e-05,7.277870077042851e-06,2.4312721392463903e-05,4.48723967127752e-05,tokens/kWh,1403981.169164157,,s,630,11.227800584793096,0.017821905690147764,0.0004085771722067965,0.017873904228210447,0.018264167022705075,0.018422527694702148,0.019092662792205825,"[0.017500160217285156, 0.01805619239807129, 0.019681280136108398, 0.018564096450805666, 0.018365440368652345, 0.018299903869628906, 0.01823744010925293, 0.018172927856445312, 0.018231296539306642, 0.018198528289794923, 0.018082815170288084, 0.018117631912231445, 0.018147327423095702, 0.017915903091430666, 0.018671615600585938, 0.01824460792541504, 0.01827020835876465, 0.01824460792541504, 0.018207744598388673, 0.018486272811889647, 0.018309120178222657, 0.018336767196655272, 0.01823027229309082, 0.01802956771850586, 0.01779302406311035, 0.018183168411254884, 0.018251775741577148, 0.01819443130493164, 0.018282495498657226, 0.018242559432983398, 0.018241535186767577, 0.018153472900390624, 0.01823539161682129, 0.017892351150512697, 0.017894399642944335, 0.017709056854248048, 0.019330047607421876, 0.01842995262145996, 0.018176000595092775, 0.018151424407958985, 0.018100223541259765, 0.01824563217163086, 0.01819340705871582, 0.017844224929809572, 0.017709056854248048, 0.018131967544555663, 0.01814630317687988, 0.018156543731689453, 0.018104320526123048, 0.018167808532714845, 0.018144256591796876, 0.018126848220825196, 0.018185216903686522, 0.018158592224121094, 0.018142208099365235, 0.01823641586303711, 0.018257919311523436, 0.018325504302978517, 0.018274303436279296, 0.018125823974609375, 0.018177024841308592, 0.017673215866088866, 0.017482751846313475, 0.01724313545227051, 0.01744076728820801, 0.017349632263183593, 0.017325056076049804, 0.017475584030151366, 0.017514495849609374, 0.017475584030151366, 0.01739468765258789, 0.01740492820739746, 0.01739468765258789, 0.017497087478637697, 0.01845145606994629, 0.018315263748168945, 0.01807161521911621, 0.018229183197021485, 0.018242559432983398, 0.018124799728393554, 0.017938432693481447, 0.01842483139038086, 0.018327552795410155, 0.01859891128540039, 0.01809715270996094, 0.018123775482177733, 0.018181119918823242, 0.01813811111450195, 0.018152448654174806, 0.018167808532714845, 0.018268159866333008, 0.018291711807250977, 0.0182108154296875, 0.01816166305541992, 0.018128896713256838, 0.018085887908935547, 0.018121728897094725, 0.018185216903686522, 0.01824665641784668, 0.018150400161743165, 0.01819443130493164, 0.01815449523925781, 0.01817087936401367, 0.01818623924255371, 0.018089984893798827, 0.01844633674621582, 0.019802143096923828, 0.019217376708984376, 0.018408447265625, 0.018189311981201172, 0.018143232345581056, 0.018173952102661133, 0.018115583419799804, 0.018167808532714845, 0.01810534477233887, 0.01818726348876953, 0.018127872467041017, 0.01824051284790039, 0.018085887908935547, 0.018231296539306642, 0.018122751235961913, 0.01783500862121582, 0.018067455291748045, 0.018299903869628906, 0.018284543991088868, 0.018151424407958985, 0.017316864013671874, 0.018018304824829103, 0.01820262336730957, 0.018107391357421874, 0.018147327423095702, 0.01826304054260254, 0.018141183853149414, 0.018225151062011717, 0.018116607666015624, 0.018131967544555663, 0.01824358367919922, 0.017472511291503907, 0.017258495330810548, 0.017146879196166993, 0.01739673614501953, 0.01742233657836914, 0.01742950439453125, 0.01740902328491211, 0.017514495849609374, 0.017533952713012696, 0.017494016647338868, 0.01760358428955078, 0.017469440460205078, 0.017510400772094727, 0.01765376091003418, 0.01741926383972168, 0.017481727600097655, 0.01742233657836914, 0.017485824584960938, 0.017473535537719728, 0.017537023544311522, 0.01745510482788086, 0.0180316162109375, 0.017876991271972655, 0.017534975051879884, 0.017565696716308594, 0.017473535537719728, 0.017482751846313475, 0.017492992401123047, 0.017498111724853514, 0.017315839767456053, 0.01726464080810547, 0.01741619110107422, 0.017532928466796875, 0.017475584030151366, 0.01742540740966797, 0.017487871170043946, 0.017356800079345702, 0.01741414451599121, 0.017408000946044923, 0.01740492820739746, 0.01740492820739746, 0.017452032089233398, 0.017509376525878906, 0.01743257522583008, 0.017346559524536134, 0.01742233657836914, 0.01741209602355957, 0.017986560821533205, 0.018150400161743165, 0.018010112762451173, 0.01798041534423828, 0.017869823455810546, 0.017337343215942384, 0.01740287971496582, 0.01744588851928711, 0.017369087219238282, 0.01741312026977539, 0.01741823959350586, 0.01738035202026367, 0.017475584030151366, 0.01740492820739746, 0.017469440460205078, 0.017485824584960938, 0.01760665512084961, 0.017503231048583985, 0.01741619110107422, 0.01737215995788574, 0.01742950439453125, 0.017369087219238282, 0.017354751586914064, 0.017161216735839844, 0.01742540740966797, 0.0174335994720459, 0.017415168762207032, 0.017373184204101562, 0.017119232177734374, 0.017123327255249024, 0.017374208450317383, 0.017415168762207032, 0.017330175399780275, 0.01744588851928711, 0.017327104568481445, 0.0173885440826416, 0.017377279281616212, 0.017369087219238282, 0.017464319229125978, 0.01740185546875, 0.017671167373657228, 0.01803264045715332, 0.01799577522277832, 0.017533952713012696, 0.017406976699829102, 0.01747865676879883, 0.01739366340637207, 0.017515520095825195, 0.017949695587158202, 0.017960960388183594, 0.018318336486816408, 0.018044927597045898, 0.018018304824829103, 0.01803775978088379, 0.018009088516235353, 0.018231296539306642, 0.018035711288452147, 0.018137088775634767, 0.018018304824829103, 0.01802342414855957, 0.01844633674621582, 0.018053119659423827, 0.01759539222717285, 0.017488895416259767, 0.017502208709716797, 0.017456127166748048, 0.01742848014831543, 0.017499135971069335, 0.017308671951293944, 0.01742848014831543, 0.01742131233215332, 0.018281471252441405, 0.017533952713012696, 0.01746124839782715, 0.017466367721557616, 0.017469440460205078, 0.017754112243652344, 0.01765068817138672, 0.017777664184570312, 0.01756876754760742, 0.01741926383972168, 0.017505279541015627, 0.01740492820739746, 0.017369087219238282, 0.017399808883666993, 0.01735577583312988, 0.01744486427307129, 0.017415168762207032, 0.017475584030151366, 0.017819648742675782, 0.018280448913574218, 0.018587648391723634, 0.01823539161682129, 0.018229248046875, 0.017871871948242187, 0.018493440628051756, 0.01822719955444336, 0.017951744079589844, 0.018098175048828127, 0.017691648483276368, 0.017530879974365234, 0.018264064788818358, 0.01801113510131836, 0.017903615951538086, 0.0186562557220459, 0.01848422431945801, 0.018265087127685545, 0.01828659248352051, 0.018068479537963866, 0.018059263229370116, 0.018290687561035156, 0.01779916763305664, 0.017674240112304687, 0.01803264045715332, 0.018378751754760742, 0.01841152000427246, 0.017958911895751953, 0.018144256591796876, 0.018059263229370116, 0.01818623924255371, 0.01797427177429199, 0.017885183334350584, 0.017939456939697264, 0.0178155517578125, 0.017358848571777344, 0.017452032089233398, 0.01741004753112793, 0.01744486427307129, 0.017447935104370118, 0.017473535537719728, 0.01741107177734375, 0.017184768676757813, 0.0174202880859375, 0.01745408058166504, 0.017327104568481445, 0.01744895935058594, 0.017375232696533204, 0.017390592575073242, 0.017357824325561523, 0.01744895935058594, 0.017354751586914064, 0.017390592575073242, 0.017573888778686524, 0.017492992401123047, 0.01740390396118164, 0.01742950439453125, 0.017483776092529296, 0.01741926383972168, 0.01739468765258789, 0.017452032089233398, 0.01762611198425293, 0.017574911117553712, 0.017528831481933595, 0.01742540740966797, 0.017383424758911133, 0.01745408058166504, 0.017336320877075196, 0.01739366340637207, 0.01739263916015625, 0.017449983596801756, 0.01765888023376465, 0.017541120529174805, 0.01742233657836914, 0.017476608276367187, 0.01742131233215332, 0.017746944427490235, 0.018168832778930662, 0.017901567459106444, 0.01796403121948242, 0.01799884796142578, 0.01780121612548828, 0.01739776039123535, 0.017535999298095704, 0.01742438316345215, 0.017427455902099608, 0.017472511291503907, 0.0174202880859375, 0.017346559524536134, 0.017802240371704102, 0.018552831649780274, 0.018010112762451173, 0.017987583160400392, 0.0186746883392334, 0.018172927856445312, 0.01741414451599121, 0.017469440460205078, 0.017468416213989257, 0.01742950439453125, 0.017373184204101562, 0.017498111724853514, 0.01737932777404785, 0.017491968154907226, 0.01746227264404297, 0.01741926383972168, 0.01722470474243164, 0.017348608016967772, 0.01743257522583008, 0.017391616821289063, 0.017375232696533204, 0.01742438316345215, 0.017358848571777344, 0.017320959091186524, 0.01742336082458496, 0.0174202880859375, 0.01739673614501953, 0.01741414451599121, 0.01740902328491211, 0.017502208709716797, 0.017325056076049804, 0.01743155288696289, 0.01740287971496582, 0.01720524787902832, 0.017163263320922852, 0.01721958351135254, 0.017271808624267578, 0.017344512939453126, 0.017768447875976562, 0.01810534477233887, 0.018108415603637695, 0.01802137565612793, 0.018114559173583983, 0.018350080490112306, 0.018120704650878908, 0.018041856765747072, 0.01839820861816406, 0.01807257652282715, 0.018027519226074217, 0.018085887908935547, 0.01743974494934082, 0.01744179153442383, 0.01802649688720703, 0.017537023544311522, 0.017518592834472657, 0.0174653434753418, 0.017542144775390626, 0.017895423889160156, 0.018671615600585938, 0.018297855377197265, 0.018469888687133788, 0.0192225284576416, 0.018526208877563476, 0.01822003173828125, 0.018103296279907227, 0.018120704650878908, 0.018110464096069336, 0.01938739204406738, 0.01828556823730469, 0.01800499153137207, 0.018160640716552736, 0.018052095413208007, 0.01809715270996094, 0.018122751235961913, 0.018130943298339842, 0.01798963165283203, 0.018182144165039063, 0.01775923156738281, 0.01756159973144531, 0.018787328720092773, 0.018028543472290038, 0.01860710334777832, 0.01802956771850586, 0.017999872207641602, 0.017678335189819337, 0.017391616821289063, 0.01802444839477539, 0.018082815170288084, 0.01797427177429199, 0.018051071166992186, 0.017943552017211914, 0.01799679946899414, 0.018118656158447266, 0.01820057678222656, 0.018041856765747072, 0.01801113510131836, 0.017932287216186525, 0.018160640716552736, 0.018068479537963866, 0.0180316162109375, 0.018110464096069336, 0.018033664703369142, 0.0192491512298584, 0.01826201629638672, 0.018217983245849608, 0.018130943298339842, 0.018043903350830077, 0.017747968673706056, 0.01801113510131836, 0.01800499153137207, 0.018104320526123048, 0.018100223541259765, 0.017836032867431642, 0.01804697608947754, 0.018149375915527344, 0.018584575653076172, 0.01846272087097168, 0.0181790714263916, 0.017339391708374022, 0.017467391967773437, 0.01737113571166992, 0.017364992141723632, 0.017690624237060547, 0.01805414390563965, 0.018077695846557617, 0.01802956771850586, 0.017941503524780272, 0.018010112762451173, 0.017935359954833984, 0.017458175659179686, 0.017328128814697266, 0.01719603157043457, 0.01716633605957031, 0.017504255294799806, 0.01762303924560547, 0.017331199645996095, 0.017357824325561523, 0.01739366340637207, 0.017663999557495116, 0.017993728637695314, 0.01706598472595215, 0.01723904037475586, 0.01726464080810547, 0.017582080841064454, 0.017353727340698243, 0.01716223907470703, 0.01741209602355957, 0.01745510482788086, 0.017399808883666993, 0.017588224411010742, 0.017467391967773437, 0.017555456161499023, 0.017573888778686524, 0.017408000946044923, 0.017547264099121093, 0.01805516815185547, 0.01841868782043457, 0.01804083251953125, 0.018053119659423827, 0.018126848220825196, 0.018273279190063475, 0.01804697608947754, 0.018128896713256838, 0.018068479537963866, 0.01806540870666504, 0.017954816818237306, 0.017949695587158202, 0.01742233657836914, 0.017572864532470703, 0.017970176696777345, 0.017266687393188478, 0.017557504653930665, 0.017771520614624024, 0.017508352279663086, 0.017290239334106446, 0.017246208190917968, 0.01729539108276367, 0.01787593650817871, 0.017971200942993162, 0.01768550491333008, 0.017710079193115236, 0.018025472640991212, 0.01780838394165039, 0.017648639678955077, 0.017727487564086913, 0.01804902458190918, 0.01807974433898926, 0.017932287216186525, 0.01784115219116211, 0.018489343643188477, 0.01841971206665039, 0.017741823196411134, 0.017697792053222656, 0.017923072814941408, 0.018144256591796876, 0.018139135360717772, 0.01805619239807129, 0.01806540870666504, 0.018094079971313477, 0.018108415603637695, 0.018075647354125975, 0.01804595184326172, 0.018550783157348632, 0.018546688079833985, 0.018139135360717772, 0.017385471343994142, 0.018311168670654295, 0.017885183334350584, 0.01739468765258789, 0.017909759521484374, 0.0178657283782959, 0.017452032089233398, 0.017398784637451172, 0.017953792572021485, 0.017984512329101563, 0.01800499153137207, 0.01797324752807617, 0.01801215934753418, 0.018035711288452147, 0.017704959869384765, 0.01759539222717285, 0.017949695587158202, 0.017902591705322265, 0.018089984893798827, 0.017923072814941408, 0.017971200942993162, 0.017949695587158202, 0.01803878402709961, 0.017994752883911135, 0.017994752883911135, 0.017702911376953127, 0.01746227264404297, 0.01782476806640625, 0.017506303787231444, 0.01741004753112793, 0.01744486427307129, 0.017489919662475584, 0.017944576263427735, 0.017949695587158202, 0.017427455902099608, 0.017168384552001953, 0.01723904037475586, 0.01722470474243164, 0.01780121612548828, 0.01803878402709961, 0.017976320266723633, 0.018017280578613282, 0.01805619239807129, 0.018132991790771484, 0.01805516815185547, 0.01802444839477539, 0.01803264045715332, 0.01803878402709961, 0.01784320068359375, 0.017741823196411134, 0.017682432174682617, 0.01764659118652344, 0.017375232696533204, 0.01746124839782715, 0.017513471603393553, 0.017821695327758787, 0.018117631912231445, 0.018076671600341796, 0.017821695327758787, 0.017157119750976564, 0.01726361656188965, 0.017854463577270507, 0.017499135971069335]",tokens/s,56.11072224183164,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 83685 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1524.16256,2095.579136,0.0,1465.909248,1358.169088,s,1,8.3516474609375,8.3516474609375,0.0,8.3516474609375,8.3516474609375,8.3516474609375,8.3516474609375,[8.3516474609375],,kWh,1.805285382221832e-05,9.878727637215027e-06,3.0115579648049007e-05,5.804716110748235e-05,,MB,1674.56768,2122.842112,0.0,1474.297856,1356.544512,s,10,0.25788685035705566,0.025788685035705566,8.830682133442401e-05,0.025761712074279783,0.0258179874420166,0.025932193756103516,0.02602355880737305,"[0.02573516845703125, 0.025757152557373045, 0.025763776779174803, 0.025792608261108397, 0.025770656585693358, 0.02572297668457031, 0.02604640007019043, 0.0257488956451416, 0.025759647369384766, 0.025789567947387695]",tokens/s,9926.834177297398,kWh,3.0397340033740344e-07,1.6656209996291933e-07,1.1605300629564452e-06,1.631065563256768e-06,tokens/kWh,156952611.6956585,MB,1680.625664,2124.939264,0.0,1474.297856,1409.970176,s,10,11.123126342773439,1.1123126342773437,0.010284379429998781,1.1133880615234375,1.1264655639648438,1.1268132263183592,1.1270913562011717,"[1.127160888671875, 1.114127685546875, 1.10178955078125, 1.118501708984375, 1.118333251953125, 1.1263883056640625, 1.10782861328125, 1.1126484375, 1.1020780029296875, 1.0942698974609375]",tokens/s,56.63875250408385,kWh,1.2961849200009057e-05,7.102646893159082e-06,2.4468212662235776e-05,4.453270875540391e-05,tokens/kWh,1414690.4996511163,,s,630,11.119244302749646,0.017649594131348628,0.00036808048624410776,0.017763840675354005,0.01791918067932129,0.018051071166992186,0.01884817329406739,"[0.01767731285095215, 0.017777664184570312, 0.018516992568969725, 0.019091455459594727, 0.018147327423095702, 0.017844224929809572, 0.017932287216186525, 0.01780121612548828, 0.01785856056213379, 0.017795072555541993, 0.017936384201049805, 0.017796096801757814, 0.01784115219116211, 0.017733631134033204, 0.01782374382019043, 0.01780019187927246, 0.017918975830078124, 0.017889280319213868, 0.017884159088134767, 0.01782579231262207, 0.017853439331054686, 0.017871871948242187, 0.0178288631439209, 0.017879039764404296, 0.017811456680297853, 0.01787392044067383, 0.01780121612548828, 0.01784217643737793, 0.017868799209594728, 0.017786880493164063, 0.017869823455810546, 0.01780531120300293, 0.01784832000732422, 0.017819648742675782, 0.017904640197753906, 0.017760255813598632, 0.017830911636352538, 0.01780531120300293, 0.018020351409912108, 0.017886207580566405, 0.017869823455810546, 0.01783500862121582, 0.018044927597045898, 0.01781862449645996, 0.0177838077545166, 0.017889280319213868, 0.017868799209594728, 0.017904640197753906, 0.01783500862121582, 0.017855487823486327, 0.017954816818237306, 0.017930240631103517, 0.01783500862121582, 0.01785958480834961, 0.01780121612548828, 0.0178288631439209, 0.01784115219116211, 0.017836032867431642, 0.017836032867431642, 0.01784115219116211, 0.017893375396728514, 0.017868799209594728, 0.01789030456542969, 0.0170383358001709, 0.017152000427246093, 0.017532928466796875, 0.017763328552246094, 0.01783705520629883, 0.017780736923217775, 0.01780633544921875, 0.01783705520629883, 0.01784217643737793, 0.01781350326538086, 0.017908735275268553, 0.017722368240356445, 0.01758515167236328, 0.017672191619873046, 0.017978368759155275, 0.0177838077545166, 0.01780735969543457, 0.017846271514892577, 0.017821695327758787, 0.01783193588256836, 0.017763328552246094, 0.017862655639648437, 0.01777459144592285, 0.01744076728820801, 0.016950271606445313, 0.017147903442382813, 0.017121280670166016, 0.017123327255249024, 0.017126432418823244, 0.017164255142211916, 0.017529855728149413, 0.017830911636352538, 0.017763328552246094, 0.017737728118896484, 0.017921024322509766, 0.01787392044067383, 0.017737728118896484, 0.017811456680297853, 0.01782579231262207, 0.017769472122192383, 0.017863679885864257, 0.017855487823486327, 0.017881088256835938, 0.01781350326538086, 0.017764352798461915, 0.017732608795166017, 0.017754112243652344, 0.017743871688842772, 0.017819648742675782, 0.017736703872680663, 0.01779814338684082, 0.018300928115844727, 0.018150400161743165, 0.017827840805053712, 0.017926143646240233, 0.017708032608032227, 0.017133567810058595, 0.017148927688598634, 0.017299455642700197, 0.017435647964477538, 0.0178657283782959, 0.017769472122192383, 0.017786880493164063, 0.016982015609741212, 0.017113088607788086, 0.017075199127197266, 0.01707827186584473, 0.01718272018432617, 0.017163263320922852, 0.017087488174438475, 0.01719193649291992, 0.017130495071411133, 0.017060863494873048, 0.01714995193481445, 0.017127424240112304, 0.017144832611083984, 0.017060863494873048, 0.017096704483032226, 0.017097728729248047, 0.017068031311035157, 0.017092607498168946, 0.017184768676757813, 0.017143808364868163, 0.017103872299194335, 0.017111040115356444, 0.017201152801513672, 0.017898496627807618, 0.018053119659423827, 0.017757183074951173, 0.01781760025024414, 0.017864704132080078, 0.017888256072998047, 0.01784832000732422, 0.017819648742675782, 0.017914880752563478, 0.01780121612548828, 0.01783500862121582, 0.017847295761108398, 0.017909759521484374, 0.01781350326538086, 0.01785036849975586, 0.017803264617919923, 0.017812480926513673, 0.017871871948242187, 0.018033664703369142, 0.017810432434082032, 0.017864704132080078, 0.017947647094726564, 0.01782681655883789, 0.018051071166992186, 0.017786880493164063, 0.017872896194458008, 0.017796096801757814, 0.017912832260131836, 0.01784524726867676, 0.017661951065063478, 0.016963584899902344, 0.01701273536682129, 0.017120256423950195, 0.017192960739135742, 0.01720319938659668, 0.0172359676361084, 0.01717862319946289, 0.01722060775756836, 0.01721343994140625, 0.017466367721557616, 0.01765068817138672, 0.01777459144592285, 0.017868799209594728, 0.017748992919921876, 0.017622016906738282, 0.017690624237060547, 0.017722368240356445, 0.017689599990844726, 0.01769267272949219, 0.017710079193115236, 0.017712127685546874, 0.017671167373657228, 0.017716224670410157, 0.017708032608032227, 0.017804288864135744, 0.017912832260131836, 0.017723392486572266, 0.017332223892211913, 0.01719193649291992, 0.01720524787902832, 0.017131519317626954, 0.017140735626220704, 0.017120256423950195, 0.017523712158203125, 0.017802240371704102, 0.017721343994140625, 0.017779712677001954, 0.017698816299438477, 0.01778483200073242, 0.017632255554199217, 0.01780121612548828, 0.017796096801757814, 0.017870847702026366, 0.017780736923217775, 0.0177838077545166, 0.01770086479187012, 0.018349056243896485, 0.01789030456542969, 0.01781862449645996, 0.017755136489868165, 0.017747968673706056, 0.01781452751159668, 0.017743871688842772, 0.0177838077545166, 0.017689599990844726, 0.017829887390136717, 0.017756160736083985, 0.017726463317871095, 0.01776639938354492, 0.018151424407958985, 0.017876991271972655, 0.01775923156738281, 0.017822719573974608, 0.017735679626464843, 0.01780019187927246, 0.017779712677001954, 0.017888256072998047, 0.017720319747924804, 0.017906688690185548, 0.01774284744262695, 0.019124223709106446, 0.01803980827331543, 0.01787596893310547, 0.017663999557495116, 0.01738137626647949, 0.01716633605957031, 0.017172479629516603, 0.017294336318969726, 0.017795072555541993, 0.01777561569213867, 0.017879039764404296, 0.01797324752807617, 0.01828659248352051, 0.017897472381591797, 0.0178155517578125, 0.017743871688842772, 0.017697792053222656, 0.017881088256835938, 0.01787596893310547, 0.01785958480834961, 0.017741823196411134, 0.017768447875976562, 0.01780633544921875, 0.01774284744262695, 0.01777561569213867, 0.017698816299438477, 0.017733631134033204, 0.017765375137329103, 0.017740800857543947, 0.017705984115600586, 0.017819648742675782, 0.017932287216186525, 0.017693695068359376, 0.01783193588256836, 0.017741823196411134, 0.017847295761108398, 0.01742540740966797, 0.017123327255249024, 0.017487871170043946, 0.017702911376953127, 0.017762304306030274, 0.017780736923217775, 0.017717248916625978, 0.017708032608032227, 0.017716224670410157, 0.017760255813598632, 0.01779916763305664, 0.019513343811035155, 0.018365440368652345, 0.01804902458190918, 0.017750015258789064, 0.017701887130737306, 0.017715200424194336, 0.017717248916625978, 0.01780121612548828, 0.017737728118896484, 0.017683456420898438, 0.01781862449645996, 0.01773465538024902, 0.017803264617919923, 0.017795072555541993, 0.017738752365112305, 0.017731584548950196, 0.017624063491821287, 0.017125375747680666, 0.017075199127197266, 0.01773465538024902, 0.017723392486572266, 0.01779916763305664, 0.01779199981689453, 0.017779712677001954, 0.01776639938354492, 0.017764352798461915, 0.018051071166992186, 0.018095104217529297, 0.017927167892456054, 0.017796096801757814, 0.017769472122192383, 0.017827840805053712, 0.017704959869384765, 0.017746944427490235, 0.017769472122192383, 0.017738752365112305, 0.01784115219116211, 0.01788211250305176, 0.01922764778137207, 0.019017728805541992, 0.019281951904296876, 0.018133983612060547, 0.017868799209594728, 0.017909759521484374, 0.017140735626220704, 0.017729536056518554, 0.01782476806640625, 0.017810432434082032, 0.01784320068359375, 0.017803264617919923, 0.018908159255981445, 0.018109439849853515, 0.017795072555541993, 0.017944576263427735, 0.01784320068359375, 0.01782067108154297, 0.017898496627807618, 0.01780838394165039, 0.0179814395904541, 0.017876991271972655, 0.017958911895751953, 0.017787904739379884, 0.01800294494628906, 0.017488895416259767, 0.017398784637451172, 0.017679359436035155, 0.01785036849975586, 0.018093055725097656, 0.01780531120300293, 0.017844224929809572, 0.017916927337646483, 0.017056768417358398, 0.017377279281616212, 0.01764352035522461, 0.017364992141723632, 0.01714995193481445, 0.018386943817138672, 0.018131967544555663, 0.017804288864135744, 0.0180633602142334, 0.01798860740661621, 0.017157119750976564, 0.017035263061523438, 0.01721343994140625, 0.01715510368347168, 0.01708540725708008, 0.017105920791625977, 0.017080320358276366, 0.017090560913085938, 0.017085439682006837, 0.017076223373413087, 0.017125375747680666, 0.017111040115356444, 0.017296384811401368, 0.017291263580322267, 0.017116159439086915, 0.017129472732543945, 0.017278976440429687, 0.017829887390136717, 0.017711103439331053, 0.01783705520629883, 0.017894399642944335, 0.017987583160400392, 0.017761280059814453, 0.017506303787231444, 0.017711103439331053, 0.017705984115600586, 0.017671167373657228, 0.017726463317871095, 0.0178657283782959, 0.017804288864135744, 0.01764556884765625, 0.017726463317871095, 0.017757183074951173, 0.017714176177978515, 0.017771520614624024, 0.017717248916625978, 0.017714176177978515, 0.017678335189819337, 0.017765375137329103, 0.017712127685546874, 0.017704959869384765, 0.017909759521484374, 0.017794048309326172, 0.017975296020507812, 0.017802240371704102, 0.01780940818786621, 0.017720319747924804, 0.017754112243652344, 0.017743871688842772, 0.017758207321166994, 0.017739776611328126, 0.017768447875976562, 0.01782374382019043, 0.017946624755859376, 0.01781760025024414, 0.01776742362976074, 0.017876991271972655, 0.017912832260131836, 0.017760255813598632, 0.01779097557067871, 0.01719500732421875, 0.01720524787902832, 0.017128448486328125, 0.01720729637145996, 0.017080320358276366, 0.01706598472595215, 0.017177600860595704, 0.017547264099121093, 0.018061311721801757, 0.018214912414550782, 0.017960960388183594, 0.017740800857543947, 0.01784012794494629, 0.017903615951538086, 0.017894399642944335, 0.018085887908935547, 0.01783705520629883, 0.017764352798461915, 0.01783296012878418, 0.017893375396728514, 0.017764352798461915, 0.01778278350830078, 0.017821695327758787, 0.017827840805053712, 0.017353727340698243, 0.01722163200378418, 0.01721139144897461, 0.017903615951538086, 0.01780735969543457, 0.017680383682250975, 0.017209344863891602, 0.01737113571166992, 0.01787392044067383, 0.017846271514892577, 0.017802240371704102, 0.017787904739379884, 0.017810432434082032, 0.017777664184570312, 0.017771520614624024, 0.017693695068359376, 0.017771520614624024, 0.01778278350830078, 0.017785856246948242, 0.017770496368408203, 0.01770086479187012, 0.01769267272949219, 0.017804288864135744, 0.01802239990234375, 0.01787392044067383, 0.018150400161743165, 0.01745305633544922, 0.017122304916381836, 0.01723904037475586, 0.017104896545410156, 0.016827392578125, 0.017106943130493164, 0.017147903442382813, 0.017084415435791016, 0.017681407928466796, 0.017903615951538086, 0.017738752365112305, 0.017822719573974608, 0.017765375137329103, 0.01862451171875, 0.017754112243652344, 0.01721651268005371, 0.017128448486328125, 0.016974847793579103, 0.017055744171142577, 0.017500160217285156, 0.017917951583862304, 0.017870847702026366, 0.01717452812194824, 0.017068031311035157, 0.017112064361572265, 0.017105920791625977, 0.017106943130493164, 0.017138687133789063, 0.017111040115356444, 0.017172479629516603, 0.01704550361633301, 0.017096704483032226, 0.0170700798034668, 0.017129472732543945, 0.01745305633544922, 0.017844224929809572, 0.01779814338684082, 0.017735679626464843, 0.017763328552246094, 0.017697792053222656, 0.017737728118896484, 0.017804288864135744, 0.017490943908691405, 0.017052671432495118, 0.016884735107421875, 0.017128448486328125, 0.017142784118652343, 0.017150976181030272, 0.017145856857299805, 0.017560575485229494, 0.01780121612548828, 0.01781657600402832, 0.017924095153808595, 0.01782067108154297, 0.017854463577270507, 0.017838079452514647, 0.0177838077545166, 0.017781759262084963, 0.017900543212890627, 0.017318912506103516, 0.017106943130493164, 0.01720319938659668, 0.01719808006286621, 0.017120256423950195, 0.017101823806762697, 0.01718988800048828, 0.017090560913085938, 0.017090560913085938, 0.017167360305786132, 0.01723187255859375, 0.017744895935058593, 0.017937408447265626, 0.018507776260375978, 0.018701311111450195, 0.018082815170288084, 0.017905664443969727, 0.01782374382019043, 0.017938432693481447, 0.017797119140625, 0.017871871948242187, 0.016951295852661134, 0.017164287567138673, 0.01722060775756836, 0.017148927688598634, 0.017134592056274413, 0.017126399993896483, 0.017077247619628907, 0.017073152542114257, 0.017051647186279297, 0.017122304916381836, 0.017068031311035157, 0.01720729637145996, 0.017697792053222656, 0.017739776611328126, 0.017777664184570312, 0.01781760025024414, 0.017714176177978515, 0.017671167373657228, 0.017764352798461915, 0.017752063751220702, 0.017740800857543947, 0.017724416732788087, 0.017688575744628905, 0.01766912078857422, 0.017746944427490235, 0.017705984115600586, 0.0176680965423584, 0.017674240112304687, 0.017739776611328126, 0.017697792053222656, 0.017901567459106444, 0.017819648742675782, 0.017715200424194336, 0.01761075210571289, 0.017152000427246093, 0.017085439682006837, 0.017088512420654296, 0.017111040115356444, 0.017130495071411133, 0.01704140853881836, 0.017080320358276366, 0.016969728469848632, 0.016913408279418944, 0.016995328903198242, 0.01714995193481445, 0.017836032867431642, 0.01802649688720703, 0.01784320068359375, 0.01782476806640625, 0.01722777557373047, 0.017157119750976564, 0.017060863494873048, 0.017100799560546876, 0.017353727340698243, 0.01695232009887695, 0.016885759353637696, 0.016905216217041014, 0.016941055297851563, 0.017154048919677735, 0.017120256423950195, 0.0172042236328125, 0.017107967376708985, 0.017116159439086915]",tokens/s,56.65852668100922,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpofhf0fwt/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1273.511936,2645.03296,0.0,1998.585856,1692.285952,s,10,0.19512889289855956,0.019512889289855957,0.0006973531616019884,0.019118751525878906,0.02076168899536133,0.020845900917053222,0.020913270454406736,"[0.02074297523498535, 0.019069087982177733, 0.01902137565612793, 0.019066047668457032, 0.01907967948913574, 0.0189399356842041, 0.01915782356262207, 0.019729055404663087, 0.019392799377441407, 0.020930112838745116]",tokens/s,13119.533258105714,kWh,2.258438427837786e-07,1.2375136383443654e-07,6.813650022829518e-07,1.030960208901167e-06,tokens/kWh,248312202.3427593,MB,1273.806848,2645.03296,0.0,1998.585856,1740.085248,s,10,11.767407348632812,1.1767407348632815,0.01424671134639797,1.1756907958984375,1.1905447143554688,1.2001551696777344,1.2078435339355469,"[1.209765625, 1.1791375732421876, 1.183642578125, 1.159637939453125, 1.166791015625, 1.1595640869140624, 1.17467822265625, 1.169077880859375, 1.1884090576171875, 1.176703369140625]",tokens/s,53.537706423768526,kWh,1.3865471741266508e-05,7.59789077589712e-06,2.9375964599116e-05,5.083932711627962e-05,tokens/kWh,1239198.1478414636,,s,629,11.921181676864638,0.018952594080865855,0.002352548224385844,0.0184770565032959,0.01912931900024414,0.01946992607116699,0.03775201217651367,"[0.020113407135009767, 0.019819520950317384, 0.020000768661499024, 0.02002943992614746, 0.020170751571655272, 0.019966976165771484, 0.01986867141723633, 0.019780607223510743, 0.019711999893188475, 0.01991372871398926, 0.02002943992614746, 0.019809280395507813, 0.01985024070739746, 0.01979801559448242, 0.01944063949584961, 0.0192225284576416, 0.01923276710510254, 0.019565568923950196, 0.01946316719055176, 0.01927168083190918, 0.019129344940185547, 0.01903308868408203, 0.018916351318359375, 0.019993600845336915, 0.020050943374633787, 0.019573759078979493, 0.01898700714111328, 0.019013631820678712, 0.019124223709106446, 0.0190515193939209, 0.018741247177124023, 0.018388992309570314, 0.018288639068603514, 0.018325504302978517, 0.01842585563659668, 0.018329599380493163, 0.01836031913757324, 0.019708927154541016, 0.019175424575805664, 0.018696191787719727, 0.01881804847717285, 0.019137535095214844, 0.018902015686035157, 0.018919456481933595, 0.018997215270996095, 0.018655231475830078, 0.018349056243896485, 0.018293760299682618, 0.018259967803955078, 0.018803712844848632, 0.018983936309814452, 0.018994176864624023, 0.018919424057006837, 0.01900032043457031, 0.018935808181762694, 0.018883583068847656, 0.018942975997924806, 0.018905088424682616, 0.018944000244140623, 0.018937856674194335, 0.018896896362304686, 0.019002368927001953, 0.038024192810058595, 0.018856960296630858, 0.018953216552734374, 0.01899212837219238, 0.018933759689331055, 0.018946048736572265, 0.018918399810791017, 0.018514944076538087, 0.018379776000976563, 0.01847603225708008, 0.018405376434326173, 0.01839411163330078, 0.01844428825378418, 0.018465791702270508, 0.018507776260375978, 0.01843404769897461, 0.01840025520324707, 0.018305023193359374, 0.018584575653076172, 0.018944000244140623, 0.01904128074645996, 0.018902015686035157, 0.018913280487060546, 0.018897920608520507, 0.01900441551208496, 0.018922496795654296, 0.018920448303222655, 0.018958335876464845, 0.01924198341369629, 0.018942975997924806, 0.018929664611816405, 0.01883750343322754, 0.018920448303222655, 0.018919424057006837, 0.01887539291381836, 0.018379776000976563, 0.018397184371948243, 0.019178495407104493, 0.018910207748413087, 0.018939903259277344, 0.01884569549560547, 0.018928640365600585, 0.018945024490356444, 0.018886655807495118, 0.01885798454284668, 0.019137535095214844, 0.019356672286987304, 0.018903039932250978, 0.01864806365966797, 0.01850060844421387, 0.018481151580810547, 0.018380800247192384, 0.018301952362060548, 0.018478080749511717, 0.018493440628051756, 0.01842483139038086, 0.0183767032623291, 0.018386943817138672, 0.018413568496704103, 0.018397184371948243, 0.018351104736328124, 0.01844633674621582, 0.01836851119995117, 0.03775590515136719, 0.018353151321411132, 0.01845043182373047, 0.018341888427734376, 0.018726911544799805, 0.018123775482177733, 0.018150400161743165, 0.018366464614868162, 0.01886310386657715, 0.01907711982727051, 0.018974720001220705, 0.01900444793701172, 0.01890505599975586, 0.018916351318359375, 0.018966527938842775, 0.019076095581054688, 0.018861055374145508, 0.01881497573852539, 0.019140607833862306, 0.019045408248901368, 0.018921440124511718, 0.018963455200195312, 0.018877439498901367, 0.01926144027709961, 0.019165184020996092, 0.018880512237548826, 0.018881536483764647, 0.018955263137817382, 0.018960384368896483, 0.018936832427978514, 0.018729984283447267, 0.018342912673950194, 0.01841459274291992, 0.018329599380493163, 0.01839206314086914, 0.018340864181518556, 0.01840025520324707, 0.018534400939941405, 0.018990079879760743, 0.01900339126586914, 0.018896896362304686, 0.018963455200195312, 0.018910207748413087, 0.01887846374511719, 0.018913280487060546, 0.01883340835571289, 0.018333696365356447, 0.018703359603881836, 0.018351104736328124, 0.01845452880859375, 0.0182794246673584, 0.018318336486816408, 0.01838591957092285, 0.018298879623413086, 0.018270240783691407, 0.019394527435302733, 0.01943961524963379, 0.020576255798339844, 0.019310592651367187, 0.018993152618408202, 0.01903206443786621, 0.018950143814086915, 0.019017728805541992, 0.0377446403503418, 0.018357248306274415, 0.01839820861816406, 0.018373632431030275, 0.018353151321411132, 0.0184453125, 0.018336767196655272, 0.01846784019470215, 0.018374656677246092, 0.018431039810180665, 0.018341823577880858, 0.018308095932006836, 0.018556928634643553, 0.01840127944946289, 0.018554880142211915, 0.0184770565032959, 0.01844428825378418, 0.018405376434326173, 0.018333696365356447, 0.01840947151184082, 0.01842278480529785, 0.01840127944946289, 0.018413568496704103, 0.018343967437744142, 0.018638816833496094, 0.018471935272216796, 0.01844223976135254, 0.018472959518432617, 0.018349056243896485, 0.018336767196655272, 0.01837772750854492, 0.018337791442871093, 0.01840742492675781, 0.01838083267211914, 0.018362335205078125, 0.018448383331298827, 0.01839820861816406, 0.01841459274291992, 0.018397184371948243, 0.018324480056762696, 0.018363391876220703, 0.018415615081787108, 0.018408447265625, 0.018379776000976563, 0.018749439239501953, 0.01843404769897461, 0.018365440368652345, 0.018324480056762696, 0.018372608184814454, 0.018328575134277342, 0.01844223976135254, 0.018338815689086914, 0.018361343383789062, 0.01836031913757324, 0.01841152000427246, 0.018314239501953124, 0.018345983505249023, 0.01840332794189453, 0.018328575134277342, 0.018373632431030275, 0.01841663932800293, 0.018287616729736327, 0.018552831649780274, 0.037754878997802735, 0.018325504302978517, 0.01840025520324707, 0.018291711807250977, 0.018345983505249023, 0.018326528549194337, 0.01842995262145996, 0.018415615081787108, 0.01841971206665039, 0.01842278480529785, 0.018359296798706053, 0.01837772750854492, 0.018334720611572267, 0.018315263748168945, 0.01840947151184082, 0.018386943817138672, 0.0183767032623291, 0.018520063400268554, 0.018358272552490236, 0.018396160125732423, 0.018333696365356447, 0.01839206314086914, 0.018307071685791015, 0.018312192916870116, 0.018371583938598633, 0.018345983505249023, 0.01841663932800293, 0.018457599639892578, 0.01835212707519531, 0.018354175567626953, 0.01828659248352051, 0.018344959259033202, 0.018364416122436524, 0.018355199813842774, 0.018298879623413086, 0.01835212707519531, 0.018412544250488282, 0.018366464614868162, 0.018382848739624022, 0.018255872726440428, 0.018357248306274415, 0.01836953544616699, 0.01902284812927246, 0.020344831466674804, 0.01921331214904785, 0.018961408615112304, 0.018908159255981445, 0.018890752792358398, 0.018924543380737305, 0.019082239151000976, 0.01884364891052246, 0.018367488861083983, 0.019126272201538085, 0.018917375564575196, 0.018936832427978514, 0.01902284812927246, 0.0184268798828125, 0.018364416122436524, 0.018391040802001952, 0.01841049575805664, 0.018320383071899413, 0.018339839935302735, 0.018349056243896485, 0.0375838737487793, 0.018310144424438478, 0.01836031913757324, 0.018404352188110353, 0.018507776260375978, 0.018515968322753908, 0.018348031997680665, 0.018380800247192384, 0.01826201629638672, 0.018463775634765624, 0.018397151947021486, 0.018396160125732423, 0.018347007751464844, 0.018324480056762696, 0.01841459274291992, 0.018471935272216796, 0.018512895584106445, 0.01836031913757324, 0.018316287994384766, 0.018356224060058594, 0.018326528549194337, 0.01836953544616699, 0.018387968063354493, 0.018312192916870116, 0.018323455810546875, 0.018404352188110353, 0.01864806365966797, 0.018514944076538087, 0.018299903869628906, 0.018319360733032225, 0.018310144424438478, 0.018258943557739257, 0.018485248565673826, 0.018364416122436524, 0.018373632431030275, 0.018423807144165038, 0.018341888427734376, 0.018431999206542968, 0.018876415252685547, 0.018487295150756835, 0.01843302345275879, 0.018374656677246092, 0.0185743350982666, 0.01837772750854492, 0.018381824493408205, 0.018363391876220703, 0.01838387107849121, 0.01839923286437988, 0.018378751754760742, 0.01836031913757324, 0.01839411163330078, 0.018412544250488282, 0.018408447265625, 0.018473983764648438, 0.018364416122436524, 0.01840947151184082, 0.018310144424438478, 0.01839206314086914, 0.01851087951660156, 0.018360288619995117, 0.01839206314086914, 0.01836031913757324, 0.01843097686767578, 0.037787647247314454, 0.018329599380493163, 0.01837772750854492, 0.018396160125732423, 0.018449407577514648, 0.01841152000427246, 0.018397184371948243, 0.018374656677246092, 0.018320383071899413, 0.018408447265625, 0.018351104736328124, 0.01837772750854492, 0.018423807144165038, 0.018333696365356447, 0.01864908790588379, 0.018455551147460936, 0.018397184371948243, 0.018354175567626953, 0.018366464614868162, 0.018356224060058594, 0.018347007751464844, 0.01841152000427246, 0.019151872634887695, 0.019474431991577147, 0.019180543899536134, 0.018968576431274413, 0.01903923225402832, 0.018941951751708985, 0.018888704299926756, 0.0189040641784668, 0.018905088424682616, 0.018907136917114258, 0.019160064697265625, 0.01887027168273926, 0.018877439498901367, 0.01868185615539551, 0.018921472549438476, 0.01904025650024414, 0.018936832427978514, 0.018332672119140626, 0.018308095932006836, 0.01839411163330078, 0.018347007751464844, 0.018311168670654295, 0.0183767032623291, 0.01841459274291992, 0.018522111892700196, 0.018931711196899414, 0.019349504470825195, 0.01902284812927246, 0.018991104125976564, 0.018930688858032226, 0.018989055633544923, 0.019001344680786132, 0.018495487213134765, 0.018301952362060548, 0.01869004821777344, 0.01825484848022461, 0.01839206314086914, 0.01840230369567871, 0.01839820861816406, 0.018663423538208008, 0.019091455459594727, 0.038542335510253906, 0.018413568496704103, 0.018481151580810547, 0.018983936309814452, 0.01899519920349121, 0.018893823623657227, 0.018886655807495118, 0.01880575942993164, 0.018921472549438476, 0.018318336486816408, 0.018509855270385744, 0.018419679641723634, 0.018356224060058594, 0.01843404769897461, 0.018962432861328125, 0.01904435157775879, 0.01921843147277832, 0.018980863571166993, 0.01903923225402832, 0.018388992309570314, 0.01842790412902832, 0.01837772750854492, 0.018372608184814454, 0.018331647872924805, 0.01839411163330078, 0.018319360733032225, 0.018305023193359374, 0.018293760299682618, 0.018367488861083983, 0.01842483139038086, 0.018390016555786134, 0.018357280731201173, 0.018384864807128906, 0.01839411163330078, 0.018364416122436524, 0.018327552795410155, 0.01839308738708496, 0.018328575134277342, 0.01826918411254883, 0.0184268798828125, 0.018395135879516602, 0.018295808792114256, 0.018373632431030275, 0.018151424407958985, 0.018207744598388673, 0.01836031913757324, 0.018295808792114256, 0.018780160903930664, 0.018881536483764647, 0.018906112670898437, 0.018965503692626954, 0.018974752426147462, 0.018904031753540038, 0.018936832427978514, 0.018956287384033203, 0.018965503692626954, 0.01840332794189453, 0.018310144424438478, 0.018336767196655272, 0.018404352188110353, 0.018361343383789062, 0.01839923286437988, 0.018463743209838866, 0.038046718597412106, 0.01843507194519043, 0.018305023193359374, 0.018233343124389647, 0.018344959259033202, 0.018366464614868162, 0.018495487213134765, 0.018343967437744142, 0.018420703887939455, 0.01840230369567871, 0.018654207229614257, 0.020091903686523437, 0.019406848907470704, 0.01906073570251465, 0.0190515193939209, 0.018997247695922852, 0.018948095321655273, 0.019203071594238282, 0.01926655960083008, 0.01898700714111328, 0.018949119567871094, 0.01903104019165039, 0.01902079963684082, 0.018894847869873048, 0.018984960556030273, 0.018909183502197266, 0.018982912063598634, 0.019183616638183593, 0.019070976257324217, 0.01901875114440918, 0.018957311630249024, 0.018975744247436522, 0.018861055374145508, 0.018933759689331055, 0.01906073570251465, 0.018932735443115235, 0.018926591873168946, 0.018968576431274413, 0.01884671974182129, 0.018888704299926756, 0.018856960296630858, 0.018906112670898437, 0.018761728286743166, 0.019104768753051758, 0.019098623275756836, 0.01904844856262207, 0.018964479446411133, 0.018893823623657227, 0.018338815689086914, 0.018323455810546875, 0.01824563217163086, 0.018487295150756835, 0.018811904907226562, 0.01941200065612793, 0.01912931251525879, 0.01884364891052246, 0.01884569549560547, 0.018997247695922852, 0.018654207229614257, 0.018766847610473633, 0.01901875114440918, 0.018925567626953126, 0.018929664611816405, 0.04048076629638672, 0.019142656326293944, 0.018959360122680666, 0.018917375564575196, 0.019141632080078123, 0.018944000244140623, 0.01884876823425293, 0.018808832168579103, 0.018886655807495118, 0.018934783935546876, 0.01899929618835449, 0.018940927505493164, 0.018977792739868164, 0.018918399810791017, 0.01899929618835449, 0.018940927505493164, 0.018861055374145508, 0.018836511611938476, 0.018780128479003906, 0.018320383071899413, 0.01839308738708496, 0.01840127944946289, 0.01827737617492676, 0.018310144424438478, 0.01827840042114258, 0.018487295150756835, 0.018775039672851563, 0.018501632690429686, 0.01841663932800293, 0.018305023193359374, 0.019021823883056642, 0.018941951751708985, 0.018931711196899414, 0.01887027168273926, 0.018334720611572267, 0.018359296798706053, 0.018281471252441405, 0.018289663314819335, 0.018388992309570314, 0.018295808792114256, 0.01823539161682129, 0.01827020835876465, 0.018332672119140626, 0.01830297660827637, 0.01837264060974121, 0.018341888427734376, 0.018376672744750976, 0.01881292724609375, 0.0188221435546875, 0.01887948799133301, 0.018420736312866212, 0.018380800247192384, 0.018301952362060548, 0.018333696365356447, 0.018735103607177735, 0.019014656066894533, 0.01880985641479492, 0.018928640365600585, 0.018937856674194335, 0.018965503692626954, 0.018817024230957033, 0.01887027168273926, 0.018896896362304686]",tokens/s,52.763225748056264,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949957-036bce287e56b0fd0a500625;86efad4a-6dcc-416b-bfa4-fa4ce6b119a6) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",deci,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,886.386688,793.247744,0.0,163.577856,154.631168,s,1,7.29906884765625,7.29906884765625,0.0,7.29906884765625,7.29906884765625,7.29906884765625,7.29906884765625,[7.29906884765625],,kWh,5.7218968333232736e-06,3.0940641421793545e-06,8.157228748029155e-06,1.6973189723531785e-05,,MB,1478.197248,849.870848,0.0,201.326592,187.147776,s,30,0.19149910402297973,0.006383303467432658,0.00018696562006823646,0.006445616006851196,0.0065314945220947265,0.006662159943580627,0.006854664301872254,"[0.006720479965209961, 0.0064707517623901365, 0.006501791954040527, 0.006130271911621093, 0.006445024013519287, 0.006909471988677978, 0.006496384143829346, 0.006446208000183105, 0.006362912178039551, 0.006195424079895019, 0.0062121920585632324, 0.006136864185333252, 0.006310336112976074, 0.006215424060821533, 0.006173215866088867, 0.006245247840881348, 0.006485407829284668, 0.006119135856628418, 0.006477727890014649, 0.006524896144866944, 0.006458559989929199, 0.006517600059509277, 0.006223840236663818, 0.006135424137115478, 0.0061760001182556154, 0.006590879917144775, 0.0064830718040466305, 0.0064719681739807125, 0.006455967903137207, 0.006406623840332032]",tokens/s,40104.62628106295,kWh,7.754699888696303e-08,4.2491613327109235e-08,1.351228555994792e-07,2.551614678135514e-07,tokens/kWh,1003286280.6192247,MB,1515.823104,849.870848,0.0,201.326592,187.150336,s,30,9.829113952636717,0.3276371317545573,0.0060182675284913174,0.327994873046875,0.3339585632324219,0.3344779800415039,0.33734441528320314,"[0.33850714111328123, 0.329207763671875, 0.33205035400390626, 0.3224430847167969, 0.3339035339355469, 0.3335452575683594, 0.332814208984375, 0.331987548828125, 0.3256690673828125, 0.31929391479492186, 0.3187181396484375, 0.32407537841796874, 0.3222847900390625, 0.3195601806640625, 0.3233250732421875, 0.32213357543945315, 0.32259808349609376, 0.32428533935546877, 0.33318328857421875, 0.33303814697265627, 0.333653564453125, 0.3196043395996094, 0.3192364807128906, 0.32039938354492187, 0.326781982421875, 0.3326269226074219, 0.33445382690429687, 0.33449774169921875, 0.3336434326171875, 0.3315924072265625]",tokens/s,192.285897702203,kWh,3.94677338671472e-06,2.1626440981180255e-06,6.094909906200862e-06,1.2204327391033607e-05,tokens/kWh,5162103.406557698,,s,1890,9.817088093757635,0.005194226504633666,0.0001277540581487776,0.005242879867553711,0.00531466236114502,0.005352447986602784,0.005591152749061585,"[0.005585919857025146, 0.005352447986602784, 0.005362688064575195, 0.005379072189331055, 0.005710847854614258, 0.00552345609664917, 0.005697535991668701, 0.005567488193511963, 0.005586944103240967, 0.005755904197692871, 0.005697535991668701, 0.005585919857025146, 0.00535756778717041, 0.0052899842262268066, 0.005281792163848877, 0.005352447986602784, 0.005288959980010987, 0.005276671886444092, 0.005338111877441406, 0.0052971520423889164, 0.005296127796173096, 0.005354496002197265, 0.005278719902038574, 0.005319680213928223, 0.00590342378616333, 0.005410783767700196, 0.005310431957244873, 0.005279776096343994, 0.0052919998168945315, 0.005323775768280029, 0.00531763219833374, 0.0052715520858764645, 0.005446656227111817, 0.0052899842262268066, 0.0052715840339660645, 0.005276639938354492, 0.005251071929931641, 0.005242879867553711, 0.005303296089172363, 0.005323775768280029, 0.005286911964416504, 0.005299200057983398, 0.005303296089172363, 0.005276671886444092, 0.005268479824066162, 0.005323775768280029, 0.005274623870849609, 0.005310463905334473, 0.005380095958709717, 0.00531660795211792, 0.00530947208404541, 0.005299168109893799, 0.005272575855255127, 0.0053350400924682614, 0.005322751998901368, 0.0053678078651428224, 0.005339136123657226, 0.00531763219833374, 0.005276800155639649, 0.00529804801940918, 0.005312511920928955, 0.005288959980010987, 0.005273600101470947, 0.005313536167144775, 0.005294079780578613, 0.0052674560546875, 0.005261312007904053, 0.005284863948822022, 0.005262400150299072, 0.005259200096130371, 0.005243904113769531, 0.005082111835479736, 0.00506982421875, 0.00505241584777832, 0.005071872234344482, 0.005075967788696289, 0.005061632156372071, 0.005054463863372802, 0.0050954241752624516, 0.005067776203155518, 0.005064703941345215, 0.0050514240264892575, 0.005138400077819824, 0.004988927841186524, 0.0050432000160217285, 0.005083168029785156, 0.005076960086822509, 0.0050503678321838375, 0.00506982421875, 0.005248000144958496, 0.005264383792877197, 0.00516812801361084, 0.005192704200744629, 0.005190656185150146, 0.005167103767395019, 0.005165056228637695, 0.005187583923339844, 0.005228544235229492, 0.005257215976715088, 0.0053002238273620605, 0.005272575855255127, 0.005272575855255127, 0.005315584182739258, 0.005260287761688232, 0.005248000144958496, 0.005313536167144775, 0.005299200057983398, 0.005275648117065429, 0.0052930560111999515, 0.005261312007904053, 0.005269504070281982, 0.005298175811767578, 0.005292031764984131, 0.005277696132659912, 0.005244927883148193, 0.0052899842262268066, 0.005245952129364013, 0.005295104026794434, 0.005302271842956543, 0.005274623870849609, 0.0052705278396606445, 0.005591040134429932, 0.005470208168029785, 0.005631999969482422, 0.005291007995605469, 0.0051968002319335935, 0.005326848030090332, 0.005245952129364013, 0.005280767917633057, 0.00521727991104126, 0.00522547197341919, 0.005292031764984131, 0.005285888195037842, 0.0052705278396606445, 0.0052971520423889164, 0.005254144191741943, 0.005272575855255127, 0.0053002238273620605, 0.005258240222930908, 0.005277696132659912, 0.005276703834533691, 0.005303264141082764, 0.005280767917633057, 0.005349376201629639, 0.005272575855255127, 0.005287936210632324, 0.005321728229522705, 0.005275648117065429, 0.005272575855255127, 0.005321728229522705, 0.0053012480735778805, 0.0052674560546875, 0.005326879978179932, 0.005287903785705567, 0.005275648117065429, 0.0053637118339538575, 0.005282815933227539, 0.005262335777282715, 0.00530944013595581, 0.005274655818939209, 0.0052817602157592776, 0.005310463905334473, 0.005287936210632324, 0.005268479824066162, 0.005262335777282715, 0.00531763219833374, 0.005279744148254394, 0.005246975898742676, 0.005272575855255127, 0.005237760066986084, 0.005246975898742676, 0.0052899842262268066, 0.005241856098175048, 0.005256192207336426, 0.005311488151550293, 0.0052715520858764645, 0.0053012480735778805, 0.005236800193786621, 0.005243840217590332, 0.005275648117065429, 0.005296127796173096, 0.0052674560546875, 0.005246975898742676, 0.0053309440612792965, 0.0052930560111999515, 0.005085184097290039, 0.004988927841186524, 0.00501043176651001, 0.004988927841186524, 0.005089280128479004, 0.005059584140777588, 0.0050728960037231445, 0.005102591991424561, 0.005054463863372802, 0.005056511878967285, 0.005054463863372802, 0.005085184097290039, 0.005093376159667969, 0.0050503678321838375, 0.005037055969238281, 0.005086207866668701, 0.00505452823638916, 0.005043136119842529, 0.005061632156372071, 0.005115903854370117, 0.005040128231048584, 0.0050462718009948735, 0.005083136081695557, 0.005054463863372802, 0.005044223785400391, 0.005056511878967285, 0.0050841598510742185, 0.005083136081695557, 0.005056511878967285, 0.005045248031616211, 0.0050800638198852536, 0.005053440093994141, 0.005054463863372802, 0.005070847988128662, 0.005082111835479736, 0.005057536125183106, 0.005037055969238281, 0.005073919773101807, 0.005070847988128662, 0.005045248031616211, 0.004987904071807861, 0.005051392078399658, 0.00504531192779541, 0.0050533761978149416, 0.005061632156372071, 0.005091328144073487, 0.005056511878967285, 0.005055488109588623, 0.005048319816589355, 0.0050421757698059086, 0.004997119903564453, 0.004977663993835449, 0.005137407779693603, 0.005212192058563232, 0.005303264141082764, 0.005269504070281982, 0.0052930560111999515, 0.005303296089172363, 0.005273600101470947, 0.005326848030090332, 0.0052971520423889164, 0.005269504070281982, 0.0052899842262268066, 0.005260287761688232, 0.005280767917633057, 0.005356544017791748, 0.005362688064575195, 0.005295104026794434, 0.0052705278396606445, 0.005284863948822022, 0.005287936210632324, 0.005254144191741943, 0.00530841588973999, 0.005253119945526123, 0.005268479824066162, 0.005314559936523438, 0.0052633600234985355, 0.005245952129364013, 0.00531660795211792, 0.005283840179443359, 0.0052715520858764645, 0.0053012480735778805, 0.005276671886444092, 0.00526643180847168, 0.005269504070281982, 0.005305344104766845, 0.005276671886444092, 0.005180416107177735, 0.0052971520423889164, 0.005265408039093018, 0.005268479824066162, 0.005323775768280029, 0.0052715520858764645, 0.005291007995605469, 0.005321728229522705, 0.005278719902038574, 0.005356544017791748, 0.005453824043273926, 0.005593088150024414, 0.005294079780578613, 0.005337088108062744, 0.005258240222930908, 0.005279744148254394, 0.005350399971008301, 0.005286911964416504, 0.005315584182739258, 0.005332992076873779, 0.005315584182739258, 0.005276671886444092, 0.005339136123657226, 0.005268479824066162, 0.005260287761688232, 0.0052971520423889164, 0.005291007995605469, 0.005261312007904053, 0.005310463905334473, 0.005253119945526123, 0.005260287761688232, 0.0052930560111999515, 0.0052715520858764645, 0.005238783836364746, 0.005299200057983398, 0.005295104026794434, 0.005260287761688232, 0.005243904113769531, 0.005346303939819336, 0.005298175811767578, 0.005261312007904053, 0.005302271842956543, 0.005261312007904053, 0.005559296131134033, 0.005412864208221436, 0.005258240222930908, 0.005243904113769531, 0.005294079780578613, 0.005255167961120606, 0.005260287761688232, 0.005287936210632324, 0.005265408039093018, 0.0052633600234985355, 0.005287936210632324, 0.0052971520423889164, 0.005294079780578613, 0.005306367874145508, 0.005340159893035889, 0.005273600101470947, 0.005312511920928955, 0.005296127796173096, 0.0052633600234985355, 0.005257215976715088, 0.005329919815063477, 0.005276671886444092, 0.005268479824066162, 0.005306367874145508, 0.005291007995605469, 0.005281792163848877, 0.0053821439743042, 0.005448703765869141, 0.005313536167144775, 0.005359615802764893, 0.005319680213928223, 0.0052715520858764645, 0.005304384231567383, 0.005285823822021484, 0.00526643180847168, 0.005325823783874512, 0.0052633600234985355, 0.0052715520858764645, 0.005323775768280029, 0.005288991928100586, 0.005272543907165527, 0.0053012480735778805, 0.005259263992309571, 0.005307392120361328, 0.005311488151550293, 0.005265408039093018, 0.005214240074157715, 0.00518345594406128, 0.00515993595123291, 0.005149695873260498, 0.0051660799980163576, 0.005253119945526123, 0.005268479824066162, 0.005280767917633057, 0.0053012480735778805, 0.005257215976715088, 0.005265408039093018, 0.005287936210632324, 0.0052715520858764645, 0.005287936210632324, 0.0052899842262268066, 0.005286911964416504, 0.005288959980010987, 0.005275648117065429, 0.005336063861846924, 0.005279744148254394, 0.005273600101470947, 0.005312511920928955, 0.005278719902038574, 0.005273600101470947, 0.005288959980010987, 0.005277696132659912, 0.0052715840339660645, 0.0052674241065979, 0.00531763219833374, 0.005268479824066162, 0.005262335777282715, 0.0052930560111999515, 0.005261312007904053, 0.005239808082580567, 0.00530944013595581, 0.005254144191741943, 0.0053002238273620605, 0.00531660795211792, 0.005294079780578613, 0.005276671886444092, 0.00530944013595581, 0.005276671886444092, 0.0052633600234985355, 0.00531660795211792, 0.0052705278396606445, 0.005273600101470947, 0.005326848030090332, 0.005272575855255127, 0.005264383792877197, 0.005284863948822022, 0.005257215976715088, 0.005203968048095703, 0.005255167961120606, 0.0053012480735778805, 0.0052930560111999515, 0.0053002238273620605, 0.005298175811767578, 0.005275648117065429, 0.0052930560111999515, 0.005340159893035889, 0.00531660795211792, 0.005314559936523438, 0.005331967830657959, 0.005326848030090332, 0.005295104026794434, 0.005303296089172363, 0.005260287761688232, 0.005193727970123291, 0.005216256141662597, 0.005154816150665284, 0.005173247814178467, 0.005184512138366699, 0.005169151782989502, 0.0052070398330688475, 0.005292031764984131, 0.005315584182739258, 0.0052633600234985355, 0.005284863948822022, 0.005326848030090332, 0.005272575855255127, 0.0052674560546875, 0.005278719902038574, 0.005279744148254394, 0.005281792163848877, 0.005259263992309571, 0.005289023876190186, 0.005263296127319336, 0.005245952129364013, 0.0052715520858764645, 0.005255167961120606, 0.0052674560546875, 0.005276671886444092, 0.005272575855255127, 0.005274623870849609, 0.005295104026794434, 0.0052633600234985355, 0.005287936210632324, 0.0051968002319335935, 0.005244927883148193, 0.005269504070281982, 0.005256192207336426, 0.005295104026794434, 0.005260287761688232, 0.005246975898742676, 0.0052971520423889164, 0.005254144191741943, 0.0052705278396606445, 0.005291007995605469, 0.005276671886444092, 0.005274623870849609, 0.005296127796173096, 0.005282815933227539, 0.005281792163848877, 0.00535859203338623, 0.005264383792877197, 0.005237760066986084, 0.005274623870849609, 0.005272575855255127, 0.005262335777282715, 0.005246975898742676, 0.005279744148254394, 0.005246975898742676, 0.005252096176147461, 0.005299200057983398, 0.005317696094512939, 0.005267392158508301, 0.005294079780578613, 0.005248000144958496, 0.005258240222930908, 0.0052899842262268066, 0.005268479824066162, 0.005310463905334473, 0.005310463905334473, 0.005262432098388672, 0.005243807792663574, 0.005212160110473632, 0.005158912181854248, 0.005169151782989502, 0.005172224044799805, 0.005178368091583252, 0.005153791904449463, 0.005234687805175781, 0.00531660795211792, 0.005254144191741943, 0.005208064079284668, 0.0055920639038085935, 0.005702655792236328, 0.005429247856140137, 0.005294079780578613, 0.005322751998901368, 0.005279776096343994, 0.0052674241065979, 0.005296127796173096, 0.00530841588973999, 0.005261312007904053, 0.00531763219833374, 0.005256192207336426, 0.005256192207336426, 0.0052971520423889164, 0.005254144191741943, 0.005275648117065429, 0.0052899842262268066, 0.0052715520858764645, 0.005250048160552978, 0.005294079780578613, 0.005244927883148193, 0.005286911964416504, 0.005570559978485107, 0.005123072147369385, 0.005067776203155518, 0.005063680171966553, 0.005096447944641113, 0.005085184097290039, 0.005088255882263183, 0.0050841598510742185, 0.005063680171966553, 0.005057536125183106, 0.005056511878967285, 0.0050800638198852536, 0.005064703941345215, 0.005056511878967285, 0.005058559894561767, 0.005112832069396973, 0.0050841598510742185, 0.005054463863372802, 0.005063680171966553, 0.0051066880226135255, 0.005066751956939697, 0.004988927841186524, 0.005014527797698975, 0.004994048118591309, 0.004988927841186524, 0.004925439834594727, 0.004983808040618896, 0.005037055969238281, 0.005038080215454102, 0.005066751956939697, 0.005087232112884522, 0.005075967788696289, 0.005048319816589355, 0.005090303897857666, 0.005089280128479004, 0.005055488109588623, 0.0050462718009948735, 0.005039103984832764, 0.005094431877136231, 0.005149695873260498, 0.005083136081695557, 0.00506982421875, 0.005038080215454102, 0.005091328144073487, 0.005066751956939697, 0.005061632156372071, 0.0050657281875610355, 0.005054463863372802, 0.005085184097290039, 0.005053440093994141, 0.005090303897857666, 0.005047296047210693, 0.005053440093994141, 0.0050503678321838375, 0.005096447944641113, 0.005037055969238281, 0.005044223785400391, 0.005039103984832764, 0.0051138558387756345, 0.005125120162963868, 0.005015552043914795, 0.004994048118591309, 0.005025792121887207, 0.004983808040618896, 0.0050206718444824215, 0.00506982421875, 0.005071872234344482, 0.005051392078399658, 0.005055488109588623, 0.005079040050506592, 0.005090303897857666, 0.005063680171966553, 0.005138432025909424, 0.005092383861541748, 0.005020639896392822, 0.005104640007019043, 0.0050135040283203125, 0.00501043176651001, 0.0049909758567810054, 0.005000192165374756, 0.004985856056213379, 0.005037055969238281, 0.004994048118591309, 0.005064703941345215, 0.005075967788696289, 0.005107711791992187, 0.00506060791015625, 0.005062655925750732, 0.0051036162376403805, 0.005075967788696289, 0.005051392078399658, 0.005059584140777588, 0.005123072147369385, 0.005062655925750732, 0.005062655925750732, 0.005059616088867187, 0.0051056318283081055, 0.005067776203155518, 0.005059584140777588, 0.005089280128479004, 0.005067776203155518, 0.005120031833648682, 0.005102591991424561, 0.005062655925750732, 0.005064703941345215, 0.0050462718009948735, 0.005025792121887207, 0.005051392078399658, 0.0050800638198852536, 0.0050462718009948735, 0.005049344062805176, 0.005051392078399658, 0.005074944019317627, 0.005082111835479736, 0.005057536125183106, 0.005047296047210693, 0.00506982421875, 0.005066751956939697, 0.005051392078399658, 0.005066751956939697, 0.005064703941345215, 0.00501145601272583, 0.004981760025024414, 0.005070847988128662, 0.00510975980758667, 0.005037055969238281, 0.005055488109588623, 0.005093376159667969, 0.005045248031616211, 0.005057536125183106, 0.00506060791015625, 0.005099520206451416, 0.0050432000160217285, 0.00505241584777832, 0.0050432000160217285, 0.00511078405380249, 0.0050421757698059086, 0.005033984184265137, 0.005070847988128662, 0.0050657281875610355, 0.0050503678321838375, 0.0050657281875610355, 0.005085184097290039, 0.0050462718009948735, 0.0050432000160217285, 0.005038080215454102, 0.005116928100585938, 0.004999167919158935, 0.004986879825592041, 0.00497049617767334, 0.005018623828887939, 0.004976640224456787, 0.004997119903564453, 0.005024767875671386, 0.005071872234344482, 0.005033984184265137, 0.005044223785400391, 0.005054463863372802, 0.005093376159667969, 0.005058559894561767, 0.005059584140777588, 0.005082111835479736, 0.005074944019317627, 0.005044223785400391, 0.005051392078399658, 0.005037055969238281, 0.005047296047210693, 0.005132287979125977, 0.005025792121887207, 0.005008384227752686, 0.005035071849822998, 0.004965312004089355, 0.005026815891265869, 0.005066751956939697, 0.005126143932342529, 0.005059584140777588, 0.0050728960037231445, 0.005435391902923584, 0.005417984008789062, 0.005224448204040527, 0.005079040050506592, 0.00511897611618042, 0.005054463863372802, 0.005049344062805176, 0.0050503678321838375, 0.0050841598510742185, 0.005069888114929199, 0.005055424213409424, 0.0050954241752624516, 0.00511078405380249, 0.005049344062805176, 0.00506060791015625, 0.005092351913452148, 0.005098495960235596, 0.005082111835479736, 0.005063680171966553, 0.005146624088287354, 0.005040128231048584, 0.005058559894561767, 0.005079040050506592, 0.00510975980758667, 0.005133344173431397, 0.0050616002082824706, 0.005096447944641113, 0.005061632156372071, 0.005067776203155518, 0.005058559894561767, 0.005135359764099121, 0.005194752216339111, 0.005624832153320313, 0.005550079822540284, 0.005279744148254394, 0.005627903938293457, 0.005479423999786377, 0.005246975898742676, 0.005282815933227539, 0.0052367358207702636, 0.005216256141662597, 0.005108736038208008, 0.00505241584777832, 0.005139455795288086, 0.005216256141662597, 0.005121024131774903, 0.005079040050506592, 0.005055488109588623, 0.005054463863372802, 0.005096447944641113, 0.005073984146118164, 0.0050657281875610355, 0.005059584140777588, 0.005024767875671386, 0.005257215976715088, 0.005212160110473632, 0.005183487892150879, 0.005202943801879883, 0.005189631938934326, 0.005187583923339844, 0.005183487892150879, 0.005219327926635742, 0.005150720119476319, 0.00506879997253418, 0.005202943801879883, 0.00522547197341919, 0.005192704200744629, 0.005185535907745361, 0.005195775985717774, 0.005173247814178467, 0.005187583923339844, 0.005265408039093018, 0.00511897611618042, 0.0051066880226135255, 0.005107711791992187, 0.005107711791992187, 0.005088255882263183, 0.005061632156372071, 0.005097472190856934, 0.005059584140777588, 0.005074944019317627, 0.005062655925750732, 0.005134335994720459, 0.005063680171966553, 0.0050769920349121095, 0.005075967788696289, 0.005079040050506592, 0.005094399929046631, 0.005074944019317627, 0.005084256172180176, 0.0049939842224121095, 0.005084127902984619, 0.005063680171966553, 0.005128191947937012, 0.0050657281875610355, 0.0050769920349121095, 0.0050769920349121095, 0.005089280128479004, 0.005074944019317627, 0.005064703941345215, 0.005088319778442383, 0.005055424213409424, 0.005066751956939697, 0.005045248031616211, 0.005107711791992187, 0.005054463863372802, 0.005055488109588623, 0.005058559894561767, 0.005081088066101074, 0.005090303897857666, 0.005066751956939697, 0.005078015804290771, 0.00506879997253418, 0.00506060791015625, 0.0050646719932556155, 0.005061632156372071, 0.005031936168670655, 0.005067776203155518, 0.0050432000160217285, 0.005073919773101807, 0.0050728960037231445, 0.005049344062805176, 0.005047296047210693, 0.005039103984832764, 0.005063680171966553, 0.005078015804290771, 0.005056511878967285, 0.005047296047210693, 0.005097472190856934, 0.0050432000160217285, 0.005081088066101074, 0.005053440093994141, 0.005074944019317627, 0.0050462718009948735, 0.005059584140777588, 0.005081088066101074, 0.005102591991424561, 0.00506060791015625, 0.004988927841186524, 0.00509446382522583, 0.00507692813873291, 0.00511897611618042, 0.005070847988128662, 0.00511897611618042, 0.005087232112884522, 0.005059584140777588, 0.005008384227752686, 0.005091328144073487, 0.005064703941345215, 0.005044223785400391, 0.005094399929046631, 0.005066751956939697, 0.005087232112884522, 0.005054463863372802, 0.005094431877136231, 0.0050953922271728515, 0.005070847988128662, 0.005062655925750732, 0.005117951869964599, 0.0050462718009948735, 0.005048319816589355, 0.00506879997253418, 0.005059584140777588, 0.005093376159667969, 0.0050432000160217285, 0.005079040050506592, 0.005064703941345215, 0.005055488109588623, 0.005056511878967285, 0.005130239963531494, 0.005064767837524414, 0.0050646400451660155, 0.0050728960037231445, 0.0050841598510742185, 0.005037055969238281, 0.0050094079971313476, 0.00499507188796997, 0.005048319816589355, 0.0050841598510742185, 0.005092351913452148, 0.005049344062805176, 0.0050657281875610355, 0.005095488071441651, 0.005052351951599121, 0.0050954241752624516, 0.005067872047424316, 0.005003168106079102, 0.004999167919158935, 0.005058591842651367, 0.0052592320442199705, 0.005096447944641113, 0.005064703941345215, 0.005071872234344482, 0.005102591991424561, 0.005178368091583252, 0.005055488109588623, 0.005037055969238281, 0.00501964807510376, 0.005420032024383545, 0.005452799797058106, 0.0052715520858764645, 0.005112832069396973, 0.005104640007019043, 0.005141503810882568, 0.005093376159667969, 0.005062655925750732, 0.005102591991424561, 0.005111807823181152, 0.005107711791992187, 0.0050728960037231445, 0.0051036162376403805, 0.005070847988128662, 0.005116960048675537, 0.005084127902984619, 0.005094399929046631, 0.005064703941345215, 0.005071872234344482, 0.005107776165008545, 0.005095359802246094, 0.00505350399017334, 0.005086143970489502, 0.005087232112884522, 0.0050800638198852536, 0.005101568222045898, 0.005307392120361328, 0.005153791904449463, 0.005085184097290039, 0.005071904182434082, 0.005149663925170899, 0.005264383792877197, 0.0052295680046081545, 0.0053012480735778805, 0.005259263992309571, 0.005184512138366699, 0.005224448204040527, 0.005054463863372802, 0.005142528057098389, 0.005220352172851562, 0.00522649621963501, 0.005181439876556396, 0.005081088066101074, 0.005062655925750732, 0.005073919773101807, 0.005114880084991455, 0.00506982421875, 0.005067776203155518, 0.0050503678321838375, 0.005089280128479004, 0.0050954241752624516, 0.005067776203155518, 0.005054463863372802, 0.005082111835479736, 0.005067776203155518, 0.005055488109588623, 0.005114880084991455, 0.005063680171966553, 0.00505241584777832, 0.005033984184265137, 0.005078015804290771, 0.005085184097290039, 0.005044223785400391, 0.005061632156372071, 0.005088255882263183, 0.005064703941345215, 0.005066751956939697, 0.005075967788696289, 0.005225503921508789, 0.005129183769226074, 0.005116928100585938, 0.005157887935638428, 0.005137472152709961, 0.005199808120727539, 0.005116928100585938, 0.005097472190856934, 0.005083136081695557, 0.005054463863372802, 0.005119008064270019, 0.0050616002082824706, 0.0050503678321838375, 0.005062655925750732, 0.0051066880226135255, 0.005089280128479004, 0.005059584140777588, 0.004983808040618896, 0.005083136081695557, 0.0050657281875610355, 0.005054463863372802, 0.0051138558387756345, 0.0050728960037231445, 0.005067776203155518, 0.005071936130523681, 0.005101503849029541, 0.005260287761688232, 0.005269504070281982, 0.005327871799468994, 0.005286911964416504, 0.005163008213043213, 0.005204991817474365, 0.005188608169555664, 0.00516096019744873, 0.00517632007598877, 0.005175295829772949, 0.005178368091583252, 0.005260287761688232, 0.0052674560546875, 0.005291007995605469, 0.0052633600234985355, 0.005214208126068115, 0.005298175811767578, 0.005260287761688232, 0.005250048160552978, 0.005277696132659912, 0.005257215976715088, 0.005252096176147461, 0.005272575855255127, 0.005288959980010987, 0.005253119945526123, 0.005241856098175048, 0.005246975898742676, 0.005053440093994141, 0.005054463863372802, 0.0050657281875610355, 0.005055488109588623, 0.005045248031616211, 0.005054463863372802, 0.005085184097290039, 0.0050432000160217285, 0.005051392078399658, 0.005055488109588623, 0.005070847988128662, 0.005055488109588623, 0.004984831809997559, 0.005047296047210693, 0.005134335994720459, 0.005071872234344482, 0.0050462718009948735, 0.0050728960037231445, 0.005075967788696289, 0.00506060791015625, 0.00506060791015625, 0.005088255882263183, 0.005061632156372071, 0.0050503678321838375, 0.005064703941345215, 0.005083136081695557, 0.005081088066101074, 0.005047296047210693, 0.005059584140777588, 0.005088255882263183, 0.00506060791015625, 0.0050462718009948735, 0.005081088066101074, 0.005074944019317627, 0.005054463863372802, 0.005054463863372802, 0.0051476478576660155, 0.005057536125183106, 0.005070847988128662, 0.005054463863372802, 0.005085184097290039, 0.0051036162376403805, 0.005051392078399658, 0.005056511878967285, 0.005079040050506592, 0.005033984184265137, 0.005041152000427246, 0.005045248031616211, 0.005051392078399658, 0.005082111835479736, 0.005055488109588623, 0.005044288158416748, 0.005062655925750732, 0.005104576110839844, 0.005061632156372071, 0.0050432000160217285, 0.005079040050506592, 0.00506879997253418, 0.005067776203155518, 0.005031936168670655, 0.0050206718444824215, 0.005061632156372071, 0.005058559894561767, 0.005055488109588623, 0.005101600170135498, 0.005052383899688721, 0.00505241584777832, 0.0050432000160217285, 0.005081088066101074, 0.005058559894561767, 0.005059584140777588, 0.005082111835479736, 0.005051392078399658, 0.005087232112884522, 0.005032959938049316, 0.005075967788696289, 0.005031936168670655, 0.0053944320678710935, 0.005433343887329102, 0.00522649621963501, 0.005082111835479736, 0.005081088066101074, 0.005127168178558349, 0.005067776203155518, 0.005056511878967285, 0.005056511878967285, 0.0050841598510742185, 0.00505241584777832, 0.005079040050506592, 0.0050728960037231445, 0.005163008213043213, 0.00522547197341919, 0.005284927845001221, 0.0052848000526428225, 0.005257215976715088, 0.005219327926635742, 0.005312511920928955, 0.005246975898742676, 0.005276671886444092, 0.0052930560111999515, 0.005276671886444092, 0.005288959980010987, 0.005322751998901368, 0.0052674560546875, 0.005259263992309571, 0.005306431770324707, 0.0052510080337524415, 0.005259263992309571, 0.005227519989013672, 0.0052336640357971195, 0.005272575855255127, 0.0052674560546875, 0.005314559936523438, 0.005281792163848877, 0.0052715520858764645, 0.005292031764984131, 0.005284863948822022, 0.005268479824066162, 0.005262335777282715, 0.005310463905334473, 0.005285888195037842, 0.005268479824066162, 0.005291007995605469, 0.005264383792877197, 0.005283840179443359, 0.005319680213928223, 0.005251071929931641, 0.005260287761688232, 0.0052930560111999515, 0.0052705278396606445, 0.005259263992309571, 0.005305344104766845, 0.005274623870849609, 0.005272575855255127, 0.0053043198585510255, 0.005273600101470947, 0.005288959980010987, 0.005350399971008301, 0.005283840179443359, 0.005250048160552978, 0.005184512138366699, 0.005198847770690918, 0.005275648117065429, 0.0052899842262268066, 0.005294079780578613, 0.005283840179443359, 0.005307392120361328, 0.005319680213928223, 0.005284863948822022, 0.005261312007904053, 0.005314559936523438, 0.005248000144958496, 0.005265408039093018, 0.005313536167144775, 0.005246975898742676, 0.0052008957862854, 0.005303296089172363, 0.005249023914337158, 0.005331967830657959, 0.005295104026794434, 0.005274623870849609, 0.0052674560546875, 0.005281792163848877, 0.005291007995605469, 0.005299200057983398, 0.005310463905334473, 0.005298175811767578, 0.005284927845001221, 0.005276607990264893, 0.005329919815063477, 0.005295104026794434, 0.005298175811767578, 0.005325823783874512, 0.0052899842262268066, 0.005283840179443359, 0.005259263992309571, 0.005280767917633057, 0.005250048160552978, 0.005241856098175048, 0.005286911964416504, 0.005272575855255127, 0.005255167961120606, 0.0052899842262268066, 0.005246975898742676, 0.005234687805175781, 0.005292031764984131, 0.005250048160552978, 0.005247007846832275, 0.00526639986038208, 0.005242879867553711, 0.005242879867553711, 0.005288959980010987, 0.005283840179443359, 0.005274623870849609, 0.005306367874145508, 0.0053678078651428224, 0.0052715520858764645, 0.005312511920928955, 0.005419007778167725, 0.005344255924224854, 0.005406847953796387, 0.0053359360694885255, 0.005319680213928223, 0.0053350400924682614, 0.005336063861846924, 0.005283840179443359, 0.005191679954528809, 0.005214208126068115, 0.005152768135070801, 0.0052674560546875, 0.00531660795211792, 0.00522547197341919, 0.005171199798583984, 0.00521830415725708, 0.0052930560111999515, 0.005276671886444092, 0.005311488151550293, 0.0052633600234985355, 0.005265408039093018, 0.0052674560546875, 0.005275648117065429, 0.005268479824066162, 0.005277696132659912, 0.005295167922973633, 0.0052551040649414064, 0.005282815933227539, 0.005285888195037842, 0.005286911964416504, 0.005288959980010987, 0.005409791946411133, 0.0053002238273620605, 0.005248000144958496, 0.005305344104766845, 0.005241856098175048, 0.005254144191741943, 0.005302303791046142, 0.005286911964416504, 0.005261312007904053, 0.0052674560546875, 0.005296127796173096, 0.005257215976715088, 0.005265439987182617, 0.005279712200164795, 0.005241856098175048, 0.005249023914337158, 0.005294079780578613, 0.005306367874145508, 0.0052008957862854, 0.005237760066986084, 0.005272672176361084, 0.005265312194824219, 0.005259263992309571, 0.005307392120361328, 0.005258240222930908, 0.005254144191741943, 0.00537395191192627, 0.005284863948822022, 0.0052899842262268066, 0.0053350400924682614, 0.0052633600234985355, 0.005274623870849609, 0.005296127796173096, 0.005262335777282715, 0.005258240222930908, 0.0052899842262268066, 0.005305344104766845, 0.005291007995605469, 0.005340159893035889, 0.005269504070281982, 0.0055552000999450684, 0.00567193603515625, 0.005458975791931152, 0.005309408187866211, 0.005329919815063477, 0.005292031764984131, 0.005276671886444092, 0.0052971520423889164, 0.005257215976715088, 0.005253119945526123, 0.005352447986602784, 0.005231616020202637, 0.0052408318519592285, 0.005297183990478516, 0.0052592320442199705, 0.005251071929931641, 0.005295104026794434, 0.0052705278396606445, 0.005274623870849609, 0.0053043198585510255, 0.0052899842262268066, 0.005261312007904053, 0.005281792163848877, 0.005268479824066162, 0.005257215976715088, 0.005227519989013672, 0.005234687805175781, 0.005241856098175048, 0.0052336640357971195, 0.005278719902038574, 0.0053043198585510255, 0.005285888195037842, 0.005280767917633057, 0.005344255924224854, 0.005264383792877197, 0.005131264209747314, 0.005091328144073487, 0.0050503678321838375, 0.0050462718009948735, 0.005055488109588623, 0.005085184097290039, 0.005054463863372802, 0.005051392078399658, 0.005064703941345215, 0.005079040050506592, 0.005064703941345215, 0.005047296047210693, 0.005079040050506592, 0.005057536125183106, 0.005053440093994141, 0.005058559894561767, 0.005079040050506592, 0.004987904071807861, 0.004993023872375488, 0.005024767875671386, 0.005181439876556396, 0.005058559894561767, 0.0050503678321838375, 0.0049909758567810054, 0.005048319816589355, 0.004993023872375488, 0.0050022401809692385, 0.004975615978240967, 0.0050135040283203125, 0.005018623828887939, 0.005053440093994141, 0.0050769920349121095, 0.005091328144073487, 0.005048319816589355, 0.0050503678321838375, 0.005088255882263183, 0.005018623828887939, 0.00501145601272583, 0.004976704120635987, 0.00498579216003418, 0.005056511878967285, 0.00505244779586792, 0.005040095806121826, 0.0050841598510742185, 0.005064703941345215, 0.005078015804290771, 0.005056511878967285, 0.005099520206451416, 0.00506879997253418, 0.0050769920349121095, 0.00506879997253418, 0.0050657281875610355, 0.0050503678321838375, 0.004994048118591309, 0.004997119903564453, 0.005001215934753418, 0.004973567962646484, 0.004988927841186524, 0.005059584140777588, 0.005061632156372071, 0.005067776203155518, 0.0050728960037231445, 0.0050462718009948735, 0.0050728960037231445, 0.005062655925750732, 0.005057536125183106, 0.0050432000160217285, 0.005090303897857666, 0.005033984184265137, 0.0050432000160217285, 0.0050432000160217285, 0.005073919773101807, 0.005037055969238281, 0.005040128231048584, 0.0050228161811828615, 0.00507689619064331, 0.005035007953643799, 0.005039103984832764, 0.005040128231048584, 0.0050841598510742185, 0.00506166410446167, 0.005037024021148682, 0.0050769920349121095, 0.00506060791015625, 0.0050360321998596195, 0.00505241584777832, 0.005053440093994141, 0.0049797120094299315, 0.005039135932922364, 0.005044191837310791, 0.0050780482292175294, 0.005045216083526611, 0.005047296047210693, 0.005053440093994141, 0.005094399929046631, 0.005053440093994141, 0.005085184097290039, 0.005054463863372802, 0.005100543975830078, 0.005057536125183106, 0.005048319816589355, 0.005079040050506592, 0.0050432000160217285, 0.005068863868713379, 0.0051281280517578125, 0.005093376159667969, 0.005070847988128662, 0.00506060791015625, 0.005049344062805176, 0.005115903854370117, 0.005063680171966553, 0.005049344062805176, 0.00506879997253418, 0.005098495960235596, 0.00506060791015625, 0.00506879997253418, 0.005093376159667969, 0.005074944019317627, 0.005062655925750732, 0.0050503678321838375, 0.005092351913452148, 0.005063680171966553, 0.005063680171966553, 0.00505241584777832, 0.005054463863372802, 0.00506879997253418, 0.00506060791015625, 0.005057536125183106, 0.005082111835479736, 0.0050769920349121095, 0.00505241584777832, 0.005055488109588623, 0.005054463863372802, 0.0051333122253417966, 0.005056511878967285, 0.004993023872375488, 0.005032959938049316, 0.00505241584777832, 0.005054463863372802, 0.005067776203155518, 0.005098495960235596, 0.005058559894561767, 0.005058559894561767, 0.005056511878967285, 0.0050841598510742185, 0.005074944019317627, 0.005049344062805176, 0.0050503678321838375, 0.005091360092163086, 0.00506774377822876, 0.0050462718009948735, 0.005102591991424561, 0.005073919773101807, 0.005049344062805176, 0.005086207866668701, 0.005105663776397705, 0.005062655925750732, 0.005054463863372802, 0.005062655925750732, 0.0051476478576660155, 0.005062655925750732, 0.005054463863372802, 0.005058559894561767, 0.005088255882263183, 0.005333024024963379, 0.005401567935943603, 0.005276671886444092, 0.005096447944641113, 0.005074944019317627, 0.005100543975830078, 0.005075967788696289, 0.005054463863372802, 0.005061632156372071, 0.005091328144073487, 0.00505241584777832, 0.005097472190856934, 0.005047296047210693, 0.005074944019317627, 0.0050432000160217285, 0.0050462718009948735, 0.005031936168670655, 0.005078015804290771, 0.0050432000160217285, 0.005047296047210693, 0.004998144149780274, 0.005053440093994141, 0.005067776203155518, 0.005044223785400391, 0.005059584140777588, 0.0050462718009948735, 0.00506879997253418, 0.005064703941345215, 0.0050432000160217285, 0.005061632156372071, 0.005071872234344482, 0.005057536125183106, 0.005054463863372802, 0.0050852479934692385, 0.005035967826843262, 0.005039103984832764, 0.005045248031616211, 0.005082111835479736, 0.0050360321998596195, 0.005057600021362305, 0.005051328182220459, 0.005087232112884522, 0.00506982421875, 0.005047296047210693, 0.005082111835479736, 0.00505241584777832, 0.0050360321998596195, 0.005101568222045898, 0.0051363840103149415, 0.005484543800354004, 0.00532480001449585, 0.005315584182739258, 0.0052899842262268066, 0.005283840179443359, 0.005277696132659912, 0.005269504070281982, 0.005259263992309571, 0.005310463905334473, 0.005275648117065429, 0.005264383792877197, 0.005279744148254394, 0.005252096176147461, 0.005279744148254394, 0.005250048160552978, 0.005338111877441406, 0.005254144191741943, 0.005264383792877197, 0.005284863948822022, 0.005182464122772217, 0.005222400188446045, 0.005272575855255127, 0.005260287761688232, 0.005260320186614991, 0.0052817602157592776, 0.005279744148254394, 0.005275648117065429, 0.0053043198585510255, 0.005287936210632324, 0.005283872127532959, 0.005294047832489014, 0.005296127796173096, 0.005281792163848877, 0.005276703834533691, 0.005440512180328369, 0.005603328227996827, 0.005484543800354004, 0.005437439918518067, 0.0053043198585510255, 0.005283840179443359, 0.005344287872314453, 0.005265376091003418, 0.005243904113769531, 0.00530134391784668, 0.0052540478706359865, 0.005245952129364013, 0.005286911964416504, 0.005254144191741943, 0.005255167961120606, 0.005283840179443359, 0.005244927883148193, 0.005244991779327393, 0.0053779840469360355, 0.005480447769165039, 0.0051968002319335935, 0.005314559936523438, 0.005281792163848877, 0.005284863948822022, 0.005287936210632324, 0.005250048160552978, 0.005246975898742676, 0.0052674560546875, 0.005294079780578613, 0.005275648117065429, 0.005269567966461181, 0.005231552124023438, 0.005281792163848877, 0.005295104026794434, 0.005325823783874512, 0.0052633600234985355, 0.005250048160552978, 0.005315584182739258, 0.005269504070281982, 0.005255167961120606, 0.005329919815063477, 0.005242879867553711, 0.005259263992309571, 0.005295104026794434, 0.005245952129364013, 0.005251071929931641, 0.005265408039093018, 0.005164031982421875, 0.00515174388885498, 0.005144576072692871, 0.00517632007598877, 0.0051476478576660155, 0.005164031982421875, 0.005157887935638428, 0.005154816150665284, 0.005155839920043945, 0.005248000144958496, 0.005294079780578613, 0.005251071929931641, 0.005237760066986084, 0.005278719902038574, 0.005252096176147461, 0.005262335777282715, 0.005259200096130371, 0.0053012480735778805, 0.005328896045684814, 0.005256192207336426, 0.005280767917633057, 0.005265408039093018, 0.005256192207336426, 0.005292031764984131, 0.0052633600234985355, 0.005273632049560547, 0.005303264141082764, 0.005451776027679443, 0.0058716158866882326, 0.005327871799468994, 0.005225567817687988, 0.005206943988800049, 0.005287936210632324, 0.0052633600234985355, 0.005273600101470947, 0.005294079780578613, 0.005251071929931641, 0.00525216007232666, 0.005308351993560791, 0.005279744148254394, 0.005268479824066162, 0.005279744148254394, 0.005269504070281982, 0.005245952129364013, 0.005241856098175048, 0.005285920143127442, 0.005274591922760009, 0.005278719902038574, 0.00531660795211792, 0.0053002238273620605, 0.005274623870849609, 0.005295104026794434, 0.0052930560111999515, 0.005252096176147461, 0.00531763219833374, 0.005277696132659912, 0.005281792163848877, 0.005307392120361328, 0.005273600101470947, 0.005307392120361328, 0.005313536167144775, 0.005265408039093018, 0.00526643180847168, 0.005552127838134766, 0.005489664077758789, 0.005596159934997558, 0.005431295871734619, 0.00530025577545166, 0.005288928031921387, 0.005256192207336426, 0.005190656185150146, 0.0051660799980163576, 0.005201920032501221, 0.0052971520423889164, 0.005295104026794434, 0.00531763219833374, 0.0052899842262268066, 0.005273600101470947, 0.005237760066986084, 0.005294079780578613, 0.005256192207336426, 0.005284863948822022, 0.005303296089172363, 0.005291007995605469, 0.005280767917633057, 0.0053002238273620605, 0.005276671886444092, 0.005273600101470947, 0.005321728229522705, 0.0052705278396606445, 0.005261312007904053, 0.005264383792877197, 0.005306367874145508, 0.005305344104766845, 0.005292031764984131, 0.005314559936523438, 0.00525216007232666, 0.00526636791229248, 0.005296127796173096, 0.005254144191741943, 0.005294079780578613, 0.00530841588973999, 0.0052644162178039555, 0.005260255813598633, 0.005362688064575195, 0.005433343887329102, 0.005302271842956543, 0.005410816192626953, 0.005390336036682129, 0.005413887977600097, 0.005751808166503906, 0.0053944320678710935, 0.005341184139251709, 0.005323775768280029, 0.005268479824066162, 0.005318655967712403, 0.005274623870849609, 0.005269504070281982, 0.005234687805175781, 0.005316671848297119, 0.0052581758499145505, 0.005261312007904053, 0.005396480083465576, 0.0052336640357971195, 0.005198847770690918, 0.005303296089172363, 0.005261312007904053, 0.0052930560111999515, 0.005388288021087646, 0.005279744148254394, 0.005362688064575195, 0.005305344104766845, 0.005255167961120606, 0.005251071929931641, 0.005278719902038574, 0.005238783836364746, 0.005261312007904053, 0.0052633600234985355, 0.005274623870849609, 0.005252096176147461, 0.005241856098175048, 0.005328896045684814, 0.005253119945526123, 0.005261312007904053, 0.005239808082580567, 0.005277696132659912, 0.005241856098175048, 0.005718080043792725, 0.005363647937774658, 0.005278719902038574, 0.005314559936523438, 0.005287936210632324, 0.0052674560546875, 0.005298175811767578, 0.005275648117065429, 0.005261312007904053, 0.005254144191741943, 0.0052971520423889164, 0.0052633600234985355, 0.005372928142547607, 0.005303296089172363, 0.005253119945526123, 0.005243904113769531, 0.005292031764984131, 0.0052705278396606445, 0.005241856098175048, 0.005318655967712403, 0.005327871799468994, 0.005268479824066162, 0.005290143966674805, 0.005197663784027099, 0.0052930560111999515, 0.005346303939819336, 0.005276671886444092, 0.005255167961120606, 0.005280767917633057, 0.005256192207336426, 0.005237760066986084, 0.00535756778717041, 0.005262335777282715, 0.005265408039093018, 0.005285888195037842, 0.005354496002197265, 0.0053012480735778805, 0.005255167961120606, 0.005311488151550293, 0.005275648117065429, 0.00526643180847168, 0.005326848030090332, 0.005258240222930908, 0.005259263992309571, 0.005292031764984131, 0.005253119945526123, 0.005268479824066162, 0.005398528099060058, 0.005260287761688232, 0.005250048160552978, 0.005393407821655274, 0.005259263992309571, 0.005246975898742676, 0.005294079780578613, 0.005258240222930908, 0.005260287761688232, 0.005282815933227539, 0.0052633600234985355, 0.005234687805175781, 0.005149695873260498, 0.005121024131774903, 0.005097472190856934, 0.005049344062805176, 0.0050728960037231445, 0.005082111835479736, 0.005063680171966553, 0.005064703941345215, 0.005057536125183106, 0.00530841588973999, 0.005198880195617676, 0.0051988158226013185, 0.005292031764984131, 0.005265408039093018, 0.005285888195037842, 0.005279744148254394, 0.005272575855255127, 0.005261312007904053, 0.005248000144958496, 0.005282815933227539, 0.005238783836364746, 0.005252096176147461, 0.00531660795211792, 0.005261312007904053, 0.005222400188446045, 0.005272575855255127, 0.005223423957824707, 0.005257215976715088, 0.005372928142547607, 0.005278719902038574, 0.005264383792877197, 0.005282815933227539, 0.005273663997650146, 0.00523360013961792, 0.005294079780578613, 0.005244927883148193, 0.0052930560111999515, 0.005276671886444092, 0.005262335777282715, 0.005251071929931641, 0.005259263992309571, 0.005282815933227539, 0.005282815933227539, 0.005250048160552978, 0.005275648117065429, 0.005302271842956543, 0.005279744148254394, 0.005368832111358642, 0.005465087890625, 0.005772287845611572, 0.005636096000671387, 0.005450751781463623, 0.005352447986602784, 0.005281792163848877, 0.0052633600234985355, 0.005349376201629639, 0.005268479824066162, 0.005129248142242432, 0.005066720008850098, 0.00515993595123291, 0.00515993595123291, 0.005268479824066162]",tokens/s,192.52144647676027,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492f3-6d830d51078b4a221e736b98;24108ea5-a9ba-4ee4-9e16-16d6447f0112) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmps7jy47cp/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,924.79488,849.870848,0.0,220.20096,205.438976,s,1,7.4467490234375,7.4467490234375,0.0,7.4467490234375,7.4467490234375,7.4467490234375,7.4467490234375,[7.4467490234375],,kWh,7.848097623618364e-06,4.284297164879504e-06,9.912785707999472e-06,2.204518049649734e-05,,MB,1494.704128,908.591104,0.0,260.046848,226.388992,s,22,0.1935845108032227,0.00879929594560103,0.00021540354590178412,0.008712591648101807,0.009005116558074951,0.009275012922286987,0.009402381038665772,"[0.009289152145385742, 0.0087457275390625, 0.008697471618652343, 0.008653120040893554, 0.009432479858398438, 0.008641535758972169, 0.008828800201416015, 0.008617888450622559, 0.008864800453186035, 0.008993856430053711, 0.008680831909179688, 0.008547264099121094, 0.008646847724914551, 0.00872771167755127, 0.008857376098632812, 0.00868188762664795, 0.008657376289367675, 0.008916799545288085, 0.008634559631347656, 0.008667327880859376, 0.008795328140258789, 0.009006367683410645]",tokens/s,29093.23672969316,kWh,1.0375291623134544e-07,5.685130194258933e-08,2.1893442546443182e-07,3.7953864363836656e-07,tokens/kWh,674503121.85845,MB,1531.936768,921.174016,0.0,272.62976,226.391552,s,22,10.16187680053711,0.4619034909335049,0.0035253084170831835,0.4619976196289063,0.4664013244628906,0.4669015731811524,0.4677183413696289,"[0.4661012878417969, 0.4607333984375, 0.4618383483886719, 0.4669261474609375, 0.46792892456054686, 0.4595680236816406, 0.4539393615722656, 0.46407333374023435, 0.46643466186523436, 0.4591684265136719, 0.456758544921875, 0.45940338134765624, 0.45742828369140626, 0.4633062438964844, 0.46527008056640623, 0.46215689086914064, 0.4640723876953125, 0.46295172119140626, 0.4639628601074219, 0.4584295959472656, 0.4610428771972656, 0.46038201904296877]",tokens/s,136.39212787216064,kWh,5.467215727707771e-06,2.9949784877401206e-06,8.747714161989917e-06,1.7209908377437808e-05,tokens/kWh,3660681.8943088045,,s,1386,10.152310792446121,0.007324899561649449,0.00016668050844541085,0.007277567863464355,0.0074997758865356446,0.007578367829322815,0.00792017939090729,"[0.007379968166351319, 0.00748960018157959, 0.007487423896789551, 0.00744755220413208, 0.007477248191833496, 0.007437312126159668, 0.007436287879943848, 0.00723967981338501, 0.007259136199951172, 0.007206912040710449, 0.0072540159225463864, 0.007225344181060791, 0.007202816009521484, 0.007261184215545655, 0.007268352031707764, 0.007233535766601563, 0.0072325119972229, 0.007214079856872559, 0.007358463764190673, 0.007428095817565918, 0.007434239864349365, 0.007452672004699707, 0.007238656044006348, 0.007275519847869873, 0.007217152118682861, 0.007240767955780029, 0.007251904010772705, 0.007265279769897461, 0.00724889612197876, 0.007263232231140137, 0.00724889612197876, 0.007414783954620361, 0.007746560096740723, 0.0075632638931274416, 0.008068096160888672, 0.008167424201965333, 0.007635968208312988, 0.007520256042480469, 0.007481344223022461, 0.0074997758865356446, 0.0076308479309082035, 0.007547904014587403, 0.007460864067077637, 0.007482367992401123, 0.007444479942321777, 0.007708672046661377, 0.007542784214019775, 0.007436287879943848, 0.0072427520751953125, 0.00734822416305542, 0.007521279811859131, 0.007461887836456299, 0.0074065918922424315, 0.0073359360694885255, 0.007221248149871826, 0.007228415966033935, 0.00719155216217041, 0.007219200134277344, 0.00733081579208374, 0.0074700798988342285, 0.0072427520751953125, 0.007239776134490967, 0.007194528102874756, 0.007208960056304932, 0.007286784172058106, 0.007247871875762939, 0.00729804801940918, 0.007234560012817383, 0.007193600177764893, 0.007178239822387696, 0.00724070405960083, 0.007414783954620361, 0.007372799873352051, 0.007236608028411865, 0.007222271919250488, 0.007229440212249756, 0.007258111953735351, 0.007213088035583496, 0.007223264217376709, 0.007216127872467041, 0.007219200134277344, 0.007228415966033935, 0.0072468481063842774, 0.007233535766601563, 0.007218175888061523, 0.007580671787261963, 0.007359488010406494, 0.007456768035888672, 0.007566368103027344, 0.007408607959747315, 0.007236608028411865, 0.00719155216217041, 0.007217152118682861, 0.007375872135162354, 0.00728166389465332, 0.007258111953735351, 0.007250944137573242, 0.007187456130981445, 0.007316480159759522, 0.007514111995697022, 0.00742195177078247, 0.0072724480628967286, 0.007270400047302246, 0.007331840038299561, 0.0072202239036560055, 0.0072499198913574215, 0.007200767993927002, 0.007453695774078369, 0.007309311866760254, 0.007230463981628418, 0.007540736198425293, 0.007398399829864502, 0.0074332160949707035, 0.00729702377319336, 0.007198719978332519, 0.007194623947143554, 0.007218175888061523, 0.007206912040710449, 0.007711743831634522, 0.007517183780670166, 0.0073359360694885255, 0.007565311908721924, 0.0074332160949707035, 0.007258111953735351, 0.007209983825683594, 0.007203839778900147, 0.007102464199066162, 0.007214079856872559, 0.0072120318412780765, 0.007200767993927002, 0.007258111953735351, 0.007270400047302246, 0.00793497610092163, 0.007469056129455566, 0.007477248191833496, 0.007413760185241699, 0.007422976016998291, 0.007416831970214844, 0.007455743789672851, 0.007516160011291504, 0.007425024032592774, 0.00728985595703125, 0.007205887794494629, 0.007173120021820068, 0.007196671962738037, 0.007188479900360107, 0.007370751857757568, 0.007198719978332519, 0.007217152118682861, 0.007241727828979493, 0.0072120318412780765, 0.007256063938140869, 0.007425024032592774, 0.007468031883239746, 0.007370751857757568, 0.007211008071899414, 0.007198719978332519, 0.007206912040710449, 0.0071833600997924804, 0.007196671962738037, 0.007211008071899414, 0.007241727828979493, 0.007231488227844239, 0.007172095775604248, 0.00781004810333252, 0.007546879768371582, 0.007498752117156982, 0.007537792205810547, 0.0073183999061584475, 0.007209983825683594, 0.007050240039825439, 0.007087103843688965, 0.007098368167877197, 0.00785920000076294, 0.0074414081573486324, 0.007611392021179199, 0.007464960098266602, 0.007426047801971435, 0.007425024032592774, 0.007461887836456299, 0.007542784214019775, 0.007223296165466309, 0.00724070405960083, 0.007251967906951904, 0.007223296165466309, 0.007262207984924316, 0.00723967981338501, 0.0071157760620117185, 0.007018496036529541, 0.007069695949554444, 0.0072130560874938965, 0.00725708818435669, 0.007468031883239746, 0.007516160011291504, 0.007219200134277344, 0.007195648193359375, 0.007198719978332519, 0.007286784172058106, 0.007230463981628418, 0.007205887794494629, 0.007198719978332519, 0.007452672004699707, 0.007427072048187256, 0.007442431926727295, 0.007355391979217529, 0.007451648235321045, 0.007392255783081054, 0.007408639907836914, 0.007414783954620361, 0.007367680072784424, 0.007300159931182861, 0.007250879764556885, 0.007214079856872559, 0.007156735897064209, 0.007312384128570557, 0.007434239864349365, 0.00742195177078247, 0.007417856216430664, 0.007415840148925781, 0.007422944068908691, 0.007394303798675537, 0.007455743789672851, 0.007336959838867187, 0.007172095775604248, 0.007215104103088379, 0.007142399787902832, 0.00719974422454834, 0.007231488227844239, 0.007391232013702393, 0.007407616138458252, 0.007552000045776367, 0.007418879985809326, 0.007444479942321777, 0.0074926080703735356, 0.007649280071258545, 0.007431168079376221, 0.007420928001403809, 0.007456768035888672, 0.007449600219726562, 0.0074291200637817386, 0.00745472002029419, 0.007453695774078369, 0.007575551986694336, 0.0075684161186218265, 0.007730144023895264, 0.007464960098266602, 0.007480319976806641, 0.007419904232025146, 0.007502848148345947, 0.0075008001327514645, 0.008104960441589355, 0.008313856124877929, 0.007544832229614258, 0.007424032211303711, 0.00750486421585083, 0.007558144092559814, 0.007493631839752197, 0.007508992195129394, 0.007532544136047363, 0.007451648235321045, 0.007464960098266602, 0.007443456172943115, 0.007391232013702393, 0.007475200176239013, 0.00722431993484497, 0.007344128131866455, 0.007473152160644531, 0.007292928218841553, 0.0072468481063842774, 0.007174143791198731, 0.007219200134277344, 0.007278592109680176, 0.0072130560874938965, 0.007176191806793213, 0.007223296165466309, 0.007221248149871826, 0.0072202239036560055, 0.007209983825683594, 0.007184383869171143, 0.007197696208953858, 0.007308288097381592, 0.007211008071899414, 0.007186431884765625, 0.0071823358535766605, 0.007259136199951172, 0.007299071788787842, 0.008217599868774414, 0.008910847663879394, 0.008027135848999023, 0.008506367683410645, 0.00757862377166748, 0.00743833589553833, 0.007451648235321045, 0.007452672004699707, 0.00743833589553833, 0.007217152118682861, 0.007181312084197998, 0.007202816009521484, 0.007480319976806641, 0.0074301438331604, 0.0076605439186096195, 0.007400447845458984, 0.007368703842163086, 0.007309311866760254, 0.007174143791198731, 0.0071823358535766605, 0.007174143791198731, 0.007201824188232422, 0.007659488201141358, 0.007568384170532226, 0.007461887836456299, 0.007366655826568603, 0.007484416007995606, 0.007501823902130127, 0.007321599960327148, 0.0070594558715820314, 0.007168000221252442, 0.007180287837982178, 0.007407616138458252, 0.007305215835571289, 0.007419904232025146, 0.007200767993927002, 0.007181312084197998, 0.007192575931549072, 0.007264256000518799, 0.007329792022705078, 0.007401472091674805, 0.007159808158874512, 0.007225344181060791, 0.007211008071899414, 0.007299071788787842, 0.007216127872467041, 0.007194623947143554, 0.007193600177764893, 0.007301119804382325, 0.007214079856872559, 0.0072130560874938965, 0.007236639976501465, 0.007223296165466309, 0.007214047908782959, 0.007475200176239013, 0.007231488227844239, 0.007385087966918945, 0.007418879985809326, 0.0074332160949707035, 0.007357439994812012, 0.007235583782196045, 0.007373824119567871, 0.007445504188537597, 0.0073697280883789065, 0.007170048236846924, 0.007205887794494629, 0.007286784172058106, 0.007221248149871826, 0.007321599960327148, 0.007383039951324463, 0.007385087966918945, 0.007473152160644531, 0.0072540159225463864, 0.007207935810089112, 0.007196671962738037, 0.007147520065307617, 0.007411712169647216, 0.007426047801971435, 0.007308288097381592, 0.007411712169647216, 0.007404543876647949, 0.007473152160644531, 0.007391232013702393, 0.0074035201072692874, 0.007285759925842285, 0.007231488227844239, 0.007201791763305664, 0.007157760143280029, 0.007200767993927002, 0.007247871875762939, 0.007303167819976806, 0.007296000003814697, 0.007337984085083008, 0.0074332160949707035, 0.007432191848754883, 0.007291903972625732, 0.007203839778900147, 0.007192575931549072, 0.007219200134277344, 0.007184383869171143, 0.007169023990631103, 0.007166975975036621, 0.007121920108795166, 0.0072202239036560055, 0.0072120318412780765, 0.0071823358535766605, 0.007352320194244385, 0.007373824119567871, 0.007158783912658692, 0.007062528133392334, 0.007052320003509522, 0.007174111843109131, 0.007065599918365479, 0.007225344181060791, 0.007245823860168457, 0.007184383869171143, 0.0071833600997924804, 0.007237631797790528, 0.007158783912658692, 0.007197696208953858, 0.007102464199066162, 0.00710041618347168, 0.007070752143859863, 0.007080927848815918, 0.007058432102203369, 0.007128064155578613, 0.007171072006225586, 0.007245823860168457, 0.0071823358535766605, 0.007234560012817383, 0.007223296165466309, 0.007193600177764893, 0.007186431884765625, 0.00745472002029419, 0.0071823358535766605, 0.007221248149871826, 0.007196671962738037, 0.007106560230255127, 0.007203839778900147, 0.007172095775604248, 0.007164927959442138, 0.007185408115386963, 0.007227392196655274, 0.007193600177764893, 0.007160831928253173, 0.0071905279159545895, 0.0071905279159545895, 0.0071833600997924804, 0.007193600177764893, 0.007221248149871826, 0.007202816009521484, 0.0072120318412780765, 0.007189504146575928, 0.00719155216217041, 0.007236608028411865, 0.007050240039825439, 0.007222271919250488, 0.007277567863464355, 0.007324672222137451, 0.00719974422454834, 0.007449600219726562, 0.007428095817565918, 0.0074700798988342285, 0.007437312126159668, 0.00753766393661499, 0.007451648235321045, 0.00744652795791626, 0.007417856216430664, 0.007371776103973389, 0.00744755220413208, 0.007351295948028564, 0.007398399829864502, 0.007267327785491944, 0.007401472091674805, 0.007464960098266602, 0.0074106879234313965, 0.007451648235321045, 0.007443456172943115, 0.007219200134277344, 0.007290880203247071, 0.0071905279159545895, 0.007206912040710449, 0.007196671962738037, 0.007200767993927002, 0.007245823860168457, 0.0073431038856506346, 0.007383039951324463, 0.007611392021179199, 0.0073820161819458, 0.007394303798675537, 0.007425024032592774, 0.007413760185241699, 0.007144447803497315, 0.007164927959442138, 0.007188479900360107, 0.007204864025115967, 0.007260159969329834, 0.007136288166046142, 0.007300064086914062, 0.007449600219726562, 0.007401472091674805, 0.007428095817565918, 0.007352320194244385, 0.007456799983978271, 0.007428063869476319, 0.007393280029296875, 0.00727347183227539, 0.007428095817565918, 0.007422976016998291, 0.0074291200637817386, 0.007428095817565918, 0.007451648235321045, 0.007386112213134765, 0.00743833589553833, 0.007356416225433349, 0.0074065918922424315, 0.007408639907836914, 0.007678976058959961, 0.007287807941436767, 0.007411712169647216, 0.007360511779785156, 0.0074332160949707035, 0.007419904232025146, 0.007432191848754883, 0.00758784008026123, 0.0075335679054260255, 0.00744755220413208, 0.00744652795791626, 0.007392255783081054, 0.007384064197540283, 0.007265279769897461, 0.0074414081573486324, 0.007412735939025879, 0.007628799915313721, 0.007301119804382325, 0.007188479900360107, 0.007187456130981445, 0.007344128131866455, 0.00742195177078247, 0.007544832229614258, 0.007407616138458252, 0.0073697280883789065, 0.007394303798675537, 0.007489535808563232, 0.007392255783081054, 0.007799808025360107, 0.007442431926727295, 0.007395328044891358, 0.007479296207427978, 0.007740416049957275, 0.007418879985809326, 0.007602176189422607, 0.007392255783081054, 0.007318528175354004, 0.007304192066192627, 0.007277567863464355, 0.007550975799560547, 0.007398399829864502, 0.007451648235321045, 0.0074291200637817386, 0.007388160228729248, 0.007426047801971435, 0.0074741759300231934, 0.007425024032592774, 0.007502848148345947, 0.007352320194244385, 0.007222271919250488, 0.007373824119567871, 0.0072837119102478025, 0.007284736156463623, 0.007570432186126709, 0.007228415966033935, 0.007176191806793213, 0.007192575931549072, 0.007198719978332519, 0.007255040168762207, 0.007203839778900147, 0.007201856136322022, 0.007419839859008789, 0.00738918399810791, 0.007479296207427978, 0.007409664154052734, 0.007581696033477783, 0.0074403839111328125, 0.007422976016998291, 0.007451648235321045, 0.00742195177078247, 0.00744755220413208, 0.007458816051483155, 0.007373824119567871, 0.007519231796264648, 0.007458816051483155, 0.007413760185241699, 0.0074967041015625, 0.007458816051483155, 0.007222271919250488, 0.007168000221252442, 0.007225344181060791, 0.007192575931549072, 0.007196767807006836, 0.007200672149658203, 0.007161856174468994, 0.007193600177764893, 0.00724070405960083, 0.007123968124389648, 0.007144447803497315, 0.00714137601852417, 0.007328767776489258, 0.007432191848754883, 0.007267327785491944, 0.00719155216217041, 0.0071905279159545895, 0.007266304016113281, 0.0072120318412780765, 0.007217152118682861, 0.00719974422454834, 0.007176191806793213, 0.007131135940551757, 0.007161856174468994, 0.007168000221252442, 0.007193600177764893, 0.007214079856872559, 0.007186431884765625, 0.0071905279159545895, 0.0072326078414916995, 0.0071924800872802735, 0.007200767993927002, 0.007197696208953858, 0.007229440212249756, 0.007221248149871826, 0.007219200134277344, 0.0072325119972229, 0.0071792640686035155, 0.007185408115386963, 0.007216127872467041, 0.007355391979217529, 0.0073697280883789065, 0.007480319976806641, 0.0074065918922424315, 0.007432191848754883, 0.0074301438331604, 0.007264256000518799, 0.007196671962738037, 0.007184383869171143, 0.0070563840866088865, 0.007198719978332519, 0.007195648193359375, 0.007237631797790528, 0.007197696208953858, 0.007256063938140869, 0.007163904190063477, 0.007189504146575928, 0.007223296165466309, 0.007221248149871826, 0.007184383869171143, 0.007181312084197998, 0.0071823358535766605, 0.007168000221252442, 0.007309311866760254, 0.007633920192718506, 0.007435264110565186, 0.007363584041595459, 0.00742195177078247, 0.007200767993927002, 0.0071823358535766605, 0.007209983825683594, 0.007514111995697022, 0.007268352031707764, 0.007195648193359375, 0.0072120318412780765, 0.0071833600997924804, 0.007170048236846924, 0.007074816226959229, 0.007174143791198731, 0.0071792640686035155, 0.007331840038299561, 0.007458816051483155, 0.007385087966918945, 0.00722431993484497, 0.007255040168762207, 0.007275519847869873, 0.007180287837982178, 0.007163904190063477, 0.007173120021820068, 0.007283840179443359, 0.007201663970947266, 0.007187456130981445, 0.007170048236846924, 0.007204864025115967, 0.007398399829864502, 0.007746560096740723, 0.00729804801940918, 0.007200767993927002, 0.00714035177230835, 0.007206912040710449, 0.007237631797790528, 0.007193600177764893, 0.007261184215545655, 0.0071198720932006835, 0.007202816009521484, 0.007223328113555908, 0.007230432033538819, 0.007221248149871826, 0.007159808158874512, 0.0072130560874938965, 0.007189536094665527, 0.007223264217376709, 0.007072768211364746, 0.007502848148345947, 0.007263232231140137, 0.007237631797790528, 0.00734822416305542, 0.0072724480628967286, 0.007780352115631104, 0.008133631706237793, 0.007625728130340576, 0.007486464023590088, 0.007437312126159668, 0.007313439846038819, 0.007240672111511231, 0.007226367950439453, 0.007336959838867187, 0.007267327785491944, 0.007196671962738037, 0.007193600177764893, 0.007367680072784424, 0.007269375801086426, 0.007235583782196045, 0.007204864025115967, 0.00729807996749878, 0.0072007360458374026, 0.007202816009521484, 0.00719974422454834, 0.007218175888061523, 0.007258111953735351, 0.0076277761459350585, 0.007508992195129394, 0.007270400047302246, 0.00719155216217041, 0.007193600177764893, 0.00706774377822876, 0.007143328189849854, 0.007181312084197998, 0.007204864025115967, 0.007278592109680176, 0.007166975975036621, 0.007158783912658692, 0.0072540159225463864, 0.007358463764190673, 0.007328767776489258, 0.007175168037414551, 0.007205887794494629, 0.00722431993484497, 0.007428095817565918, 0.007450623989105225, 0.007233535766601563, 0.007200767993927002, 0.00719974422454834, 0.007162879943847656, 0.007211071968078613, 0.007199679851531983, 0.007267327785491944, 0.00723967981338501, 0.007290880203247071, 0.0072120318412780765, 0.007200767993927002, 0.007170048236846924, 0.007203839778900147, 0.007177216053009033, 0.007196671962738037, 0.007217152118682861, 0.007225344181060791, 0.007223296165466309, 0.007139328002929687, 0.007285759925842285, 0.007412735939025879, 0.0073134078979492185, 0.007319551944732666, 0.007284736156463623, 0.007331840038299561, 0.0072120318412780765, 0.007211008071899414, 0.007201791763305664, 0.007326720237731933, 0.0071495680809020995, 0.007184383869171143, 0.0072130560874938965, 0.0071905279159545895, 0.007187456130981445, 0.0072120318412780765, 0.007211008071899414, 0.007173120021820068, 0.007181312084197998, 0.007285759925842285, 0.007639039993286132, 0.007456768035888672, 0.007448575973510742, 0.007390207767486572, 0.0072130560874938965, 0.00724070405960083, 0.007176191806793213, 0.007207935810089112, 0.0072468481063842774, 0.00733081579208374, 0.007218175888061523, 0.007157760143280029, 0.007234560012817383, 0.007412735939025879, 0.007221248149871826, 0.007126016139984131, 0.007175168037414551, 0.007193600177764893, 0.007192575931549072, 0.007208960056304932, 0.007223296165466309, 0.007319551944732666, 0.007278592109680176, 0.007292928218841553, 0.007159808158874512, 0.007194623947143554, 0.007319551944732666, 0.007157760143280029, 0.007186431884765625, 0.007217152118682861, 0.007193600177764893, 0.007236608028411865, 0.007165952205657959, 0.007401472091674805, 0.007309311866760254, 0.007288832187652588, 0.007256063938140869, 0.007282688140869141, 0.007341055870056152, 0.0070860800743103025, 0.007245823860168457, 0.007229440212249756, 0.0072468481063842774, 0.007251967906951904, 0.007219200134277344, 0.007216127872467041, 0.00723967981338501, 0.007259136199951172, 0.007184383869171143, 0.0072837119102478025, 0.007260159969329834, 0.007258111953735351, 0.007244800090789795, 0.007271423816680909, 0.007238656044006348, 0.007184383869171143, 0.007192575931549072, 0.007226367950439453, 0.007288832187652588, 0.007288832187652588, 0.0072202239036560055, 0.007296000003814697, 0.00727347183227539, 0.007435264110565186, 0.007408639907836914, 0.0074741759300231934, 0.0072724480628967286, 0.007610367774963379, 0.00775270414352417, 0.007541759967803955, 0.0074967041015625, 0.007570464134216309, 0.007505887985229492, 0.0074783039093017575, 0.007469024181365967, 0.0073062400817871095, 0.00760319995880127, 0.007676928043365478, 0.0075038719177246095, 0.00744755220413208, 0.007469056129455566, 0.007501823902130127, 0.007218175888061523, 0.007279615879058838, 0.007476223945617676, 0.007400447845458984, 0.007497727870941162, 0.00754585599899292, 0.0075049281120300295, 0.007414752006530761, 0.007312384128570557, 0.007221248149871826, 0.007201791763305664, 0.007174143791198731, 0.007228415966033935, 0.007400447845458984, 0.007379968166351319, 0.00725708818435669, 0.007180287837982178, 0.007227392196655274, 0.007166975975036621, 0.007541759967803955, 0.00734822416305542, 0.0073062400817871095, 0.00724070405960083, 0.007221248149871826, 0.007354368209838867, 0.0072540159225463864, 0.007459839820861816, 0.007379968166351319, 0.007244800090789795, 0.007320608139038086, 0.007407584190368652, 0.007420928001403809, 0.007491583824157715, 0.007422976016998291, 0.00740556812286377, 0.007222271919250488, 0.007193600177764893, 0.007193600177764893, 0.007390207767486572, 0.007398399829864502, 0.007444479942321777, 0.007411712169647216, 0.007435264110565186, 0.007400447845458984, 0.00725708818435669, 0.007243775844573975, 0.007428095817565918, 0.007300096035003662, 0.007379968166351319, 0.007724031925201416, 0.0074414081573486324, 0.007540736198425293, 0.00753868818283081, 0.007416831970214844, 0.007386112213134765, 0.007357439994812012, 0.0074291200637817386, 0.007395328044891358, 0.0074711041450500485, 0.007550975799560547, 0.008156160354614257, 0.007799808025360107, 0.007414783954620361, 0.007286784172058106, 0.007225344181060791, 0.007204864025115967, 0.00733900785446167, 0.007275519847869873, 0.007423999786376953, 0.007355391979217529, 0.007227392196655274, 0.007170048236846924, 0.007186431884765625, 0.007464960098266602, 0.007385087966918945, 0.007270400047302246, 0.007177248001098633, 0.0072724161148071285, 0.0071495680809020995, 0.007364607810974121, 0.007491583824157715, 0.007583744049072265, 0.007393280029296875, 0.007060480117797851, 0.0072120318412780765, 0.007357439994812012, 0.007303167819976806, 0.007331840038299561, 0.0073062400817871095, 0.00719155216217041, 0.007221248149871826, 0.0071792640686035155, 0.007209983825683594, 0.007449600219726562, 0.007442431926727295, 0.007271423816680909, 0.007234560012817383, 0.007189504146575928, 0.00724070405960083, 0.007379968166351319, 0.007287807941436767, 0.007416831970214844, 0.00723967981338501, 0.007176224231719971, 0.007216095924377441, 0.007178239822387696, 0.007219200134277344, 0.007202816009521484, 0.007463935852050781, 0.00739737606048584, 0.007336959838867187, 0.007219200134277344, 0.007386112213134765, 0.007505919933319092, 0.007458816051483155, 0.007375872135162354, 0.007414783954620361, 0.007337984085083008, 0.0074414081573486324, 0.00722431993484497, 0.007326720237731933, 0.007393280029296875, 0.007258111953735351, 0.007310336112976074, 0.007217152118682861, 0.007196671962738037, 0.007284736156463623, 0.00739737606048584, 0.007600128173828125, 0.007566336154937744, 0.0074967041015625, 0.007528448104858398, 0.007264256000518799, 0.007416831970214844, 0.007905280113220215, 0.007572512149810791, 0.0073400321006774905, 0.007224287986755371, 0.0072325119972229, 0.007498752117156982, 0.007448575973510742, 0.007395328044891358, 0.007229440212249756, 0.007176224231719971, 0.007179232120513916, 0.00719155216217041, 0.008018943786621094, 0.00791756820678711, 0.007451648235321045, 0.007184383869171143, 0.007153664112091064, 0.0072120318412780765, 0.007384064197540283, 0.007404543876647949, 0.007360511779785156, 0.007174143791198731, 0.007204895973205567, 0.00723964786529541, 0.0071833600997924804, 0.007228415966033935, 0.007394303798675537, 0.007374847888946533, 0.007218175888061523, 0.007316480159759522, 0.0074106879234313965, 0.007414783954620361, 0.007483391761779785, 0.007435264110565186, 0.007532544136047363, 0.0073400321006774905, 0.0072468481063842774, 0.007208960056304932, 0.007221248149871826, 0.007219200134277344, 0.007184383869171143, 0.007391232013702393, 0.0073820161819458, 0.007476223945617676, 0.007285759925842285, 0.007310336112976074, 0.0074301438331604, 0.007458816051483155, 0.007353343963623047, 0.007394303798675537, 0.007366655826568603, 0.007555071830749512, 0.00739737606048584, 0.007373824119567871, 0.0071833600997924804, 0.007184383869171143, 0.007173120021820068, 0.007208960056304932, 0.0072120318412780765, 0.007341055870056152, 0.007638016223907471, 0.007611392021179199, 0.007356416225433349, 0.007634943962097168, 0.007551008224487304, 0.007422944068908691, 0.00740556812286377, 0.007510015964508057, 0.007360511779785156, 0.007362559795379638, 0.007225344181060791, 0.007136256217956543, 0.007334911823272705, 0.0071905279159545895, 0.007300096035003662, 0.007333888053894043, 0.007641088008880615, 0.007404543876647949, 0.0075008001327514645, 0.007673855781555176, 0.007396351814270019, 0.007269375801086426, 0.0072120318412780765, 0.007226367950439453, 0.007227392196655274, 0.007508992195129394, 0.0074997758865356446, 0.00739737606048584, 0.007342080116271973, 0.007236608028411865, 0.007236608028411865, 0.007153664112091064, 0.007195648193359375, 0.007342080116271973, 0.007360511779785156, 0.007201791763305664, 0.007201791763305664, 0.007219200134277344, 0.0072468481063842774, 0.007221248149871826, 0.007175168037414551, 0.007095295906066895, 0.007169023990631103, 0.007189504146575928, 0.007196671962738037, 0.007305215835571289, 0.0072427520751953125, 0.007620607852935791, 0.0076277761459350585, 0.007375872135162354, 0.007187456130981445, 0.007178239822387696, 0.007394303798675537, 0.007432191848754883, 0.007409664154052734, 0.0074035201072692874, 0.007460864067077637, 0.007577600002288819, 0.007526400089263916, 0.007473152160644531, 0.007588863849639893, 0.007360511779785156, 0.007331840038299561, 0.007205887794494629, 0.007366655826568603, 0.007356416225433349, 0.007364607810974121, 0.007361536026000977, 0.00769536018371582, 0.007320576190948487, 0.007203839778900147, 0.007205887794494629, 0.00724070405960083, 0.007444479942321777, 0.0074629120826721195, 0.00729702377319336, 0.007243775844573975, 0.007188479900360107, 0.007332863807678222, 0.0074741759300231934, 0.007493631839752197, 0.007259136199951172, 0.007192575931549072, 0.007180287837982178, 0.007200767993927002, 0.007185408115386963, 0.007188479900360107, 0.007408639907836914, 0.0074967041015625, 0.007280640125274658, 0.007209983825683594, 0.007189504146575928, 0.0071792640686035155, 0.007310336112976074, 0.007560192108154297, 0.0074106879234313965, 0.007452672004699707, 0.00745472002029419, 0.007734272003173828, 0.007875584125518798, 0.007423999786376953, 0.007462944030761719, 0.0075447998046875, 0.007419904232025146, 0.007519231796264648, 0.0074629120826721195, 0.00743833589553833, 0.0075038719177246095, 0.00742195177078247, 0.007426047801971435, 0.007285759925842285, 0.007207935810089112, 0.007197696208953858, 0.007180287837982178, 0.00743833589553833, 0.007428095817565918, 0.007209983825683594, 0.007193600177764893, 0.007193600177764893, 0.00719974422454834, 0.007402495861053467, 0.007512063980102539, 0.007413792133331299, 0.00750486421585083, 0.007519231796264648, 0.007426047801971435, 0.007477248191833496, 0.007416831970214844, 0.0073431038856506346, 0.007229440212249756, 0.007197696208953858, 0.007202816009521484, 0.007456768035888672, 0.007423999786376953, 0.007423999786376953, 0.007364607810974121, 0.007218207836151123, 0.007222239971160889, 0.007147520065307617, 0.007205887794494629, 0.007206912040710449, 0.00694374418258667, 0.00719155216217041, 0.007164927959442138, 0.007160831928253173, 0.007321599960327148, 0.007201791763305664, 0.007247871875762939, 0.007198719978332519, 0.007185408115386963, 0.0072427520751953125, 0.007207935810089112, 0.0071905279159545895, 0.007384064197540283, 0.0073697280883789065, 0.007461887836456299, 0.0073431038856506346, 0.007203839778900147, 0.007150591850280762, 0.007205887794494629, 0.007158783912658692, 0.007292928218841553, 0.007366655826568603, 0.007214079856872559, 0.007202816009521484, 0.007158783912658692, 0.007192575931549072, 0.007216127872467041, 0.007404543876647949, 0.007333888053894043, 0.0071823358535766605, 0.007159840106964111, 0.007210976123809815, 0.007245823860168457, 0.007228415966033935, 0.007326720237731933, 0.007088128089904785, 0.007166975975036621, 0.007205887794494629, 0.007209983825683594, 0.007172095775604248, 0.007274496078491211, 0.0074700798988342285, 0.007411712169647216, 0.007270400047302246, 0.007215104103088379, 0.007172095775604248, 0.007309311866760254, 0.007432191848754883, 0.007341055870056152, 0.007374847888946533, 0.007391232013702393, 0.007482367992401123, 0.007280640125274658, 0.007245823860168457, 0.007395328044891358, 0.007400447845458984, 0.007331840038299561, 0.007147520065307617, 0.0072468481063842774, 0.007409664154052734, 0.007436287879943848, 0.007431168079376221, 0.007413760185241699, 0.007154687881469727, 0.0071833600997924804, 0.007162879943847656, 0.007319551944732666, 0.00743833589553833, 0.007197696208953858, 0.0072120318412780765, 0.007258111953735351, 0.007300096035003662, 0.007437312126159668, 0.007415808200836181, 0.007271423816680909, 0.007176224231719971, 0.007195615768432617, 0.007184383869171143, 0.007178239822387696, 0.007230463981628418, 0.007207935810089112, 0.007588863849639893, 0.007377920150756836, 0.0074035201072692874, 0.0072468481063842774, 0.0073820161819458, 0.007548927783966064, 0.007465983867645264, 0.007334911823272705, 0.007189504146575928, 0.007215104103088379, 0.007194623947143554, 0.007206912040710449, 0.007188479900360107, 0.0073175039291381834, 0.00749567985534668, 0.007211008071899414, 0.007216127872467041, 0.007193600177764893, 0.0071485438346862796, 0.007384064197540283, 0.007233535766601563, 0.007150591850280762, 0.007226367950439453, 0.007163904190063477, 0.0071823358535766605, 0.007180287837982178, 0.00719155216217041, 0.007611392021179199, 0.0074711041450500485, 0.007350272178649903, 0.007420928001403809, 0.007286784172058106, 0.007391232013702393, 0.007829504013061523, 0.007303167819976806, 0.007486464023590088, 0.00724070405960083, 0.007275519847869873, 0.007365632057189942, 0.007762944221496582, 0.0074291200637817386, 0.007387135982513428, 0.007238656044006348, 0.007151616096496582, 0.007411712169647216, 0.007356416225433349, 0.007419904232025146, 0.007215104103088379, 0.007176191806793213, 0.007417856216430664, 0.007402495861053467, 0.0072468481063842774, 0.007280640125274658, 0.007364607810974121, 0.007194623947143554, 0.007161856174468994, 0.007163904190063477, 0.00727347183227539, 0.00719974422454834, 0.007211008071899414, 0.007244832038879394, 0.007163936138153076, 0.0071699838638305665, 0.007206912040710449, 0.007193600177764893, 0.0071526398658752445, 0.007155712127685547, 0.007352320194244385, 0.007907328128814697, 0.007311359882354736, 0.007189504146575928, 0.007152671813964844, 0.007307231903076172, 0.007209983825683594, 0.007334911823272705, 0.007193600177764893, 0.007201791763305664, 0.007171072006225586, 0.007193600177764893, 0.00724889612197876, 0.007196671962738037, 0.0072120318412780765, 0.007155712127685547, 0.007152671813964844, 0.007197663784027099, 0.007168000221252442, 0.007184383869171143, 0.007150591850280762, 0.007204864025115967, 0.007105535984039306, 0.007145472049713135, 0.0071905279159545895, 0.007291903972625732, 0.007634943962097168, 0.00744652795791626, 0.007971839904785156, 0.008236031532287597, 0.007668799877166748, 0.007477183818817138, 0.007444479942321777, 0.007416831970214844, 0.007431168079376221, 0.007398399829864502, 0.0074301438331604, 0.007325695991516113, 0.007163904190063477, 0.007192575931549072, 0.007218175888061523]",tokens/s,136.52064326392158,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gptj,MB,3756.355584,5604.114432,0.0,4974.444544,4685.071872,s,1,10.4989814453125,10.4989814453125,0.0,10.4989814453125,10.4989814453125,10.4989814453125,10.4989814453125,[10.4989814453125],,kWh,4.405354306387923e-05,2.4126449702457116e-05,7.846117387999563e-05,0.00014664116664633197,,MB,1954.013184,5641.863168,0.0,4993.318912,4233.626624,s,10,1.119364288330078,0.11193642883300783,0.0002043803694134326,0.11195820617675781,0.11219218292236327,0.11220625228881835,0.11221750778198242,"[0.11183289337158203, 0.11222032165527343, 0.11166476440429687, 0.11187715148925781, 0.11218905639648437, 0.1120459213256836, 0.11212345886230468, 0.1116539535522461, 0.1120392608642578, 0.11171750640869141]",tokens/s,2287.0123932746965,kWh,1.3210388742593114e-06,7.237893119738252e-07,4.8965100900444985e-06,6.941338276277635e-06,tokens/kWh,36880496.211356334,MB,1962.663936,5643.96032,0.0,4993.318912,4342.312448,s,10,25.113387695312497,2.5113387695312497,0.01388286165313518,2.5093922119140624,2.531296337890625,2.532910009765625,2.534200947265625,"[2.509003662109375, 2.530937744140625, 2.5065712890625, 2.50978076171875, 2.534523681640625, 2.51360302734375, 2.503455810546875, 2.50239208984375, 2.48372021484375, 2.5193994140625]",tokens/s,25.086221247545655,kWh,3.062846728310212e-05,1.6785763974168442e-05,6.660882489195545e-05,0.00011402305614922603,tokens/kWh,552519.8335111248,,s,630,25.10607566452026,0.03985091375320677,0.000668783085371352,0.03963852691650391,0.04064808807373047,0.04079984493255615,0.04178579364776612,"[0.039378944396972655, 0.03920383834838867, 0.039512065887451174, 0.04003839874267578, 0.039387134552001955, 0.03939430236816406, 0.03941273498535156, 0.03932262420654297, 0.039809024810791016, 0.0393175048828125, 0.03973222351074219, 0.039392257690429686, 0.03968511962890625, 0.039411712646484375, 0.03943423843383789, 0.0395417594909668, 0.039360511779785154, 0.03962777709960937, 0.03974041748046875, 0.04120883178710937, 0.041671710968017577, 0.039648223876953125, 0.03931238555908203, 0.03931238555908203, 0.0403394546508789, 0.040052734375, 0.03941791915893555, 0.039575489044189456, 0.03948134231567383, 0.039964672088623046, 0.0393809928894043, 0.03952844619750977, 0.039378944396972655, 0.03951718521118164, 0.03997081756591797, 0.040755199432373046, 0.03998515319824219, 0.03949465560913086, 0.039572479248046875, 0.03978140640258789, 0.04025238418579102, 0.039272449493408204, 0.03938611221313477, 0.03930521774291992, 0.039408641815185545, 0.03983359909057617, 0.039403518676757815, 0.03960422515869141, 0.03931340789794922, 0.03928575897216797, 0.03985715103149414, 0.04006406402587891, 0.03931027221679687, 0.04028518295288086, 0.04057907104492187, 0.04067327880859375, 0.04054425430297852, 0.040553470611572266, 0.04058726501464844, 0.040521728515625, 0.04096307373046875, 0.040509441375732425, 0.04064051055908203, 0.04055756759643555, 0.040515583038330076, 0.04048998260498047, 0.04047564697265625, 0.040687614440917966, 0.04070809555053711, 0.04050431823730469, 0.0406036491394043, 0.040659969329833984, 0.04053913497924805, 0.04054732894897461, 0.04065894317626953, 0.04070195388793945, 0.04062617492675781, 0.040591358184814456, 0.04034048080444336, 0.040630271911621094, 0.040583168029785156, 0.040612865447998046, 0.04074700927734375, 0.04059033584594727, 0.040567806243896484, 0.04060671997070313, 0.04067635345458984, 0.04048281478881836, 0.040613887786865234, 0.04064255905151367, 0.04266086578369141, 0.040858623504638675, 0.03990323257446289, 0.039512065887451174, 0.04021657562255859, 0.03959500885009765, 0.03938508987426758, 0.03936665725708008, 0.03934822463989258, 0.040390655517578124, 0.03937177658081055, 0.03930828857421875, 0.03932057571411133, 0.03941068649291992, 0.04026265716552734, 0.03945574569702148, 0.0393359375, 0.03982643127441406, 0.03950284957885742, 0.039967742919921875, 0.04086579132080078, 0.04067020797729492, 0.040948734283447266, 0.0395417594909668, 0.03988889694213867, 0.03945676803588867, 0.039570430755615234, 0.03937279891967774, 0.03949772644042969, 0.039728126525878905, 0.03971891021728516, 0.039367679595947266, 0.03937177658081055, 0.03957555389404297, 0.03973324966430664, 0.04033126449584961, 0.03942092895507812, 0.03976396942138672, 0.03978342437744141, 0.039212032318115236, 0.039390209197998044, 0.03935641479492188, 0.03948441696166992, 0.039806976318359374, 0.03946188735961914, 0.0392437744140625, 0.03932672119140625, 0.03925299072265625, 0.040097793579101565, 0.03938611221313477, 0.0394967041015625, 0.03945574569702148, 0.04292812728881836, 0.04198297500610351, 0.04050431823730469, 0.039384063720703126, 0.03934515380859375, 0.03935027313232422, 0.03916287994384766, 0.039226367950439454, 0.039201793670654295, 0.03918745422363281, 0.03937177658081055, 0.039332862854003905, 0.03990425491333008, 0.04106547164916992, 0.04060569763183594, 0.03942911911010742, 0.04035583877563476, 0.03958784103393555, 0.03928268814086914, 0.03934207916259766, 0.03926220703125, 0.04044800186157226, 0.03950592041015625, 0.04015718460083008, 0.039831550598144534, 0.03953561782836914, 0.040318977355957034, 0.03952640151977539, 0.03923251342773437, 0.03983871841430664, 0.04003942489624023, 0.039608318328857424, 0.03935539245605469, 0.039943168640136716, 0.039288833618164064, 0.03931340789794922, 0.03919462585449219, 0.03978956985473633, 0.04050739288330078, 0.039449600219726565, 0.03923353576660156, 0.04076236724853516, 0.04040806579589844, 0.04037734222412109, 0.03996160125732422, 0.04055039978027344, 0.04041523361206055, 0.03905023956298828, 0.03913216018676758, 0.039190528869628906, 0.03924070358276367, 0.03915673446655273, 0.03968102264404297, 0.03954278564453125, 0.03894476699829102, 0.03922022247314453, 0.0390010871887207, 0.03925299072265625, 0.03923353576660156, 0.03891712188720703, 0.03920588684082031, 0.039209983825683595, 0.038836223602294925, 0.03902054214477539, 0.03917619323730469, 0.03915673446655273, 0.039037952423095705, 0.03914854431152344, 0.03916799926757813, 0.041490432739257815, 0.04093132781982422, 0.03926015853881836, 0.03920896148681641, 0.03901030349731445, 0.039430145263671876, 0.03921100616455078, 0.03954687881469727, 0.04044902420043945, 0.04010291290283203, 0.038983680725097655, 0.03914956665039063, 0.0392171516418457, 0.03962879943847656, 0.04050022506713867, 0.04049203109741211, 0.04055551910400391, 0.04048179244995117, 0.0405401611328125, 0.04050227355957031, 0.04121702575683594, 0.04049612808227539, 0.04057292938232422, 0.04045107269287109, 0.040525825500488284, 0.040321025848388675, 0.04046438217163086, 0.04048179244995117, 0.04051865768432617, 0.0395335693359375, 0.04044902420043945, 0.040474624633789064, 0.04042342376708984, 0.04048691177368164, 0.04009369659423828, 0.040166400909423826, 0.040343551635742186, 0.040231937408447264, 0.040581119537353515, 0.040515583038330076, 0.040400894165039065, 0.04020019149780273, 0.04044800186157226, 0.04053504180908203, 0.04056576156616211, 0.04045318222045898, 0.04064352035522461, 0.04054323196411133, 0.04040806579589844, 0.04051660919189453, 0.04051148986816406, 0.04043574523925781, 0.040693729400634766, 0.04050739288330078, 0.04055244827270508, 0.04064665603637695, 0.04062515258789062, 0.040425472259521485, 0.04053401565551758, 0.04055756759643555, 0.03998310470581055, 0.03920281600952148, 0.03987353515625, 0.03919257736206055, 0.03922227096557617, 0.03927961730957031, 0.03942399978637695, 0.0399441909790039, 0.039228416442871096, 0.03932876968383789, 0.03911782455444336, 0.03934207916259766, 0.03923251342773437, 0.04024422454833984, 0.03929087829589844, 0.03930931091308594, 0.03916799926757813, 0.03934310531616211, 0.040166400909423826, 0.04039168167114258, 0.03974041748046875, 0.04066304016113281, 0.040325119018554685, 0.04072857666015625, 0.040600574493408204, 0.04058931350708008, 0.04062412643432617, 0.040659969329833984, 0.04098559951782227, 0.04070297622680664, 0.04045721435546875, 0.04067942428588867, 0.0407562255859375, 0.04061798477172852, 0.040529918670654294, 0.040578048706054685, 0.04040703964233398, 0.04057702255249023, 0.040613887786865234, 0.04062822341918945, 0.04059648132324219, 0.040420352935791014, 0.04056063842773437, 0.04057907104492187, 0.03992166519165039, 0.04062003326416016, 0.04044287872314453, 0.039482368469238284, 0.03926528167724609, 0.03935232162475586, 0.03935232162475586, 0.0397916145324707, 0.0404213752746582, 0.04006707382202149, 0.04050022506713867, 0.039298046112060545, 0.03978854370117187, 0.03930828857421875, 0.039610366821289066, 0.04173926544189453, 0.0406640625, 0.0400445442199707, 0.04011008071899414, 0.03912396621704101, 0.03922022247314453, 0.03930112075805664, 0.03921612930297851, 0.040323070526123043, 0.040016895294189454, 0.03933900833129883, 0.039122943878173826, 0.03934105682373047, 0.040226814270019534, 0.03920896148681641, 0.03916799926757813, 0.04041523361206055, 0.03951923370361328, 0.040174591064453126, 0.03990630340576172, 0.03931545639038086, 0.039362560272216796, 0.03925503921508789, 0.0393994255065918, 0.0399738883972168, 0.04008652877807617, 0.03947622299194336, 0.039411712646484375, 0.03954585647583008, 0.03981619262695312, 0.03974041748046875, 0.040134654998779294, 0.03924684906005859, 0.03927552032470703, 0.040834049224853515, 0.040651775360107424, 0.04111257553100586, 0.039390209197998044, 0.03928473663330078, 0.039605247497558595, 0.04064767837524414, 0.04041830444335937, 0.03952537536621094, 0.039600128173828124, 0.04060774230957031, 0.040643585205078124, 0.040591358184814456, 0.04231782531738281, 0.03951103973388672, 0.03929702377319336, 0.04015923309326172, 0.040321025848388675, 0.04153753662109375, 0.041804798126220705, 0.039927806854248044, 0.03965030288696289, 0.03964723205566406, 0.039367679595947266, 0.03927654266357422, 0.03940966415405273, 0.039316478729248046, 0.03937177658081055, 0.03978854370117187, 0.04000153732299805, 0.04170547103881836, 0.04038553619384765, 0.039398399353027344, 0.03962879943847656, 0.04062515258789062, 0.03910246276855469, 0.039272449493408204, 0.03892531204223633, 0.039218177795410154, 0.03976192092895508, 0.04018380737304687, 0.03928985595703125, 0.03920896148681641, 0.03983871841430664, 0.04009574508666992, 0.03917926406860352, 0.039196670532226564, 0.04009881591796875, 0.03947724914550781, 0.03912089538574219, 0.040591358184814456, 0.0406743049621582, 0.039163902282714845, 0.03923763275146484, 0.03940454483032227, 0.039452671051025394, 0.03910041427612305, 0.03919257736206055, 0.039728126525878905, 0.04061183929443359, 0.04050636672973633, 0.039567359924316405, 0.039051265716552735, 0.039180286407470705, 0.03919564819335938, 0.03924787139892578, 0.03925196838378906, 0.03912499237060547, 0.040218624114990234, 0.03971788787841797, 0.03921100616455078, 0.040360958099365234, 0.0405401611328125, 0.040542209625244144, 0.03925196838378906, 0.03908198547363281, 0.039293952941894535, 0.040687614440917966, 0.04067327880859375, 0.039357440948486325, 0.03922431945800781, 0.040493057250976565, 0.03956121444702149, 0.03990118408203125, 0.04020326232910156, 0.039137279510498044, 0.04194713592529297, 0.04078694534301758, 0.04044083023071289, 0.039174144744873046, 0.03987148666381836, 0.04009164810180664, 0.03920383834838867, 0.0393256950378418, 0.04010700988769531, 0.03984281539916992, 0.03926220703125, 0.039051265716552735, 0.03916595077514649, 0.04016025543212891, 0.039191551208496093, 0.039177215576171875, 0.039597057342529295, 0.04048179244995117, 0.040397823333740236, 0.03944140625, 0.040809471130371096, 0.04038246536254883, 0.03926937484741211, 0.03922431945800781, 0.03925503921508789, 0.039844863891601565, 0.040412158966064454, 0.03968511962890625, 0.03908095932006836, 0.03919462585449219, 0.03908403015136719, 0.039221248626708984, 0.03969638442993164, 0.040793087005615236, 0.040235008239746094, 0.039185409545898435, 0.03920896148681641, 0.039046142578125, 0.03977011108398437, 0.04001279830932617, 0.0391649284362793, 0.039858177185058595, 0.040052734375, 0.03984588623046875, 0.039226367950439454, 0.039591934204101564, 0.039810047149658204, 0.03970764923095703, 0.03915980911254883, 0.03913318252563477, 0.03912089538574219, 0.03924889755249023, 0.039137279510498044, 0.03912499237060547, 0.03921408081054688, 0.03922739028930664, 0.040248321533203124, 0.03956326293945313, 0.0392437744140625, 0.039155712127685545, 0.039229438781738284, 0.03917824172973633, 0.03924588775634766, 0.039214015960693356, 0.039370750427246096, 0.039204864501953124, 0.03893862533569336, 0.039283710479736327, 0.03986841583251953, 0.03928268814086914, 0.039172096252441405, 0.03915059280395508, 0.039018497467041016, 0.04000153732299805, 0.03944243240356445, 0.039430145263671876, 0.03916799926757813, 0.03913318252563477, 0.0392437744140625, 0.039142398834228515, 0.03921408081054688, 0.03963596725463867, 0.03934310531616211, 0.03918950271606445, 0.03967385482788086, 0.04037017440795899, 0.04135424041748047, 0.03964108657836914, 0.039209983825683595, 0.03990016174316406, 0.04007219314575195, 0.039021568298339845, 0.03918438339233398, 0.039378944396972655, 0.03940966415405273, 0.03909939193725586, 0.03910451126098633, 0.03924070358276367, 0.039054336547851565, 0.039316478729248046, 0.03909427261352539, 0.03908915328979492, 0.03943526458740235, 0.04049203109741211, 0.03910553741455078, 0.03933184051513672, 0.03910246276855469, 0.039629825592041014, 0.039139328002929685, 0.03924684906005859, 0.03926835250854492, 0.03919462585449219, 0.03914956665039063, 0.03913420867919922, 0.03930316925048828, 0.04017868804931641, 0.04017561721801758, 0.03928780746459961, 0.039213054656982424, 0.039823360443115234, 0.04150271987915039, 0.04089548873901367, 0.03948748779296875, 0.04023295974731445, 0.039258113861083986, 0.039406593322753904, 0.039174144744873046, 0.039087104797363284, 0.039223297119140625, 0.03916697692871094, 0.040035327911376956, 0.0392540168762207, 0.0397127685546875, 0.039964672088623046, 0.03988787078857422, 0.0392171516418457, 0.03913113784790039, 0.04036505508422852, 0.040493057250976565, 0.03937279891967774, 0.039021568298339845, 0.039344127655029294, 0.039330814361572264, 0.03928678512573242, 0.04025241470336914, 0.040474624633789064, 0.039959552764892575, 0.039946239471435545, 0.04072140884399414, 0.03960934448242188, 0.03924889755249023, 0.0392355842590332, 0.03903180694580078, 0.04056371307373047, 0.04080537414550781, 0.040774654388427735, 0.04101529693603516, 0.04099993515014649, 0.04094566345214844, 0.040583168029785156, 0.03975680160522461, 0.03916799926757813, 0.04071321487426758, 0.04075724792480469, 0.04050124740600586, 0.04053811264038086, 0.040637439727783206, 0.04069068908691406, 0.040584190368652344, 0.04043571090698242, 0.03930931091308594, 0.03893964767456055, 0.03905023956298828, 0.0391536636352539, 0.03902566528320312, 0.0397946891784668, 0.04060979080200195, 0.04252774429321289, 0.04127948760986328, 0.040741886138916016]",tokens/s,25.093527495828898,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694813f-54a5b0753305d8757cf57460;bee32b92-e86e-4d7e-a4fa-b8dbc2356f6a) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7351.259136,9907.470336,0.0,9277.800448,8679.633408,s,1,12.271779296875,12.271779296875,0.0,12.271779296875,12.271779296875,12.271779296875,12.271779296875,[12.271779296875],,kWh,6.486267154514155e-05,3.550603500086324e-05,0.00012799176906003185,0.00022836047560603664,,MB,1752.805376,9926.344704,0.0,9277.800448,8206.444544,s,10,1.9052067871093752,0.19052067871093753,3.8356069268934654e-05,0.19052210998535157,0.19056241455078124,0.19057672424316408,0.19058817199707032,"[0.19051551818847656, 0.19051840209960938, 0.1905361633300781, 0.19059103393554688, 0.19045286560058594, 0.19053907775878906, 0.19049845886230468, 0.19047021484375, 0.19052581787109374, 0.19055923461914062]",tokens/s,1343.6861643161017,kWh,2.2530987011531727e-06,1.234588033536411e-06,8.740174707017988e-06,1.2227861441707572e-05,tokens/kWh,20935794.96467132,MB,1758.150656,9926.344704,0.0,9277.800448,8483.156992,s,10,18.42956396484375,1.8429563964843747,0.023816935922790182,1.8540309448242187,1.860294140625,1.8633376831054687,1.8657725170898438,"[1.8596177978515624, 1.8583907470703125, 1.8550015869140626, 1.783762451171875, 1.853060302734375, 1.825242919921875, 1.825933837890625, 1.8663812255859376, 1.8588675537109376, 1.8433055419921875]",tokens/s,34.184205399638785,kWh,2.173952263690158e-05,1.1911195387386497e-05,6.598227395978501e-05,9.963299198407307e-05,tokens/kWh,632320.6675362206,,s,630,18.42712157440186,0.02924939932444739,0.0007145361362447388,0.029451775550842284,0.029931929969787597,0.030271795463562012,0.03128338994979859,"[0.028836864471435547, 0.028413951873779295, 0.028276735305786133, 0.028206079483032227, 0.028283903121948242, 0.028770303726196288, 0.029535232543945314, 0.029569023132324217, 0.029468671798706055, 0.029656063079833983, 0.029640703201293944, 0.0301527042388916, 0.03000217628479004, 0.029543424606323244, 0.02955366325378418, 0.02939187240600586, 0.029410303115844725, 0.02953727912902832, 0.029549568176269532, 0.02952191925048828, 0.029586431503295898, 0.029429759979248047, 0.029488128662109377, 0.029378623962402345, 0.029592512130737304, 0.029639680862426757, 0.029732864379882814, 0.02939801597595215, 0.029472768783569334, 0.02918707275390625, 0.030150720596313477, 0.031549375534057615, 0.031092735290527345, 0.029688831329345702, 0.02953727912902832, 0.029460479736328125, 0.02939084815979004, 0.029378559112548826, 0.029451263427734374, 0.02937651252746582, 0.029313024520874024, 0.029470720291137696, 0.029494272232055665, 0.029615104675292967, 0.02958131217956543, 0.029455360412597657, 0.029879295349121093, 0.030063615798950196, 0.030209024429321288, 0.02954649543762207, 0.029473823547363283, 0.02938876724243164, 0.0293621768951416, 0.029428735733032226, 0.029387775421142577, 0.02952191925048828, 0.029405183792114258, 0.029456384658813478, 0.029861888885498046, 0.029628416061401368, 0.0294072322845459, 0.029551616668701174, 0.029566976547241212, 0.028412927627563478, 0.02831257629394531, 0.028144639968872072, 0.028224512100219725, 0.028241920471191406, 0.02823379135131836, 0.02827052879333496, 0.028294143676757814, 0.029683712005615235, 0.029636608123779298, 0.02953830337524414, 0.029419519424438476, 0.029914112091064454, 0.030533632278442382, 0.029649919509887695, 0.02942361640930176, 0.029483007431030273, 0.029714431762695313, 0.02977382469177246, 0.029486080169677735, 0.029449216842651366, 0.02938265609741211, 0.02953727912902832, 0.029484031677246093, 0.029475839614868164, 0.029328384399414063, 0.029507583618164062, 0.02933964729309082, 0.029313024520874024, 0.030279680252075194, 0.03015475273132324, 0.029527040481567384, 0.02939289665222168, 0.029495296478271486, 0.029643808364868164, 0.029528032302856444, 0.02935203170776367, 0.02936310386657715, 0.02952396774291992, 0.029438976287841798, 0.029854719161987304, 0.030867456436157226, 0.03032678413391113, 0.029650943756103516, 0.02976358413696289, 0.029466623306274413, 0.029715456008911133, 0.030274560928344726, 0.030092287063598632, 0.02950553512573242, 0.02971238327026367, 0.029446144104003907, 0.029627391815185547, 0.029471744537353517, 0.03056025505065918, 0.02976972770690918, 0.029467647552490234, 0.02957414436340332, 0.029612096786499023, 0.02955564880371094, 0.02956492805480957, 0.029524991989135742, 0.029831167221069335, 0.02833510398864746, 0.028379135131835938, 0.029412351608276367, 0.029426687240600585, 0.029345792770385744, 0.029485055923461914, 0.029791231155395507, 0.029466623306274413, 0.028298240661621094, 0.028189823150634764, 0.02814246368408203, 0.028229631423950196, 0.028273664474487304, 0.02916761589050293, 0.028626943588256838, 0.029180927276611326, 0.029570047378540038, 0.029645824432373048, 0.029378559112548826, 0.029434879302978514, 0.02938275146484375, 0.029344671249389647, 0.029683712005615235, 0.029088768005371093, 0.028283903121948242, 0.028261375427246094, 0.028308479309082032, 0.02894745635986328, 0.02972163200378418, 0.02980963134765625, 0.02955571174621582, 0.029455360412597657, 0.02955062484741211, 0.029461471557617188, 0.029605920791625977, 0.029508575439453125, 0.02937446403503418, 0.02953625679016113, 0.03246284866333008, 0.03302809524536133, 0.030085119247436523, 0.029674495697021484, 0.029667327880859375, 0.029731840133666993, 0.029703168869018554, 0.029557760238647462, 0.029593599319458007, 0.02959052848815918, 0.030501888275146483, 0.02996428871154785, 0.029715456008911133, 0.029476863861083984, 0.02949017524719238, 0.02962944030761719, 0.029693952560424806, 0.029534208297729493, 0.02957926368713379, 0.029460479736328125, 0.029618175506591796, 0.028446720123291015, 0.029808639526367187, 0.030126079559326172, 0.029944831848144532, 0.02818252754211426, 0.028279808044433592, 0.02833305549621582, 0.02778316879272461, 0.02836172866821289, 0.028382207870483397, 0.02834841537475586, 0.028214271545410157, 0.028243967056274414, 0.02811903953552246, 0.028314624786376953, 0.028201984405517577, 0.028222463607788087, 0.028424192428588867, 0.029260799407958983, 0.028520448684692383, 0.028309503555297853, 0.028245023727416992, 0.02823779106140137, 0.0283371524810791, 0.028103679656982423, 0.028276735305786133, 0.02834841537475586, 0.028298240661621094, 0.028279808044433592, 0.028297216415405273, 0.028480607986450194, 0.02835446357727051, 0.02876006317138672, 0.028440576553344726, 0.028387327194213868, 0.028291072845458985, 0.028224512100219725, 0.02818662452697754, 0.02837811279296875, 0.027872255325317383, 0.028017663955688478, 0.028242944717407226, 0.028284927368164063, 0.028239871978759764, 0.028449792861938477, 0.028329984664916992, 0.02831155204772949, 0.028314624786376953, 0.028078079223632812, 0.02818764877319336, 0.02831564712524414, 0.028253183364868165, 0.028232704162597655, 0.028290079116821288, 0.028735456466674806, 0.028794879913330077, 0.028272640228271483, 0.02820812797546387, 0.028342271804809572, 0.028275711059570312, 0.028210176467895507, 0.028214271545410157, 0.028410879135131836, 0.028251136779785156, 0.028292095184326172, 0.028391424179077147, 0.028276735305786133, 0.028256256103515624, 0.028057600021362306, 0.028219392776489258, 0.031037439346313478, 0.03148595237731933, 0.030073856353759764, 0.029741056442260744, 0.029414400100708008, 0.029414400100708008, 0.02935807991027832, 0.029489152908325194, 0.029445119857788086, 0.029440031051635743, 0.029512672424316405, 0.02958233642578125, 0.029473791122436522, 0.030055423736572266, 0.030136320114135744, 0.03035238456726074, 0.030086143493652344, 0.03042815971374512, 0.029456384658813478, 0.02841804885864258, 0.02837606430053711, 0.02860748863220215, 0.029642751693725586, 0.030092287063598632, 0.029476863861083984, 0.029473791122436522, 0.029620223999023438, 0.02952707290649414, 0.029479904174804686, 0.029503488540649415, 0.029401119232177735, 0.029699039459228516, 0.029411327362060546, 0.02953113555908203, 0.02943283271789551, 0.02955366325378418, 0.029463552474975587, 0.029502464294433595, 0.029673471450805664, 0.029665279388427734, 0.02940620803833008, 0.029425664901733397, 0.029487104415893556, 0.029557760238647462, 0.02953625679016113, 0.029508607864379883, 0.029843456268310548, 0.029813760757446288, 0.02995199966430664, 0.030040063858032227, 0.029456384658813478, 0.029619199752807617, 0.028572671890258788, 0.02833919906616211, 0.028301311492919923, 0.028511295318603514, 0.028391359329223632, 0.02844060707092285, 0.02827670478820801, 0.028271615982055662, 0.02810982322692871, 0.028322816848754883, 0.028200960159301756, 0.02814259147644043, 0.028238847732543947, 0.028305408477783203, 0.028268543243408203, 0.028297216415405273, 0.028029951095581054, 0.027644927978515626, 0.027826175689697266, 0.028242944717407226, 0.02812928009033203, 0.02832896041870117, 0.028263423919677736, 0.028224512100219725, 0.028294143676757814, 0.028807167053222657, 0.029320192337036134, 0.02870783996582031, 0.028205055236816406, 0.028260351181030274, 0.02839347267150879, 0.028358655929565428, 0.028283903121948242, 0.02834124755859375, 0.031453184127807614, 0.030427135467529298, 0.02977791976928711, 0.029511680603027345, 0.029594623565673828, 0.02942361640930176, 0.029338623046875, 0.029475839614868164, 0.029520896911621092, 0.02929977607727051, 0.029355968475341797, 0.029700096130371095, 0.029689855575561523, 0.029445119857788086, 0.029419519424438476, 0.029397024154663085, 0.029535200119018553, 0.029298688888549803, 0.02937548828125, 0.029452287673950195, 0.030402559280395508, 0.029863935470581054, 0.029430784225463868, 0.029517824172973633, 0.029543424606323244, 0.029485055923461914, 0.02933452796936035, 0.030298112869262695, 0.030128128051757814, 0.02950655937194824, 0.029082624435424805, 0.02838937568664551, 0.028404735565185548, 0.028451839447021485, 0.028465152740478516, 0.02834636878967285, 0.02830745506286621, 0.02835148811340332, 0.028266496658325195, 0.028244991302490235, 0.02816307258605957, 0.028251136779785156, 0.02830745506286621, 0.028282880783081055, 0.028274688720703125, 0.028080127716064454, 0.028229631423950196, 0.028338176727294922, 0.02833919906616211, 0.028270591735839845, 0.02827369689941406, 0.028279775619506835, 0.028618751525878908, 0.02840985679626465, 0.02830438423156738, 0.028270591735839845, 0.028515327453613282, 0.029112319946289062, 0.028617727279663087, 0.028445695877075194, 0.02837401580810547, 0.028325887680053712, 0.02955571174621582, 0.029706239700317383, 0.029501440048217774, 0.029396991729736328, 0.02976358413696289, 0.029499391555786132, 0.029484031677246093, 0.029462528228759766, 0.029445119857788086, 0.029512704849243163, 0.029411327362060546, 0.029329408645629884, 0.029903871536254883, 0.03127398490905762, 0.030337024688720703, 0.029713407516479492, 0.029504512786865233, 0.029352960586547853, 0.029403135299682616, 0.029425664901733397, 0.02938368034362793, 0.029496320724487303, 0.02954444885253906, 0.029443071365356444, 0.03035852813720703, 0.02973593521118164, 0.0293570556640625, 0.02940108871459961, 0.029446144104003907, 0.029124607086181642, 0.029890560150146486, 0.028841983795166014, 0.028256256103515624, 0.0283371524810791, 0.02831257629394531, 0.02830335998535156, 0.028254207611083985, 0.028273664474487304, 0.02920243263244629, 0.028469247817993162, 0.028898303985595702, 0.02934681510925293, 0.029524991989135742, 0.029410367965698243, 0.029504447937011718, 0.029417472839355467, 0.029360128402709962, 0.02935603141784668, 0.0295546875, 0.029427711486816405, 0.029418495178222655, 0.03129651260375976, 0.029889535903930665, 0.029379583358764647, 0.02938982391357422, 0.02944000053405762, 0.02958233642578125, 0.029496320724487303, 0.029543424606323244, 0.0303503360748291, 0.02988140869140625, 0.029941696166992188, 0.02993561553955078, 0.02962227249145508, 0.029937664031982423, 0.029455360412597657, 0.029453311920166016, 0.029485055923461914, 0.029496320724487303, 0.02944819259643555, 0.029435935974121093, 0.02942880058288574, 0.029581216812133788, 0.029500415802001953, 0.029468671798706055, 0.029848575592041016, 0.029675519943237305, 0.029502464294433595, 0.030342144012451173, 0.029690879821777344, 0.029502464294433595, 0.02954649543762207, 0.02940006446838379, 0.02983526420593262, 0.02976051139831543, 0.029475839614868164, 0.02993152046203613, 0.029512704849243163, 0.03040153694152832, 0.029674495697021484, 0.02959769630432129, 0.02958950424194336, 0.02949017524719238, 0.029816831588745117, 0.030377023696899413, 0.029982656478881837, 0.029848575592041016, 0.029499391555786132, 0.02935603141784668, 0.029446144104003907, 0.029702144622802733, 0.02825823974609375, 0.02878156852722168, 0.029609983444213867, 0.029436927795410156, 0.029377536773681642, 0.029541439056396484, 0.0295402889251709, 0.029312063217163085, 0.029421503067016602, 0.02938368034362793, 0.029560831069946288, 0.02953932762145996, 0.029792255401611328, 0.029527040481567384, 0.029670400619506834, 0.029444095611572265, 0.02933145523071289, 0.02953113555908203, 0.02955264091491699, 0.02977689552307129, 0.029507583618164062, 0.030233600616455077, 0.030456832885742188, 0.02968780708312988, 0.029430784225463868, 0.029503488540649415, 0.02958438491821289, 0.029421567916870117, 0.029484031677246093, 0.029740032196044923, 0.02958028793334961, 0.02949017524719238, 0.029500415802001953, 0.02936524772644043, 0.02954854393005371, 0.02975948715209961, 0.029656063079833983, 0.029503488540649415, 0.029449216842651366, 0.029913087844848633, 0.029894655227661132, 0.02940928077697754, 0.029557760238647462, 0.029458431243896483, 0.029405183792114258, 0.029507583618164062, 0.029702144622802733, 0.029494272232055665, 0.029470720291137696, 0.02961622428894043, 0.029429664611816408, 0.029608959197998046, 0.029541376113891602, 0.029500415802001953, 0.02960588836669922, 0.029826047897338868, 0.03019366455078125, 0.030121984481811522, 0.02877952003479004, 0.028407808303833007, 0.028657663345336915, 0.028702720642089844, 0.029526016235351563, 0.030736383438110353, 0.029814783096313476, 0.02953932762145996, 0.029273151397705078, 0.02907436752319336, 0.029455360412597657, 0.029363199234008788, 0.029435903549194335, 0.029516799926757813, 0.02936729621887207, 0.029916160583496092, 0.02983628845214844, 0.029417472839355467, 0.029510719299316406, 0.0312872314453125, 0.030268415451049805, 0.02953830337524414, 0.029386751174926756, 0.029458431243896483, 0.029543424606323244, 0.030118911743164063, 0.029865983963012696, 0.030439424514770507, 0.02960383987426758, 0.0295731201171875, 0.029509632110595704, 0.029535232543945314, 0.0294072322845459, 0.029410303115844725, 0.029490272521972657, 0.02952899169921875, 0.02934272003173828, 0.028875776290893555, 0.028321792602539062, 0.028511232376098632, 0.028505088806152344, 0.02831257629394531, 0.028293119430541993, 0.028306432723999023, 0.028342271804809572, 0.02815897560119629, 0.02837606430053711, 0.028286975860595705, 0.02873139190673828, 0.028444671630859376, 0.028246015548706056, 0.02833612823486328, 0.028366847991943358, 0.028270591735839845, 0.028223487854003908, 0.0283637752532959, 0.028220415115356445, 0.028231679916381838, 0.028299264907836914, 0.02909491157531738, 0.02992639923095703, 0.029453311920166016, 0.029846527099609374, 0.03022233581542969, 0.02999603271484375, 0.031068159103393556, 0.030070783615112305, 0.029791231155395507]",tokens/s,34.188736285061914,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949046-5d40a048084f7d2976b0e9c1;0d95bdcf-a14b-4cc3-a305-520fea4c1f87) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-rw-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-rw-1b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4215.250944,6159.859712,0.0,5530.189824,5138.859008,s,1,10.990646484375,10.990646484375,0.0,10.990646484375,10.990646484375,10.990646484375,10.990646484375,[10.990646484375],,kWh,4.2736360538880386e-05,2.3407499316776583e-05,8.003617513996364e-05,0.0001461800349956206,,MB,1581.105152,6182.928384,0.0,5534.384128,4844.878336,s,10,1.0449887313842774,0.10449887313842773,4.037788535407605e-05,0.10449299240112304,0.10455071716308595,0.10455187072753906,0.10455279357910156,"[0.10443628692626954, 0.10446288299560547, 0.10450012969970703, 0.10447942352294921, 0.10452015686035156, 0.10445286560058593, 0.10448585510253906, 0.10454764556884766, 0.10455302429199219, 0.1045504608154297]",tokens/s,2449.787182497954,kWh,1.2362520120298373e-06,6.772695751808138e-07,5.204486223770709e-06,7.11800781098136e-06,tokens/kWh,35965119.28591228,MB,1617.956864,6185.025536,0.0,5534.384128,5015.843328,s,10,15.150910278320314,1.515091027832031,0.023377937912931742,1.5091336059570313,1.5431247436523436,1.5452247253417968,1.5469047106933593,"[1.495157470703125, 1.490927978515625, 1.4853792724609376, 1.4942926025390626, 1.5193092041015626, 1.54732470703125, 1.5426580810546875, 1.5388231201171876, 1.538079833984375, 1.4989580078125]",tokens/s,41.58166000768135,kWh,1.7881578620468918e-05,9.79935040602676e-06,4.66797775058306e-05,7.436070653232627e-05,tokens/kWh,847221.6434981355,,s,630,15.148687337875382,0.024045535456945023,0.0005615339926653315,0.02409062385559082,0.024694374275207518,0.024824780654907227,0.025565133285522464,"[0.02386332893371582, 0.024011743545532226, 0.02369331169128418, 0.024262655258178712, 0.025230335235595702, 0.024014848709106446, 0.023767040252685546, 0.023826431274414063, 0.023542783737182618, 0.023560192108154295, 0.023622655868530275, 0.023627775192260742, 0.024040447235107423, 0.024995840072631836, 0.02513100814819336, 0.024196096420288086, 0.023623680114746092, 0.02353152084350586, 0.023609344482421874, 0.023573503494262696, 0.023661567687988282, 0.023538688659667968, 0.023568384170532225, 0.023384063720703126, 0.02351923179626465, 0.023568384170532225, 0.023550975799560548, 0.023556095123291015, 0.023538688659667968, 0.023463935852050782, 0.02350592041015625, 0.02350694465637207, 0.02350387191772461, 0.02353152084350586, 0.023459840774536132, 0.023640064239501952, 0.023599103927612306, 0.02351820755004883, 0.023560192108154295, 0.023615488052368162, 0.02350387191772461, 0.023625728607177734, 0.023557119369506836, 0.023588863372802735, 0.023566335678100587, 0.023661567687988282, 0.023582719802856447, 0.02371788787841797, 0.023945215225219727, 0.023621631622314454, 0.023508991241455078, 0.023456768035888673, 0.02353561592102051, 0.023578624725341796, 0.023579647064208984, 0.023564287185668945, 0.02508083152770996, 0.023762943267822266, 0.023605247497558594, 0.023564287185668945, 0.02351513671875, 0.02369331169128418, 0.023607295989990236, 0.023555072784423828, 0.023426048278808592, 0.023415807723999024, 0.0235100154876709, 0.023141376495361327, 0.023262208938598632, 0.023340032577514647, 0.023574527740478517, 0.023575551986694337, 0.023459840774536132, 0.02349260711669922, 0.023556095123291015, 0.023846912384033202, 0.02374963188171387, 0.023480319976806642, 0.023559167861938478, 0.023476224899291992, 0.023969791412353517, 0.025776128768920898, 0.02393087959289551, 0.0235284481048584, 0.02353459167480469, 0.0231147518157959, 0.023743488311767577, 0.023517183303833008, 0.02375372886657715, 0.02350694465637207, 0.023597055435180665, 0.023578624725341796, 0.023569408416748046, 0.023390207290649414, 0.02366771125793457, 0.024135679244995118, 0.023756799697875978, 0.02350182342529297, 0.023559167861938478, 0.023830528259277343, 0.024000511169433594, 0.023567359924316408, 0.023608320236206053, 0.023544832229614256, 0.023464960098266603, 0.023419904708862304, 0.02323967933654785, 0.025417728424072264, 0.02471219253540039, 0.024004608154296874, 0.023653375625610353, 0.023565311431884766, 0.023597055435180665, 0.023573503494262696, 0.023521280288696288, 0.02347315216064453, 0.023544832229614256, 0.02354380798339844, 0.023591936111450194, 0.023524351119995117, 0.023548927307128906, 0.02353152084350586, 0.023196672439575194, 0.02428313636779785, 0.023682048797607422, 0.02350796890258789, 0.02404351997375488, 0.02408755111694336, 0.023797760009765623, 0.02408550453186035, 0.023855104446411132, 0.024303615570068358, 0.024224767684936522, 0.024035327911376952, 0.024186880111694335, 0.02371174430847168, 0.023425024032592775, 0.023841791152954102, 0.023579647064208984, 0.02369536018371582, 0.023611391067504883, 0.02325299263000488, 0.02309529685974121, 0.02350694465637207, 0.023529472351074218, 0.023440383911132814, 0.02350182342529297, 0.02312499237060547, 0.023129087448120117, 0.023068672180175782, 0.023373823165893554, 0.023190528869628906, 0.023117824554443358, 0.022987775802612305, 0.023045120239257814, 0.023104511260986327, 0.023145471572875977, 0.023144447326660156, 0.02327961540222168, 0.023435264587402343, 0.02346598434448242, 0.02426982307434082, 0.023451648712158202, 0.023384063720703126, 0.02349465560913086, 0.02351820755004883, 0.023556095123291015, 0.02352639961242676, 0.023573503494262696, 0.023631872177124022, 0.023589887619018556, 0.02315673637390137, 0.023441408157348635, 0.023576576232910155, 0.02409779167175293, 0.02405580711364746, 0.023536640167236327, 0.023591936111450194, 0.02415718460083008, 0.02371993637084961, 0.023618560791015625, 0.023540735244750977, 0.024169471740722655, 0.023516159057617187, 0.023425024032592775, 0.023517183303833008, 0.023582719802856447, 0.02350284767150879, 0.02353561592102051, 0.023191551208496093, 0.024261632919311524, 0.023727104187011717, 0.023254016876220703, 0.023393280029296876, 0.023586816787719726, 0.02354380798339844, 0.023751680374145507, 0.023579647064208984, 0.023388160705566406, 0.023579647064208984, 0.023794687271118165, 0.023976959228515626, 0.023953407287597657, 0.023756799697875978, 0.023817216873168946, 0.02342911911010742, 0.023202816009521485, 0.023228416442871092, 0.023224319458007812, 0.023628799438476563, 0.023790592193603514, 0.023212032318115236, 0.02612735939025879, 0.02476032066345215, 0.0235100154876709, 0.023710720062255858, 0.023805952072143553, 0.024203264236450195, 0.023837696075439452, 0.023636991500854493, 0.02367897605895996, 0.023575551986694337, 0.023516159057617187, 0.02347929573059082, 0.023579647064208984, 0.023200767517089844, 0.024590335845947265, 0.023863296508789062, 0.023568384170532225, 0.023254016876220703, 0.02326425552368164, 0.02325503921508789, 0.023582719802856447, 0.023399423599243165, 0.023564287185668945, 0.023568384170532225, 0.02348953628540039, 0.02335436820983887, 0.0236810245513916, 0.02343731117248535, 0.023581695556640626, 0.02368511962890625, 0.024135679244995118, 0.02406809616088867, 0.023757823944091795, 0.024390655517578123, 0.024559616088867187, 0.024163328170776367, 0.024231935501098634, 0.02350796890258789, 0.023360511779785157, 0.023864320755004883, 0.023962623596191408, 0.023595008850097656, 0.02352639961242676, 0.02409574317932129, 0.024704000473022462, 0.024689664840698244, 0.024371200561523438, 0.02425753593444824, 0.02429439926147461, 0.024233983993530273, 0.024284160614013672, 0.023773183822631837, 0.023867391586303712, 0.024162303924560546, 0.024052736282348632, 0.024057855606079103, 0.02428006362915039, 0.024829952239990235, 0.024210432052612304, 0.024349695205688478, 0.024210432052612304, 0.02408345603942871, 0.024181760787963868, 0.023938047409057618, 0.02405990409851074, 0.024177663803100585, 0.024009727478027345, 0.024203264236450195, 0.02386636734008789, 0.024013824462890625, 0.02410700798034668, 0.024284160614013672, 0.02431488037109375, 0.024319999694824217, 0.024300544738769532, 0.02425651168823242, 0.024089599609375, 0.023808000564575195, 0.02424115180969238, 0.025399295806884766, 0.02510950469970703, 0.024345600128173828, 0.023992319107055664, 0.023394304275512694, 0.02365132713317871, 0.023612415313720703, 0.023768064498901367, 0.024811519622802734, 0.02488934326171875, 0.023777280807495117, 0.023568384170532225, 0.023538688659667968, 0.023541759490966797, 0.023662591934204103, 0.025578496932983398, 0.02472243118286133, 0.023820287704467775, 0.023582719802856447, 0.023829504013061522, 0.02407526397705078, 0.023226367950439454, 0.023402496337890624, 0.023723007202148438, 0.024730623245239256, 0.02714726448059082, 0.025661439895629884, 0.024830976486206056, 0.024828927993774414, 0.024786943435668944, 0.02490982437133789, 0.024603647232055666, 0.02427289581298828, 0.024232959747314452, 0.024160255432128908, 0.024205312728881836, 0.024396799087524415, 0.02453913688659668, 0.024406015396118166, 0.02466815948486328, 0.02427289581298828, 0.024308736801147462, 0.024420352935791017, 0.02450432014465332, 0.024630271911621093, 0.024667135238647463, 0.024810495376586913, 0.024912895202636717, 0.02429439926147461, 0.024173568725585938, 0.024582143783569335, 0.02427187156677246, 0.024243200302124023, 0.024648704528808595, 0.024761344909667967, 0.02469068717956543, 0.02487603187561035, 0.024761344909667967, 0.024656896591186524, 0.024621055603027343, 0.02427801513671875, 0.024453119277954103, 0.024758272171020508, 0.024387584686279298, 0.02450841522216797, 0.024179712295532226, 0.024221696853637696, 0.02413465690612793, 0.02449510383605957, 0.024613887786865234, 0.0247459831237793, 0.024656896591186524, 0.02470604705810547, 0.02447667121887207, 0.02428313636779785, 0.02411724853515625, 0.024224767684936522, 0.024632320404052735, 0.024574975967407226, 0.02437222480773926, 0.024615936279296875, 0.024230911254882814, 0.024245248794555665, 0.024171520233154296, 0.024219648361206055, 0.024600576400756836, 0.024705024719238283, 0.024171520233154296, 0.024129535675048826, 0.024186880111694335, 0.024200191497802736, 0.024178688049316405, 0.024375295639038085, 0.02465996742248535, 0.02467020797729492, 0.024564735412597655, 0.024673280715942384, 0.024633344650268556, 0.024515584945678712, 0.02468454360961914, 0.024671232223510742, 0.024598527908325195, 0.024608768463134766, 0.024623104095458984, 0.024612863540649413, 0.024526847839355468, 0.0243056640625, 0.024432640075683593, 0.024384511947631835, 0.02414489555358887, 0.024535039901733398, 0.024607744216918945, 0.024054784774780274, 0.02469990348815918, 0.024597503662109374, 0.02429030418395996, 0.0241213436126709, 0.024637439727783202, 0.02458624076843262, 0.02467532730102539, 0.024582143783569335, 0.02467430305480957, 0.02469375991821289, 0.02471219253540039, 0.02467430305480957, 0.024600576400756836, 0.02456985664367676, 0.024679424285888672, 0.02450022315979004, 0.024591360092163086, 0.024242176055908202, 0.024549375534057616, 0.024630271911621093, 0.024236032485961914, 0.02473369598388672, 0.024376319885253905, 0.024151039123535157, 0.0245166072845459, 0.02465996742248535, 0.024662015914916992, 0.024657920837402345, 0.02466815948486328, 0.024219648361206055, 0.024224767684936522, 0.024242176055908202, 0.024164352416992187, 0.024028160095214843, 0.024619007110595705, 0.024689664840698244, 0.024525823593139647, 0.023658496856689453, 0.024938495635986328, 0.02430668830871582, 0.024610815048217775, 0.024440832138061523, 0.024649728775024415, 0.024715263366699217, 0.024526847839355468, 0.024165376663208008, 0.024215551376342775, 0.02409676742553711, 0.02411622428894043, 0.024122367858886717, 0.024667135238647463, 0.024707071304321288, 0.024680448532104493, 0.024263679504394533, 0.02430771255493164, 0.024612863540649413, 0.02453196716308594, 0.024252416610717774, 0.024151039123535157, 0.02411622428894043, 0.024645631790161132, 0.024591360092163086, 0.024491008758544923, 0.02469273567199707, 0.024235008239746093, 0.024139776229858398, 0.024147968292236328, 0.024638463973999023, 0.024626176834106447, 0.02414182472229004, 0.02433228874206543, 0.02457088088989258, 0.024242176055908202, 0.02411315155029297, 0.024473600387573242, 0.024732671737670898, 0.02455244827270508, 0.024762367248535155, 0.024724479675292968, 0.024261632919311524, 0.0241213436126709, 0.024154111862182616, 0.024181760787963868, 0.02452992057800293, 0.024170495986938476, 0.024168447494506837, 0.024207359313964845, 0.024185855865478514, 0.024130559921264647, 0.024136735916137696, 0.02419094467163086, 0.024649728775024415, 0.025333759307861328, 0.025428991317749023, 0.024837120056152344, 0.024781824111938477, 0.024211456298828125, 0.024169471740722655, 0.02470809555053711, 0.024327167510986326, 0.023414783477783203, 0.02365235137939453, 0.02354380798339844, 0.02332057571411133, 0.02332569694519043, 0.02328780746459961, 0.023640064239501952, 0.024263679504394533, 0.024195072174072265, 0.02429644775390625, 0.02430463981628418, 0.02409164810180664, 0.02406399917602539, 0.02449715232849121, 0.024564735412597655, 0.024819711685180663, 0.025011199951171875, 0.02467635154724121, 0.024542207717895507, 0.024679424285888672, 0.024557567596435546, 0.02465996742248535, 0.024550399780273437, 0.02409779167175293, 0.024444927215576173, 0.02450534439086914, 0.024638463973999023, 0.024612863540649413, 0.0247193603515625, 0.024654848098754883, 0.024620031356811522, 0.024846336364746095, 0.02467532730102539, 0.02491187286376953, 0.02470195198059082, 0.02466815948486328, 0.024679424285888672, 0.02413465690612793, 0.02411212730407715, 0.024171520233154296, 0.024186880111694335, 0.02452992057800293, 0.02429132843017578, 0.024268800735473633, 0.024740863800048828, 0.024634368896484377, 0.024653823852539062, 0.024657920837402345, 0.02469273567199707, 0.024713216781616212, 0.024603647232055666, 0.024664064407348633, 0.0245166072845459, 0.024329216003417968, 0.0241530876159668, 0.024705024719238283, 0.02515456008911133, 0.024927232742309572, 0.024681472778320314, 0.02456268882751465, 0.024250368118286132, 0.024645631790161132, 0.024142847061157227, 0.025792512893676758, 0.024772607803344726, 0.0245032958984375, 0.024573951721191405, 0.02433843231201172, 0.024535039901733398, 0.0239237117767334, 0.023617536544799804, 0.02352742385864258, 0.023444480895996093, 0.02328371238708496, 0.023404544830322265, 0.023630847930908205, 0.023740415573120118, 0.023553024291992186, 0.023400447845458985, 0.023513088226318358, 0.02312396812438965, 0.023109632492065428, 0.023220224380493162, 0.023147552490234376, 0.02352227210998535, 0.023175167083740233, 0.023206911087036132, 0.023597055435180665, 0.02349056053161621, 0.023532543182373047, 0.02351411247253418, 0.023290880203247072, 0.023211008071899415, 0.023335935592651368, 0.02553241539001465, 0.025846784591674804, 0.023835647583007814, 0.02373324775695801, 0.023556095123291015, 0.023632896423339843, 0.023602176666259765, 0.023533567428588868, 0.023559167861938478, 0.02354380798339844, 0.02347724723815918, 0.023546880722045898, 0.023587839126586914, 0.023578624725341796, 0.023411712646484374, 0.023227392196655275, 0.023584768295288085, 0.023553024291992186, 0.02364825630187988, 0.023331840515136718, 0.023394304275512694, 0.023583744049072267, 0.023638015747070314, 0.023542783737182618, 0.023802879333496094, 0.024576000213623047, 0.02430259132385254, 0.024138751983642577, 0.02431385612487793, 0.024247295379638673, 0.025260032653808592, 0.024573951721191405]",tokens/s,41.5877617610371,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17675.65312,22507.159552,0.0,21877.489664,21024.863232,s,1,19.672849609375,19.672849609375,0.0,19.672849609375,19.672849609375,19.672849609375,19.672849609375,[19.672849609375],,kWh,0.0001527158016499963,8.368189489156028e-05,0.00033219137686396616,0.0005685890734055227,,MB,4732.08832,22588.94848,0.0,21940.404224,19728.708096,s,10,5.2001083984375,0.52001083984375,0.0002612213321407145,0.5199537048339844,0.5203627502441407,0.5204325958251953,0.520488472290039,"[0.5197767333984376, 0.5202744140625, 0.5199697875976562, 0.52050244140625, 0.5200132446289063, 0.5197109375, 0.519846923828125, 0.5197290649414062, 0.5199376220703125, 0.5203472290039063]",tokens/s,492.2974299476555,kWh,6.145990360868044e-06,3.3677220301130146e-06,2.4088380381800945e-05,3.3602092772782005e-05,tokens/kWh,7618573.0969519345,MB,4740.308992,22616.211456,0.0,21965.570048,19728.710656,s,10,35.26120947265625,3.5261209472656247,0.008651624965183962,3.5296944580078127,3.53474453125,3.537799389648437,3.5402432763671876,"[3.529432861328125, 3.5299560546875, 3.534065673828125, 3.540854248046875, 3.530581298828125, 3.51314111328125, 3.516546142578125, 3.514847900390625, 3.530099609375, 3.5216845703125]",tokens/s,17.86665884187953,kWh,4.160499797718762e-05,2.2802414192348797e-05,0.00013918480579219739,0.00020359221796173384,tokens/kWh,309442.0829574201,,s,630,35.25862912750245,0.055966077980162614,0.00042827902735830556,0.05578035354614258,0.056548453903198244,0.05676770687103271,0.057255547561645505,"[0.05610086441040039, 0.05565951919555664, 0.05569331359863281, 0.05572710418701172, 0.055795711517333986, 0.05564313507080078, 0.05621657562255859, 0.05617049789428711, 0.05568204879760742, 0.055667713165283204, 0.05569126510620117, 0.05568307113647461, 0.055777278900146485, 0.055739391326904295, 0.05568307113647461, 0.05572915267944336, 0.055752704620361325, 0.055624702453613284, 0.05560627365112305, 0.05627699279785156, 0.056592384338378904, 0.056648704528808595, 0.056569854736328126, 0.05647359848022461, 0.05651148986816406, 0.056548351287841796, 0.056513534545898435, 0.056493057250976565, 0.05639782333374024, 0.05652073669433594, 0.06018761444091797, 0.05686067199707031, 0.05655449676513672, 0.05638655853271484, 0.055531520843505856, 0.05563904190063477, 0.05555199813842773, 0.05573529434204102, 0.0556492805480957, 0.055695358276367186, 0.055646209716796874, 0.05571891021728516, 0.05657497787475586, 0.055962623596191405, 0.05568204879760742, 0.05577523040771484, 0.05568716812133789, 0.05571686553955078, 0.05573427200317383, 0.055894016265869144, 0.05667532730102539, 0.055965694427490234, 0.05569228744506836, 0.05570150375366211, 0.05567897415161133, 0.05581414413452149, 0.05573324966430664, 0.05572198486328125, 0.055741439819335936, 0.05568921661376953, 0.055757823944091796, 0.05593395233154297, 0.05570969772338867, 0.05705215835571289, 0.056438785552978515, 0.05573836898803711, 0.05576396942138672, 0.05568716812133789, 0.0561899528503418, 0.05711667251586914, 0.056010753631591796, 0.055812095642089846, 0.05569843292236328, 0.055757823944091796, 0.055795711517333986, 0.05578035354614258, 0.05576704025268555, 0.05578137588500977, 0.05576806259155274, 0.055757823944091796, 0.055702529907226565, 0.05634969711303711, 0.05651865768432617, 0.055766014099121096, 0.05571686553955078, 0.05567180633544922, 0.056164352416992185, 0.05650431823730469, 0.05636710357666016, 0.05561753463745117, 0.05542502212524414, 0.05554483032226563, 0.05568307113647461, 0.055769088745117185, 0.05637529754638672, 0.0557844467163086, 0.05560115051269531, 0.05565951919555664, 0.055815166473388675, 0.05615820693969727, 0.055669761657714846, 0.05574348831176758, 0.05566361618041992, 0.05578137588500977, 0.05654937744140625, 0.05611212921142578, 0.05575884628295898, 0.05575680160522461, 0.055690238952636716, 0.05578956985473633, 0.05600665664672851, 0.05737472152709961, 0.05586841583251953, 0.0566927375793457, 0.05628422546386719, 0.0558007698059082, 0.05655347061157227, 0.05698559951782227, 0.055926784515380856, 0.055725055694580077, 0.05572403335571289, 0.055798782348632815, 0.05669478225708008, 0.05658316802978516, 0.05647052764892578, 0.056584190368652344, 0.05638246536254883, 0.05611008071899414, 0.056389633178710936, 0.056264705657958984, 0.05648588943481445, 0.0565667839050293, 0.056030208587646485, 0.05562777709960937, 0.05593907165527344, 0.05567692947387695, 0.05574655914306641, 0.055700481414794924, 0.05575167846679688, 0.056235008239746094, 0.05576806259155274, 0.05575167846679688, 0.05638662338256836, 0.056538047790527346, 0.05650739288330078, 0.05616128158569336, 0.05632819366455078, 0.05664051055908203, 0.05759590530395508, 0.05660774230957031, 0.05608755111694336, 0.05586739349365234, 0.05573529434204102, 0.05585203170776367, 0.05639987182617188, 0.05597798538208008, 0.05652070236206055, 0.0558766098022461, 0.05569228744506836, 0.05574758529663086, 0.05583564758300781, 0.05600255966186524, 0.05574655914306641, 0.05568716812133789, 0.05596672058105469, 0.05654732894897461, 0.05677260971069336, 0.05690777587890625, 0.05651251220703125, 0.05591756820678711, 0.055755775451660154, 0.05570457458496094, 0.05565030288696289, 0.05570256042480469, 0.056255455017089846, 0.055894016265869144, 0.056425472259521485, 0.05570969772338867, 0.05653094482421875, 0.05613875198364258, 0.055657470703125, 0.05566463851928711, 0.05562060928344727, 0.055725055694580077, 0.056556575775146486, 0.05656470489501953, 0.05616128158569336, 0.05564211273193359, 0.0556124153137207, 0.056941566467285154, 0.05624319839477539, 0.056033279418945314, 0.05596057510375976, 0.05620633697509766, 0.055818241119384764, 0.0562083854675293, 0.0556943359375, 0.056360958099365234, 0.0565852165222168, 0.05648896026611328, 0.056357887268066405, 0.05712793731689453, 0.057306110382080076, 0.0567347183227539, 0.05634048080444336, 0.0568903694152832, 0.0557209587097168, 0.0556492805480957, 0.05575475311279297, 0.05629337692260742, 0.05676547241210937, 0.05658723068237305, 0.05629132843017578, 0.055736320495605465, 0.05563187026977539, 0.055586814880371094, 0.05660671997070312, 0.05639372634887695, 0.055812095642089846, 0.055828479766845705, 0.055547904968261716, 0.05570969772338867, 0.056403968811035154, 0.056564735412597655, 0.05677260971069336, 0.05692211151123047, 0.05584691238403321, 0.056627201080322265, 0.05778742218017578, 0.057076702117919924, 0.056771583557128906, 0.05570457458496094, 0.05563904190063477, 0.057145343780517575, 0.05586022567749024, 0.05571891021728516, 0.05596672058105469, 0.05581011199951172, 0.05603628921508789, 0.05595340728759766, 0.05653811264038086, 0.05570150375366211, 0.05579776000976563, 0.05640499114990234, 0.0564664306640625, 0.05569126510620117, 0.05584281539916992, 0.05583359909057617, 0.05567692947387695, 0.05560934448242188, 0.055632896423339843, 0.05558272171020508, 0.056268798828125, 0.05570867156982422, 0.055877632141113284, 0.05574348831176758, 0.05580083084106445, 0.05562777709960937, 0.055548927307128904, 0.05555916976928711, 0.055651329040527345, 0.05677363204956055, 0.05674803161621094, 0.05646233749389649, 0.05626572799682617, 0.0558919677734375, 0.056025089263916014, 0.057804798126220705, 0.0572334098815918, 0.05602406311035156, 0.05566668701171875, 0.055670783996582034, 0.0558766098022461, 0.055769088745117185, 0.055766014099121096, 0.055739391326904295, 0.055672832489013675, 0.05569638442993164, 0.05566156768798828, 0.05569126510620117, 0.055670783996582034, 0.0558653450012207, 0.05567488098144531, 0.0556954231262207, 0.055747520446777346, 0.05678182220458984, 0.05646438217163086, 0.05626367950439453, 0.055742462158203124, 0.05559500885009765, 0.056769535064697264, 0.05576704025268555, 0.05572710418701172, 0.05578137588500977, 0.05573324966430664, 0.055700481414794924, 0.05723955154418945, 0.056718334197998044, 0.05654118347167969, 0.05612646484375, 0.055672832489013675, 0.05586131286621094, 0.056263614654541015, 0.05655756759643555, 0.05662105560302735, 0.05639372634887695, 0.05645107269287109, 0.05653708648681641, 0.055725055694580077, 0.05583257675170898, 0.055769088745117185, 0.055725055694580077, 0.055731201171875, 0.05570457458496094, 0.0556492805480957, 0.056269824981689455, 0.05572608184814453, 0.05573529434204102, 0.05561139297485351, 0.055585792541503906, 0.055700481414794924, 0.055618560791015625, 0.05563596725463867, 0.05565849685668945, 0.05568921661376953, 0.055782398223876956, 0.05569740676879883, 0.05565542221069336, 0.05570867156982422, 0.055793663024902344, 0.05575680160522461, 0.05570563125610352, 0.0555898551940918, 0.055634944915771485, 0.05573529434204102, 0.05567488098144531, 0.05573017501831055, 0.055613441467285155, 0.0556943359375, 0.05564128112792969, 0.05604230499267578, 0.056497150421142575, 0.055995391845703124, 0.05580595016479492, 0.05578137588500977, 0.05567795181274414, 0.05565849685668945, 0.05573222351074219, 0.05576499176025391, 0.05574041748046875, 0.05567795181274414, 0.05568000030517578, 0.056005630493164066, 0.055836673736572265, 0.0557946891784668, 0.05573017501831055, 0.055810047149658204, 0.05574655914306641, 0.055634944915771485, 0.055801856994628904, 0.0558131217956543, 0.05578035354614258, 0.05563699340820313, 0.05570457458496094, 0.05568617630004883, 0.055971809387207035, 0.05592268753051758, 0.05571072006225586, 0.055695358276367186, 0.05575680160522461, 0.055769088745117185, 0.05583257675170898, 0.055803905487060546, 0.05579673767089844, 0.055702529907226565, 0.055731201171875, 0.055723007202148435, 0.055785472869873044, 0.05620326232910156, 0.05693747329711914, 0.05615513610839844, 0.05588684844970703, 0.05578342437744141, 0.05570867156982422, 0.05581721496582031, 0.056066047668457034, 0.05587865447998047, 0.05589606475830078, 0.05581721496582031, 0.05562265777587891, 0.055657470703125, 0.05571993637084961, 0.05571481704711914, 0.05573427200317383, 0.05588787078857422, 0.05569945526123047, 0.05569228744506836, 0.055940097808837894, 0.055779327392578126, 0.055711742401123046, 0.055777278900146485, 0.05571891021728516, 0.05569126510620117, 0.05575884628295898, 0.05572403335571289, 0.055696449279785155, 0.05574342346191406, 0.05570355224609375, 0.05560934448242188, 0.055736320495605465, 0.055788543701171874, 0.05568819046020508, 0.055578624725341794, 0.05560729598999024, 0.05597491073608398, 0.0558551025390625, 0.0557762565612793, 0.05569843292236328, 0.05565951919555664, 0.055809024810791016, 0.05566361618041992, 0.056330238342285156, 0.056253440856933595, 0.05586227035522461, 0.05597695922851562, 0.055662593841552734, 0.0556492805480957, 0.05573734283447265, 0.05584896087646484, 0.056492191314697265, 0.05570339202880859, 0.055608318328857424, 0.055623680114746096, 0.055621631622314455, 0.05558169555664062, 0.055672832489013675, 0.05565849685668945, 0.055798782348632815, 0.055777278900146485, 0.05602918243408203, 0.055826431274414064, 0.0566640625, 0.05580799865722656, 0.055841854095458984, 0.05563590240478516, 0.05566361618041992, 0.05573017501831055, 0.05595750427246094, 0.055690238952636716, 0.05555916976928711, 0.05571481704711914, 0.05567795181274414, 0.05560115051269531, 0.055605247497558595, 0.05579980850219726, 0.05581414413452149, 0.05567795181274414, 0.05572710418701172, 0.05567180633544922, 0.05562265777587891, 0.055686145782470706, 0.055618560791015625, 0.05568000030517578, 0.055624702453613284, 0.05586841583251953, 0.055684097290039064, 0.055725055694580077, 0.05568511962890625, 0.05610496139526367, 0.05581414413452149, 0.05563904190063477, 0.05565849685668945, 0.05585715103149414, 0.05583257675170898, 0.055790592193603515, 0.05582131195068359, 0.05571379089355469, 0.05571583938598633, 0.05583564758300781, 0.05576704025268555, 0.05580492782592773, 0.05571072006225586, 0.05584793472290039, 0.055752704620361325, 0.055711742401123046, 0.055731201171875, 0.05574860763549805, 0.0559381103515625, 0.05570246505737304, 0.0558653450012207, 0.05598617553710938, 0.0558551025390625, 0.05579776000976563, 0.0557209587097168, 0.05575987243652344, 0.055769088745117185, 0.0559554557800293, 0.055793663024902344, 0.05562879943847656, 0.05569638442993164, 0.05563187026977539, 0.056782848358154295, 0.05608243179321289, 0.05572608184814453, 0.056371200561523435, 0.05573222351074219, 0.055605247497558595, 0.055597057342529295, 0.05554278564453125, 0.05572915267944336, 0.055861248016357425, 0.05589299011230469, 0.055962623596191405, 0.05588275146484375, 0.05566156768798828, 0.05559091186523438, 0.05589299011230469, 0.05588889694213867, 0.055772159576416014, 0.05566668701171875, 0.05574041748046875, 0.05563699340820313, 0.055656448364257816, 0.05571686553955078, 0.05566054534912109, 0.05567180633544922, 0.0556492805480957, 0.05614182281494141, 0.057262081146240235, 0.05637535858154297, 0.0562677116394043, 0.05622272109985352, 0.05625241470336914, 0.05630054473876953, 0.05631590270996094, 0.056318977355957034, 0.056231937408447265, 0.05633740615844727, 0.056097793579101565, 0.05649612808227539, 0.05720070266723633, 0.05638547134399414, 0.056256511688232425, 0.056180736541748044, 0.056272895812988284, 0.05635583877563476, 0.05600665664672851, 0.055731201171875, 0.055907329559326174, 0.05570867156982422, 0.055616512298583984, 0.055634944915771485, 0.05598822402954102, 0.05573734283447265, 0.05592575836181641, 0.05568819046020508, 0.0556759033203125, 0.05590835189819336, 0.05604761505126953, 0.0563240966796875, 0.05615718460083008, 0.05631488037109375, 0.05623603057861328, 0.05623807907104492, 0.05635996627807617, 0.05626057434082031, 0.0566927375793457, 0.05682995223999023, 0.05629849624633789, 0.05616844940185547, 0.056288257598876956, 0.056251392364501954, 0.057178112030029295, 0.056400894165039066, 0.05594112014770508, 0.05583257675170898, 0.05581107330322266, 0.0557250862121582, 0.056040416717529296, 0.05586022567749024, 0.05707468795776367, 0.05655654525756836, 0.0557844467163086, 0.05574348831176758, 0.05574041748046875, 0.05570764923095703, 0.05614182281494141, 0.05632921600341797, 0.056139774322509765, 0.05577523040771484, 0.05616128158569336, 0.05623295974731445, 0.05612646484375, 0.05606911849975586, 0.055690238952636716, 0.05564211273193359, 0.056010753631591796, 0.05580083084106445, 0.05569331359863281, 0.0556431999206543, 0.05575673675537109, 0.05569126510620117, 0.05581721496582031, 0.055600128173828124, 0.05560627365112305, 0.0556124153137207, 0.05558169555664062, 0.0557127685546875, 0.05575372695922851, 0.05572710418701172, 0.05577318572998047, 0.055744510650634765, 0.05564211273193359, 0.05577523040771484, 0.05573836898803711, 0.05573324966430664, 0.05566156768798828, 0.05568819046020508, 0.05566873550415039, 0.05565647888183594, 0.05569020843505859, 0.05565039825439453, 0.05567068862915039, 0.05565849685668945, 0.055618560791015625, 0.055672832489013675, 0.05555916976928711, 0.05562879943847656, 0.05584896087646484, 0.0557946891784668]",tokens/s,17.86796638410956,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,1550.159872,1804.075008,0.0,1174.40512,1147.036672,s,1,8.4729375,8.4729375,0.0,8.4729375,8.4729375,8.4729375,8.4729375,[8.4729375],,kWh,1.9532451124306113e-05,1.0689575772187266e-05,3.1026691487995306e-05,6.124871838448869e-05,,MB,1648.672768,1879.57248,0.0,1231.028224,1064.7808,s,12,0.31659408187866217,0.026382840156555176,2.7796273416128276e-05,0.02638267230987549,0.026417984199523927,0.026431709384918213,0.02644215700149536,"[0.026335296630859376, 0.026389568328857423, 0.026378751754760742, 0.026421024322509767, 0.026444768905639647, 0.02637343978881836, 0.02638809585571289, 0.026369184494018556, 0.026386592864990236, 0.026351999282836915, 0.026364736557006836, 0.026390623092651368]",tokens/s,9703.276769328162,kWh,3.1078060159302734e-07,1.702669554709439e-07,1.1465268723097672e-06,1.6275744293737387e-06,tokens/kWh,157289273.76826888,MB,1679.355904,1879.57248,0.0,1231.028224,1118.109696,s,12,10.410047424316405,0.8675039520263672,0.015910609083034696,0.8733244323730469,0.8786638427734376,0.8790870635986329,0.8794431939697266,"[0.8787228393554688, 0.87716357421875, 0.87038916015625, 0.8781328735351562, 0.8741294555664062, 0.8760210571289062, 0.868830078125, 0.8725194091796875, 0.8313599243164063, 0.8689221801757813, 0.8343246459960938, 0.8795322265625]",tokens/s,72.62214754508133,kWh,1.0040586320166714e-05,5.501757186153379e-06,1.923131350402092e-05,3.477365701034101e-05,tokens/kWh,1811716.2650239815,,s,756,10.405540869712818,0.013763942949355595,0.0003870008958008143,0.013918208122253419,0.01403647994995117,0.01409996771812439,0.014817331171035768,"[0.013065216064453124, 0.013197312355041504, 0.013881343841552735, 0.01397555160522461, 0.013897727966308594, 0.01386086368560791, 0.01395404815673828, 0.01386086368560791, 0.013741056442260742, 0.013883392333984374, 0.013832192420959472, 0.013873151779174805, 0.013883392333984374, 0.013829119682312012, 0.01387110424041748, 0.013867008209228515, 0.014025728225708007, 0.014844927787780762, 0.015055871963500977, 0.014461952209472656, 0.014194687843322755, 0.013924351692199707, 0.014064640045166015, 0.013906944274902343, 0.01397862434387207, 0.013997056007385255, 0.013873151779174805, 0.013816831588745117, 0.013853695869445801, 0.01395404815673828, 0.01384447956085205, 0.013925375938415528, 0.01389363193511963, 0.013875200271606445, 0.013937664031982423, 0.013945856094360352, 0.0140697603225708, 0.013997056007385255, 0.013920255661010742, 0.014027775764465332, 0.014026752471923828, 0.01409331226348877, 0.013904895782470703, 0.013981696128845214, 0.013780991554260253, 0.013945856094360352, 0.013913087844848633, 0.013937664031982423, 0.01390182399749756, 0.013990912437438965, 0.013910016059875489, 0.013904895782470703, 0.013919232368469238, 0.014082048416137695, 0.013914112091064454, 0.013957119941711426, 0.013912063598632812, 0.013940735816955567, 0.013889535903930664, 0.013881343841552735, 0.013919232368469238, 0.013959168434143066, 0.013821951866149903, 0.01305907154083252, 0.013188096046447753, 0.013279232025146484, 0.013200384140014648, 0.013369343757629394, 0.015410176277160645, 0.015145983695983887, 0.014173184394836426, 0.013974559783935548, 0.014880736351013184, 0.014179327964782714, 0.013989888191223144, 0.01394380760192871, 0.013898752212524413, 0.01387827205657959, 0.013808639526367187, 0.0138854398727417, 0.013827072143554688, 0.013974528312683105, 0.013834239959716797, 0.013915136337280273, 0.013890560150146485, 0.0139683837890625, 0.013926400184631347, 0.013907967567443847, 0.01387007999420166, 0.013964287757873535, 0.013908991813659668, 0.013921279907226563, 0.01388646411895752, 0.01388646411895752, 0.01388748836517334, 0.013934592247009277, 0.013890560150146485, 0.013940735816955567, 0.013896703720092773, 0.013937664031982423, 0.013868032455444336, 0.013939711570739746, 0.013896703720092773, 0.013872127532958984, 0.013915136337280273, 0.013848575592041015, 0.013859840393066406, 0.013838335990905762, 0.013998080253601074, 0.013819904327392578, 0.013973504066467286, 0.013971455574035644, 0.013937664031982423, 0.01384447956085205, 0.013819904327392578, 0.01380355167388916, 0.013858783721923828, 0.013881343841552735, 0.013884415626525879, 0.013938688278198242, 0.013876223564147949, 0.013904895782470703, 0.01387110424041748, 0.013912063598632812, 0.01397657585144043, 0.014014464378356933, 0.013064191818237305, 0.013224960327148438, 0.013153280258178711, 0.013244416236877441, 0.013189120292663574, 0.01305292797088623, 0.013201408386230469, 0.01317683219909668, 0.013103103637695313, 0.013164544105529785, 0.013313023567199708, 0.013230079650878907, 0.013197312355041504, 0.013964287757873535, 0.013928447723388672, 0.014021632194519042, 0.013925375938415528, 0.014096384048461913, 0.013935615539550781, 0.014042112350463867, 0.014029824256896972, 0.014008319854736329, 0.014092288017272948, 0.014115839958190919, 0.014012415885925293, 0.014010368347167968, 0.013964320182800293, 0.01400111961364746, 0.013965312004089356, 0.013923328399658203, 0.013979647636413574, 0.01397555160522461, 0.013961215972900391, 0.013993984222412109, 0.013988863945007325, 0.013981696128845214, 0.013948927879333496, 0.013940735816955567, 0.013902848243713378, 0.013972479820251465, 0.013940735816955567, 0.013971455574035644, 0.01388748836517334, 0.014011391639709473, 0.013963264465332031, 0.01407795238494873, 0.013876223564147949, 0.014034943580627441, 0.013987839698791504, 0.013935615539550781, 0.013922304153442382, 0.013946880340576171, 0.013892607688903809, 0.013933568000793458, 0.0138854398727417, 0.013920255661010742, 0.01407487964630127, 0.014046208381652832, 0.013924351692199707, 0.013907967567443847, 0.01396019172668457, 0.01398681640625, 0.013926400184631347, 0.013037599563598633, 0.013230048179626464, 0.013191167831420898, 0.013175807952880859, 0.013184000015258789, 0.013225983619689942, 0.013187071800231934, 0.013172736167907715, 0.013133824348449707, 0.013189120292663574, 0.014359552383422852, 0.01580134391784668, 0.014415871620178223, 0.013859840393066406, 0.013950976371765136, 0.013949952125549316, 0.013946880340576171, 0.013897727966308594, 0.013950976371765136, 0.014014464378356933, 0.014020607948303223, 0.01396735954284668, 0.01400115203857422, 0.013955072402954101, 0.014166015625, 0.013948927879333496, 0.01397555160522461, 0.01396224021911621, 0.01399500846862793, 0.013918208122253419, 0.014047231674194336, 0.013791232109069825, 0.013958144187927245, 0.013926400184631347, 0.01397043228149414, 0.013904895782470703, 0.01397043228149414, 0.013935615539550781, 0.01397555160522461, 0.013953023910522461, 0.013963264465332031, 0.013935680389404298, 0.013972415924072265, 0.014554112434387208, 0.014653440475463866, 0.015133695602416992, 0.01428377628326416, 0.014053376197814941, 0.014054400444030762, 0.013972479820251465, 0.014163968086242675, 0.014079999923706055, 0.013953023910522461, 0.014018560409545898, 0.01397657585144043, 0.013987839698791504, 0.013935615539550781, 0.01407692813873291, 0.01400115203857422, 0.013949952125549316, 0.01389363193511963, 0.014040063858032227, 0.013884415626525879, 0.013082624435424805, 0.013247488021850586, 0.0130764799118042, 0.013196288108825683, 0.013229056358337403, 0.013199359893798827, 0.013189120292663574, 0.013170687675476075, 0.013867008209228515, 0.013950976371765136, 0.013950976371765136, 0.013918208122253419, 0.013985792160034179, 0.013930496215820312, 0.013928447723388672, 0.013927424430847168, 0.014003199577331543, 0.013933568000793458, 0.013999103546142578, 0.014035967826843262, 0.013857791900634766, 0.013935615539550781, 0.013931520462036133, 0.013931520462036133, 0.014089216232299804, 0.01419059181213379, 0.014027775764465332, 0.014002176284790039, 0.013971455574035644, 0.013927424430847168, 0.014095359802246094, 0.01417420768737793, 0.014036992073059081, 0.014015487670898438, 0.014073856353759765, 0.01397760009765625, 0.01399500846862793, 0.013930496215820312, 0.013928447723388672, 0.01396019172668457, 0.014008319854736329, 0.014123007774353028, 0.01407795238494873, 0.013997056007385255, 0.013935615539550781, 0.01396735954284668, 0.013957119941711426, 0.013890560150146485, 0.014022656440734863, 0.013868032455444336, 0.013917183876037598, 0.013910016059875489, 0.013950976371765136, 0.014002176284790039, 0.01389568042755127, 0.013997056007385255, 0.013814784049987794, 0.013922304153442382, 0.013926400184631347, 0.0139683837890625, 0.013906944274902343, 0.013923328399658203, 0.013873151779174805, 0.013074432373046875, 0.013223936080932617, 0.013150208473205567, 0.013187071800231934, 0.013192192077636718, 0.013495295524597169, 0.013934592247009277, 0.014027775764465332, 0.013915136337280273, 0.013938688278198242, 0.013908991813659668, 0.014034943580627441, 0.013936639785766602, 0.014026752471923828, 0.013903871536254882, 0.013926400184631347, 0.013888511657714844, 0.013915136337280273, 0.013858816146850587, 0.013939711570739746, 0.01386393642425537, 0.013908991813659668, 0.013979711532592774, 0.013941696166992187, 0.013875200271606445, 0.013949952125549316, 0.013912063598632812, 0.014022656440734863, 0.013829119682312012, 0.013959199905395507, 0.013985759735107422, 0.013921279907226563, 0.014094335556030273, 0.014252063751220703, 0.013958111763000488, 0.013882368087768555, 0.013940735816955567, 0.013915136337280273, 0.01479475212097168, 0.014270463943481445, 0.014016511917114258, 0.014015487670898438, 0.013966336250305175, 0.013983743667602539, 0.013948927879333496, 0.013899776458740234, 0.013849599838256836, 0.013945856094360352, 0.013902848243713378, 0.013942784309387207, 0.013912063598632812, 0.013942784309387207, 0.01399500846862793, 0.013938688278198242, 0.014023679733276367, 0.014008383750915527, 0.013945792198181152, 0.013961215972900391, 0.01396224021911621, 0.013964320182800293, 0.013969375610351562, 0.01389363193511963, 0.013925375938415528, 0.013108223915100097, 0.013221887588500977, 0.01319321632385254, 0.013224960327148438, 0.013159423828125, 0.013180928230285644, 0.013248512268066406, 0.013192192077636718, 0.013163519859313964, 0.013232128143310547, 0.01313587188720703, 0.013279232025146484, 0.013205504417419434, 0.014019583702087402, 0.01396224021911621, 0.013935615539550781, 0.013926400184631347, 0.013894656181335448, 0.013991935729980469, 0.013961215972900391, 0.01387929630279541, 0.013922304153442382, 0.013938688278198242, 0.013948927879333496, 0.013949952125549316, 0.013966336250305175, 0.01387827205657959, 0.013953023910522461, 0.013935615539550781, 0.013940735816955567, 0.013894656181335448, 0.013907967567443847, 0.013840383529663085, 0.014011391639709473, 0.013883392333984374, 0.01387007999420166, 0.013853695869445801, 0.013915136337280273, 0.01390182399749756, 0.014096384048461913, 0.01399500846862793, 0.013959168434143066, 0.013990912437438965, 0.013999103546142578, 0.013882368087768555, 0.013903871536254882, 0.014039039611816406, 0.014042112350463867, 0.014000127792358399, 0.013919232368469238, 0.013946880340576171, 0.013945856094360352, 0.013919232368469238, 0.013957119941711426, 0.013880319595336914, 0.013930496215820312, 0.013939711570739746, 0.013906944274902343, 0.013956095695495606, 0.01387724781036377, 0.013949952125549316, 0.013892607688903809, 0.013896703720092773, 0.013113344192504883, 0.013194239616394043, 0.01316044807434082, 0.013223936080932617, 0.013186047554016114, 0.013253631591796875, 0.013194239616394043, 0.013104127883911134, 0.013139967918395995, 0.01325055980682373, 0.014641152381896973, 0.014030847549438476, 0.013882368087768555, 0.013902848243713378, 0.013849599838256836, 0.013910016059875489, 0.013925375938415528, 0.013916159629821777, 0.013990912437438965, 0.013903871536254882, 0.013959168434143066, 0.01388646411895752, 0.01395199966430664, 0.013950976371765136, 0.013947903633117676, 0.013888511657714844, 0.014063615798950196, 0.013949952125549316, 0.013956095695495606, 0.013945856094360352, 0.013953023910522461, 0.013905920028686524, 0.013937664031982423, 0.014104576110839843, 0.014054400444030762, 0.013924351692199707, 0.014051327705383301, 0.01406156826019287, 0.013991935729980469, 0.013932543754577637, 0.013955072402954101, 0.013921279907226563, 0.014017536163330077, 0.013867008209228515, 0.013936639785766602, 0.013874176025390626, 0.013964287757873535, 0.014052351951599122, 0.014076959609985352, 0.013952992439270019, 0.014106623649597168, 0.013953023910522461, 0.013941760063171387, 0.01409331226348877, 0.014003199577331543, 0.013868032455444336, 0.014045184135437011, 0.013905920028686524, 0.013874176025390626, 0.01387827205657959, 0.013967424392700196, 0.013791168212890625, 0.013896703720092773, 0.01307545566558838, 0.013240320205688477, 0.013180928230285644, 0.013167615890502929, 0.013195263862609862, 0.013143039703369141, 0.013195263862609862, 0.013173760414123535, 0.013148223876953125, 0.01317471981048584, 0.013140992164611816, 0.013180928230285644, 0.013184000015258789, 0.013189120292663574, 0.013275135993957519, 0.013073408126831054, 0.01317683219909668, 0.01318297576904297, 0.013248512268066406, 0.013299712181091309, 0.013164544105529785, 0.013240320205688477, 0.013097984313964844, 0.013034496307373047, 0.013288448333740235, 0.013214719772338868, 0.013195263862609862, 0.013203455924987792, 0.0133887996673584, 0.013279232025146484, 0.013278207778930663, 0.01326796817779541, 0.013179903984069824, 0.013255680084228515, 0.013275135993957519, 0.013188159942626954, 0.013231040000915528, 0.013221887588500977, 0.013170687675476075, 0.013228032112121582, 0.013188096046447753, 0.01325772762298584, 0.013123583793640137, 0.013001728057861327, 0.01307852840423584, 0.013273088455200196, 0.013245439529418946, 0.013227007865905761, 0.013017087936401368, 0.012997664451599122, 0.012988384246826173, 0.013239295959472656, 0.013214719772338868, 0.013026304244995117, 0.013109248161315918, 0.01316147232055664, 0.0132925443649292, 0.013337599754333495, 0.01326591968536377, 0.013203455924987792, 0.01326796817779541, 0.013248512268066406, 0.013203455924987792, 0.01315225601196289, 0.013336576461791993, 0.013233152389526368, 0.013240320205688477, 0.013245439529418946, 0.013230079650878907, 0.01325977611541748, 0.013109248161315918, 0.013148159980773925, 0.013205504417419434, 0.013146112442016602, 0.013227007865905761, 0.013317119598388672, 0.013204480171203613, 0.01316966438293457, 0.013463552474975587, 0.013760512351989745, 0.013888511657714844, 0.014512127876281738, 0.014248959541320801, 0.013971455574035644, 0.013946880340576171, 0.013907967567443847, 0.014007295608520508, 0.014044159889221192, 0.014082048416137695, 0.01388748836517334, 0.014044159889221192, 0.01399500846862793, 0.014003199577331543, 0.013982720375061035, 0.013948927879333496, 0.013882368087768555, 0.013997056007385255, 0.013935615539550781, 0.01397862434387207, 0.013929471969604493, 0.014035967826843262, 0.014020607948303223, 0.01387929630279541, 0.013919232368469238, 0.013931520462036133, 0.013955072402954101, 0.01399295997619629, 0.013891584396362304, 0.01389363193511963, 0.014033920288085937, 0.013937664031982423, 0.013929471969604493, 0.01396224021911621, 0.014000127792358399, 0.013957119941711426, 0.014010368347167968, 0.013920255661010742, 0.013959168434143066, 0.013940735816955567, 0.014114815711975098, 0.014002176284790039, 0.013913087844848633, 0.014009344100952148, 0.013955072402954101, 0.013927424430847168, 0.013818880081176758, 0.013218815803527833, 0.013288448333740235, 0.013217791557312012, 0.01324953556060791, 0.013248512268066406, 0.013232128143310547, 0.013476863861083984, 0.013301759719848634, 0.013180928230285644, 0.013379584312438965, 0.01318297576904297, 0.013302783966064453, 0.013322239875793456, 0.013213695526123047, 0.013164544105529785, 0.013173760414123535, 0.013091903686523437, 0.01315014362335205, 0.013211647987365722, 0.013195263862609862, 0.013124608039855956, 0.013156352043151855, 0.013167615890502929, 0.013140992164611816, 0.013208576202392578, 0.01315225601196289, 0.013172736167907715, 0.013222911834716796, 0.013165568351745606, 0.013204480171203613, 0.013085696220397949, 0.012951552391052246, 0.013200384140014648, 0.013364224433898926, 0.013213695526123047, 0.013042688369750977, 0.013224960327148438, 0.013342720031738281, 0.013585408210754395, 0.013385727882385253, 0.013178879737854005, 0.013229056358337403, 0.013227007865905761, 0.013201408386230469, 0.013220864295959473, 0.013493247985839844, 0.013260800361633301, 0.013307904243469238, 0.013277183532714844, 0.013263872146606445, 0.013253631591796875, 0.013200415611267089, 0.01334166431427002, 0.013355008125305176, 0.013170687675476075, 0.01325772762298584, 0.013272064208984375, 0.013218815803527833, 0.013322239875793456, 0.013215744018554687, 0.013304832458496094, 0.013253631591796875, 0.013244416236877441, 0.01325158405303955, 0.01334988784790039, 0.013238271713256837, 0.013254655838012695, 0.013626432418823243, 0.013979583740234376, 0.013917183876037598, 0.014079999923706055, 0.013900799751281738, 0.014376959800720214, 0.014775296211242676, 0.014215167999267577, 0.014009344100952148, 0.013991935729980469, 0.013963264465332031, 0.014112768173217773, 0.014098431587219238, 0.014031871795654297, 0.014010368347167968, 0.014022656440734863, 0.01397760009765625, 0.013902848243713378, 0.013948927879333496, 0.01396735954284668, 0.013961215972900391, 0.01389568042755127, 0.013948960304260254, 0.013956064224243165, 0.013936639785766602, 0.013929471969604493, 0.014136320114135742, 0.013947903633117676, 0.013983743667602539, 0.013998080253601074, 0.013961215972900391, 0.013917183876037598, 0.013931520462036133, 0.013961215972900391, 0.013932543754577637, 0.01389363193511963, 0.013906944274902343, 0.01394489574432373, 0.013947839736938477, 0.013937664031982423, 0.013932543754577637, 0.013933568000793458, 0.013919232368469238, 0.013947903633117676, 0.01386393642425537, 0.01395199966430664, 0.013920255661010742, 0.013940735816955567, 0.01541222381591797, 0.014158847808837891, 0.014017536163330077, 0.01386188793182373, 0.013890560150146485, 0.013833215713500976, 0.013999103546142578, 0.013916159629821777, 0.013926400184631347, 0.013811712265014648, 0.013856767654418945]",tokens/s,72.65359960292616,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-7b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-7b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495e6-24c5fec26b2eb15737096d10;cc3c1055-4df3-49a1-9aee-62dd0699dcde) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for internlm/internlm-20b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/internlm/internlm-20b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for internlm/internlm2-20b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/internlm/internlm2-20b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4037, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 200, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 83, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -RuntimeError: q_weight and gptq_scales have incompatible shapes - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1074.417664,1200.095232,0.0,570.425344,536.326656,s,1,7.79876123046875,7.79876123046875,0.0,7.79876123046875,7.79876123046875,7.79876123046875,7.79876123046875,[7.79876123046875],,kWh,1.1852933221520188e-05,6.480465323765408e-06,1.6701957806009027e-05,3.503535635129462e-05,,MB,1550.671872,1273.495552,0.0,624.951296,594.377728,s,10,0.19878761482238766,0.01987876148223877,0.0002603248762522551,0.019843456268310546,0.020185266494750977,0.0202972412109375,0.02038682098388672,"[0.020033279418945314, 0.020409215927124025, 0.019675968170166015, 0.020160383224487304, 0.019996383666992187, 0.019657888412475587, 0.01965180778503418, 0.019515775680541993, 0.019885984420776368, 0.019800928115844725]",tokens/s,12878.065880951903,kWh,2.404429127653597e-07,1.317035410593254e-07,5.436821196789241e-07,9.158285735036092e-07,tokens/kWh,279528295.3671582,MB,1587.904512,1290.272768,0.0,639.63136,607.71072,s,10,10.986668090820311,1.0986668090820313,0.008148997990733285,1.0988784790039063,1.1074724731445313,1.108330780029297,1.1090174255371095,"[1.0974532470703124, 1.1091890869140626, 1.1072786865234374, 1.1003037109375, 1.09528076171875, 1.10728173828125, 1.09462939453125, 1.088001220703125, 1.103834228515625, 1.083416015625]",tokens/s,57.34222557668632,kWh,1.3286257495428257e-05,7.2804568972817314e-06,2.0915946159118178e-05,4.148266055182817e-05,tokens/kWh,1518706.8322507474,,s,630,10.982118457794193,0.01743193405999078,0.00036867316736935283,0.01732096004486084,0.017897164916992187,0.018075289726257323,0.018787133312225344,"[0.017691648483276368, 0.017544191360473634, 0.017508352279663086, 0.01739366340637207, 0.017179647445678712, 0.017172479629516603, 0.017335296630859375, 0.017520639419555666, 0.017712127685546874, 0.017770496368408203, 0.01807360076904297, 0.017525760650634766, 0.01741619110107422, 0.01745408058166504, 0.018121728897094725, 0.018127872467041017, 0.01764352035522461, 0.017513471603393553, 0.017625087738037108, 0.01763532829284668, 0.017473535537719728, 0.017689599990844726, 0.01763839912414551, 0.017709056854248048, 0.01719398307800293, 0.017810432434082032, 0.017909759521484374, 0.017238016128540038, 0.017238016128540038, 0.017640447616577147, 0.017556480407714844, 0.017542144775390626, 0.017366016387939453, 0.01756876754760742, 0.017625087738037108, 0.017406976699829102, 0.017246208190917968, 0.017101823806762697, 0.01725132751464844, 0.01721036720275879, 0.017126399993896483, 0.01716223907470703, 0.01740185546875, 0.017315839767456053, 0.017123327255249024, 0.017154048919677735, 0.017155071258544922, 0.01719705581665039, 0.017348608016967772, 0.01723289680480957, 0.017238016128540038, 0.017209344863891602, 0.01722163200378418, 0.017101823806762697, 0.017171455383300782, 0.017156095504760743, 0.017159168243408202, 0.01720012855529785, 0.017368064880371094, 0.01714995193481445, 0.017075199127197266, 0.017183744430541992, 0.01714691162109375, 0.01723187255859375, 0.017184768676757813, 0.017284095764160155, 0.01723494338989258, 0.01715814399719238, 0.017142784118652343, 0.01718681526184082, 0.017543167114257813, 0.017903615951538086, 0.017755199432373046, 0.018297792434692383, 0.01839923286437988, 0.01785651206970215, 0.017458175659179686, 0.017977344512939454, 0.017447935104370118, 0.017286144256591796, 0.017695743560791014, 0.017358848571777344, 0.017368064880371094, 0.0174335994720459, 0.01757900810241699, 0.017501184463500977, 0.01741209602355957, 0.017278976440429687, 0.017531904220581054, 0.01740492820739746, 0.017133567810058595, 0.01740185546875, 0.017728511810302734, 0.01759129524230957, 0.017675264358520508, 0.017666048049926757, 0.017313791275024415, 0.017442815780639647, 0.01749504089355469, 0.018757631301879883, 0.019345407485961915, 0.018076671600341796, 0.018041856765747072, 0.017722368240356445, 0.017847295761108398, 0.017693695068359376, 0.01738137626647949, 0.017201152801513672, 0.01742131233215332, 0.01716223907470703, 0.017559551239013673, 0.01764556884765625, 0.01719603157043457, 0.017914880752563478, 0.01794047927856445, 0.01768550491333008, 0.01759334373474121, 0.01720627212524414, 0.017331199645996095, 0.017551359176635743, 0.01762713623046875, 0.017794048309326172, 0.017795072555541993, 0.01761587142944336, 0.01765990447998047, 0.01761689567565918, 0.017270784378051757, 0.01741107177734375, 0.017325056076049804, 0.017469440460205078, 0.01785139274597168, 0.01777561569213867, 0.018790399551391602, 0.017949695587158202, 0.017484800338745117, 0.017473535537719728, 0.017304576873779298, 0.017176576614379883, 0.01745715141296387, 0.01717043113708496, 0.01720729637145996, 0.017286144256591796, 0.017819648742675782, 0.01761075210571289, 0.017777664184570312, 0.01725542449951172, 0.01716633605957031, 0.017574911117553712, 0.017707008361816406, 0.017625087738037108, 0.017819648742675782, 0.018144256591796876, 0.017738752365112305, 0.017707008361816406, 0.01883750343322754, 0.017924095153808595, 0.017391616821289063, 0.017350656509399414, 0.017271808624267578, 0.017323007583618166, 0.017361919403076173, 0.017687551498413084, 0.017736703872680663, 0.017289215087890625, 0.017740800857543947, 0.017716224670410157, 0.017702911376953127, 0.017728511810302734, 0.01740595245361328, 0.018140159606933593, 0.017757183074951173, 0.018009088516235353, 0.017666048049926757, 0.017690624237060547, 0.01765478324890137, 0.017728511810302734, 0.017757183074951173, 0.017698816299438477, 0.01767628860473633, 0.017313791275024415, 0.016969728469848632, 0.017168384552001953, 0.01736396789550781, 0.017369087219238282, 0.017335296630859375, 0.017276927947998046, 0.01702911949157715, 0.017257503509521484, 0.017148895263671873, 0.01781760025024414, 0.017171455383300782, 0.01720832061767578, 0.017633279800415038, 0.017929216384887696, 0.018124799728393554, 0.017289215087890625, 0.017131519317626954, 0.017119232177734374, 0.017127424240112304, 0.017119232177734374, 0.017277952194213866, 0.017771520614624024, 0.017299455642700197, 0.01742336082458496, 0.01718988800048828, 0.017698816299438477, 0.017386495590209963, 0.017089536666870117, 0.017171455383300782, 0.01718272018432617, 0.018280448913574218, 0.017725439071655275, 0.01717251205444336, 0.017132511138916017, 0.016974847793579103, 0.017138687133789063, 0.0171059513092041, 0.017351648330688477, 0.017331199645996095, 0.017144832611083984, 0.017338367462158204, 0.017107967376708985, 0.017516544342041016, 0.017582080841064454, 0.017131519317626954, 0.017683456420898438, 0.017933311462402343, 0.01743667221069336, 0.0176629753112793, 0.017260543823242186, 0.018718719482421875, 0.018214912414550782, 0.018779136657714843, 0.017847295761108398, 0.017937408447265626, 0.01781760025024414, 0.017691648483276368, 0.017573888778686524, 0.017488895416259767, 0.017063936233520507, 0.017121280670166016, 0.017107967376708985, 0.017289215087890625, 0.01704652786254883, 0.017100799560546876, 0.017133567810058595, 0.01761996841430664, 0.01764761543273926, 0.017724416732788087, 0.017269760131835937, 0.01719193649291992, 0.017294336318969726, 0.017265695571899414, 0.017164255142211916, 0.01721343994140625, 0.017157119750976564, 0.017122304916381836, 0.01719705581665039, 0.017159168243408202, 0.01765273666381836, 0.017344512939453126, 0.017110015869140623, 0.017306623458862306, 0.017164287567138673, 0.017103872299194335, 0.017134592056274413, 0.017663999557495116, 0.017753087997436523, 0.01740287971496582, 0.01723904037475586, 0.017064960479736328, 0.01741414451599121, 0.01721855926513672, 0.017171455383300782, 0.017364992141723632, 0.01717043113708496, 0.017329151153564454, 0.017171455383300782, 0.017163263320922852, 0.017612800598144532, 0.017175552368164062, 0.017119232177734374, 0.017694751739501954, 0.017702880859375, 0.017588224411010742, 0.017442815780639647, 0.017142784118652343, 0.01718169593811035, 0.017698816299438477, 0.017752063751220702, 0.017613824844360353, 0.01768448066711426, 0.017313791275024415, 0.016862207412719727, 0.017373184204101562, 0.017179647445678712, 0.017758207321166994, 0.01785651206970215, 0.01761075210571289, 0.017132543563842775, 0.017476608276367187, 0.01723391914367676, 0.017894399642944335, 0.01723187255859375, 0.01718988800048828, 0.01722470474243164, 0.01881088066101074, 0.018017280578613282, 0.01755340766906738, 0.017146879196166993, 0.01722163200378418, 0.017339391708374022, 0.01721651268005371, 0.017097728729248047, 0.017443840026855468, 0.017378303527832033, 0.01760767936706543, 0.01765273666381836, 0.017735679626464843, 0.01744179153442383, 0.017089536666870117, 0.017132543563842775, 0.017468416213989257, 0.017754112243652344, 0.017738752365112305, 0.0177838077545166, 0.017321983337402345, 0.01699635124206543, 0.017490943908691405, 0.017157119750976564, 0.017763328552246094, 0.017362943649291994, 0.018324480056762696, 0.019725311279296876, 0.01802956771850586, 0.017879039764404296, 0.017565696716308594, 0.017520639419555666, 0.017739776611328126, 0.017861631393432616, 0.017442815780639647, 0.01716633605957031, 0.017357824325561523, 0.017291263580322267, 0.017533952713012696, 0.017469440460205078, 0.017266687393188478, 0.017290239334106446, 0.017375232696533204, 0.017464319229125978, 0.01739776039123535, 0.01742336082458496, 0.017512447357177736, 0.017534975051879884, 0.01739776039123535, 0.01718988800048828, 0.017503231048583985, 0.01760051155090332, 0.017971200942993162, 0.017571840286254883, 0.017273855209350587, 0.01740595245361328, 0.01763532829284668, 0.018126848220825196, 0.018562047958374024, 0.017781759262084963, 0.0174202880859375, 0.017435647964477538, 0.01779302406311035, 0.017617919921875, 0.017649663925170898, 0.0171909122467041, 0.017492992401123047, 0.017829887390136717, 0.017543167114257813, 0.017534975051879884, 0.017352703094482422, 0.016913408279418944, 0.01823744010925293, 0.01899929618835449, 0.01842790412902832, 0.017992704391479493, 0.017522687911987304, 0.01703731155395508, 0.017299455642700197, 0.017285120010375975, 0.016930816650390625, 0.01716531181335449, 0.017164287567138673, 0.017313791275024415, 0.017434623718261717, 0.017714176177978515, 0.018298879623413086, 0.018177024841308592, 0.01745305633544922, 0.01762611198425293, 0.01719603157043457, 0.017605632781982423, 0.01741004753112793, 0.017514495849609374, 0.017367040634155274, 0.017099775314331055, 0.0172359676361084, 0.017914880752563478, 0.01744486427307129, 0.017260543823242186, 0.01719705581665039, 0.017313791275024415, 0.01703014373779297, 0.01740390396118164, 0.01703424072265625, 0.01680486488342285, 0.01703628730773926, 0.017129472732543945, 0.018001920700073244, 0.01721651268005371, 0.016953344345092772, 0.017093631744384767, 0.017129472732543945, 0.017076223373413087, 0.017505279541015627, 0.017555456161499023, 0.017889280319213868, 0.017329151153564454, 0.01701580810546875, 0.01723187255859375, 0.016954368591308593, 0.017163263320922852, 0.017172479629516603, 0.01721343994140625, 0.017090560913085938, 0.016887807846069337, 0.01695232009887695, 0.01717350387573242, 0.017142784118652343, 0.017105920791625977, 0.017110015869140623, 0.01756159973144531, 0.017294336318969726, 0.017141759872436522, 0.017152000427246093, 0.01703628730773926, 0.017124351501464845, 0.017146879196166993, 0.017054719924926756, 0.01680384063720703, 0.016837631225585938, 0.01705062484741211, 0.017119232177734374, 0.017169408798217774, 0.017228799819946287, 0.017238016128540038, 0.017138687133789063, 0.017187839508056642, 0.016937984466552734, 0.017310720443725586, 0.01702400016784668, 0.017111040115356444, 0.017150976181030272, 0.01725542449951172, 0.017114112854003907, 0.017157119750976564, 0.016867328643798828, 0.016969728469848632, 0.017092607498168946, 0.017112064361572265, 0.017134592056274413, 0.017114112854003907, 0.01716223907470703, 0.017083391189575196, 0.017115135192871094, 0.017122304916381836, 0.017968128204345703, 0.017912832260131836, 0.01797222328186035, 0.018892799377441406, 0.017953792572021485, 0.017335296630859375, 0.0172359676361084, 0.01718169593811035, 0.01742438316345215, 0.017879039764404296, 0.017334272384643554, 0.017703935623168944, 0.01765273666381836, 0.017294336318969726, 0.017093631744384767, 0.017118207931518553, 0.017080320358276366, 0.017115135192871094, 0.017137664794921875, 0.017086463928222655, 0.017447935104370118, 0.01738751983642578, 0.017086463928222655, 0.017159168243408202, 0.017107967376708985, 0.017105920791625977, 0.017167360305786132, 0.017468416213989257, 0.017315839767456053, 0.01744895935058594, 0.01722368049621582, 0.017291263580322267, 0.017119232177734374, 0.017426431655883787, 0.01699737548828125, 0.01785651206970215, 0.017477632522583008, 0.017324031829833983, 0.017575935363769533, 0.017911808013916015, 0.01846272087097168, 0.017522687911987304, 0.017342464447021484, 0.01739776039123535, 0.01725644874572754, 0.017333248138427734, 0.01726361656188965, 0.017896448135375977, 0.017487871170043946, 0.017315839767456053, 0.017233983993530273, 0.017469375610351563, 0.01740595245361328, 0.017837087631225587, 0.017119199752807616, 0.017294336318969726, 0.0176312313079834, 0.017836032867431642, 0.017274879455566407, 0.017116159439086915, 0.017152000427246093, 0.017140735626220704, 0.017489919662475584, 0.01743257522583008, 0.017308671951293944, 0.017314815521240236, 0.01697587203979492, 0.01773686408996582, 0.017546079635620118, 0.01724825668334961, 0.017298431396484376, 0.01719910430908203, 0.017184768676757813, 0.01722470474243164, 0.0172677116394043, 0.01725951957702637, 0.01721241569519043, 0.017512447357177736, 0.017319936752319336, 0.01720832061767578, 0.017246208190917968, 0.0172410888671875, 0.017161216735839844, 0.017270784378051757, 0.01865830421447754, 0.018331647872924805, 0.018052095413208007, 0.017949695587158202, 0.018068479537963866, 0.017951744079589844, 0.018106367111206053, 0.01799065589904785, 0.01820876884460449, 0.017978368759155275, 0.017934335708618163, 0.01690729522705078, 0.017183712005615234, 0.017169408798217774, 0.017154048919677735, 0.017133567810058595, 0.017143808364868163, 0.017762304306030274, 0.017477632522583008, 0.017131519317626954, 0.01716633605957031, 0.017135616302490234, 0.017132543563842775, 0.017142784118652343, 0.017129472732543945, 0.017080320358276366, 0.017072128295898437, 0.017167360305786132, 0.017117183685302736, 0.017172479629516603, 0.017141759872436522, 0.017116159439086915, 0.017185792922973633, 0.01724825668334961, 0.017164287567138673, 0.017157119750976564, 0.017129472732543945, 0.017260543823242186, 0.017089536666870117, 0.017164287567138673, 0.017130495071411133, 0.017686527252197267, 0.01760972785949707, 0.01721036720275879, 0.017099775314331055, 0.017108991622924806, 0.01718988800048828, 0.017187839508056642, 0.01716531181335449, 0.017260543823242186, 0.017307680130004884, 0.017161184310913086, 0.017160192489624023, 0.017150976181030272, 0.017175552368164062, 0.017063936233520507, 0.01719398307800293, 0.01720524787902832, 0.01720832061767578, 0.017171455383300782, 0.017154048919677735, 0.017142784118652343, 0.01718272018432617, 0.01719603157043457, 0.017140735626220704, 0.017184768676757813, 0.017201152801513672, 0.017254400253295898, 0.017088512420654296, 0.017118207931518553, 0.017139711380004884, 0.017257471084594727, 0.017135616302490234, 0.017247264862060546]",tokens/s,57.36598111021818,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b68-3a4fa94d278a651d52fcd067;77ecf3ab-74ef-418c-8af1-abfdac7d9dc5) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4946.989056,8236.040192,0.0,7606.370304,6988.678144,s,1,11.663908203125,11.663908203125,0.0,11.663908203125,11.663908203125,11.663908203125,11.663908203125,[11.663908203125],,kWh,5.630064533334007e-05,3.083235221696591e-05,9.837341203211558e-05,0.00018550640958242158,,MB,2546.507776,8257.011712,0.0,7608.467456,6915.138048,s,10,1.207145851135254,0.1207145851135254,0.0003263453466187748,0.1205823860168457,0.12106293640136719,0.12123412399291993,0.12137107406616211,"[0.1210064926147461, 0.1208414077758789, 0.12054137420654297, 0.12140531158447265, 0.12056339263916016, 0.12038050842285156, 0.12042950439453125, 0.12102489471435547, 0.1203515853881836, 0.12060137939453125]",tokens/s,2120.7047993351107,kWh,1.426756477451013e-06,7.817753881423987e-07,6.033615937999734e-06,8.242147803593146e-06,tokens/kWh,31059865.231778223,MB,2550.771712,8261.206016,0.0,7610.564608,6915.140608,s,10,19.7747529296875,1.9774752929687502,0.0145158508621153,1.9751402587890623,1.999521240234375,2.0001826171875,2.00071171875,"[1.9664244384765626, 1.97063720703125, 1.999374267578125, 2.000843994140625, 1.9833507080078125, 1.9690186767578124, 1.95729736328125, 1.960449951171875, 1.9877130126953124, 1.979643310546875]",tokens/s,31.858805125913438,kWh,2.318020092741042e-05,1.270338657767249e-05,5.712079569660305e-05,9.300438320168597e-05,tokens/kWh,677387.4287557014,,s,630,19.7728143119812,0.0313854195428273,0.0005576319821256677,0.03117209529876709,0.032197734069824215,0.03247877101898194,0.033238447227478034,"[0.031493120193481446, 0.031139839172363282, 0.031084543228149415, 0.03118387222290039, 0.03133030319213867, 0.03116646385192871, 0.031145984649658204, 0.031137792587280274, 0.03117465591430664, 0.031194112777709962, 0.031089664459228516, 0.031214591979980468, 0.031301631927490234, 0.03189452743530274, 0.031045631408691408, 0.031031295776367186, 0.031139839172363282, 0.031045631408691408, 0.03199488067626953, 0.0309616641998291, 0.03103436851501465, 0.031067136764526368, 0.031086591720581053, 0.030701568603515625, 0.03141324806213379, 0.03161907196044922, 0.0318023681640625, 0.031121408462524414, 0.031062015533447264, 0.03118489646911621, 0.03212287902832031, 0.03220172882080078, 0.031138816833496095, 0.031164415359497072, 0.031071231842041015, 0.030894079208374024, 0.031076351165771485, 0.031099903106689454, 0.031107072830200196, 0.030825471878051756, 0.030881792068481444, 0.031096832275390625, 0.0307589111328125, 0.03130982398986817, 0.031172607421875, 0.03124224090576172, 0.031088640213012695, 0.031074304580688477, 0.031263744354248044, 0.030950399398803712, 0.030650367736816408, 0.032285694122314454, 0.030926847457885744, 0.031047679901123046, 0.03100876808166504, 0.031188991546630858, 0.03098931121826172, 0.0313384952545166, 0.031212543487548827, 0.031286272048950195, 0.030954496383666992, 0.031156223297119142, 0.031783935546875, 0.03137228775024414, 0.03099033546447754, 0.031038463592529295, 0.03097907257080078, 0.031072256088256835, 0.0314071044921875, 0.03128934478759766, 0.030942207336425782, 0.03100364875793457, 0.031007743835449218, 0.031178752899169923, 0.031410175323486327, 0.031422464370727536, 0.031212543487548827, 0.031070207595825194, 0.031093759536743162, 0.03125862312316895, 0.030983167648315428, 0.031067136764526368, 0.031094783782958983, 0.03083776092529297, 0.03232665634155273, 0.032702465057373044, 0.032527359008789065, 0.031263744354248044, 0.03153510475158691, 0.03220172882080078, 0.0321976318359375, 0.03103436851501465, 0.03168972778320313, 0.030946304321289062, 0.031909887313842776, 0.031735807418823245, 0.031939584732055666, 0.031084543228149415, 0.031205375671386718, 0.031109119415283205, 0.03154022407531738, 0.03116543960571289, 0.03101900863647461, 0.031045631408691408, 0.030905344009399413, 0.031014911651611327, 0.03101081657409668, 0.031123455047607423, 0.031129600524902344, 0.031226879119873048, 0.031119359970092773, 0.03141222381591797, 0.031113216400146484, 0.031045631408691408, 0.031058944702148438, 0.031091712951660157, 0.03073023986816406, 0.031148031234741212, 0.031357952117919925, 0.03120332717895508, 0.030893056869506837, 0.03113471984863281, 0.03153305625915527, 0.031076383590698243, 0.03121353530883789, 0.03099033546447754, 0.031120384216308594, 0.030938112258911132, 0.03179417610168457, 0.03140505599975586, 0.031114240646362305, 0.031239168167114258, 0.031123455047607423, 0.030993408203125, 0.031077375411987306, 0.031101951599121092, 0.031070207595825194, 0.03117158317565918, 0.03131494331359863, 0.03199897575378418, 0.03238604736328125, 0.03191910362243652, 0.0313753604888916, 0.03202969741821289, 0.0321710090637207, 0.0322344970703125, 0.032092159271240234, 0.032595966339111326, 0.03187404823303223, 0.03156889533996582, 0.031089664459228516, 0.031144960403442383, 0.03162009620666504, 0.03221401596069336, 0.032123905181884765, 0.03282124710083008, 0.03259084701538086, 0.03075379180908203, 0.032008190155029294, 0.031764480590820314, 0.0323583984375, 0.03115724754333496, 0.03118387222290039, 0.0313753604888916, 0.03211264038085938, 0.03200105667114258, 0.031388639450073245, 0.03112652778625488, 0.03146649551391602, 0.031066112518310547, 0.031118335723876952, 0.031308799743652346, 0.031040512084960937, 0.031120384216308594, 0.03232460784912109, 0.03247923278808594, 0.03262464141845703, 0.03260006332397461, 0.03288883209228516, 0.0324136962890625, 0.03266764831542969, 0.03215769577026367, 0.03180441665649414, 0.03183103942871094, 0.03160678482055664, 0.03175526428222656, 0.032132095336914065, 0.03219558334350586, 0.03204198455810547, 0.031524864196777344, 0.031784959793090824, 0.03143577575683594, 0.030983167648315428, 0.03197952079772949, 0.03204608154296875, 0.03169484710693359, 0.03174195289611816, 0.032105472564697264, 0.03221196746826172, 0.03181875228881836, 0.032105472564697264, 0.03235942459106445, 0.032763904571533206, 0.031336448669433595, 0.03196211242675781, 0.03257241439819336, 0.03240345764160156, 0.03199590492248535, 0.0313384952545166, 0.03198975944519043, 0.031088640213012695, 0.031180799484252928, 0.03214131164550781, 0.03078758430480957, 0.030891008377075195, 0.03218022537231445, 0.031089664459228516, 0.03213926315307617, 0.03202867126464844, 0.03278745651245117, 0.032979969024658204, 0.03243724822998047, 0.03213107299804688, 0.03136716842651367, 0.03156991958618164, 0.03164057540893555, 0.030980096817016602, 0.03139276885986328, 0.031083520889282228, 0.031021055221557618, 0.03135078430175781, 0.032069633483886716, 0.03232563018798828, 0.03233484649658203, 0.0322426872253418, 0.03159552001953125, 0.030932992935180665, 0.03216691207885742, 0.03245568084716797, 0.03136511993408203, 0.0315043830871582, 0.031079423904418944, 0.031254528045654296, 0.03121049690246582, 0.03153408050537109, 0.032912384033203124, 0.03201740646362305, 0.031180799484252928, 0.03159244728088379, 0.031091712951660157, 0.03206246566772461, 0.031304704666137696, 0.032478206634521486, 0.032130046844482424, 0.03232460784912109, 0.03255091094970703, 0.03164057540893555, 0.03122585678100586, 0.03177984046936035, 0.03197542381286621, 0.03260927963256836, 0.03224576187133789, 0.03233280181884766, 0.03162931251525879, 0.03097395133972168, 0.03210956954956055, 0.031164415359497072, 0.03117363166809082, 0.03171737670898438, 0.03155558395385742, 0.03139891242980957, 0.031230976104736328, 0.03098931121826172, 0.030809087753295897, 0.031160320281982422, 0.03127193641662598, 0.031243263244628908, 0.030774272918701173, 0.032113662719726564, 0.03341823959350586, 0.03167129516601563, 0.030818304061889647, 0.031100927352905275, 0.031081472396850586, 0.031082496643066407, 0.031063039779663085, 0.031065088272094726, 0.03100569534301758, 0.031111167907714843, 0.031154176712036134, 0.03265024185180664, 0.032350208282470705, 0.03240959930419922, 0.031488000869750975, 0.030924800872802735, 0.031887359619140625, 0.031092735290527345, 0.031162368774414063, 0.03118387222290039, 0.031112192153930664, 0.03105177688598633, 0.032307201385498044, 0.03189555168151856, 0.03123302459716797, 0.03103027153015137, 0.03114188766479492, 0.030857215881347655, 0.030996480941772462, 0.030721023559570314, 0.031025152206420898, 0.031244287490844725, 0.031156223297119142, 0.031078399658203124, 0.031070207595825194, 0.030905344009399413, 0.031111167907714843, 0.03079680061340332, 0.03178700828552246, 0.03208703994750976, 0.03174399948120117, 0.031308799743652346, 0.031341567993164066, 0.03198054313659668, 0.03209625625610352, 0.031119359970092773, 0.03128319931030273, 0.031098880767822266, 0.030653440475463867, 0.031682559967041016, 0.032043006896972655, 0.031212543487548827, 0.031099903106689454, 0.031096832275390625, 0.031194112777709962, 0.03116748809814453, 0.03102003288269043, 0.031039487838745116, 0.030906368255615234, 0.031120384216308594, 0.031244287490844725, 0.031152128219604492, 0.0322949104309082, 0.031094783782958983, 0.03060736083984375, 0.031065088272094726, 0.03119513511657715, 0.03341209411621094, 0.03239014434814453, 0.031122432708740235, 0.031108095169067384, 0.031109119415283205, 0.03096985626220703, 0.031037439346313478, 0.030689279556274415, 0.030891008377075195, 0.031046655654907225, 0.03094528007507324, 0.03268505477905274, 0.03136000061035156, 0.030839807510375978, 0.031164415359497072, 0.03155251121520996, 0.030993408203125, 0.031098880767822266, 0.031139839172363282, 0.03099852752685547, 0.03124224090576172, 0.030955520629882813, 0.031143936157226562, 0.03081216049194336, 0.03094425582885742, 0.030884864807128907, 0.031038463592529295, 0.030705663681030275, 0.03074355125427246, 0.030689279556274415, 0.031339519500732424, 0.031132671356201173, 0.03099750328063965, 0.031062015533447264, 0.031127552032470703, 0.030950399398803712, 0.03120639991760254, 0.03076198387145996, 0.03078860855102539, 0.03101388740539551, 0.031122432708740235, 0.031320064544677735, 0.03105075263977051, 0.03105996894836426, 0.031058944702148438, 0.031188991546630858, 0.03100160026550293, 0.03102207946777344, 0.030934015274047853, 0.03083263969421387, 0.031140863418579103, 0.03124019241333008, 0.031244287490844725, 0.031285247802734374, 0.031032320022583007, 0.03096883201599121, 0.030734336853027344, 0.030681087493896485, 0.030653440475463867, 0.031185920715332032, 0.031094783782958983, 0.03119615936279297, 0.031037439346313478, 0.031053823471069338, 0.03094937515258789, 0.031177728652954102, 0.03091967964172363, 0.03100569534301758, 0.030966783523559572, 0.031204351425170897, 0.031156223297119142, 0.03113369560241699, 0.03134771156311035, 0.03127910423278808, 0.031067136764526368, 0.03115827178955078, 0.03117670440673828, 0.03151974487304687, 0.031095808029174804, 0.030711807250976563, 0.03105075263977051, 0.03114188766479492, 0.030822399139404297, 0.03105075263977051, 0.03114188766479492, 0.03135897636413574, 0.031152128219604492, 0.031068159103393556, 0.031058944702148438, 0.03115007972717285, 0.030870527267456056, 0.031066112518310547, 0.031062015533447264, 0.03112550354003906, 0.031054847717285155, 0.03142758369445801, 0.03115827178955078, 0.031118335723876952, 0.031064064025878906, 0.032176128387451174, 0.031307775497436525, 0.03126681518554687, 0.03097907257080078, 0.031073280334472656, 0.03117363166809082, 0.03201433563232422, 0.03224883270263672, 0.031665151596069335, 0.031076351165771485, 0.030843904495239258, 0.03077836799621582, 0.031014911651611327, 0.03075584030151367, 0.030705663681030275, 0.03099238395690918, 0.031120447158813475, 0.031135679244995117, 0.030859264373779297, 0.03057766342163086, 0.03073843193054199, 0.031036415100097657, 0.031015935897827147, 0.030718975067138672, 0.030943231582641603, 0.03105177688598633, 0.031073280334472656, 0.030902271270751954, 0.0318474235534668, 0.030958591461181642, 0.03099852752685547, 0.03101081657409668, 0.031099903106689454, 0.03117158317565918, 0.031029247283935548, 0.030880767822265624, 0.03153311920166016, 0.031119295120239258, 0.031045631408691408, 0.030993408203125, 0.0310435848236084, 0.03077939224243164, 0.030856224060058595, 0.03058684730529785, 0.030629888534545898, 0.03146751976013184, 0.03222323226928711, 0.032173057556152344, 0.031119359970092773, 0.030905344009399413, 0.030865407943725585, 0.031318016052246093, 0.03105996894836426, 0.031131647109985353, 0.03102207946777344, 0.03104256057739258, 0.031065088272094726, 0.03074662399291992, 0.030521343231201172, 0.03077324867248535, 0.0313436164855957, 0.03146240043640137, 0.030825471878051756, 0.03138764762878418, 0.031268863677978515, 0.031336448669433595, 0.03128319931030273, 0.03167436790466309, 0.03128319931030273, 0.031049760818481445, 0.031167455673217773, 0.031113216400146484, 0.031063039779663085, 0.03122585678100586, 0.03147878456115723, 0.03372544097900391, 0.03291033554077148, 0.03234815979003906, 0.031848447799682614, 0.031302656173706055, 0.03135385513305664, 0.031285247802734374, 0.03118387222290039, 0.031032320022583007, 0.030850048065185546, 0.031987712860107424, 0.03341209411621094, 0.031252479553222655, 0.03172966384887695, 0.03075993537902832, 0.031492095947265625, 0.03134873580932617, 0.031666175842285156, 0.03211161422729492, 0.03191193580627441, 0.031099903106689454, 0.031604736328125, 0.031953920364379884, 0.03304755020141602, 0.0318791675567627, 0.03142758369445801, 0.03162112045288086, 0.031194112777709962, 0.030901248931884766, 0.03210956954956055, 0.031263744354248044, 0.032683006286621095, 0.03166924858093262, 0.031097856521606446, 0.03096985626220703, 0.030839807510375978, 0.03146137619018555, 0.032107521057128906, 0.031229951858520507, 0.032198654174804685, 0.03218227386474609, 0.03204095840454101, 0.03073023986816406, 0.030868480682373047, 0.03075993537902832, 0.03201331329345703, 0.0313118724822998, 0.032161792755126956, 0.03132928085327148, 0.03129855918884277, 0.030938112258911132, 0.031671327590942384, 0.03162928009033203, 0.0321341438293457, 0.03179929542541504, 0.030943231582641603, 0.030552064895629883, 0.030864383697509764, 0.03153203201293946, 0.030715904235839843, 0.03316428756713867, 0.033001472473144534, 0.03215564727783203, 0.03121971130371094, 0.030652416229248046, 0.031161344528198243, 0.03120742416381836, 0.03151872062683105, 0.03151872062683105, 0.031086591720581053, 0.03326873779296875, 0.03334656143188477, 0.03121766471862793, 0.03124838447570801, 0.03120128059387207, 0.03116646385192871, 0.03138252830505371, 0.03300454330444336, 0.03218636703491211, 0.03207167816162109, 0.03140812873840332, 0.031474687576293944, 0.03117977523803711, 0.03117670440673828, 0.03101081657409668, 0.03119206428527832, 0.031254528045654296, 0.03116646385192871, 0.030664703369140626, 0.03113471984863281, 0.031115264892578126, 0.03336191940307617, 0.03131699180603027, 0.03096883201599121, 0.030909439086914063, 0.031162368774414063, 0.030895103454589845, 0.03116543960571289, 0.03091967964172363, 0.031226879119873048, 0.0314337272644043, 0.030809087753295897, 0.03125555229187012, 0.03202764892578125, 0.030680063247680665, 0.030724096298217773, 0.031114240646362305, 0.030718975067138672, 0.030737407684326173, 0.030878719329833985]",tokens/s,31.86192870977683,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8229.507072,12519.473152,0.0,11882.463232,11315.947008,s,1,13.88078515625,13.88078515625,0.0,13.88078515625,13.88078515625,13.88078515625,13.88078515625,[13.88078515625],,kWh,8.203572408403059e-05,4.4935786515998695e-05,0.0001605473506600119,0.0002875188612600412,,MB,3973.255168,12540.444672,0.0,11884.560384,11070.3104,s,10,2.1136905975341795,0.21136905975341796,0.00014870709404169244,0.21136196899414061,0.21151019897460938,0.2115747024536133,0.2116263052368164,"[0.21138473510742187, 0.2111017303466797, 0.21133888244628907, 0.21149586486816407, 0.21163920593261717, 0.21147821044921875, 0.21145289611816406, 0.2112085723876953, 0.21133920288085936, 0.21125129699707032]",tokens/s,1211.1517186983197,kWh,2.501212984591995e-06,1.3704762044229838e-06,1.0575239941666587e-05,1.4446929130681564e-05,tokens/kWh,17720028.78150221,MB,3981.635584,12544.638976,0.0,11886.657536,11070.31296,s,10,25.5754326171875,2.5575432617187497,0.01007682678839056,2.5578111572265625,2.57296142578125,2.5744368896484375,2.5756172607421877,"[2.559084228515625, 2.5502470703125, 2.543716552734375, 2.554602783203125, 2.575912353515625, 2.559779296875, 2.55686865234375, 2.572633544921875, 2.558753662109375, 2.54383447265625]",tokens/s,24.6330144021345,kWh,3.0343719519713393e-05,1.662693028897109e-05,8.626061530473277e-05,0.0001332312651134173,tokens/kWh,472861.9813552718,,s,630,25.573351436615,0.04059262132796031,0.0005761048865541318,0.04046745681762695,0.04124989280700684,0.04151316509246826,0.04259772552490234,"[0.04024729537963867, 0.04017561721801758, 0.04010905456542969, 0.040204288482666016, 0.0401448974609375, 0.040233985900878906, 0.04019507217407227, 0.0405852165222168, 0.04071219253540039, 0.041371646881103515, 0.04122623825073242, 0.041076736450195314, 0.04012543869018555, 0.04012543869018555, 0.04070809555053711, 0.04095795059204101, 0.04092006301879883, 0.04097433471679687, 0.041640960693359375, 0.04225331115722656, 0.041247745513916016, 0.04102656173706055, 0.04096307373046875, 0.04255846405029297, 0.04122214508056641, 0.04100403213500976, 0.04015411376953125, 0.040174591064453126, 0.04009574508666992, 0.04016537475585937, 0.040114177703857425, 0.04020019149780273, 0.04037222290039062, 0.041199615478515625, 0.041578495025634765, 0.04017766571044922, 0.040052734375, 0.040079360961914064, 0.04011212921142578, 0.04013363265991211, 0.04007628631591797, 0.04008755111694336, 0.0400711669921875, 0.0401868782043457, 0.0411514892578125, 0.04168806457519531, 0.0401899528503418, 0.04013260650634766, 0.04007321548461914, 0.04005376052856445, 0.0415467529296875, 0.041082878112792966, 0.040883201599121094, 0.041008129119873046, 0.04090982437133789, 0.04097945785522461, 0.04086067199707031, 0.04081356811523437, 0.04018175888061523, 0.040084480285644535, 0.04000358581542969, 0.04011724853515625, 0.040081409454345705, 0.040032257080078126, 0.04044287872314453, 0.04035891342163086, 0.04023807907104492, 0.04044287872314453, 0.04252057647705078, 0.04022169494628906, 0.040032257080078126, 0.04003123092651367, 0.039997440338134765, 0.04025241470336914, 0.04079513549804688, 0.04086374282836914, 0.041016319274902346, 0.04124979019165039, 0.0415283203125, 0.04092211151123047, 0.04094976043701172, 0.04090675354003906, 0.04079411315917969, 0.04084531021118164, 0.040114177703857425, 0.040196094512939456, 0.04016230392456055, 0.040081409454345705, 0.04017868804931641, 0.04015411376953125, 0.04026572799682617, 0.04052377700805664, 0.04020019149780273, 0.040089599609375, 0.040130561828613284, 0.04015513610839844, 0.04015513610839844, 0.04011008071899414, 0.04067327880859375, 0.04102143859863281, 0.041059326171875, 0.041078784942626956, 0.0413757438659668, 0.04107263946533203, 0.04221952056884765, 0.04145151901245117, 0.04019404983520508, 0.04014182281494141, 0.04009062576293945, 0.040068096160888675, 0.04010598373413086, 0.040065025329589846, 0.04008550262451172, 0.04017766571044922, 0.040948734283447266, 0.040079360961914064, 0.040403968811035154, 0.040185855865478515, 0.040164352416992184, 0.040187904357910156, 0.040010753631591796, 0.040002559661865236, 0.04012748718261719, 0.039994369506835936, 0.040046592712402344, 0.04005887985229492, 0.04020633697509766, 0.04003430557250977, 0.04002816009521484, 0.040130561828613284, 0.04005990219116211, 0.03997081756591797, 0.040068096160888675, 0.04053504180908203, 0.04097740936279297, 0.040850433349609375, 0.040089599609375, 0.04068454360961914, 0.040207359313964845, 0.04009267044067383, 0.04006707382202149, 0.04003839874267578, 0.040048641204833986, 0.04007526397705078, 0.0400445442199707, 0.04005683135986328, 0.04090777587890625, 0.040030208587646485, 0.040101886749267575, 0.04001177597045898, 0.04008243179321289, 0.04005785751342773, 0.04000665664672851, 0.039967742919921875, 0.04007628631591797, 0.039977985382080077, 0.03995340728759766, 0.03996057510375976, 0.040531967163085936, 0.040041473388671874, 0.039959552764892575, 0.04082688140869141, 0.040220672607421876, 0.04071731185913086, 0.043053054809570314, 0.041289726257324216, 0.0409989128112793, 0.04093030548095703, 0.04007731246948242, 0.0400445442199707, 0.04049817657470703, 0.04093849563598633, 0.040925182342529294, 0.04048691177368164, 0.040046592712402344, 0.040089599609375, 0.040018943786621096, 0.039977985382080077, 0.04010291290283203, 0.04000153732299805, 0.04003839874267578, 0.040033279418945314, 0.04003635025024414, 0.041011199951171876, 0.04086476898193359, 0.04084735870361328, 0.04089344024658203, 0.04110028839111328, 0.04149452972412109, 0.04277862548828125, 0.04192768096923828, 0.04175360107421875, 0.04118732833862305, 0.04017356872558594, 0.040084480285644535, 0.04013772964477539, 0.04010700988769531, 0.04009062576293945, 0.04012953567504883, 0.040033279418945314, 0.04010291290283203, 0.04011008071899414, 0.04082483291625977, 0.04099174499511719, 0.04011008071899414, 0.0401448974609375, 0.04008243179321289, 0.04007628631591797, 0.04046745681762695, 0.04031078338623047, 0.04011008071899414, 0.040048641204833986, 0.04009881591796875, 0.04006911849975586, 0.040231937408447264, 0.04100403213500976, 0.041046016693115236, 0.040166400909423826, 0.04011929702758789, 0.04007219314575195, 0.04029849624633789, 0.04015923309326172, 0.04008038330078125, 0.04006707382202149, 0.04008652877807617, 0.040880126953125, 0.04164710235595703, 0.04125183868408203, 0.04090572738647461, 0.04090060806274414, 0.04092313766479492, 0.04074496078491211, 0.0408166389465332, 0.04052070236206055, 0.04049407958984375, 0.0401797103881836, 0.04012953567504883, 0.04015513610839844, 0.04013568115234375, 0.04044083023071289, 0.0417781753540039, 0.040921089172363284, 0.04051148986816406, 0.04090777587890625, 0.04022272109985352, 0.04029439926147461, 0.040697856903076174, 0.04090060806274414, 0.040081409454345705, 0.040052734375, 0.04109209442138672, 0.0415283203125, 0.04066304016113281, 0.04096921539306641, 0.041404415130615234, 0.04113510513305664, 0.04081151962280274, 0.040046592712402344, 0.040242176055908206, 0.04061183929443359, 0.04123955154418945, 0.04087603378295898, 0.04092006301879883, 0.04080230331420898, 0.04099993515014649, 0.040850433349609375, 0.04007321548461914, 0.04027807998657226, 0.04101625442504883, 0.04050431823730469, 0.04053708648681641, 0.041030654907226564, 0.04075417709350586, 0.04011724853515625, 0.04014080047607422, 0.04080332946777344, 0.04101529693603516, 0.04097536087036133, 0.040976383209228515, 0.04129177474975586, 0.0406036491394043, 0.04022272109985352, 0.040166400909423826, 0.04069887924194336, 0.04151091384887695, 0.04007219314575195, 0.04046745681762695, 0.04085657501220703, 0.041169921875, 0.041106433868408204, 0.04110233688354492, 0.040815616607666014, 0.040908798217773434, 0.04091289520263672, 0.041247745513916016, 0.04044083023071289, 0.04012748718261719, 0.04007424163818359, 0.04095078277587891, 0.04098355102539063, 0.04368076705932617, 0.041588737487792966, 0.04111667251586914, 0.040973312377929685, 0.04099686431884766, 0.04159590530395508, 0.0413306884765625, 0.041040897369384766, 0.04120576095581055, 0.041322494506835936, 0.04110540771484375, 0.04096716690063477, 0.04097228622436523, 0.04118527984619141, 0.04109823989868164, 0.04092211151123047, 0.040525825500488284, 0.04010905456542969, 0.04015513610839844, 0.04051148986816406, 0.04012134552001953, 0.04012953567504883, 0.04018175888061523, 0.041164798736572264, 0.04091904067993164, 0.04045414352416992, 0.040553470611572266, 0.040943614959716795, 0.041312255859375, 0.04088422393798828, 0.040871936798095705, 0.04094464111328125, 0.041043968200683595, 0.04095897674560547, 0.040956928253173826, 0.040610816955566405, 0.04007731246948242, 0.04010598373413086, 0.04105830383300781, 0.04100505447387695, 0.04010086441040039, 0.04007628631591797, 0.04006707382202149, 0.04007321548461914, 0.04002304077148437, 0.04019507217407227, 0.041017345428466793, 0.04084735870361328, 0.040825855255126955, 0.04044902420043945, 0.041316352844238284, 0.04064972686767578, 0.04072345733642578, 0.0414832649230957, 0.04086886215209961, 0.040089599609375, 0.04010291290283203, 0.04069683074951172, 0.04089344024658203, 0.04093337631225586, 0.04022886276245117, 0.041025535583496094, 0.0406824951171875, 0.04089548873901367, 0.04087603378295898, 0.040910846710205076, 0.04004044723510742, 0.03997491073608399, 0.0403691520690918, 0.040787967681884765, 0.040776702880859376, 0.040771583557128906, 0.04095590209960937, 0.0407347183227539, 0.03997491073608399, 0.03994214248657227, 0.04123648071289063, 0.04141363143920898, 0.04039372634887695, 0.04038246536254883, 0.04007628631591797, 0.04002816009521484, 0.04097945785522461, 0.04057395172119141, 0.039948287963867186, 0.03996979141235352, 0.04064051055908203, 0.040825855255126955, 0.04325580978393555, 0.04386304092407227, 0.041306110382080076, 0.04095180892944336, 0.04005887985229492, 0.04019302368164063, 0.04093644714355469, 0.04039884948730469, 0.041524223327636715, 0.041299968719482424, 0.04088115310668945, 0.040925182342529294, 0.04103577423095703, 0.04087603378295898, 0.040956928253173826, 0.04083609771728516, 0.04048588943481445, 0.040025089263916014, 0.04008652877807617, 0.040120319366455076, 0.040908798217773434, 0.040837120056152344, 0.04008755111694336, 0.03998310470581055, 0.040018943786621096, 0.04005068969726563, 0.04011315155029297, 0.0400261116027832, 0.04002099227905274, 0.040022014617919925, 0.04047872161865235, 0.04187750244140625, 0.041565185546875, 0.04171059036254883, 0.04135424041748047, 0.04081356811523437, 0.04010496139526367, 0.04019507217407227, 0.04010086441040039, 0.04009881591796875, 0.04004761505126953, 0.040008705139160154, 0.04006399917602539, 0.039977985382080077, 0.04038348770141602, 0.04092006301879883, 0.04088524627685547, 0.039975936889648435, 0.03999129486083984, 0.04003123092651367, 0.040081409454345705, 0.040068096160888675, 0.04002918243408203, 0.0412119026184082, 0.04151500701904297, 0.042613761901855465, 0.04130303955078125, 0.04055756759643555, 0.04091187286376953, 0.04091801452636719, 0.04091289520263672, 0.04094259262084961, 0.042052608489990234, 0.04092620849609375, 0.04094771194458008, 0.04098559951782227, 0.04089651107788086, 0.040869888305664064, 0.04015923309326172, 0.04087910461425781, 0.04003942489624023, 0.039977985382080077, 0.04019302368164063, 0.04104191970825195, 0.04089651107788086, 0.04008345413208008, 0.0408279037475586, 0.04150067138671875, 0.041011199951171876, 0.041388031005859374, 0.04137472152709961, 0.04111974334716797, 0.0409354248046875, 0.040033279418945314, 0.04106854248046875, 0.04006604766845703, 0.039975936889648435, 0.041093120574951174, 0.040954879760742184, 0.04089548873901367, 0.040103935241699216, 0.04148121643066406, 0.041059326171875, 0.040185855865478515, 0.04015923309326172, 0.04080025482177734, 0.04109209442138672, 0.041032703399658206, 0.041025535583496094, 0.04114227294921875, 0.04096614456176758, 0.040531967163085936, 0.04152115249633789, 0.04100198364257813, 0.04030361557006836, 0.040635391235351564, 0.0404213752746582, 0.04014694213867188, 0.04013363265991211, 0.04007731246948242, 0.0405401611328125, 0.040959999084472655, 0.04088422393798828, 0.04100096130371094, 0.04110233688354492, 0.041032703399658206, 0.040397823333740236, 0.041022464752197264, 0.040941566467285154, 0.040822784423828126, 0.0411146240234375, 0.04134502410888672, 0.04012543869018555, 0.04009369659423828, 0.04085657501220703, 0.04152524948120117, 0.04129075241088867, 0.04098355102539063, 0.04106137466430664, 0.0409804801940918, 0.040400894165039065, 0.04117913436889648, 0.041040897369384766, 0.041032703399658206, 0.04094259262084961, 0.04051865768432617, 0.04009983825683594, 0.04046438217163086, 0.043150337219238284, 0.04115353775024414, 0.040880126953125, 0.04044595336914063, 0.04008038330078125, 0.0399554557800293, 0.040427520751953126, 0.04081459045410156, 0.041133056640625, 0.04083814239501953, 0.04087398529052735, 0.040776702880859376, 0.041420799255371094, 0.04020019149780273, 0.040052734375, 0.03999846267700195, 0.040041473388671874, 0.040005630493164065, 0.040048641204833986, 0.04084940719604492, 0.04085145568847656, 0.04007833480834961, 0.040150016784667966, 0.04007731246948242, 0.04012543869018555, 0.04034969711303711, 0.04011315155029297, 0.040079360961914064, 0.04001792144775391, 0.04003430557250977, 0.040046592712402344, 0.04049100875854492, 0.04146688079833984, 0.04088934326171875, 0.04087398529052735, 0.040801280975341796, 0.040049663543701174, 0.040002559661865236, 0.04047769546508789, 0.04014694213867188, 0.040035327911376956, 0.04017663955688477, 0.040188926696777344, 0.040041473388671874, 0.040046592712402344, 0.04005376052856445, 0.04005068969726563, 0.039992321014404295, 0.040089599609375, 0.04008345413208008, 0.04013363265991211, 0.040840190887451173, 0.041262081146240234, 0.04019302368164063, 0.04011008071899414, 0.04009267044067383, 0.04013363265991211, 0.04009062576293945, 0.04036505508422852, 0.04037222290039062, 0.04017868804931641, 0.0401715202331543, 0.04010700988769531, 0.040671230316162106, 0.040948734283447266, 0.040158206939697266, 0.04013363265991211, 0.04012748718261719, 0.04009062576293945, 0.0401162223815918, 0.040130561828613284, 0.04009983825683594, 0.04009062576293945, 0.04012134552001953, 0.04007628631591797, 0.04020940780639649, 0.04000358581542969, 0.040089599609375, 0.04011110305786133, 0.04005887985229492, 0.04011520004272461, 0.04010291290283203, 0.04010598373413086, 0.04006604766845703, 0.04041932678222656, 0.04127231979370117, 0.040716289520263675, 0.040304641723632816, 0.04086272048950195, 0.04092211151123047, 0.04090265655517578, 0.04095795059204101, 0.04086374282836914, 0.04051046371459961, 0.041839614868164066, 0.041250816345214845, 0.04021350479125976, 0.0403394546508789, 0.04010700988769531, 0.0400261116027832, 0.04049203109741211, 0.041366527557373044, 0.04103987121582031, 0.0408166389465332]",tokens/s,24.635019057298408,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949194-415dc2620a2075d5164260f2;7fb73872-7101-4af2-8b80-b131a0c919cb) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c74-02b0fbca7e23d6547042e369;b3b8e0f4-923e-4889-ab00-5654001a7a3c) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fed-0850d74b4401a0ae539cacb0;9ba5308e-6267-424e-8240-0ef43ceea49e) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694929d-6304a28710de5a83725b7e52;34eab1af-aab7-43af-9579-36df82c8b663) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11039.133696,15131.475968,0.0,14501.80608,13634.065408,s,1,14.776548828125,14.776548828125,0.0,14.776548828125,14.776548828125,14.776548828125,14.776548828125,[14.776548828125],,kWh,9.235084860347155e-05,5.059989296664129e-05,0.00019848349211998295,0.0003414342336900958,,MB,2074.288128,15150.350336,0.0,14501.80608,12898.830848,s,10,3.3686259460449217,0.3368625946044922,0.00011232239645778162,0.3368625030517578,0.3369219421386719,0.3370396606445312,0.33713383544921877,"[0.33672259521484377, 0.3371573791503906, 0.33687030029296877, 0.33687405395507813, 0.3367579345703125, 0.3368144226074219, 0.3368547058105469, 0.3368957824707031, 0.33679489135742186, 0.3368838806152344]",tokens/s,759.9537737354533,kWh,3.984347269930511e-06,2.1823687631950657e-06,1.586795713879874e-05,2.2034673171924314e-05,tokens/kWh,11618052.966003817,MB,2085.298176,15152.447488,0.0,14501.80608,13243.67104,s,10,23.006421142578127,2.300642114257813,0.006445033814341154,2.2998525390625,2.3112770751953127,2.311795520019531,2.3122102758789063,"[2.302720947265625, 2.30058154296875, 2.29779248046875, 2.2939384765625, 2.29107470703125, 2.29912353515625, 2.31231396484375, 2.30118994140625, 2.296523681640625, 2.311161865234375]",tokens/s,27.383659374732346,kWh,2.757268436645823e-05,1.511190531829729e-05,9.593657674920597e-05,0.00013862116643396152,tokens/kWh,454476.0487931178,,s,630,22.993287155151368,0.03649728119865296,0.00048166708026236313,0.03633817672729492,0.03708948402404785,0.03747676010131836,0.03848630363464356,"[0.03689164733886719, 0.03629260635375976, 0.03624857711791992, 0.03637145614624023, 0.0363765754699707, 0.03644825744628906, 0.03621478271484375, 0.03623731231689453, 0.03672678375244141, 0.03800064086914062, 0.03746099090576172, 0.037149696350097655, 0.03618099212646484, 0.03623116683959961, 0.03599359893798828, 0.03625164794921875, 0.03623731231689453, 0.03627212905883789, 0.03638476943969727, 0.03648819351196289, 0.03630694580078125, 0.03704934310913086, 0.037422080993652344, 0.03626803207397461, 0.03625881576538086, 0.03624755096435547, 0.03624652862548828, 0.03748966217041016, 0.036536319732666016, 0.036261886596679685, 0.0362762222290039, 0.036291584014892575, 0.036311038970947264, 0.0363059196472168, 0.03616972732543945, 0.036124671936035156, 0.036201473236083984, 0.03638784027099609, 0.03642265701293945, 0.036316158294677735, 0.036168704986572264, 0.03629056167602539, 0.03609292984008789, 0.036326400756835936, 0.03685683059692383, 0.03639603042602539, 0.03660083389282227, 0.03663052749633789, 0.0364400634765625, 0.03657318496704102, 0.036997119903564454, 0.036316158294677735, 0.03628646469116211, 0.03643699264526367, 0.03637145614624023, 0.03622604751586914, 0.036208641052246096, 0.03640217590332031, 0.03626803207397461, 0.03659775924682617, 0.03908505630493164, 0.036969470977783206, 0.03853619384765625, 0.036397056579589845, 0.03631513595581055, 0.03640217590332031, 0.036291584014892575, 0.03619123077392578, 0.03671142578125, 0.03697356796264648, 0.036669441223144535, 0.036212734222412106, 0.03785932922363281, 0.03834265518188477, 0.03699302291870117, 0.03631513595581055, 0.03638272094726563, 0.036334590911865236, 0.03662131118774414, 0.036770816802978515, 0.03630284881591797, 0.036106239318847655, 0.036245502471923825, 0.03694694519042969, 0.036787200927734375, 0.03622809600830078, 0.03617996978759765, 0.03750092697143555, 0.03637452697753906, 0.03647590255737305, 0.03630080032348633, 0.0362690544128418, 0.036310016632080076, 0.036380672454833986, 0.036377601623535157, 0.0374128646850586, 0.03650457763671875, 0.03692748641967773, 0.036365310668945314, 0.03627212905883789, 0.03635200119018555, 0.03627315139770508, 0.03626803207397461, 0.036294654846191404, 0.036596736907958984, 0.03625881576538086, 0.03630387115478516, 0.036154369354248046, 0.03595980834960937, 0.03625881576538086, 0.0362977294921875, 0.03634688186645508, 0.03624959945678711, 0.03628134536743164, 0.03628339385986328, 0.03609600067138672, 0.036359169006347655, 0.03624038314819336, 0.03635200119018555, 0.036413440704345705, 0.03643084716796875, 0.036342784881591796, 0.03681382369995117, 0.036490238189697266, 0.03639603042602539, 0.0369244155883789, 0.036468734741210936, 0.036364288330078126, 0.039465984344482424, 0.03764633560180664, 0.03673190307617188, 0.036278270721435545, 0.036063232421875, 0.03609804916381836, 0.03623526382446289, 0.03619839859008789, 0.03616358566284179, 0.03767193603515625, 0.03780710220336914, 0.036674560546875, 0.03608575820922852, 0.0362342414855957, 0.036201473236083984, 0.03623833465576172, 0.036773887634277344, 0.03680665588378906, 0.03637247848510742, 0.03621376037597656, 0.0361420783996582, 0.03628236770629883, 0.0361451530456543, 0.036275199890136715, 0.03623731231689453, 0.036377601623535157, 0.03609804916381836, 0.03640934371948242, 0.03632128143310547, 0.036393985748291016, 0.03634483337402344, 0.03582361602783203, 0.03591884613037109, 0.03627110290527344, 0.03622604751586914, 0.03632128143310547, 0.03640524673461914, 0.03641856002807617, 0.03618611145019531, 0.03628543853759766, 0.03609702301025391, 0.036424705505371094, 0.036242431640625, 0.03629875183105469, 0.03649740982055664, 0.03836415863037109, 0.03642265701293945, 0.03615948867797852, 0.03625369644165039, 0.03644416046142578, 0.0362977294921875, 0.0363612174987793, 0.036311038970947264, 0.03650252914428711, 0.036193279266357424, 0.03639603042602539, 0.03627724838256836, 0.036435966491699216, 0.036209663391113284, 0.03624755096435547, 0.03634483337402344, 0.036416511535644534, 0.036279296875, 0.03670937728881836, 0.03700428771972656, 0.03633356857299805, 0.03626393508911133, 0.03634995269775391, 0.03638784027099609, 0.03618918228149414, 0.036332542419433594, 0.03629260635375976, 0.036178943634033206, 0.03611852645874023, 0.03632230377197266, 0.036329471588134765, 0.036310016632080076, 0.03613798522949219, 0.03637350463867187, 0.036209663391113284, 0.036357120513916014, 0.03629056167602539, 0.036503551483154296, 0.03629260635375976, 0.03635609436035156, 0.036485118865966795, 0.036168704986572264, 0.03687936019897461, 0.03694079971313476, 0.03624857711791992, 0.036572158813476564, 0.036393985748291016, 0.0369879035949707, 0.036291584014892575, 0.0363612174987793, 0.03643084716796875, 0.03642572784423828, 0.03625267028808594, 0.03632025527954102, 0.03625881576538086, 0.036212734222412106, 0.03619942474365234, 0.03627212905883789, 0.03618304061889648, 0.03628236770629883, 0.03625164794921875, 0.03630182266235352, 0.03604787063598633, 0.036337665557861325, 0.03626291275024414, 0.036468734741210936, 0.03633561706542969, 0.03634483337402344, 0.036805633544921876, 0.036752384185791014, 0.0362608642578125, 0.03635200119018555, 0.036354049682617184, 0.03641753768920898, 0.03625471878051758, 0.037098495483398435, 0.036674560546875, 0.03648409652709961, 0.03623219299316406, 0.03684864044189453, 0.03622707366943359, 0.03634175872802734, 0.036278270721435545, 0.03633049774169922, 0.03636019134521484, 0.036318206787109376, 0.036380672454833986, 0.03623116683959961, 0.036329471588134765, 0.036375553131103515, 0.03711795043945312, 0.03678105545043946, 0.03626700973510742, 0.036326400756835936, 0.036631553649902344, 0.03698688125610351, 0.03642777633666992, 0.036362239837646484, 0.03637247848510742, 0.03631718444824219, 0.036324352264404294, 0.03620044708251953, 0.03659366226196289, 0.03632025527954102, 0.03625267028808594, 0.03633561706542969, 0.03639807891845703, 0.03631513595581055, 0.03624345779418945, 0.036354049682617184, 0.03645951843261719, 0.03641548919677735, 0.03637145614624023, 0.03631513595581055, 0.03641548919677735, 0.03622604751586914, 0.03672371292114258, 0.036514816284179685, 0.0368353271484375, 0.03664384078979492, 0.036377601623535157, 0.03628236770629883, 0.03630694580078125, 0.03706265640258789, 0.0365291519165039, 0.03628134536743164, 0.03664384078979492, 0.03585331344604492, 0.035757057189941405, 0.036514816284179685, 0.03642675018310547, 0.03575603103637695, 0.0359106559753418, 0.036261886596679685, 0.03627315139770508, 0.036162559509277346, 0.0355860481262207, 0.0356495361328125, 0.03590655899047852, 0.03590041732788086, 0.03621478271484375, 0.03605401611328125, 0.03720601654052735, 0.03663872146606445, 0.03632230377197266, 0.03736166381835938, 0.036378623962402344, 0.03617587280273438, 0.03641446304321289, 0.03637247848510742, 0.0361973762512207, 0.03678617477416992, 0.036429824829101565, 0.036347904205322266, 0.036111358642578126, 0.0363694076538086, 0.03639807891845703, 0.03621785736083984, 0.03632230377197266, 0.036490238189697266, 0.03633356857299805, 0.03609804916381836, 0.036278270721435545, 0.03632332611083984, 0.03697868728637695, 0.03648921585083008, 0.03630694580078125, 0.03623628616333008, 0.03667660903930664, 0.03630387115478516, 0.036465663909912106, 0.036977664947509765, 0.03617279815673828, 0.03620044708251953, 0.036171775817871094, 0.03601408004760742, 0.03622195053100586, 0.03635302352905274, 0.0362608642578125, 0.03634380722045898, 0.036222976684570314, 0.03627110290527344, 0.03623219299316406, 0.036466686248779294, 0.03632128143310547, 0.036354049682617184, 0.03632230377197266, 0.036705280303955076, 0.03615948867797852, 0.03611340713500977, 0.0363765754699707, 0.036324352264404294, 0.036413440704345705, 0.0363581428527832, 0.036241409301757815, 0.036326400756835936, 0.03617587280273438, 0.03628134536743164, 0.0362158088684082, 0.03695001602172852, 0.036552703857421875, 0.036670463562011715, 0.038539264678955076, 0.03804569625854492, 0.03731148910522461, 0.03830169677734375, 0.03634483337402344, 0.036055038452148434, 0.03623628616333008, 0.03625267028808594, 0.03638579177856445, 0.03625471878051758, 0.03638579177856445, 0.036327423095703124, 0.03608063888549805, 0.03619123077392578, 0.03623014450073242, 0.03638272094726563, 0.037031936645507815, 0.036603904724121096, 0.03650457763671875, 0.036225025177001956, 0.03630387115478516, 0.036291584014892575, 0.0363612174987793, 0.03635507202148437, 0.036446208953857424, 0.03620556640625, 0.03621683120727539, 0.036239360809326174, 0.03618406295776367, 0.03623731231689453, 0.037628929138183595, 0.03801497650146484, 0.03727462387084961, 0.03644825744628906, 0.03726847839355469, 0.03735244750976562, 0.036375553131103515, 0.03620761489868164, 0.03644313430786133, 0.03613491058349609, 0.0362608642578125, 0.03632844924926758, 0.036377601623535157, 0.03801702499389648, 0.03896627044677734, 0.037359615325927735, 0.036860927581787106, 0.037323776245117186, 0.03692031860351563, 0.036342784881591796, 0.03644825744628906, 0.03644927978515625, 0.03709132766723633, 0.03622092819213867, 0.036446208953857424, 0.03638681411743164, 0.03731148910522461, 0.03631513595581055, 0.03659571075439453, 0.03652505493164063, 0.036908031463623044, 0.03655475234985352, 0.03733299255371094, 0.037114879608154294, 0.037617664337158206, 0.037048320770263675, 0.037179393768310545, 0.03627212905883789, 0.03698995208740234, 0.03631411361694336, 0.03640934371948242, 0.036195327758789066, 0.03656294250488281, 0.03655987167358398, 0.03630284881591797, 0.03709235382080078, 0.036560897827148435, 0.03644723129272461, 0.03613081741333008, 0.03626803207397461, 0.03751833724975586, 0.037751808166503906, 0.03668070220947266, 0.036378623962402344, 0.0364769287109375, 0.03620044708251953, 0.036367359161376955, 0.037163009643554686, 0.03632332611083984, 0.03629363250732422, 0.036503551483154296, 0.03691110229492187, 0.036857856750488284, 0.0364031982421875, 0.03628543853759766, 0.036350975036621096, 0.03630489730834961, 0.03631513595581055, 0.0362874870300293, 0.03643904113769531, 0.036348926544189454, 0.036377601623535157, 0.03631718444824219, 0.03665100860595703, 0.03633356857299805, 0.036296703338623046, 0.03602534484863281, 0.036450302124023434, 0.03653529739379883, 0.03651891326904297, 0.03629260635375976, 0.03632332611083984, 0.03624448013305664, 0.036329471588134765, 0.03620761489868164, 0.036329471588134765, 0.036176895141601564, 0.036898815155029296, 0.03651686477661133, 0.03724288177490234, 0.03708927917480469, 0.03644518280029297, 0.036203521728515625, 0.03620249557495117, 0.03618918228149414, 0.036337665557861325, 0.036754432678222655, 0.03672883224487305, 0.0362690544128418, 0.037098495483398435, 0.036275199890136715, 0.03618406295776367, 0.03611443328857422, 0.036171775817871094, 0.036208641052246096, 0.03623731231689453, 0.03618918228149414, 0.03614617538452149, 0.036348926544189454, 0.03697971343994141, 0.036294654846191404, 0.036311038970947264, 0.036212734222412106, 0.03631411361694336, 0.03617587280273438, 0.03664691162109375, 0.03646976089477539, 0.03626291275024414, 0.036416511535644534, 0.0362762222290039, 0.03629260635375976, 0.036101119995117184, 0.036347904205322266, 0.03632844924926758, 0.03635507202148437, 0.03631923294067383, 0.03650457763671875, 0.036495361328125, 0.0362239990234375, 0.03627008056640625, 0.03634995269775391, 0.03627008056640625, 0.03619430541992188, 0.03618406295776367, 0.036370433807373044, 0.03619123077392578, 0.036490238189697266, 0.03678412628173828, 0.03712716674804688, 0.03633356857299805, 0.036209663391113284, 0.036171775817871094, 0.03624755096435547, 0.03632332611083984, 0.03621068954467774, 0.036299774169921875, 0.03637145614624023, 0.03623219299316406, 0.036241409301757815, 0.036187137603759766, 0.03631411361694336, 0.03779379272460937, 0.038133758544921875, 0.03756851196289063, 0.037384193420410154, 0.03668070220947266, 0.036144126892089845, 0.03633152008056641, 0.03638579177856445, 0.036212734222412106, 0.03639910507202149, 0.0364031982421875, 0.03789311981201172, 0.0375470085144043, 0.036490238189697266, 0.036318206787109376, 0.03627110290527344, 0.036239360809326174, 0.03622195053100586, 0.03646156692504883, 0.03622604751586914, 0.03626598358154297, 0.03623014450073242, 0.03646361541748047, 0.03604172897338867, 0.036348926544189454, 0.03626291275024414, 0.03619123077392578, 0.036348926544189454, 0.03632230377197266, 0.03629568099975586, 0.036334590911865236, 0.036490238189697266, 0.03624448013305664, 0.03649740982055664, 0.03684249496459961, 0.03631206512451172, 0.0362342414855957, 0.036345855712890625, 0.036345855712890625, 0.03621887969970703, 0.03630284881591797, 0.03723263931274414, 0.03670220947265625, 0.036149246215820316, 0.03605299377441406, 0.03679334259033203, 0.03672678375244141, 0.03654246520996094, 0.03771084976196289, 0.036278270721435545, 0.036452350616455076, 0.03711385726928711, 0.036278270721435545, 0.03633868789672851, 0.037324798583984374, 0.039277568817138675, 0.037951488494873044, 0.03711385726928711, 0.0363694076538086, 0.03665203094482422, 0.03693772888183594, 0.03659468841552734, 0.036908031463623044, 0.03682406234741211, 0.03714559936523437, 0.03726131057739258, 0.03761459350585938, 0.03915161514282227, 0.036634624481201174, 0.0361451530456543, 0.03621683120727539, 0.035945472717285154, 0.036483070373535154, 0.03618815994262695]",tokens/s,27.399301185122468,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948d16-35fd6b2d61e9f87045c55932;d36d9f9e-ee85-417b-9445-b62706b71319) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6682.693632,9622.257664,0.0,8992.587776,8404.320768,s,1,11.7803134765625,11.7803134765625,0.0,11.7803134765625,11.7803134765625,11.7803134765625,11.7803134765625,[11.7803134765625],,kWh,5.875740629514389e-05,3.218795053613041e-05,0.00011799092772601938,0.0002089362845572937,,MB,1698.422784,9641.132032,0.0,8992.587776,7880.275968,s,10,1.7112000122070312,0.17112000122070312,4.2560805179689245e-05,0.17111248016357422,0.1711771499633789,0.17117932662963867,0.17118106796264648,"[0.17102391052246094, 0.17110707092285157, 0.17118150329589843, 0.17115032958984375, 0.1711131591796875, 0.17111180114746094, 0.1710972137451172, 0.17117666625976563, 0.1711100769042969, 0.17112828063964844]",tokens/s,1496.0261697860929,kWh,2.0234614559559383e-06,1.1087630032473751e-06,8.057384035356354e-06,1.118960849455967e-05,tokens/kWh,22878369.705648404,MB,1709.592576,9643.229184,0.0,8992.587776,8125.490176,s,10,16.7796669921875,1.67796669921875,0.013461243747165482,1.6729420776367188,1.6973265869140626,1.704133935546875,1.7095798144531251,"[1.6958138427734375, 1.7109412841796876, 1.6769683837890625, 1.67071484375, 1.670564697265625, 1.672662841796875, 1.665101318359375, 1.6732213134765626, 1.6700264892578125, 1.6736519775390626]",tokens/s,37.54544117552057,kWh,1.9693416342861916e-05,1.079238234515285e-05,6.235400562704399e-05,9.283980431505877e-05,tokens/kWh,678588.2463324118,,s,630,16.77732659530638,0.026630677135406983,0.00045983000770795937,0.026414079666137694,0.027227546119689944,0.02738288631439209,0.028225013294219975,"[0.02656153678894043, 0.02650214385986328, 0.026513408660888672, 0.026405887603759767, 0.026798080444335938, 0.026438655853271483, 0.026406911849975585, 0.02630860710144043, 0.02635468864440918, 0.026302463531494142, 0.02632806396484375, 0.026460159301757814, 0.026801151275634767, 0.027158527374267577, 0.026596351623535155, 0.026360832214355468, 0.02655436706542969, 0.026448896408081055, 0.02676633644104004, 0.02697318458557129, 0.027243520736694334, 0.026437631607055666, 0.02637004852294922, 0.026351615905761717, 0.026414079666137694, 0.026402816772460938, 0.02671513557434082, 0.02714419174194336, 0.02705510330200195, 0.027196416854858397, 0.02721177673339844, 0.027174911499023437, 0.027196416854858397, 0.02710425567626953, 0.027090944290161133, 0.02732646369934082, 0.02729267120361328, 0.026874879837036132, 0.027192319869995117, 0.027219968795776366, 0.02738483238220215, 0.027249664306640626, 0.027240447998046875, 0.027261951446533202, 0.027198463439941405, 0.027225088119506836, 0.027255807876586914, 0.02716262435913086, 0.02736025619506836, 0.027232255935668945, 0.027281408309936524, 0.027209728240966798, 0.02720358467102051, 0.0271646728515625, 0.027115520477294923, 0.02714726448059082, 0.027388927459716796, 0.027185152053833008, 0.02706329536437988, 0.027030527114868166, 0.026960895538330077, 0.027191295623779296, 0.02701312065124512, 0.0288143367767334, 0.027410432815551757, 0.0271779842376709, 0.027267072677612306, 0.027217920303344727, 0.027047935485839843, 0.027181055068969725, 0.027037696838378908, 0.027106304168701172, 0.027099136352539063, 0.02726911926269531, 0.027056127548217773, 0.026841087341308592, 0.027593727111816405, 0.027243520736694334, 0.027045888900756834, 0.027049983978271484, 0.02712063980102539, 0.027056127548217773, 0.027073535919189453, 0.026933248519897462, 0.02715238380432129, 0.027054079055786134, 0.027028480529785157, 0.027694080352783205, 0.02735206413269043, 0.0271779842376709, 0.027022335052490236, 0.02709503936767578, 0.027075584411621095, 0.027060224533081056, 0.027265024185180665, 0.027036672592163087, 0.02707967948913574, 0.0271646728515625, 0.027018239974975586, 0.02715238380432129, 0.02709401512145996, 0.02714112091064453, 0.027174911499023437, 0.0271779842376709, 0.02721177673339844, 0.02711244773864746, 0.027204608917236327, 0.027066368103027344, 0.02711347198486328, 0.027197439193725585, 0.027148288726806642, 0.027247615814208984, 0.02730700874328613, 0.02731622314453125, 0.0285296630859375, 0.028727296829223634, 0.027218944549560548, 0.02672435188293457, 0.026506240844726563, 0.026529792785644532, 0.026610687255859376, 0.026514432907104493, 0.026435583114624024, 0.026380287170410157, 0.02669260787963867, 0.02725273513793945, 0.026401792526245117, 0.026395647048950196, 0.026355712890625, 0.026438655853271483, 0.026283008575439453, 0.02632499122619629, 0.02627276802062988, 0.026595327377319337, 0.026299392700195313, 0.026403839111328126, 0.026295295715332033, 0.02637107276916504, 0.02630348777770996, 0.026332160949707032, 0.02632601547241211, 0.0263874568939209, 0.026306560516357422, 0.02628812789916992, 0.026278911590576173, 0.026306560516357422, 0.026204160690307617, 0.02630348777770996, 0.026198015213012696, 0.026245119094848633, 0.026245119094848633, 0.02630348777770996, 0.026302463531494142, 0.026373119354248048, 0.026284032821655274, 0.02714112091064453, 0.029101055145263673, 0.02752409553527832, 0.02675814437866211, 0.026300416946411134, 0.02656153678894043, 0.026589183807373046, 0.027299840927124022, 0.027043840408325196, 0.027732992172241212, 0.02775654411315918, 0.02634035110473633, 0.027108352661132814, 0.028480512619018555, 0.026444799423217775, 0.026770431518554686, 0.026624000549316407, 0.0269752311706543, 0.026231807708740236, 0.026187776565551758, 0.02627276802062988, 0.02687283134460449, 0.026926080703735353, 0.02730700874328613, 0.026380287170410157, 0.026292224884033204, 0.026275840759277344, 0.02690355110168457, 0.02690662384033203, 0.026236928939819337, 0.026704896926879884, 0.026822656631469727, 0.026196992874145508, 0.026237951278686524, 0.026828800201416016, 0.02629324722290039, 0.02637107276916504, 0.026398719787597655, 0.026746879577636717, 0.02636288070678711, 0.026270719528198243, 0.026372095108032227, 0.026464256286621093, 0.026283008575439453, 0.026335231781005858, 0.026251264572143555, 0.02631782341003418, 0.026351615905761717, 0.026277887344360353, 0.02633318328857422, 0.02653388786315918, 0.026343423843383788, 0.026223615646362306, 0.026267648696899414, 0.026275840759277344, 0.026532863616943358, 0.026358783721923826, 0.026264575958251952, 0.02632294464111328, 0.026430463790893553, 0.026270719528198243, 0.026273792266845702, 0.026290176391601562, 0.026359807968139647, 0.02695577621459961, 0.026391551971435546, 0.0263055362701416, 0.02630860710144043, 0.026380287170410157, 0.02735923194885254, 0.02688102340698242, 0.026428415298461915, 0.026281984329223632, 0.02631679916381836, 0.02695680046081543, 0.026537984848022462, 0.026198015213012696, 0.026394624710083008, 0.02613248062133789, 0.026391551971435546, 0.026300416946411134, 0.026451967239379884, 0.027007999420166014, 0.02776166343688965, 0.0272988166809082, 0.026632192611694337, 0.027198463439941405, 0.027217920303344727, 0.027201536178588868, 0.026864639282226564, 0.026433536529541016, 0.026390527725219725, 0.02657689666748047, 0.02652569580078125, 0.02634035110473633, 0.02655232048034668, 0.02648678398132324, 0.026445823669433592, 0.026458112716674805, 0.026649599075317384, 0.027399168014526368, 0.02711756706237793, 0.026639360427856446, 0.026396671295166017, 0.026186752319335937, 0.026161151885986327, 0.02617651176452637, 0.027267072677612306, 0.026424320220947265, 0.026236928939819337, 0.02618060874938965, 0.026242048263549804, 0.026212352752685547, 0.026231807708740236, 0.026267648696899414, 0.026258432388305664, 0.026235904693603516, 0.026247167587280275, 0.026221567153930665, 0.026163200378417968, 0.02634444808959961, 0.02627276802062988, 0.026233856201171874, 0.026290176391601562, 0.026440704345703125, 0.02633830451965332, 0.02656358337402344, 0.027073535919189453, 0.026368000030517577, 0.02617241668701172, 0.027074560165405274, 0.026636287689208983, 0.02628915214538574, 0.026211328506469726, 0.026263551712036134, 0.02618880081176758, 0.026254335403442384, 0.02627686309814453, 0.02717695999145508, 0.026985471725463867, 0.02633625602722168, 0.026831872940063478, 0.02637107276916504, 0.026300416946411134, 0.026399744033813476, 0.02668339157104492, 0.02650111961364746, 0.02700595283508301, 0.02834636878967285, 0.027716608047485353, 0.02727628707885742, 0.026606592178344726, 0.02638643264770508, 0.026444799423217775, 0.02633113670349121, 0.02636288070678711, 0.02633011245727539, 0.026225664138793944, 0.02631782341003418, 0.02628812789916992, 0.026427391052246094, 0.02632294464111328, 0.026421247482299806, 0.02628505516052246, 0.02634649658203125, 0.026263551712036134, 0.02628812789916992, 0.026299392700195313, 0.02627993583679199, 0.027379711151123046, 0.02652876853942871, 0.02633625602722168, 0.026373119354248048, 0.026395647048950196, 0.02631987190246582, 0.02637107276916504, 0.026393600463867187, 0.026389503479003908, 0.026315776824951172, 0.02632499122619629, 0.02632806396484375, 0.026419200897216798, 0.02636288070678711, 0.026402816772460938, 0.02637414360046387, 0.026355712890625, 0.026402816772460938, 0.026438655853271483, 0.026290176391601562, 0.026452991485595705, 0.02636288070678711, 0.026403839111328126, 0.02631372833251953, 0.027487232208251954, 0.02655948829650879, 0.026431488037109374, 0.026402816772460938, 0.026414079666137694, 0.026201087951660155, 0.026689535140991212, 0.027271167755126953, 0.02710937690734863, 0.027114496231079102, 0.026810367584228514, 0.02629324722290039, 0.02634854316711426, 0.02634547233581543, 0.027270143508911132, 0.02695884895324707, 0.026483711242675782, 0.02694963264465332, 0.02771865653991699, 0.0271329288482666, 0.02718720054626465, 0.027084800720214845, 0.026397695541381837, 0.02631167984008789, 0.026307584762573243, 0.026720256805419923, 0.026369024276733398, 0.026380287170410157, 0.026398719787597655, 0.02631270408630371, 0.026685440063476562, 0.02630963134765625, 0.02629631996154785, 0.026268672943115235, 0.02633318328857422, 0.02616422462463379, 0.025783296585083007, 0.026287103652954103, 0.026290176391601562, 0.026212352752685547, 0.026207231521606447, 0.026216447830200194, 0.025967615127563477, 0.025894912719726562, 0.026233856201171874, 0.02693734359741211, 0.02688102340698242, 0.026327039718627928, 0.026275840759277344, 0.026247167587280275, 0.026246143341064454, 0.026239999771118162, 0.026984447479248046, 0.026382335662841795, 0.026248191833496092, 0.02613555145263672, 0.026235904693603516, 0.027617279052734374, 0.028260351181030274, 0.027509759902954102, 0.027227136611938478, 0.026372095108032227, 0.026195968627929687, 0.02629734420776367, 0.02613248062133789, 0.026187776565551758, 0.026245119094848633, 0.026302463531494142, 0.026277887344360353, 0.026229759216308594, 0.02632806396484375, 0.02634239959716797, 0.02631167984008789, 0.02633830451965332, 0.0259102725982666, 0.027231231689453125, 0.02693939208984375, 0.02633932876586914, 0.026266624450683593, 0.026302463531494142, 0.02630963134765625, 0.026284032821655274, 0.026302463531494142, 0.027133951187133788, 0.026323968887329102, 0.02634239959716797, 0.026497024536132813, 0.026373119354248048, 0.026397695541381837, 0.026403839111328126, 0.026373119354248048, 0.02637926483154297, 0.02629324722290039, 0.026448896408081055, 0.02631884765625, 0.026430463790893553, 0.026234880447387695, 0.02631270408630371, 0.02632294464111328, 0.026248191833496092, 0.026373119354248048, 0.026405887603759767, 0.026508287429809572, 0.026377216339111328, 0.026397695541381837, 0.026306560516357422, 0.027886592864990234, 0.02813849639892578, 0.026825727462768553, 0.026424320220947265, 0.026403839111328126, 0.02633932876586914, 0.026482688903808595, 0.026364927291870118, 0.026602495193481446, 0.026368000030517577, 0.026392576217651367, 0.026407936096191405, 0.026417152404785156, 0.026335231781005858, 0.026659839630126952, 0.026438655853271483, 0.026497024536132813, 0.026364927291870118, 0.026420223236083985, 0.02631679916381836, 0.026413055419921876, 0.026412031173706055, 0.026419200897216798, 0.02636288070678711, 0.02647039985656738, 0.026411008834838868, 0.026497024536132813, 0.026652671813964843, 0.02634547233581543, 0.026415103912353514, 0.026529792785644532, 0.026468351364135743, 0.026562559127807618, 0.026645503997802734, 0.026472448348999023, 0.026431488037109374, 0.026541055679321288, 0.026429439544677736, 0.026455039978027343, 0.026405887603759767, 0.026451967239379884, 0.026400768280029296, 0.026472448348999023, 0.0263874568939209, 0.026506240844726563, 0.026452991485595705, 0.02738380813598633, 0.028111871719360353, 0.027601919174194335, 0.027099136352539063, 0.026669055938720702, 0.026621952056884765, 0.027890687942504884, 0.027231231689453125, 0.026343423843383788, 0.026391551971435546, 0.026245119094848633, 0.02655436706542969, 0.02653593635559082, 0.026401792526245117, 0.02636595153808594, 0.026562559127807618, 0.026427391052246094, 0.026792959213256837, 0.026418176651000977, 0.0271646728515625, 0.027069440841674806, 0.027219968795776366, 0.02634137535095215, 0.02631270408630371, 0.026411008834838868, 0.026262527465820314, 0.026267648696899414, 0.02633830451965332, 0.026241024017333983, 0.026277887344360353, 0.026257408142089843, 0.026402816772460938, 0.026209280014038085, 0.026298368453979492, 0.026398719787597655, 0.02629324722290039, 0.026274816513061523, 0.026421247482299806, 0.027048959732055664, 0.02631270408630371, 0.026231807708740236, 0.026274816513061523, 0.026262527465820314, 0.026290176391601562, 0.026299392700195313, 0.026291200637817383, 0.026256383895874022, 0.026286079406738282, 0.02673766326904297, 0.02681548881530762, 0.02637004852294922, 0.02630143928527832, 0.02628096008300781, 0.026403839111328126, 0.026423295974731444, 0.02672947120666504, 0.02654207992553711, 0.026251264572143555, 0.02654412841796875, 0.02630451202392578, 0.026598400115966796, 0.02632089614868164, 0.027675647735595704, 0.027114496231079102, 0.02628505516052246, 0.02630451202392578, 0.02631884765625, 0.026816511154174806, 0.026368000030517577, 0.026274816513061523, 0.026392576217651367, 0.027122688293457032, 0.026529792785644532, 0.026705919265747072, 0.02692198371887207, 0.027018239974975586, 0.02793471908569336, 0.027472896575927733, 0.026792959213256837, 0.026364927291870118, 0.026436607360839845, 0.026388479232788087, 0.0259368953704834, 0.026359807968139647, 0.02616524887084961, 0.026436607360839845, 0.027381759643554687, 0.02666803169250488, 0.027022335052490236, 0.027107328414916993, 0.026830848693847657, 0.02634547233581543, 0.0263505916595459, 0.026406911849975585, 0.02638643264770508, 0.026382335662841795, 0.026398719787597655, 0.026413055419921876, 0.026214399337768556, 0.026219520568847656, 0.026449920654296875, 0.02615705680847168, 0.026059776306152343, 0.02633932876586914, 0.026622976303100586, 0.026655744552612305, 0.026076160430908202, 0.026746879577636717, 0.027090944290161133, 0.02712883186340332, 0.027073535919189453, 0.026776575088500978, 0.02631884765625, 0.026263551712036134, 0.02627276802062988, 0.026223615646362306, 0.026306560516357422, 0.02615705680847168, 0.026286079406738282, 0.026017791748046876, 0.026239999771118162, 0.026253311157226563, 0.02635775947570801, 0.02636390495300293, 0.026607616424560547, 0.026549247741699217, 0.027410432815551757, 0.027181055068969725, 0.02678169631958008, 0.02608332824707031]",tokens/s,37.55067867464939,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493a7-189dd79b2f9f56bc4f380cc6;51481734-098e-4522-acd5-252a20c74930) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-40b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-40b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,917.757952,931.659776,0.0,301.989888,282.769408,s,1,7.71605078125,7.71605078125,0.0,7.71605078125,7.71605078125,7.71605078125,7.71605078125,[7.71605078125],,kWh,7.475975285424536e-06,4.081437317748653e-06,9.660007728007347e-06,2.1217420331180537e-05,,MB,1484.013568,990.380032,0.0,341.835776,318.94528,s,17,0.1845952653884888,0.010858545022852282,0.00020383798595829513,0.010691231727600097,0.011107264328002929,0.01112632999420166,0.011168293495178222,"[0.011103296279907226, 0.010681728363037109, 0.01109440040588379, 0.010691136360168456, 0.011074208259582519, 0.010651071548461915, 0.010680224418640137, 0.011113216400146484, 0.010679295539855957, 0.011178784370422364, 0.010691231727600097, 0.010658080101013184, 0.011056991577148437, 0.010802016258239747, 0.011063136100769044, 0.010687935829162597, 0.010688511848449708]",tokens/s,23575.902615059094,kWh,1.2834443096951873e-07,7.032661370038722e-08,2.526380353832618e-07,4.5130908005316776e-07,tokens/kWh,567238753.472102,MB,1521.307648,1015.545856,0.0,367.0016,318.94784,s,17,9.948715026855469,0.5852185309914981,0.00673385407085712,0.585127197265625,0.5950670532226563,0.59615439453125,0.5975187695312499,"[0.57706201171875, 0.5946264038085938, 0.5852131958007812, 0.5837857666015625, 0.5852271728515624, 0.5771373901367187, 0.5871404418945313, 0.59572802734375, 0.5778384399414063, 0.5820794067382813, 0.5750896606445313, 0.5940531005859375, 0.585127197265625, 0.5827232666015625, 0.5808173217773438, 0.59785986328125, 0.5872063598632813]",tokens/s,107.6520934722678,kWh,6.798061860389144e-06,3.7250573425967086e-06,1.0636923145396091e-05,2.1160042348381946e-05,tokens/kWh,2977309.7313681627,,s,1071,9.94159101867676,0.009282531296616953,0.00021056255678291236,0.00918835163116455,0.009502719879150391,0.009553919792175293,0.009977446269989008,"[0.009220095634460449, 0.009171968460083007, 0.009115648269653321, 0.009251839637756347, 0.009152511596679687, 0.009115648269653321, 0.009132032394409179, 0.009116671562194823, 0.009162752151489258, 0.009138175964355469, 0.00908902359008789, 0.009150464057922364, 0.009168895721435547, 0.009081855773925781, 0.009057279586791991, 0.009154560089111329, 0.009150495529174804, 0.009087967872619628, 0.009132032394409179, 0.009133055686950683, 0.009084927558898925, 0.009091072082519532, 0.009118720054626465, 0.009395199775695801, 0.00910643196105957, 0.009208831787109375, 0.00912179183959961, 0.009086976051330567, 0.00912179183959961, 0.009178112030029297, 0.009203712463378906, 0.009323519706726074, 0.009172991752624511, 0.009146368026733399, 0.009120767593383788, 0.009149439811706543, 0.009126912117004395, 0.009149439811706543, 0.009261055946350098, 0.009149439811706543, 0.009273344039916993, 0.009046015739440917, 0.00914739227294922, 0.009309184074401856, 0.009133055686950683, 0.009174015998840332, 0.009206784248352052, 0.009128959655761718, 0.0091146240234375, 0.00913920021057129, 0.009175040245056153, 0.00911359977722168, 0.009110527992248535, 0.009157631874084473, 0.009104384422302245, 0.009115679740905762, 0.009131999969482421, 0.009228287696838379, 0.009125887870788574, 0.00910643196105957, 0.009289728164672852, 0.00912281608581543, 0.009092096328735352, 0.010057727813720703, 0.0095283203125, 0.009455615997314454, 0.009566207885742188, 0.009207807540893554, 0.00931123161315918, 0.009445376396179199, 0.009537535667419434, 0.009504768371582031, 0.009470975875854493, 0.009466879844665528, 0.00950374412536621, 0.009456640243530273, 0.009551872253417968, 0.009498623847961426, 0.009494527816772461, 0.009507840156555175, 0.009538559913635255, 0.009556991577148437, 0.009506815910339356, 0.009437184333801269, 0.009474047660827637, 0.009479167938232422, 0.009466879844665528, 0.009400320053100587, 0.009329664230346679, 0.009333760261535644, 0.00932147216796875, 0.009574399948120118, 0.009467904090881347, 0.009486335754394531, 0.009476096153259277, 0.009455615997314454, 0.009489407539367676, 0.009469951629638672, 0.00950169563293457, 0.009527296066284179, 0.009467904090881347, 0.009431039810180664, 0.009455615997314454, 0.009452544212341308, 0.00951910400390625, 0.009467904090881347, 0.009408512115478516, 0.009495552062988282, 0.009448448181152343, 0.009553919792175293, 0.009370623588562011, 0.009268223762512207, 0.009384991645812989, 0.009475040435791015, 0.009358336448669433, 0.00933683204650879, 0.00930303955078125, 0.00910643196105957, 0.009102335929870605, 0.009169919967651367, 0.0090316801071167, 0.009140224456787109, 0.009141247749328613, 0.009552895545959473, 0.00942796802520752, 0.009475071907043458, 0.009350144386291503, 0.009201663970947266, 0.009160703659057617, 0.009166848182678223, 0.00902143955230713, 0.009000960350036622, 0.00899071979522705, 0.009052160263061524, 0.008985600471496581, 0.009117695808410644, 0.00919654369354248, 0.009148415565490722, 0.00912281608581543, 0.009120767593383788, 0.009207807540893554, 0.009174015998840332, 0.00912384033203125, 0.009352191925048828, 0.009533439636230469, 0.009455615997314454, 0.00939417552947998, 0.009422847747802734, 0.009468928337097168, 0.00950374412536621, 0.009459712028503419, 0.009470975875854493, 0.009472000122070312, 0.009480192184448242, 0.009567232131958007, 0.00940544033050537, 0.009476096153259277, 0.009483263969421387, 0.009452544212341308, 0.009423871994018555, 0.009444416046142578, 0.009454527854919433, 0.009473024368286133, 0.009437184333801269, 0.009464832305908203, 0.009581567764282227, 0.009473024368286133, 0.009449472427368164, 0.009480192184448242, 0.009115648269653321, 0.009094143867492676, 0.009132032394409179, 0.009171072006225587, 0.008984448432922364, 0.00910540771484375, 0.009506815910339356, 0.009351167678833008, 0.0091146240234375, 0.009150464057922364, 0.00914128017425537, 0.009135071754455566, 0.009183232307434081, 0.00922214412689209, 0.009244671821594238, 0.00912281608581543, 0.009185279846191406, 0.009164799690246582, 0.009252863883972168, 0.009203712463378906, 0.009157631874084473, 0.009099264144897461, 0.00909004783630371, 0.009118720054626465, 0.009135104179382325, 0.009099264144897461, 0.009086976051330567, 0.009245696067810059, 0.00908902359008789, 0.009119808197021484, 0.00901420783996582, 0.00920576000213623, 0.009111552238464356, 0.009117695808410644, 0.009141247749328613, 0.009140224456787109, 0.009119744300842286, 0.009220095634460449, 0.009127936363220214, 0.009092096328735352, 0.009102335929870605, 0.009152511596679687, 0.00911359977722168, 0.009112575531005859, 0.00912281608581543, 0.009110527992248535, 0.009194496154785157, 0.009084927558898925, 0.009146368026733399, 0.009125887870788574, 0.00910540771484375, 0.009124863624572753, 0.009125887870788574, 0.009136128425598144, 0.009127936363220214, 0.009119744300842286, 0.009091072082519532, 0.009120767593383788, 0.009101311683654785, 0.009192447662353515, 0.009505791664123535, 0.009445376396179199, 0.009479167938232422, 0.009451519966125489, 0.009668607711791993, 0.00943616008758545, 0.009515007972717286, 0.009516032218933105, 0.009420831680297852, 0.009426912307739259, 0.009463808059692384, 0.009423871994018555, 0.009461759567260742, 0.00960307216644287, 0.009539584159851074, 0.009500672340393066, 0.009467904090881347, 0.009461759567260742, 0.00952627182006836, 0.009451519966125489, 0.009485312461853027, 0.009607168197631836, 0.009460736274719238, 0.009308159828186035, 0.009466879844665528, 0.009389056205749511, 0.009497599601745605, 0.00943616008758545, 0.009461759567260742, 0.009490431785583496, 0.009448448181152343, 0.00941875171661377, 0.00949350357055664, 0.009442303657531738, 0.009515007972717286, 0.009459712028503419, 0.009492480278015136, 0.009347071647644043, 0.009152511596679687, 0.009144319534301757, 0.009096192359924317, 0.009462783813476563, 0.009532416343688965, 0.009458687782287598, 0.00973414421081543, 0.009499648094177245, 0.00949350357055664, 0.009448448181152343, 0.009457663536071777, 0.009490431785583496, 0.009368576049804688, 0.009472000122070312, 0.00945462417602539, 0.009317343711853027, 0.009154560089111329, 0.009144384384155274, 0.009091008186340331, 0.009133055686950683, 0.009219072341918945, 0.009102335929870605, 0.009075712203979493, 0.009112575531005859, 0.00912281608581543, 0.009234432220458985, 0.009129983901977539, 0.009179136276245118, 0.009082880020141602, 0.009091072082519532, 0.009156607627868652, 0.009125920295715332, 0.009153504371643067, 0.009125887870788574, 0.009158656120300293, 0.009232383728027344, 0.009107456207275391, 0.009169919967651367, 0.009145343780517578, 0.008996864318847657, 0.009220095634460449, 0.009166848182678223, 0.009164799690246582, 0.009143296241760255, 0.009186304092407227, 0.009168895721435547, 0.009128959655761718, 0.009115648269653321, 0.00908083152770996, 0.00913100814819336, 0.00912384033203125, 0.009132032394409179, 0.009168895721435547, 0.009218048095703125, 0.009120767593383788, 0.009111552238464356, 0.009203712463378906, 0.009133055686950683, 0.009118720054626465, 0.009143296241760255, 0.009111552238464356, 0.009130016326904297, 0.009113568305969239, 0.009186304092407227, 0.009152511596679687, 0.009040896415710448, 0.009166848182678223, 0.009115648269653321, 0.00911359977722168, 0.0091146240234375, 0.009171968460083007, 0.009161727905273438, 0.009124863624572753, 0.009271295547485351, 0.009109503746032714, 0.009143296241760255, 0.009185279846191406, 0.009162752151489258, 0.009138175964355469, 0.00908902359008789, 0.009149439811706543, 0.009540608406066894, 0.009185279846191406, 0.009127936363220214, 0.009174015998840332, 0.009118720054626465, 0.009176063537597656, 0.009190400123596192, 0.009255935668945312, 0.009151488304138184, 0.009157631874084473, 0.0095283203125, 0.009073663711547851, 0.009084927558898925, 0.009145343780517578, 0.009133055686950683, 0.009110527992248535, 0.009167936325073242, 0.009177023887634278, 0.009117695808410644, 0.009140224456787109, 0.009259008407592773, 0.00912281608581543, 0.009129983901977539, 0.009142271995544434, 0.009120767593383788, 0.009101311683654785, 0.009111552238464356, 0.009141247749328613, 0.009104384422302245, 0.00910028839111328, 0.00910028839111328, 0.009120767593383788, 0.009134079933166504, 0.009109503746032714, 0.009125887870788574, 0.009256959915161133, 0.009499648094177245, 0.009153535842895508, 0.009034751892089844, 0.00910643196105957, 0.009132032394409179, 0.009119744300842286, 0.009094143867492676, 0.009118720054626465, 0.009125887870788574, 0.009146368026733399, 0.00912281608581543, 0.009083904266357423, 0.009155584335327148, 0.009099264144897461, 0.009137151718139648, 0.009155584335327148, 0.009224191665649414, 0.009120767593383788, 0.009101311683654785, 0.009133055686950683, 0.00909004783630371, 0.009112575531005859, 0.009129983901977539, 0.009129023551940918, 0.009097151756286621, 0.00912179183959961, 0.009136128425598144, 0.009127936363220214, 0.009137151718139648, 0.009056256294250489, 0.009362431526184082, 0.010359807968139649, 0.00963276767730713, 0.010062848091125488, 0.01019596767425537, 0.0095283203125, 0.009572352409362793, 0.009465855598449707, 0.009502719879150391, 0.009467904090881347, 0.00951807975769043, 0.009387007713317871, 0.00942080020904541, 0.009334783554077148, 0.009568287849426269, 0.009464799880981445, 0.009465855598449707, 0.009414655685424805, 0.00943513584136963, 0.009448448181152343, 0.009450495719909668, 0.009515007972717286, 0.009439231872558594, 0.009487360000610352, 0.009485312461853027, 0.009470975875854493, 0.009415679931640625, 0.009445376396179199, 0.009448448181152343, 0.009502719879150391, 0.009501759529113769, 0.009490367889404296, 0.00941260814666748, 0.00910643196105957, 0.00911359977722168, 0.009396224021911622, 0.009467904090881347, 0.009532416343688965, 0.009478143692016602, 0.009727999687194825, 0.009547776222229003, 0.009487360000610352, 0.009480192184448242, 0.009467904090881347, 0.009377792358398437, 0.009517056465148926, 0.009532416343688965, 0.009422847747802734, 0.00951807975769043, 0.00942080020904541, 0.009451519966125489, 0.009367551803588867, 0.009430015563964844, 0.009460736274719238, 0.00950169563293457, 0.00942899227142334, 0.009438207626342773, 0.00941977596282959, 0.009475071907043458, 0.009415679931640625, 0.009373696327209472, 0.00954265594482422, 0.009450495719909668, 0.00941062355041504, 0.00951801586151123, 0.009459712028503419, 0.009425919532775879, 0.009586688041687011, 0.009455615997314454, 0.009437184333801269, 0.009440256118774413, 0.009782272338867188, 0.009652223587036133, 0.009465855598449707, 0.009400320053100587, 0.009457663536071777, 0.009458687782287598, 0.009381888389587402, 0.009153535842895508, 0.009262080192565919, 0.0094136323928833, 0.009496576309204101, 0.009456640243530273, 0.009452544212341308, 0.009574399948120118, 0.009479167938232422, 0.0094269437789917, 0.009309184074401856, 0.00932044792175293, 0.009462783813476563, 0.009124863624572753, 0.009101311683654785, 0.00910643196105957, 0.00912179183959961, 0.00913100814819336, 0.009076736450195312, 0.00912384033203125, 0.009128959655761718, 0.009144319534301757, 0.009120767593383788, 0.009119744300842286, 0.009192447662353515, 0.00912281608581543, 0.009079808235168458, 0.009107456207275391, 0.009138175964355469, 0.00913920021057129, 0.00910540771484375, 0.009151488304138184, 0.009137151718139648, 0.009065471649169921, 0.009001983642578124, 0.009056256294250489, 0.009058303833007812, 0.00910540771484375, 0.009170944213867188, 0.009136128425598144, 0.009144319534301757, 0.009143296241760255, 0.009193471908569336, 0.009178112030029297, 0.009132032394409179, 0.009141247749328613, 0.009126943588256836, 0.009109472274780274, 0.00913100814819336, 0.009157631874084473, 0.00914739227294922, 0.00913100814819336, 0.009149439811706543, 0.00910540771484375, 0.00918015956878662, 0.009164799690246582, 0.009144384384155274, 0.009131967544555664, 0.009117695808410644, 0.009153535842895508, 0.009136128425598144, 0.009142271995544434, 0.009138175964355469, 0.009165823936462402, 0.009048064231872559, 0.008997920036315919, 0.009036767959594726, 0.009028608322143555, 0.00918835163116455, 0.009584639549255371, 0.009638912200927734, 0.009492480278015136, 0.00951807975769043, 0.009441280364990234, 0.00943513584136963, 0.009470975875854493, 0.00951193618774414, 0.009425951957702636, 0.009529312133789063, 0.009351167678833008, 0.009387007713317871, 0.009174015998840332, 0.00913100814819336, 0.00912384033203125, 0.009146368026733399, 0.009356287956237793, 0.009239551544189453, 0.009118720054626465, 0.00913100814819336, 0.009144319534301757, 0.009128959655761718, 0.009159680366516113, 0.009120767593383788, 0.009073663711547851, 0.009117695808410644, 0.009118720054626465, 0.009157631874084473, 0.00921395206451416, 0.009164799690246582, 0.009117695808410644, 0.009112575531005859, 0.009111552238464356, 0.009122847557067871, 0.009105376243591309, 0.009081855773925781, 0.00912281608581543, 0.009129983901977539, 0.009037823677062988, 0.009117695808410644, 0.00951910400390625, 0.009124863624572753, 0.009149439811706543, 0.009124863624572753, 0.009127936363220214, 0.00918835163116455, 0.009104384422302245, 0.009167872428894042, 0.00933683204650879, 0.009889792442321778, 0.010317824363708495, 0.009570303916931153, 0.009486335754394531, 0.009441280364990234, 0.009461759567260742, 0.009441280364990234, 0.009168895721435547, 0.00913100814819336, 0.009124863624572753, 0.009104384422302245, 0.009203712463378906, 0.009160703659057617, 0.00927948760986328, 0.009184255599975585, 0.009249792098999024, 0.009033727645874023, 0.009104384422302245, 0.00910643196105957, 0.00914739227294922, 0.009127936363220214, 0.009086976051330567, 0.00911359977722168, 0.009129983901977539, 0.009127936363220214, 0.009069567680358886, 0.009111552238464356, 0.009148415565490722, 0.009129983901977539, 0.009224191665649414, 0.009178112030029297, 0.009175040245056153, 0.009164799690246582, 0.009111552238464356, 0.009160703659057617, 0.009034751892089844, 0.009142271995544434, 0.009067520141601563, 0.008981504440307618, 0.008956928253173829, 0.00898252773284912, 0.009011199951171875, 0.008921088218688965, 0.009088000297546387, 0.009111552238464356, 0.009133055686950683, 0.009109503746032714, 0.009103360176086426, 0.009091072082519532, 0.009256959915161133, 0.009053183555603026, 0.009077759742736816, 0.009092096328735352, 0.009102335929870605, 0.009152511596679687, 0.00910848045349121, 0.00912179183959961, 0.009240575790405273, 0.009119744300842286, 0.009133055686950683, 0.00913100814819336, 0.009135104179382325, 0.009153535842895508, 0.00920576000213623, 0.009242624282836913, 0.009183232307434081, 0.009136128425598144, 0.009163776397705077, 0.00914739227294922, 0.009043040275573731, 0.009098143577575683, 0.009094143867492676, 0.009119744300842286, 0.009151488304138184, 0.009152511596679687, 0.009220095634460449, 0.00918835163116455, 0.009215999603271484, 0.009190400123596192, 0.009126912117004395, 0.009124863624572753, 0.009110527992248535, 0.00908083152770996, 0.009118720054626465, 0.00909823989868164, 0.00918835163116455, 0.009119744300842286, 0.009079808235168458, 0.009138175964355469, 0.009672703742980958, 0.00970137596130371, 0.01033625602722168, 0.009943039894104003, 0.009579520225524902, 0.00951193618774414, 0.009433183670043945, 0.009400223731994629, 0.009497599601745605, 0.009470015525817872, 0.009438143730163575, 0.00919961643218994, 0.009142271995544434, 0.00923033618927002, 0.009103360176086426, 0.009117695808410644, 0.009103360176086426, 0.00910540771484375, 0.009300992012023926, 0.009469951629638672, 0.009431039810180664, 0.009438207626342773, 0.009442303657531738, 0.009431039810180664, 0.009469951629638672, 0.009417728424072265, 0.00940236759185791, 0.009417728424072265, 0.009508864402770996, 0.0094136323928833, 0.009476096153259277, 0.00951296043395996, 0.009453568458557129, 0.009392127990722657, 0.009445376396179199, 0.009484288215637206, 0.009448448181152343, 0.00954265594482422, 0.009537535667419434, 0.00941875171661377, 0.00942899227142334, 0.009468928337097168, 0.00943513584136963, 0.009469951629638672, 0.009478176116943359, 0.009495552062988282, 0.009594847679138183, 0.009366527557373047, 0.009334783554077148, 0.009495552062988282, 0.00949350357055664, 0.009407487869262696, 0.009469951629638672, 0.009443327903747559, 0.009450495719909668, 0.009446399688720703, 0.0094269437789917, 0.009457663536071777, 0.009377792358398437, 0.009371647834777832, 0.009447423934936524, 0.009443391799926757, 0.00943404769897461, 0.00943616008758545, 0.009416704177856445, 0.009513024330139161, 0.00954361629486084, 0.009422847747802734, 0.00940236759185791, 0.00941875171661377, 0.009423871994018555, 0.009447423934936524, 0.009498623847961426, 0.009444352149963378, 0.009469951629638672, 0.009384960174560546, 0.009450495719909668, 0.009424896240234374, 0.009471039772033691, 0.009451519966125489, 0.009554880142211914, 0.009475071907043458, 0.009580544471740723, 0.009442303657531738, 0.00940339183807373, 0.009375743865966797, 0.00942080020904541, 0.009396224021911622, 0.009158656120300293, 0.009132032394409179, 0.009107456207275391, 0.009126912117004395, 0.009266176223754884, 0.009167872428894042, 0.009088000297546387, 0.009155584335327148, 0.009107456207275391, 0.009111552238464356, 0.009118720054626465, 0.009124863624572753, 0.009091072082519532, 0.00914739227294922, 0.00922111988067627, 0.009116671562194823, 0.009083904266357423, 0.008964096069335938, 0.00902451229095459, 0.009110527992248535, 0.009119744300842286, 0.009161727905273438, 0.009119744300842286, 0.009107456207275391, 0.009066495895385742, 0.009157631874084473, 0.0091146240234375, 0.009329664230346679, 0.009149439811706543, 0.009140224456787109, 0.009127936363220214, 0.009219072341918945, 0.009136128425598144, 0.00918015956878662, 0.009312255859375, 0.009207807540893554, 0.009163776397705077, 0.009088000297546387, 0.009157631874084473, 0.009161727905273438, 0.009125887870788574, 0.009118720054626465, 0.009104448318481445, 0.009164735794067383, 0.009134079933166504, 0.009224191665649414, 0.009151488304138184, 0.009145343780517578, 0.009155584335327148, 0.00922316837310791, 0.00911359977722168, 0.009042943954467773, 0.009109503746032714, 0.009153535842895508, 0.009153535842895508, 0.0091146240234375, 0.009136128425598144, 0.009146368026733399, 0.009133055686950683, 0.009165823936462402, 0.009140224456787109, 0.00912179183959961, 0.009194496154785157, 0.009127936363220214, 0.009289728164672852, 0.009208831787109375, 0.00941055965423584, 0.009541631698608399, 0.009254912376403808, 0.009425919532775879, 0.009437184333801269, 0.00933683204650879, 0.00912384033203125, 0.009149439811706543, 0.009137151718139648, 0.009084927558898925, 0.00910540771484375, 0.009068544387817384, 0.009033727645874023, 0.009084927558898925, 0.00912384033203125, 0.009201663970947266, 0.010323967933654785, 0.009943039894104003, 0.010065919876098632, 0.009530367851257325, 0.009479167938232422, 0.009335807800292969, 0.009152511596679687, 0.009135104179382325, 0.00911359977722168, 0.009278464317321777, 0.009137151718139648, 0.009126912117004395, 0.00931123161315918, 0.009663488388061523, 0.009441280364990234, 0.009444352149963378, 0.009469951629638672, 0.009517056465148926, 0.009186304092407227, 0.009112575531005859, 0.00910848045349121, 0.009041919708251953, 0.009056256294250489, 0.009092096328735352, 0.009143296241760255, 0.009136128425598144, 0.009116671562194823, 0.00910540771484375, 0.009101311683654785, 0.009036800384521485, 0.008977408409118653, 0.009053183555603026, 0.00912384033203125, 0.00909721565246582, 0.009103360176086426, 0.009085951805114746, 0.009127936363220214, 0.009161727905273438, 0.00910643196105957, 0.009191424369812011, 0.009107456207275391, 0.009677824020385742, 0.009538559913635255, 0.00942080020904541, 0.009427007675170898, 0.009404352188110351, 0.00943616008758545, 0.0094136323928833, 0.009330816268920898, 0.00937667179107666, 0.009338848114013672, 0.009292799949645996, 0.009119808197021484, 0.009093055725097656, 0.009144319534301757, 0.009190400123596192, 0.009217023849487305, 0.009382911682128906, 0.009414655685424805, 0.009390080451965332, 0.00930611228942871, 0.009143296241760255, 0.009138175964355469, 0.009141247749328613, 0.009172991752624511, 0.00913100814819336, 0.009294848442077636, 0.009136128425598144, 0.009109503746032714, 0.009095168113708496, 0.009217023849487305, 0.009190400123596192, 0.009158656120300293, 0.00913920021057129, 0.009142271995544434, 0.009177087783813476, 0.008863743782043456, 0.009092096328735352, 0.009255935668945312, 0.010354687690734863, 0.010799103736877442, 0.009640959739685059, 0.00940544033050537, 0.009496576309204101, 0.009424896240234374, 0.009583680152893067, 0.009428928375244141, 0.009439231872558594, 0.00943513584136963, 0.00940236759185791, 0.009730048179626465, 0.00953651237487793, 0.009404416084289552, 0.009417728424072265, 0.009368576049804688, 0.009415712356567383, 0.009414624214172364, 0.00941260814666748, 0.009415679931640625, 0.009479167938232422, 0.009422847747802734, 0.009293824195861817, 0.009404416084289552, 0.009408512115478516, 0.009376768112182618, 0.009440256118774413, 0.009480192184448242, 0.00942796802520752, 0.009296895980834961, 0.009342975616455078, 0.00941977596282959, 0.00942899227142334, 0.009438207626342773, 0.009461759567260742, 0.009804800033569335, 0.009606143951416016, 0.009378815650939941, 0.009299967765808105, 0.009553919792175293, 0.009573375701904297, 0.009409536361694336, 0.01115443229675293, 0.009678848266601562, 0.00962662410736084, 0.00942796802520752, 0.009463808059692384, 0.009434111595153808, 0.009366623878479004, 0.00940944004058838, 0.009389056205749511, 0.009402400016784667, 0.00945148754119873, 0.009565183639526367, 0.009370623588562011, 0.009445376396179199, 0.009102335929870605, 0.009099264144897461, 0.00914739227294922, 0.009085951805114746, 0.00920576000213623, 0.00903270435333252, 0.009095168113708496, 0.009085951805114746, 0.009078783988952637, 0.009195520401000976, 0.00971776008605957, 0.009440256118774413, 0.009460736274719238, 0.009432064056396485, 0.009393152236938476, 0.009465855598449707, 0.009430015563964844, 0.00942796802520752, 0.009391103744506836, 0.009444352149963378, 0.00941055965423584, 0.009463839530944824, 0.00918012809753418, 0.009073663711547851, 0.00919654369354248, 0.00910643196105957, 0.009048064231872559, 0.00910540771484375, 0.009141247749328613, 0.009197567939758301, 0.009081855773925781, 0.009142271995544434, 0.009092096328735352, 0.009125887870788574, 0.009078783988952637, 0.009093152046203614, 0.009124832153320312, 0.009096192359924317, 0.00940236759185791, 0.00952627182006836, 0.009399295806884766, 0.009457663536071777, 0.00942796802520752, 0.009417728424072265, 0.009450495719909668, 0.009462783813476563, 0.009423871994018555, 0.00949350357055664, 0.009407487869262696, 0.009474047660827637, 0.009461759567260742, 0.009409536361694336, 0.009497599601745605, 0.009404416084289552, 0.009461759567260742, 0.009457663536071777, 0.009461759567260742, 0.00941260814666748, 0.00953446388244629, 0.00941158390045166, 0.009790464401245117, 0.009472000122070312, 0.009160703659057617, 0.009144319534301757, 0.009141247749328613, 0.009140224456787109, 0.009078783988952637, 0.009160703659057617]",tokens/s,107.72923549037245,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8219.369472,12512.13312,0.0,11882.463232,11315.947008,s,1,13.967072265625,13.967072265625,0.0,13.967072265625,13.967072265625,13.967072265625,13.967072265625,[13.967072265625],,kWh,8.205087463752534e-05,4.4955256441090096e-05,0.00015978457227205922,0.00028679070335067467,,MB,3963.056128,12533.10464,0.0,11884.560384,11070.3104,s,10,2.1150044860839845,0.21150044860839845,0.00036286206384534913,0.21136144256591796,0.21186838073730468,0.2121684585571289,0.21240852081298828,"[0.21246853637695312, 0.21122300720214843, 0.21131575012207032, 0.21134906005859375, 0.21130665588378905, 0.21137382507324218, 0.21119136047363282, 0.2115386199951172, 0.21180169677734376, 0.21143597412109374]",tokens/s,1210.3993238992805,kWh,2.499773182233393e-06,1.369760309320949e-06,1.0662468020707083e-05,1.4532001512261427e-05,tokens/kWh,17616293.239716437,MB,3971.497984,12537.298944,0.0,11886.657536,11070.31296,s,10,25.306419189453127,2.530641918945313,0.0068972049982038465,2.5282071533203125,2.5355282470703124,2.5422317749023438,2.547594597167969,"[2.548935302734375, 2.527375732421875, 2.522360595703125, 2.53403857421875, 2.52880859375, 2.52690087890625, 2.5333759765625, 2.52662060546875, 2.530397216796875, 2.527605712890625]",tokens/s,24.89486937221695,kWh,2.98356480555426e-05,1.6351307654700164e-05,8.495821958689337e-05,0.00013114517529713615,tokens/kWh,480383.66533317487,,s,630,25.304406002044693,0.04016572381276933,0.00038280096529800915,0.040044031143188476,0.040694884872436526,0.04090844230651856,0.041476495208740236,"[0.04085657501220703, 0.040981502532958985, 0.041134078979492186, 0.04072550582885742, 0.04085760116577149, 0.04063129425048828, 0.040997886657714845, 0.04077260971069336, 0.04084531021118164, 0.04064665603637695, 0.04064460754394531, 0.040632320404052735, 0.04068659210205078, 0.040581119537353515, 0.0407644157409668, 0.0405667839050293, 0.04059033584594727, 0.0405667839050293, 0.040615936279296876, 0.04087807846069336, 0.04089548873901367, 0.041527294158935545, 0.040622081756591794, 0.04048486328125, 0.04063641738891602, 0.04071526336669922, 0.04086067199707031, 0.041605121612548826, 0.041294849395751954, 0.04072243118286133, 0.04070604705810547, 0.040782848358154294, 0.04069580841064453, 0.04067635345458984, 0.039787521362304686, 0.0406824951171875, 0.04016844940185547, 0.04025139236450195, 0.04020326232910156, 0.03993804931640625, 0.039943168640136716, 0.039782398223876955, 0.039897087097167966, 0.04004249572753906, 0.04013875198364258, 0.04084531021118164, 0.040158206939697266, 0.04018175888061523, 0.03989299011230469, 0.040008705139160154, 0.039959552764892575, 0.03992575836181641, 0.0401162223815918, 0.040225791931152347, 0.039876609802246096, 0.03988787078857422, 0.03996876907348633, 0.03966668701171875, 0.03990528106689453, 0.03987148666381836, 0.040013824462890625, 0.04008038330078125, 0.04011110305786133, 0.04072550582885742, 0.040022014617919925, 0.04057190322875977, 0.04013158416748047, 0.03991142272949219, 0.03985612869262695, 0.03995238494873047, 0.03991142272949219, 0.04048179244995117, 0.040185855865478515, 0.039798782348632815, 0.039570430755615234, 0.040048641204833986, 0.03997491073608399, 0.03988991928100586, 0.04013158416748047, 0.040008705139160154, 0.041082878112792966, 0.04050739288330078, 0.040185855865478515, 0.03991142272949219, 0.03996057510375976, 0.03983359909057617, 0.0399441909790039, 0.04006604766845703, 0.0414648323059082, 0.04181094360351562, 0.0408197135925293, 0.03993600082397461, 0.039806976318359374, 0.03987046432495117, 0.039995391845703124, 0.039975936889648435, 0.040114177703857425, 0.04046131134033203, 0.04000460815429688, 0.040371200561523435, 0.04016844940185547, 0.0399554557800293, 0.039949310302734374, 0.04000665664672851, 0.03978649520874023, 0.03992063903808594, 0.039638015747070314, 0.0399554557800293, 0.040010753631591796, 0.03990220642089844, 0.03986943817138672, 0.03990528106689453, 0.03983462524414062, 0.04041318511962891, 0.040150016784667966, 0.04034764862060547, 0.039810047149658204, 0.039977985382080077, 0.039792640686035156, 0.04012748718261719, 0.03991142272949219, 0.040166400909423826, 0.04008345413208008, 0.04029747009277344, 0.03993907165527344, 0.039962623596191404, 0.039943168640136716, 0.04029132843017578, 0.03989606475830078, 0.03994112014770508, 0.0398919677734375, 0.03960319900512695, 0.039806976318359374, 0.04024934387207031, 0.04009369659423828, 0.04067737579345703, 0.04009983825683594, 0.039979007720947264, 0.03978956985473633, 0.03986431884765625, 0.03991142272949219, 0.03973017501831055, 0.039984127044677735, 0.04005887985229492, 0.03983462524414062, 0.04003123092651367, 0.0400076789855957, 0.03998207855224609, 0.03981414413452149, 0.040022014617919925, 0.03990016174316406, 0.03991142272949219, 0.039948287963867186, 0.03993907165527344, 0.039686145782470705, 0.03988787078857422, 0.03998515319824219, 0.03982131195068359, 0.040158206939697266, 0.04005683135986328, 0.04091904067993164, 0.03991961669921875, 0.03996057510375976, 0.040341503143310545, 0.039965694427490234, 0.04027494430541992, 0.04146995162963867, 0.04022476959228516, 0.04004556655883789, 0.04043775939941406, 0.04076339340209961, 0.039787521362304686, 0.03997695922851562, 0.040065025329589846, 0.039984127044677735, 0.039744510650634765, 0.03967795181274414, 0.04009881591796875, 0.03962060928344727, 0.03952025604248047, 0.03988582229614258, 0.039923713684082034, 0.03989503860473633, 0.04015718460083008, 0.040032257080078126, 0.040441856384277344, 0.04006092834472656, 0.04039372634887695, 0.039771137237548826, 0.040022014617919925, 0.04013772964477539, 0.03991244888305664, 0.039934974670410156, 0.04027699279785156, 0.041918464660644535, 0.04078694534301758, 0.04013158416748047, 0.04012441635131836, 0.042518527984619144, 0.04072652816772461, 0.040397823333740236, 0.039847934722900394, 0.040446975708007815, 0.04021964645385742, 0.04061491012573242, 0.040120319366455076, 0.03989606475830078, 0.039793663024902344, 0.04037529754638672, 0.040016895294189454, 0.04031692886352539, 0.040981502532958985, 0.040210430145263674, 0.040139774322509765, 0.04035276794433594, 0.03952947235107422, 0.039897087097167966, 0.03964518356323242, 0.03960115051269531, 0.040817665100097655, 0.04025958251953125, 0.04028108978271484, 0.040342529296875, 0.03995443344116211, 0.04043775939941406, 0.04003635025024414, 0.040118270874023435, 0.040542209625244144, 0.0399738883972168, 0.040978431701660156, 0.04015513610839844, 0.04012851333618164, 0.03995238494873047, 0.039975936889648435, 0.03986943817138672, 0.040342529296875, 0.03994214248657227, 0.039890945434570314, 0.03998720169067383, 0.03995852661132813, 0.03990323257446289, 0.041442302703857424, 0.040389633178710936, 0.03992166519165039, 0.04009369659423828, 0.03981824111938476, 0.0400261116027832, 0.03978035354614258, 0.03991244888305664, 0.040049663543701174, 0.03988275146484375, 0.03978342437744141, 0.04005887985229492, 0.040048641204833986, 0.03994112014770508, 0.04147916793823242, 0.040525825500488284, 0.040363006591796875, 0.03994009780883789, 0.040166400909423826, 0.04002816009521484, 0.04002406311035156, 0.03969843292236328, 0.039689216613769535, 0.04006911849975586, 0.03987251281738281, 0.04032716751098633, 0.03990323257446289, 0.040103935241699216, 0.0412149772644043, 0.03998003387451172, 0.040455169677734375, 0.03991142272949219, 0.0399554557800293, 0.04062003326416016, 0.04000665664672851, 0.04012748718261719, 0.04020121765136719, 0.039951358795166016, 0.03978854370117187, 0.03994112014770508, 0.040118270874023435, 0.039992321014404295, 0.03993907165527344, 0.04014591979980469, 0.039979007720947264, 0.0399288330078125, 0.04003635025024414, 0.040089599609375, 0.040068096160888675, 0.03998207855224609, 0.040134654998779294, 0.040062976837158204, 0.03977011108398437, 0.039943168640136716, 0.03993190383911133, 0.03987558364868164, 0.04038451385498047, 0.04005478286743164, 0.040542209625244144, 0.04075929641723633, 0.04016128158569336, 0.040253440856933595, 0.04014591979980469, 0.04049100875854492, 0.04044083023071289, 0.03972403335571289, 0.0399370231628418, 0.03997491073608399, 0.04004556655883789, 0.03988991928100586, 0.039858177185058595, 0.04009062576293945, 0.04052787017822266, 0.040924160003662106, 0.03998310470581055, 0.040687614440917966, 0.04012441635131836, 0.04056063842773437, 0.04014080047607422, 0.040187904357910156, 0.04004761505126953, 0.03994112014770508, 0.040130561828613284, 0.040027137756347655, 0.04054323196411133, 0.04000358581542969, 0.03993395233154297, 0.03995340728759766, 0.03984998321533203, 0.03998310470581055, 0.040033279418945314, 0.04005580902099609, 0.03999846267700195, 0.040084480285644535, 0.04072652816772461, 0.04005068969726563, 0.03986431884765625, 0.039995391845703124, 0.03989913558959961, 0.03994521713256836, 0.04042956924438477, 0.03987558364868164, 0.040379390716552735, 0.04011520004272461, 0.039923713684082034, 0.040379390716552735, 0.04072857666015625, 0.040342529296875, 0.04015411376953125, 0.04014694213867188, 0.04028313446044922, 0.04005887985229492, 0.03997081756591797, 0.0399370231628418, 0.04005887985229492, 0.04014694213867188, 0.03991449737548828, 0.04007731246948242, 0.039949310302734374, 0.04012236785888672, 0.04043366241455078, 0.04055859375, 0.04007321548461914, 0.03996364974975586, 0.03986636734008789, 0.04045004653930664, 0.04010598373413086, 0.04008857727050781, 0.03994112014770508, 0.03988172912597656, 0.039766014099121096, 0.03993088150024414, 0.039967742919921875, 0.039965694427490234, 0.04012543869018555, 0.039951358795166016, 0.0398837776184082, 0.04010905456542969, 0.03991961669921875, 0.04007321548461914, 0.0399288330078125, 0.040062976837158204, 0.0399288330078125, 0.03978342437744141, 0.03974246215820312, 0.04077875137329102, 0.04007628631591797, 0.04051660919189453, 0.039962623596191404, 0.03994009780883789, 0.04012134552001953, 0.039897087097167966, 0.039965694427490234, 0.03996160125732422, 0.04026572799682617, 0.040025089263916014, 0.040052734375, 0.039876609802246096, 0.04020019149780273, 0.0400373764038086, 0.040052734375, 0.04009676742553711, 0.040736766815185545, 0.040188926696777344, 0.04015206527709961, 0.03980799865722656, 0.0400261116027832, 0.0397916145324707, 0.04006911849975586, 0.04002099227905274, 0.040196094512939456, 0.04005580902099609, 0.04081459045410156, 0.0400076789855957, 0.041046016693115236, 0.04009369659423828, 0.03983052825927735, 0.04132352066040039, 0.04060160064697266, 0.04014284896850586, 0.039907329559326174, 0.04002816009521484, 0.03994521713256836, 0.04012543869018555, 0.04003839874267578, 0.04095180892944336, 0.04179251098632813, 0.04005376052856445, 0.03998515319824219, 0.039779327392578126, 0.03986227035522461, 0.04121395111083984, 0.04132454299926758, 0.04056268692016601, 0.04014182281494141, 0.04066304016113281, 0.04031488037109375, 0.040387584686279294, 0.03993395233154297, 0.03988275146484375, 0.040325119018554685, 0.04013363265991211, 0.039853057861328124, 0.03990835189819336, 0.04007833480834961, 0.03998207855224609, 0.04035686492919922, 0.04009062576293945, 0.039907329559326174, 0.04030976104736328, 0.04008243179321289, 0.039890945434570314, 0.04001484680175781, 0.04005478286743164, 0.04024524688720703, 0.04009676742553711, 0.040041473388671874, 0.04134400177001953, 0.040048641204833986, 0.039926784515380856, 0.03997491073608399, 0.04038655853271484, 0.04071526336669922, 0.04009164810180664, 0.0400711669921875, 0.03996876907348633, 0.039689216613769535, 0.03985612869262695, 0.03992166519165039, 0.04005376052856445, 0.039793663024902344, 0.03991756820678711, 0.03986227035522461, 0.03979776000976563, 0.03987558364868164, 0.03993804931640625, 0.039994369506835936, 0.040025089263916014, 0.03993907165527344, 0.04031590270996094, 0.04003430557250977, 0.04048793411254883, 0.039994369506835936, 0.03993190383911133, 0.039997440338134765, 0.04022272109985352, 0.0398919677734375, 0.04094668960571289, 0.041306110382080076, 0.040286209106445314, 0.03991449737548828, 0.03995750427246094, 0.03998822402954102, 0.04017049789428711, 0.04020633697509766, 0.03988787078857422, 0.04001279830932617, 0.04002918243408203, 0.0402872314453125, 0.040130561828613284, 0.04002304077148437, 0.03990937423706055, 0.039927806854248044, 0.040151039123535154, 0.04001587295532227, 0.03987353515625, 0.04015923309326172, 0.04005785751342773, 0.04056576156616211, 0.039894016265869144, 0.03999846267700195, 0.040268798828125, 0.040030208587646485, 0.04000665664672851, 0.03997491073608399, 0.040210430145263674, 0.04016332626342774, 0.03991551971435547, 0.04014899063110351, 0.04052684783935547, 0.03981619262695312, 0.039995391845703124, 0.03992473602294922, 0.039977985382080077, 0.04069478225708008, 0.040081409454345705, 0.04002406311035156, 0.03996160125732422, 0.04007321548461914, 0.0401080322265625, 0.03981619262695312, 0.04046540832519531, 0.040359935760498046, 0.04083302307128906, 0.04005683135986328, 0.04011110305786133, 0.04002816009521484, 0.039947265625, 0.03997081756591797, 0.04016844940185547, 0.04088729476928711, 0.04143206405639648, 0.04048384094238281, 0.040842239379882815, 0.04020121765136719, 0.040035327911376956, 0.040022014617919925, 0.03995651245117188, 0.03995030212402344, 0.04053504180908203, 0.04037222290039062, 0.040390655517578124, 0.03995750427246094, 0.03994521713256836, 0.03990118408203125, 0.039831550598144534, 0.04009983825683594, 0.04053401565551758, 0.04002099227905274, 0.03993088150024414, 0.040051712036132815, 0.040008705139160154, 0.03989606475830078, 0.04017356872558594, 0.039948287963867186, 0.04040703964233398, 0.04005683135986328, 0.0399738883972168, 0.04035891342163086, 0.04024524688720703, 0.04038348770141602, 0.040062976837158204, 0.03992268753051758, 0.040052734375, 0.04068556976318359, 0.03995750427246094, 0.03992166519165039, 0.04033638381958008, 0.04016128158569336, 0.03996672058105469, 0.040390655517578124, 0.03986227035522461, 0.04127948760986328, 0.041166847229003906, 0.04011929702758789, 0.03985715103149414, 0.040032257080078126, 0.040235008239746094, 0.0405401611328125, 0.04023807907104492, 0.039964672088623046, 0.04011724853515625, 0.04005683135986328, 0.040551422119140625, 0.039932926177978514, 0.04055859375, 0.0401080322265625, 0.03998720169067383, 0.03996160125732422, 0.040002559661865236, 0.03997491073608399, 0.041393150329589845, 0.040097793579101565, 0.04007731246948242, 0.04013875198364258, 0.04001792144775391, 0.03986227035522461, 0.03998207855224609, 0.03987251281738281, 0.03990835189819336, 0.04001792144775391, 0.040027137756347655, 0.039790592193603515, 0.03992268753051758, 0.039861248016357424, 0.0396943359375, 0.03971379089355469, 0.03998720169067383, 0.04021145629882812, 0.03992473602294922, 0.04021964645385742, 0.040025089263916014, 0.039997440338134765, 0.04007833480834961, 0.03992473602294922, 0.03986431884765625, 0.03986636734008789, 0.03993600082397461, 0.03996979141235352]",tokens/s,24.896849977395,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481a0-1d7bbc244040e98c2208e2f4;457b3e47-21c5-496b-8048-d6b55a7ec029) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,2000.646144,3121.086464,0.0,2491.416576,2425.650176,s,1,9.38471484375,9.38471484375,0.0,9.38471484375,9.38471484375,9.38471484375,9.38471484375,[9.38471484375],,kWh,3.012924118261506e-05,1.6497200894123997e-05,4.1639477755928134e-05,8.82659198326672e-05,,MB,1898.72128,3196.583936,0.0,2705.32608,2606.129664,s,10,0.382892578125,0.0382892578125,0.0002077691502666906,0.038202367782592776,0.03865893898010254,0.03868914966583252,0.03871331821441651,"[0.0387193603515625, 0.03812963104248047, 0.038207134246826174, 0.03819760131835938, 0.03834262466430664, 0.03865222549438477, 0.03811196899414063, 0.038158687591552734, 0.038155487060546875, 0.038217857360839845]",tokens/s,6685.9483475395455,kWh,4.521805524199566e-07,2.47773014316977e-07,1.4785455000535748e-06,2.1784990667905085e-06,tokens/kWh,117512099.91434795,MB,1909.22752,3355.967488,0.0,2705.32608,2606.132224,s,10,14.671370361328126,1.4671370361328127,0.003919923556431551,1.4677645263671875,1.4708952880859374,1.4723928344726562,1.4735908715820314,"[1.4613653564453124, 1.461380859375, 1.46822509765625, 1.4704552001953124, 1.467303955078125, 1.473890380859375, 1.46316748046875, 1.4705625, 1.468407958984375, 1.466611572265625]",tokens/s,42.9407740711529,kWh,1.7713630636329956e-05,9.707017483274267e-06,3.184464782514293e-05,5.926529594474717e-05,tokens/kWh,1063016.7114787493,,s,630,14.669600765228271,0.023285080579727415,0.0003287335046253534,0.023195648193359376,0.023669862174987795,0.02380718116760254,0.02455831590652466,"[0.02309939193725586, 0.022938623428344726, 0.02285670471191406, 0.023150592803955077, 0.023200767517089844, 0.023161855697631836, 0.023196672439575194, 0.023138303756713868, 0.02310758399963379, 0.023167999267578124, 0.023061504364013673, 0.02271129608154297, 0.023160831451416015, 0.023625728607177734, 0.023220224380493162, 0.023137279510498047, 0.02304921531677246, 0.023082048416137695, 0.023159744262695313, 0.023040000915527343, 0.023154687881469727, 0.023119871139526366, 0.02305023956298828, 0.02349056053161621, 0.02308198356628418, 0.02311065673828125, 0.023544832229614256, 0.023619583129882812, 0.02325606346130371, 0.02282803153991699, 0.023183359146118163, 0.023159807205200195, 0.02314854431152344, 0.024226816177368164, 0.026037248611450195, 0.024159231185913087, 0.023571456909179687, 0.023180288314819338, 0.023179264068603517, 0.023206911087036132, 0.023618560791015625, 0.023196672439575194, 0.02308095932006836, 0.022635520935058592, 0.02306559944152832, 0.02308608055114746, 0.023044095993041993, 0.022889471054077147, 0.02314854431152344, 0.02289356803894043, 0.023030784606933592, 0.023000064849853515, 0.0228853759765625, 0.022816768646240236, 0.02286796760559082, 0.022845439910888672, 0.023182336807250976, 0.023129087448120117, 0.023069696426391603, 0.022996992111206056, 0.022970367431640625, 0.023192575454711914, 0.022965248107910157, 0.0230328311920166, 0.023142400741577147, 0.023136255264282226, 0.023670783996582033, 0.023212032318115236, 0.023129087448120117, 0.022915071487426757, 0.022976512908935546, 0.02311065673828125, 0.02307276725769043, 0.023031808853149413, 0.023145471572875977, 0.023152639389038086, 0.023431167602539063, 0.024577024459838868, 0.02346700859069824, 0.023228416442871092, 0.023120895385742187, 0.023014400482177736, 0.02311680030822754, 0.023112703323364257, 0.023151615142822265, 0.022986751556396484, 0.023176191329956054, 0.023198720932006835, 0.02293452835083008, 0.0230645751953125, 0.023153663635253906, 0.022932479858398438, 0.022983680725097655, 0.023176191329956054, 0.023141376495361327, 0.023130111694335938, 0.023051263809204102, 0.023249919891357423, 0.023211008071899415, 0.023219200134277345, 0.02292736053466797, 0.02289151954650879, 0.02282700729370117, 0.02285158348083496, 0.02306662368774414, 0.023186431884765626, 0.023229440689086913, 0.023045120239257814, 0.023550975799560548, 0.023203840255737306, 0.023151615142822265, 0.023243776321411135, 0.023193599700927735, 0.02329190444946289, 0.023397375106811523, 0.023017471313476562, 0.022944768905639647, 0.02282803153991699, 0.02374963188171387, 0.023572479248046875, 0.02332262420654297, 0.023798784255981444, 0.023627775192260742, 0.023178239822387696, 0.023290880203247072, 0.02326118469238281, 0.02325823974609375, 0.023049087524414064, 0.023035903930664063, 0.022932479858398438, 0.02308710479736328, 0.02406707191467285, 0.025333759307861328, 0.024186880111694335, 0.024062976837158204, 0.023787519454956055, 0.023242752075195314, 0.02346700859069824, 0.023160831451416015, 0.023549951553344727, 0.023218175888061524, 0.023184383392333984, 0.023179264068603517, 0.023145471572875977, 0.023282688140869142, 0.023374847412109375, 0.023204864501953124, 0.023230464935302734, 0.023214080810546874, 0.023545856475830077, 0.02309939193725586, 0.023357440948486328, 0.023302143096923827, 0.02372198486328125, 0.02326835250854492, 0.023111679077148437, 0.02304819107055664, 0.023340032577514647, 0.023342079162597656, 0.02352025604248047, 0.023164928436279295, 0.02307788848876953, 0.02307891273498535, 0.023213056564331053, 0.02387353515625, 0.023401472091674806, 0.023195648193359376, 0.023235584259033205, 0.02319977569580078, 0.02316796875, 0.023136255264282226, 0.023149568557739256, 0.023126016616821288, 0.02286591911315918, 0.023166976928710937, 0.02370560073852539, 0.022953983306884765, 0.022914047241210937, 0.023185407638549805, 0.02290176010131836, 0.022915071487426757, 0.02305023956298828, 0.023334911346435547, 0.023267328262329103, 0.023194623947143556, 0.023164928436279295, 0.023178239822387696, 0.023219200134277345, 0.02309939193725586, 0.023221248626708983, 0.023573503494262696, 0.023212032318115236, 0.023282688140869142, 0.023133184432983397, 0.023171072006225587, 0.023218175888061524, 0.023201791763305665, 0.02323967933654785, 0.023258111953735353, 0.023186431884765626, 0.02346291160583496, 0.02388483238220215, 0.023220191955566405, 0.023187456130981447, 0.023517183303833008, 0.023232511520385742, 0.0234967041015625, 0.023779327392578126, 0.02332979202270508, 0.023214080810546874, 0.023184383392333984, 0.023044095993041993, 0.023161855697631836, 0.025230335235595702, 0.024959999084472655, 0.024155136108398437, 0.02350592041015625, 0.023202816009521485, 0.023029760360717775, 0.02290278434753418, 0.023180288314819338, 0.023167999267578124, 0.023000064849853515, 0.023183359146118163, 0.02355200004577637, 0.023152639389038086, 0.023187456130981447, 0.02323967933654785, 0.022986751556396484, 0.023222272872924804, 0.02371686363220215, 0.02329190444946289, 0.023195648193359376, 0.02313523292541504, 0.023554048538208007, 0.023217151641845703, 0.023198720932006835, 0.023459840774536132, 0.02287718391418457, 0.023145471572875977, 0.0230328311920166, 0.023344127655029297, 0.023173120498657225, 0.02371174430847168, 0.02328780746459961, 0.022916095733642578, 0.02308198356628418, 0.02312499237060547, 0.023159807205200195, 0.023166976928710937, 0.023180288314819338, 0.024017919540405275, 0.023195648193359376, 0.023160831451416015, 0.023176191329956054, 0.02330419158935547, 0.02325196838378906, 0.023121919631958008, 0.02424934387207031, 0.023165952682495116, 0.02312294387817383, 0.022853631973266602, 0.0230328311920166, 0.022980607986450196, 0.023176191329956054, 0.02327347183227539, 0.023196672439575194, 0.023145471572875977, 0.023143423080444335, 0.023274496078491212, 0.022906879425048828, 0.023159807205200195, 0.023180288314819338, 0.023160831451416015, 0.022779903411865234, 0.023532543182373047, 0.023222272872924804, 0.023187456130981447, 0.023197696685791015, 0.02329190444946289, 0.023216127395629883, 0.02368511962890625, 0.023613439559936524, 0.023442432403564452, 0.023197696685791015, 0.02409574317932129, 0.02326016044616699, 0.0232857608795166, 0.0232325439453125, 0.023224288940429688, 0.02328678321838379, 0.023274496078491212, 0.023326719284057617, 0.023573503494262696, 0.023973888397216796, 0.023159807205200195, 0.023179264068603517, 0.023145471572875977, 0.02310553550720215, 0.023201791763305665, 0.02329497528076172, 0.023177215576171875, 0.02306252861022949, 0.023242752075195314, 0.023416831970214845, 0.02294988822937012, 0.02290380859375, 0.02329804801940918, 0.023209983825683594, 0.023282688140869142, 0.023565311431884766, 0.023459840774536132, 0.0237260799407959, 0.023803903579711915, 0.023804927825927736, 0.023770111083984375, 0.02409267234802246, 0.023549951553344727, 0.02372915267944336, 0.023769088745117187, 0.023777280807495117, 0.023604223251342774, 0.02368000030517578, 0.023571456909179687, 0.023661567687988282, 0.02353152084350586, 0.02330419158935547, 0.02370150375366211, 0.023841791152954102, 0.023585792541503905, 0.02330521583557129, 0.023541759490966797, 0.023646207809448243, 0.023608320236206053, 0.023646207809448243, 0.023504896163940428, 0.0241213436126709, 0.023590911865234376, 0.023629823684692384, 0.0236810245513916, 0.024213504791259766, 0.023771135330200196, 0.023398399353027344, 0.02350694465637207, 0.023365631103515624, 0.02314035224914551, 0.023079935073852538, 0.02305536079406738, 0.022982656478881838, 0.023175167083740233, 0.023144447326660156, 0.023184383392333984, 0.023165952682495116, 0.023133184432983397, 0.023153663635253906, 0.023208959579467774, 0.02302566337585449, 0.023159807205200195, 0.02335436820983887, 0.023137279510498047, 0.023206911087036132, 0.02348134422302246, 0.023096319198608398, 0.023111679077148437, 0.023183359146118163, 0.023150592803955077, 0.023206911087036132, 0.023153663635253906, 0.023352319717407227, 0.02345881652832031, 0.023191551208496093, 0.022972415924072266, 0.023069696426391603, 0.02285260772705078, 0.02313216018676758, 0.023004159927368165, 0.023172096252441408, 0.023113727569580078, 0.023432191848754884, 0.023859199523925782, 0.023426048278808592, 0.02328473663330078, 0.023226367950439454, 0.023169023513793945, 0.023179264068603517, 0.023152639389038086, 0.023616512298583983, 0.023235584259033205, 0.02315673637390137, 0.023053312301635744, 0.023130111694335938, 0.023367679595947266, 0.023142400741577147, 0.023147520065307618, 0.023031808853149413, 0.023069696426391603, 0.023207935333251953, 0.023262208938598632, 0.023174144744873046, 0.023167999267578124, 0.02309017562866211, 0.023584768295288085, 0.02323865509033203, 0.023034879684448242, 0.023002111434936523, 0.022991872787475585, 0.02311577606201172, 0.023355392456054686, 0.02369843292236328, 0.023213056564331053, 0.023809024810791016, 0.02324787139892578, 0.023171072006225587, 0.023166976928710937, 0.02309734344482422, 0.023143423080444335, 0.023179264068603517, 0.02310041618347168, 0.02312294387817383, 0.023187456130981447, 0.023191551208496093, 0.023056447982788084, 0.023084991455078124, 0.023633920669555664, 0.023427072525024413, 0.023169023513793945, 0.023126016616821288, 0.02291814422607422, 0.023017471313476562, 0.023195648193359376, 0.023159807205200195, 0.02312704086303711, 0.023008256912231444, 0.023323648452758788, 0.02330624008178711, 0.023227392196655275, 0.02326937675476074, 0.023167999267578124, 0.023144447326660156, 0.023172096252441408, 0.02322329521179199, 0.023155712127685548, 0.02352332878112793, 0.023222272872924804, 0.023234560012817384, 0.023169023513793945, 0.023227392196655275, 0.02311577606201172, 0.02292531204223633, 0.023183359146118163, 0.023165952682495116, 0.024239103317260743, 0.02489139175415039, 0.024020992279052734, 0.023763967514038087, 0.023529472351074218, 0.023160831451416015, 0.023096319198608398, 0.02288844871520996, 0.02313523292541504, 0.023187456130981447, 0.023158784866333007, 0.023218175888061524, 0.023343103408813477, 0.023254016876220703, 0.023347200393676756, 0.023824384689331055, 0.023435264587402343, 0.023184383392333984, 0.023202816009521485, 0.023164928436279295, 0.023248895645141602, 0.02367692756652832, 0.02352332878112793, 0.02307481575012207, 0.023241727828979493, 0.02313216018676758, 0.02310655975341797, 0.02309222412109375, 0.023190528869628906, 0.023243776321411135, 0.02331648063659668, 0.023204864501953124, 0.02328985595703125, 0.023463935852050782, 0.023088127136230468, 0.02346700859069824, 0.02325196838378906, 0.023230464935302734, 0.023150592803955077, 0.023187456130981447, 0.023353343963623048, 0.023388160705566406, 0.02343731117248535, 0.02355200004577637, 0.023172096252441408, 0.023625728607177734, 0.02451251220703125, 0.023218175888061524, 0.023568384170532225, 0.02311065673828125, 0.023159807205200195, 0.022947839736938477, 0.023193599700927735, 0.02328473663330078, 0.023145471572875977, 0.02289459228515625, 0.023339008331298827, 0.02371686363220215, 0.02370867156982422, 0.023764991760253908, 0.02312396812438965, 0.02284339141845703, 0.023212032318115236, 0.02306662368774414, 0.022957056045532227, 0.02348646354675293, 0.023202816009521485, 0.023178239822387696, 0.023203840255737306, 0.02310860824584961, 0.023192575454711914, 0.023182336807250976, 0.023214080810546874, 0.023130111694335938, 0.02330931282043457, 0.022979583740234375, 0.023212032318115236, 0.023734272003173826, 0.023230464935302734, 0.023082048416137695, 0.02318227195739746, 0.02314035224914551, 0.023178239822387696, 0.023206911087036132, 0.02330316734313965, 0.023476224899291992, 0.023155712127685548, 0.023257087707519532, 0.023035903930664063, 0.02312499237060547, 0.023172096252441408, 0.023379968643188476, 0.023576576232910155, 0.023257087707519532, 0.023241727828979493, 0.02312499237060547, 0.023185407638549805, 0.023104511260986327, 0.023224319458007812, 0.023669759750366212, 0.02472243118286133, 0.024225791931152343, 0.023785472869873047, 0.023537664413452147, 0.023186431884765626, 0.023282688140869142, 0.023181312561035155, 0.02313216018676758, 0.024041471481323243, 0.023245824813842773, 0.023237632751464843, 0.023170047760009766, 0.023117824554443358, 0.023201791763305665, 0.02328985595703125, 0.023373823165893554, 0.023556095123291015, 0.023190528869628906, 0.023227392196655275, 0.02327039909362793, 0.023201791763305665, 0.023060480117797853, 0.023104511260986327, 0.023241727828979493, 0.023225343704223633, 0.023403520584106444, 0.02349567985534668, 0.02310553550720215, 0.023290943145751954, 0.02343212890625, 0.023237632751464843, 0.0231014404296875, 0.023226367950439454, 0.02329190444946289, 0.022981632232666017, 0.02329702377319336, 0.023177215576171875, 0.023154687881469727, 0.023085056304931642, 0.02324684715270996, 0.023145471572875977, 0.02333695983886719, 0.023282688140869142, 0.0232857608795166, 0.023776256561279296, 0.023773183822631837, 0.023113727569580078, 0.023302143096923827, 0.023186431884765626, 0.023598079681396485, 0.023388160705566406, 0.023318527221679687, 0.023196672439575194, 0.023361536026000978, 0.02315673637390137, 0.02327244758605957, 0.02327654457092285, 0.023230464935302734, 0.023197696685791015, 0.023557119369506836, 0.023126016616821288, 0.023189504623413085, 0.023177215576171875, 0.023143423080444335, 0.02310348892211914, 0.023190528869628906, 0.023195648193359376, 0.023214080810546874, 0.023164928436279295, 0.023480319976806642, 0.023221248626708983, 0.02415001678466797, 0.023375871658325196, 0.02330419158935547, 0.02313523292541504, 0.02307481575012207, 0.023235584259033205, 0.023347200393676756, 0.023243776321411135]",tokens/s,42.94595402305052,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 129307 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949140-6ae044bc7df6be9e12190c3b;383dee7a-6c62-4ae2-99a1-7878e5cfbe54) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948283-12c28ab815608cd24acc7138;4d13d5a7-f26c-441e-9747-c37b3aadc7d1) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694820f-6dfab0e854bcb2f92d82d8f0;ecfad464-772d-42ec-a5af-d26dbe85c88d) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494bc-4de33cdc25217cc662222955;4e389c31-d2f2-4d24-ad1d-227234d0b6cf) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2219.040768,3198.681088,0.0,2569.0112,2295.745536,s,1,8.6029345703125,8.6029345703125,0.0,8.6029345703125,8.6029345703125,8.6029345703125,8.6029345703125,[8.6029345703125],,kWh,2.080631114445042e-05,1.1387459711546794e-05,3.750058555607172e-05,6.969435641206894e-05,,MB,2332.041216,3219.652608,0.0,2571.108352,2282.381824,s,10,0.4588505287170411,0.0458850528717041,8.721133659229497e-05,0.04586190223693848,0.04601468238830566,0.04605194835662842,0.04608176113128662,"[0.04600640106201172, 0.04588835144042969, 0.045829952239990236, 0.04586227035522461, 0.045867809295654295, 0.046089214324951173, 0.04582217788696289, 0.04579667282104492, 0.045826145172119144, 0.04586153411865234]",tokens/s,5579.158875893272,kWh,5.417417529147417e-07,2.9684805721794333e-07,2.129753232853308e-06,2.968343042985993e-06,tokens/kWh,86243401.21500169,MB,2341.060608,3221.74976,0.0,2571.108352,2391.706624,s,10,14.681139526367188,1.4681139526367188,0.018062776476668766,1.4630735473632812,1.488790380859375,1.4922234985351563,1.4949699926757813,"[1.483806884765625, 1.4880274658203125, 1.4652001953125, 1.4574271240234375, 1.446581787109375, 1.4858150634765626, 1.4579801025390624, 1.4396973876953125, 1.4956566162109375, 1.4609468994140624]",tokens/s,42.91220030083672,kWh,1.760535039937573e-05,9.647741172554951e-06,3.5269165575148194e-05,6.252225714707891e-05,tokens/kWh,1007641.1645183768,,s,630,14.67852390480041,0.023299244293333993,0.0006739075096969441,0.023023616790771483,0.023879885292053222,0.024118835067749023,0.02534163427352906,"[0.023805952072143553, 0.023815168380737304, 0.023805952072143553, 0.02368511962890625, 0.023632896423339843, 0.023781375885009767, 0.023782400131225585, 0.023856128692626953, 0.02388787269592285, 0.02368716812133789, 0.02368511962890625, 0.023669759750366212, 0.02365235137939453, 0.023670783996582033, 0.023602176666259765, 0.02368307113647461, 0.02370867156982422, 0.023851007461547852, 0.024615936279296875, 0.02411929512023926, 0.023754751205444336, 0.02390323257446289, 0.023748607635498048, 0.023740415573120118, 0.02351411247253418, 0.02391347122192383, 0.023742464065551756, 0.023706623077392578, 0.023748607635498048, 0.02375372886657715, 0.02367897605895996, 0.023756799697875978, 0.023588863372802735, 0.022817792892456053, 0.022692863464355468, 0.02267852783203125, 0.022776832580566408, 0.02272051239013672, 0.02270412826538086, 0.02247987174987793, 0.022533119201660155, 0.022524927139282228, 0.022754304885864256, 0.022717439651489257, 0.02309836769104004, 0.02368921661376953, 0.023525375366210938, 0.023627775192260742, 0.023586816787719726, 0.023508991241455078, 0.023628799438476563, 0.023649280548095702, 0.02364313507080078, 0.02365951919555664, 0.02368000030517578, 0.02372096061706543, 0.02367283248901367, 0.023629823684692384, 0.02369945526123047, 0.02373324775695801, 0.023631872177124022, 0.023658496856689453, 0.02433126449584961, 0.024164352416992187, 0.023826431274414063, 0.023567359924316408, 0.023825408935546875, 0.02364825630187988, 0.023630847930908205, 0.02367897605895996, 0.02365951919555664, 0.023608320236206053, 0.026161151885986327, 0.03168767929077149, 0.02413670349121094, 0.02348646354675293, 0.023673856735229492, 0.023661567687988282, 0.023641088485717773, 0.023580671310424805, 0.02366464042663574, 0.023604223251342774, 0.02364313507080078, 0.023624704360961913, 0.023686143875122072, 0.02285772705078125, 0.022697984695434572, 0.022769664764404295, 0.022815744400024415, 0.02290176010131836, 0.022871040344238282, 0.022770687103271483, 0.022796287536621093, 0.022775808334350587, 0.022840320587158205, 0.022841344833374022, 0.022806528091430665, 0.022737920761108397, 0.022872064590454103, 0.02284441566467285, 0.022809600830078124, 0.022797311782836914, 0.02286796760559082, 0.023060480117797853, 0.023879680633544922, 0.023236608505249022, 0.02286796760559082, 0.0228853759765625, 0.02304102325439453, 0.026801151275634767, 0.02428927993774414, 0.023665664672851562, 0.023731199264526368, 0.023786495208740235, 0.02392166328430176, 0.024010751724243166, 0.02389811134338379, 0.023809024810791016, 0.02365132713317871, 0.02384588813781738, 0.023780351638793946, 0.023814144134521483, 0.023803903579711915, 0.023739391326904297, 0.022830080032348633, 0.022921215057373046, 0.02280243110656738, 0.022664192199707032, 0.022640640258789063, 0.02285670471191406, 0.022866943359375, 0.023402496337890624, 0.02285055923461914, 0.02288332748413086, 0.02286591911315918, 0.02351206398010254, 0.0236759033203125, 0.023998464584350586, 0.02367180824279785, 0.02374963188171387, 0.023658496856689453, 0.023614463806152345, 0.0236943359375, 0.023769088745117187, 0.023633920669555664, 0.02409779167175293, 0.02374963188171387, 0.023199743270874023, 0.022782976150512696, 0.022932479858398438, 0.02283417510986328, 0.022838272094726563, 0.022807552337646485, 0.022801408767700194, 0.02291097640991211, 0.02369536018371582, 0.02366873550415039, 0.02372812843322754, 0.02366361618041992, 0.023632896423339843, 0.02368511962890625, 0.023779327392578126, 0.023582719802856447, 0.023763967514038087, 0.023524351119995117, 0.022805503845214844, 0.022797311782836914, 0.02305536079406738, 0.022945791244506835, 0.0230328311920166, 0.024253440856933595, 0.023856128692626953, 0.023657472610473632, 0.02364313507080078, 0.023615488052368162, 0.02367692756652832, 0.023360511779785157, 0.022849536895751952, 0.022840320587158205, 0.02285260772705078, 0.022811647415161132, 0.02270207977294922, 0.022796287536621093, 0.022758399963378906, 0.022777856826782225, 0.02288435173034668, 0.022751232147216797, 0.022755327224731444, 0.022996992111206056, 0.02365235137939453, 0.023023616790771483, 0.02266111946105957, 0.022734848022460938, 0.022683647155761717, 0.022785024642944338, 0.0226693115234375, 0.022759424209594727, 0.022730752944946288, 0.022761472702026365, 0.02264575958251953, 0.02393497657775879, 0.024216575622558592, 0.0241213436126709, 0.023686143875122072, 0.02389606475830078, 0.023644159317016602, 0.023623680114746092, 0.023508991241455078, 0.023609344482421874, 0.023556095123291015, 0.023635967254638672, 0.023480319976806642, 0.02369740867614746, 0.023555072784423828, 0.02368819236755371, 0.022960128784179686, 0.02285875129699707, 0.022788095474243163, 0.02280243110656738, 0.022831104278564454, 0.022849536895751952, 0.022801408767700194, 0.0227061767578125, 0.02271334457397461, 0.022766592025756836, 0.022800384521484376, 0.02312499237060547, 0.02368409538269043, 0.023602176666259765, 0.023933952331542968, 0.02325299263000488, 0.02284441566467285, 0.02305433654785156, 0.023513088226318358, 0.023159807205200195, 0.022845439910888672, 0.02283622360229492, 0.022756351470947265, 0.0228351993560791, 0.022803455352783202, 0.02289664077758789, 0.02284851264953613, 0.02283622360229492, 0.02286079978942871, 0.0227061767578125, 0.02285055923461914, 0.02285977554321289, 0.022824960708618162, 0.02372915267944336, 0.023037952423095705, 0.02286591911315918, 0.022800384521484376, 0.022812671661376953, 0.022665216445922853, 0.02275328063964844, 0.022798336029052735, 0.02269900894165039, 0.022724607467651366, 0.022701055526733398, 0.022766592025756836, 0.022746112823486327, 0.02272972869873047, 0.0227061767578125, 0.02265907287597656, 0.022587392807006838, 0.022863872528076173, 0.022724607467651366, 0.02271232032775879, 0.022734848022460938, 0.022751232147216797, 0.02267852783203125, 0.02347929573059082, 0.023779327392578126, 0.023617536544799804, 0.023657472610473632, 0.023774208068847655, 0.024482816696166993, 0.023970815658569337, 0.02367180824279785, 0.02370047950744629, 0.02368000030517578, 0.02367283248901367, 0.023171072006225587, 0.022798336029052735, 0.022803455352783202, 0.022790143966674805, 0.022723583221435546, 0.022772735595703125, 0.022778879165649413, 0.022771711349487304, 0.022866943359375, 0.02285977554321289, 0.02284441566467285, 0.023043071746826172, 0.022833152770996092, 0.022973440170288087, 0.022772735595703125, 0.022803455352783202, 0.022773759841918945, 0.022781951904296875, 0.022740991592407226, 0.022771711349487304, 0.022808576583862306, 0.022776832580566408, 0.022821887969970703, 0.02285260772705078, 0.022759424209594727, 0.022761472702026365, 0.022692863464355468, 0.022781951904296875, 0.022794240951538085, 0.02281881523132324, 0.022740991592407226, 0.022747135162353514, 0.022971391677856445, 0.02404351997375488, 0.023810047149658203, 0.024066047668457033, 0.024447999954223632, 0.02434764862060547, 0.02366873550415039, 0.02364313507080078, 0.023601152420043944, 0.023624704360961913, 0.023757823944091795, 0.02411827278137207, 0.023948287963867186, 0.02371379280090332, 0.0236943359375, 0.02370457649230957, 0.0238919677734375, 0.02369331169128418, 0.02369945526123047, 0.0236759033203125, 0.023045120239257814, 0.025013248443603517, 0.02388172721862793, 0.02371788787841797, 0.023620607376098633, 0.02368716812133789, 0.02369843292236328, 0.02369638442993164, 0.0237127685546875, 0.02376192092895508, 0.02371788787841797, 0.02370969581604004, 0.022958080291748048, 0.02290892791748047, 0.02287308883666992, 0.0228351993560791, 0.023181312561035155, 0.02391961669921875, 0.02370457649230957, 0.02369126319885254, 0.024025087356567384, 0.023975936889648438, 0.023767040252685546, 0.023825408935546875, 0.02368511962890625, 0.023871488571166992, 0.02387763214111328, 0.02411315155029297, 0.024176639556884767, 0.024253440856933595, 0.02428313636779785, 0.023310335159301757, 0.02291302490234375, 0.02287001609802246, 0.022882303237915038, 0.022796287536621093, 0.022772735595703125, 0.022849536895751952, 0.022815744400024415, 0.022807552337646485, 0.02286591911315918, 0.022759424209594727, 0.02281062316894531, 0.022800384521484376, 0.022747135162353514, 0.022778879165649413, 0.022764543533325195, 0.022776832580566408, 0.02270515251159668, 0.022755327224731444, 0.022770687103271483, 0.02273587226867676, 0.022760448455810548, 0.02272768020629883, 0.02272972869873047, 0.022747135162353514, 0.02268569564819336, 0.022781951904296875, 0.022742015838623047, 0.022755327224731444, 0.022718463897705078, 0.022767616271972657, 0.022830080032348633, 0.022985727310180663, 0.022754304885864256, 0.022784000396728517, 0.022782976150512696, 0.02288025665283203, 0.022785024642944338, 0.022803455352783202, 0.022787071228027343, 0.02332159996032715, 0.02332262420654297, 0.02281881523132324, 0.0228351993560791, 0.022845439910888672, 0.022786048889160155, 0.023001087188720702, 0.025564159393310547, 0.024377344131469726, 0.023818239212036133, 0.023793664932250977, 0.02370867156982422, 0.023798784255981444, 0.02369740867614746, 0.023758848190307616, 0.023793664932250977, 0.023805952072143553, 0.023604223251342774, 0.023787519454956055, 0.0237127685546875, 0.02372505569458008, 0.02369126319885254, 0.02372812843322754, 0.02369638442993164, 0.023773183822631837, 0.02345369529724121, 0.02284339141845703, 0.02287308883666992, 0.022760448455810548, 0.022815744400024415, 0.022846464157104493, 0.022846464157104493, 0.022879232406616212, 0.022814720153808594, 0.02307583999633789, 0.022940671920776368, 0.02293350410461426, 0.023023616790771483, 0.022799360275268556, 0.022793216705322264, 0.022748159408569335, 0.022840320587158205, 0.023096319198608398, 0.02349158477783203, 0.022840320587158205, 0.022804479598999023, 0.022691839218139647, 0.022790143966674805, 0.022616064071655274, 0.022779903411865234, 0.022749183654785156, 0.022767616271972657, 0.022777856826782225, 0.022773759841918945, 0.02270412826538086, 0.022814720153808594, 0.02268671989440918, 0.022774784088134766, 0.0227194881439209, 0.02270310401916504, 0.022757375717163086, 0.022773759841918945, 0.022778879165649413, 0.022813695907592774, 0.02274406433105469, 0.02275328063964844, 0.02270515251159668, 0.02282598304748535, 0.02280243110656738, 0.02289356803894043, 0.022709247589111328, 0.02285055923461914, 0.022746112823486327, 0.0228351993560791, 0.022794240951538085, 0.022796287536621093, 0.022972415924072266, 0.022983680725097655, 0.022746112823486327, 0.022773759841918945, 0.02284441566467285, 0.022999040603637694, 0.022737920761108397, 0.022788095474243163, 0.022964223861694336, 0.022887424468994142, 0.022960128784179686, 0.022805503845214844, 0.02365644836425781, 0.02307891273498535, 0.022817792892456053, 0.022774784088134766, 0.022754304885864256, 0.022781951904296875, 0.022791168212890626, 0.022832128524780275, 0.022743040084838868, 0.023220224380493162, 0.02326016044616699, 0.022837247848510742, 0.022772735595703125, 0.022831104278564454, 0.022831104278564454, 0.02271027183532715, 0.022780927658081054, 0.022782976150512696, 0.022813695907592774, 0.022781951904296875, 0.02388582420349121, 0.023791616439819335, 0.02376192092895508, 0.023649280548095702, 0.023772159576416017, 0.023860223770141603, 0.023825408935546875, 0.023833599090576172, 0.023824384689331055, 0.024160255432128908, 0.023989248275756835, 0.023798784255981444, 0.02374963188171387, 0.023994367599487306, 0.025240575790405274, 0.025572351455688477, 0.024415231704711913, 0.023938047409057618, 0.023792640686035156, 0.0236810245513916, 0.023801855087280274, 0.02392678451538086, 0.024637439727783202, 0.024173568725585938, 0.023811071395874024, 0.023799808502197265, 0.023814144134521483, 0.023778303146362305, 0.023837696075439452, 0.023777280807495117, 0.023759872436523437, 0.02372505569458008, 0.023839744567871093, 0.02370969581604004, 0.02374963188171387, 0.023573503494262696, 0.02376192092895508, 0.023714815139770508, 0.023723007202148438, 0.023838720321655273, 0.023830528259277343, 0.023714815139770508, 0.023771135330200196, 0.02384998321533203, 0.023755775451660157, 0.023665664672851562, 0.023812095642089845, 0.023777280807495117, 0.023791616439819335, 0.023774208068847655, 0.023801855087280274, 0.023835647583007814, 0.02388275146484375, 0.02371379280090332, 0.022767616271972657, 0.022809600830078124, 0.022757375717163086, 0.022771711349487304, 0.022718463897705078, 0.022714368820190428, 0.022770687103271483, 0.022822912216186524, 0.022816768646240236, 0.02308710479736328, 0.023649280548095702, 0.022716415405273437, 0.0227061767578125, 0.022819839477539062, 0.02283622360229492, 0.022777856826782225, 0.022822912216186524, 0.02282598304748535, 0.022723583221435546, 0.022757375717163086, 0.022945791244506835, 0.022832128524780275, 0.022587392807006838, 0.022763519287109374, 0.02273689651489258, 0.022766592025756836, 0.022768640518188478, 0.022779903411865234, 0.022708223342895507, 0.022816768646240236, 0.02285772705078125, 0.022944768905639647, 0.022817792892456053, 0.02275225639343262, 0.022794240951538085, 0.022788095474243163, 0.022795263290405272, 0.022815744400024415, 0.02347520065307617, 0.024029184341430664, 0.023563264846801758, 0.02283417510986328, 0.02345267105102539, 0.024162303924560546, 0.023742464065551756, 0.023751680374145507, 0.023789567947387694, 0.023051263809204102, 0.022847488403320314, 0.022863872528076173, 0.023443456649780273, 0.022953983306884765, 0.024995840072631836, 0.025382911682128906, 0.02711859130859375, 0.024390655517578123, 0.02388787269592285, 0.022879232406616212, 0.022923263549804687, 0.022932479858398438, 0.023626752853393555, 0.02389504051208496, 0.023744512557983398]",tokens/s,42.91984698774561,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3572.584448,5499.256832,0.0,4869.586944,4520.068608,s,1,10.538462890625,10.538462890625,0.0,10.538462890625,10.538462890625,10.538462890625,10.538462890625,[10.538462890625],,kWh,4.367738526805659e-05,2.392298257433468e-05,7.490450436800286e-05,0.00014250487221039414,,MB,1730.338816,5537.005568,0.0,4888.461312,4194.018304,s,10,0.9724095993041993,0.09724095993041992,0.00011991731324794474,0.09718494415283202,0.09736529846191407,0.09743940124511719,0.0974986834716797,"[0.09732777404785156, 0.09728275299072266, 0.09718409729003906, 0.09709772491455078, 0.097185791015625, 0.09715936279296875, 0.09751350402832032, 0.09734883117675781, 0.09716774749755859, 0.09714201354980469]",tokens/s,2632.6354674324375,kWh,1.150498771689604e-06,6.30418371271126e-07,4.425464705417472e-06,6.206381848378201e-06,tokens/kWh,41247864.90004571,MB,1758.94528,5547.491328,0.0,4896.84992,4194.020864,s,10,16.28201208496094,1.628201208496094,0.005460036741865503,1.6282060546875,1.6354828125,1.6363167846679687,1.636983962402344,"[1.6320560302734375, 1.623575439453125, 1.6284647216796875, 1.6192203369140625, 1.6236065673828124, 1.6352974853515625, 1.6310916748046875, 1.6279473876953126, 1.6371507568359376, 1.6236016845703125]",tokens/s,38.693006534610454,kWh,1.9978875357754775e-05,1.0948724561781539e-05,4.3722962702182755e-05,7.465056262171907e-05,tokens/kWh,843932.0185601733,,s,630,16.27999027061462,0.025841254397800995,0.00040708759893177477,0.02568806457519531,0.026535014533996582,0.026694451332092285,0.02728476709365845,"[0.025816064834594726, 0.02571673583984375, 0.025673728942871094, 0.02568704032897949, 0.025620479583740235, 0.02681548881530762, 0.02670182418823242, 0.026678272247314453, 0.028309503555297853, 0.027304960250854493, 0.02673459243774414, 0.0257126407623291, 0.02572697639465332, 0.025664512634277343, 0.025745407104492187, 0.025761791229248047, 0.025640960693359374, 0.02575257682800293, 0.026024959564208985, 0.026382335662841795, 0.025807872772216797, 0.02569625663757324, 0.02569215965270996, 0.025762815475463868, 0.025824256896972656, 0.02569932746887207, 0.025568256378173827, 0.02573311996459961, 0.025761791229248047, 0.025800703048706054, 0.025894912719726562, 0.02575974464416504, 0.0257392635345459, 0.02568191909790039, 0.025964544296264647, 0.025830400466918944, 0.025581567764282227, 0.025823232650756835, 0.02571161651611328, 0.025669631958007814, 0.0257259521484375, 0.025632768630981444, 0.02573209571838379, 0.02571571159362793, 0.025873407363891602, 0.02572390365600586, 0.025580543518066406, 0.02636288070678711, 0.026685440063476562, 0.02655948829650879, 0.025746431350708008, 0.025833471298217774, 0.025789440155029295, 0.02549350357055664, 0.025288703918457032, 0.02529484748840332, 0.025606143951416017, 0.025645055770874024, 0.0255416316986084, 0.026462207794189452, 0.025874431610107423, 0.025614336013793947, 0.025603071212768554, 0.02591846466064453, 0.025612287521362305, 0.025568256378173827, 0.025611263275146484, 0.02567475128173828, 0.02697932815551758, 0.026284032821655274, 0.02569011116027832, 0.025472000122070314, 0.025649152755737304, 0.025653247833251954, 0.02570035171508789, 0.025660415649414063, 0.025619455337524414, 0.02573721694946289, 0.025678911209106446, 0.02563680076599121, 0.02570240020751953, 0.026108928680419922, 0.027995199203491212, 0.026545087814331056, 0.02552422332763672, 0.02530816078186035, 0.025402368545532225, 0.02536960029602051, 0.0254597110748291, 0.025652223587036133, 0.025390079498291016, 0.025224191665649414, 0.025795583724975587, 0.025815040588378906, 0.025776128768920898, 0.026617855072021485, 0.02649497604370117, 0.025779199600219727, 0.025783296585083007, 0.025657344818115234, 0.02567475128173828, 0.02567475128173828, 0.025644031524658203, 0.025594879150390625, 0.025560064315795897, 0.025653247833251954, 0.0273305606842041, 0.026162176132202147, 0.025617408752441406, 0.025598976135253908, 0.02552729606628418, 0.025621503829956056, 0.02568704032897949, 0.025660415649414063, 0.025572351455688477, 0.025596927642822266, 0.025478143692016602, 0.025322496414184572, 0.025624576568603515, 0.025592832565307616, 0.02554982376098633, 0.02568806457519531, 0.025565216064453125, 0.025637855529785158, 0.025593856811523437, 0.025596927642822266, 0.025470975875854493, 0.025556991577148438, 0.025619455337524414, 0.025578496932983398, 0.025604095458984375, 0.025861120223999022, 0.026020864486694335, 0.026256383895874022, 0.026811391830444335, 0.02654412841796875, 0.026457088470458984, 0.02655232048034668, 0.026631168365478516, 0.02649395179748535, 0.026411008834838868, 0.02616831970214844, 0.02653593635559082, 0.025680896759033203, 0.02571468734741211, 0.025612287521362305, 0.025637887954711915, 0.025677824020385744, 0.025603071212768554, 0.025634815216064453, 0.025629695892333985, 0.02562563133239746, 0.025690080642700196, 0.025674816131591796, 0.025698240280151368, 0.025620479583740235, 0.025611328125, 0.025637823104858397, 0.025631744384765624, 0.025705472946166992, 0.02572185516357422, 0.025836544036865236, 0.025652288436889648, 0.02565216064453125, 0.025683967590332032, 0.025591808319091795, 0.025683967590332032, 0.026210304260253905, 0.02591334342956543, 0.02573107147216797, 0.02578124809265137, 0.026019840240478515, 0.026078208923339844, 0.02573417663574219, 0.02578019142150879, 0.025694208145141603, 0.025778175354003906, 0.025746431350708008, 0.025692256927490234, 0.025629600524902343, 0.026284032821655274, 0.02569932746887207, 0.025911296844482422, 0.02575155258178711, 0.025579519271850586, 0.02567987251281738, 0.02572800064086914, 0.025667583465576172, 0.025698335647583007, 0.02617344093322754, 0.026211328506469726, 0.02577305603027344, 0.02572697639465332, 0.025620479583740235, 0.025694208145141603, 0.025644031524658203, 0.025631744384765624, 0.025625600814819335, 0.025742336273193358, 0.025637887954711915, 0.02553446388244629, 0.025669631958007814, 0.025621503829956056, 0.02568806457519531, 0.025701375961303712, 0.02570035171508789, 0.025792512893676758, 0.025671680450439452, 0.026457088470458984, 0.025668607711791993, 0.02531328010559082, 0.025569280624389647, 0.025657344818115234, 0.025631744384765624, 0.025624576568603515, 0.026630144119262695, 0.026104831695556642, 0.025463808059692384, 0.025632768630981444, 0.025596927642822266, 0.025610240936279297, 0.025425920486450194, 0.0254597110748291, 0.02574847984313965, 0.02570342445373535, 0.025683967590332032, 0.025649152755737304, 0.02567884826660156, 0.025611263275146484, 0.025548799514770508, 0.0255416316986084, 0.025632768630981444, 0.02571878433227539, 0.02546892738342285, 0.025610240936279297, 0.025619455337524414, 0.025664512634277343, 0.025611263275146484, 0.025660415649414063, 0.025531391143798828, 0.025606143951416017, 0.025568256378173827, 0.025622528076171876, 0.02573311996459961, 0.025671680450439452, 0.025601024627685546, 0.025638912200927736, 0.025819135665893556, 0.026202112197875976, 0.025800703048706054, 0.02569932746887207, 0.025671680450439452, 0.025621503829956056, 0.025648128509521483, 0.02571468734741211, 0.026038272857666016, 0.025608192443847655, 0.025593856811523437, 0.025648128509521483, 0.025637887954711915, 0.025701375961303712, 0.025578496932983398, 0.025640960693359374, 0.02571673583984375, 0.02571980857849121, 0.025404415130615234, 0.025312255859375, 0.02556620788574219, 0.025367551803588868, 0.025466880798339843, 0.025333759307861328, 0.025412607192993163, 0.02531020736694336, 0.025672704696655273, 0.025653247833251954, 0.025640960693359374, 0.02571161651611328, 0.025787391662597657, 0.02572083282470703, 0.025719839096069334, 0.025655263900756835, 0.02571878433227539, 0.02571980857849121, 0.025669631958007814, 0.026066944122314452, 0.026054655075073242, 0.02578124809265137, 0.02571878433227539, 0.025486335754394532, 0.026302463531494142, 0.027213823318481444, 0.027047935485839843, 0.026771455764770507, 0.026596351623535155, 0.026719232559204102, 0.025789440155029295, 0.02571878433227539, 0.025766912460327147, 0.025785343170166015, 0.02568806457519531, 0.02565836715698242, 0.025694208145141603, 0.026004480361938476, 0.025807872772216797, 0.025663488388061522, 0.025563135147094726, 0.025664512634277343, 0.025615360260009764, 0.025606143951416017, 0.02568191909790039, 0.025626623153686523, 0.02568191909790039, 0.025657344818115234, 0.025593856811523437, 0.025664512634277343, 0.025778175354003906, 0.02565017509460449, 0.026817535400390623, 0.026597375869750976, 0.026582015991210937, 0.02711961555480957, 0.02710527992248535, 0.026647552490234375, 0.026534912109375, 0.026639360427856446, 0.026600448608398438, 0.026598400115966796, 0.02654719924926758, 0.02651545524597168, 0.02670694351196289, 0.02673971176147461, 0.02616012763977051, 0.025798656463623046, 0.02568294334411621, 0.025670719146728516, 0.025663423538208007, 0.02567065620422363, 0.025563135147094726, 0.025683967590332032, 0.02566147232055664, 0.025667552947998048, 0.025677824020385744, 0.025659391403198242, 0.025571327209472656, 0.025449472427368162, 0.025632768630981444, 0.025391103744506836, 0.025645055770874024, 0.02575974464416504, 0.02567475128173828, 0.02574847984313965, 0.025547775268554687, 0.02571878433227539, 0.02570035171508789, 0.025717760086059572, 0.02565017509460449, 0.025676799774169923, 0.025655296325683592, 0.025620479583740235, 0.025465856552124022, 0.02614374351501465, 0.025894912719726562, 0.025673728942871094, 0.026283008575439453, 0.025804800033569338, 0.02573209571838379, 0.02571878433227539, 0.025641984939575195, 0.02570444869995117, 0.025762815475463868, 0.02571059226989746, 0.025746431350708008, 0.025769983291625977, 0.02572287940979004, 0.025607168197631838, 0.025750528335571288, 0.025812992095947264, 0.027235328674316408, 0.025738239288330078, 0.02549760055541992, 0.025683967590332032, 0.025641984939575195, 0.02565836715698242, 0.025531391143798828, 0.025582592010498048, 0.025631744384765624, 0.025843711853027345, 0.026007551193237305, 0.025790464401245116, 0.025789440155029295, 0.02589695930480957, 0.02588057518005371, 0.0255416316986084, 0.025701375961303712, 0.02572287940979004, 0.025765888214111327, 0.02612326431274414, 0.026045440673828125, 0.02593075180053711, 0.025771007537841797, 0.02550169563293457, 0.025628671646118165, 0.025669631958007814, 0.025598976135253908, 0.02547609519958496, 0.0257392635345459, 0.025649152755737304, 0.02571468734741211, 0.025646080017089845, 0.025677824020385744, 0.025789440155029295, 0.02572390365600586, 0.026388479232788087, 0.025907199859619142, 0.025595903396606445, 0.025586687088012695, 0.025638912200927736, 0.025613311767578126, 0.02558470344543457, 0.02549344062805176, 0.025576448440551756, 0.025640960693359374, 0.025633792877197265, 0.025598976135253908, 0.025765888214111327, 0.02568806457519531, 0.025592832565307616, 0.025617408752441406, 0.025661439895629884, 0.025636863708496094, 0.027108352661132814, 0.027357183456420898, 0.026952703475952147, 0.026612735748291014, 0.026673152923583986, 0.026646528244018555, 0.027455488204956056, 0.026960895538330077, 0.026644479751586913, 0.026009599685668947, 0.025662464141845705, 0.026908672332763672, 0.025648128509521483, 0.026030080795288086, 0.026648639678955078, 0.025680896759033203, 0.02600543975830078, 0.026570751190185548, 0.026459135055541993, 0.026398719787597655, 0.025607168197631838, 0.02631782341003418, 0.026434560775756837, 0.026556415557861326, 0.026182655334472657, 0.025626623153686523, 0.025613344192504883, 0.025643999099731446, 0.025632768630981444, 0.025624576568603515, 0.025766912460327147, 0.025615360260009764, 0.025652223587036133, 0.025589759826660157, 0.025611263275146484, 0.025647104263305662, 0.025656320571899413, 0.025651199340820312, 0.025627647399902344, 0.025616384506225585, 0.025579519271850586, 0.025753599166870117, 0.026239999771118162, 0.02588467216491699, 0.025786367416381836, 0.025820159912109376, 0.025846784591674804, 0.02575155258178711, 0.02572902488708496, 0.02573107147216797, 0.025623552322387694, 0.025676799774169923, 0.02555392074584961, 0.025757696151733397, 0.025709568023681642, 0.025617408752441406, 0.025646080017089845, 0.02566655921936035, 0.025723968505859375, 0.02569107246398926, 0.02572902488708496, 0.025643007278442383, 0.025624576568603515, 0.025622528076171876, 0.02552217674255371, 0.025653247833251954, 0.02571878433227539, 0.025641984939575195, 0.02567065620422363, 0.025839616775512695, 0.025625600814819335, 0.02571468734741211, 0.026222591400146485, 0.02670182418823242, 0.02575564765930176, 0.02569113540649414, 0.026447872161865234, 0.026202112197875976, 0.026057727813720705, 0.026603519439697267, 0.026618879318237306, 0.02631167984008789, 0.025662464141845705, 0.025555967330932617, 0.025622528076171876, 0.0263055362701416, 0.02655948829650879, 0.02630143928527832, 0.02577305603027344, 0.02575155258178711, 0.025665536880493164, 0.02629324722290039, 0.026665983200073243, 0.02632908821105957, 0.025593856811523437, 0.025618431091308593, 0.02567475128173828, 0.026064895629882814, 0.025619455337524414, 0.025597951889038087, 0.02565017509460449, 0.025618431091308593, 0.025552896499633788, 0.02631372833251953, 0.02648575973510742, 0.025664512634277343, 0.025628671646118165, 0.025697280883789062, 0.025789440155029295, 0.02628096008300781, 0.026556415557861326, 0.02635264015197754, 0.025610240936279297, 0.025633792877197265, 0.02636288070678711, 0.02717900848388672, 0.02671615982055664, 0.02612428855895996, 0.02693734359741211, 0.025965568542480468, 0.02588979148864746, 0.025578496932983398, 0.025564159393310547, 0.025618431091308593, 0.02568806457519531, 0.025572351455688477, 0.025801727294921875, 0.026113023757934572, 0.025868288040161135, 0.025657344818115234, 0.02555801582336426, 0.026977279663085937, 0.02614681625366211, 0.02569932746887207, 0.025590848922729493, 0.025596864700317384, 0.02556211280822754, 0.025701311111450194, 0.025457664489746092, 0.02548940849304199, 0.02546892738342285, 0.02551705551147461, 0.025675775527954102, 0.02575667190551758, 0.025548799514770508, 0.025598976135253908, 0.025626623153686523, 0.02555904006958008, 0.025613311767578126, 0.025833471298217774, 0.026496000289916992, 0.025862144470214843, 0.025569280624389647, 0.02571468734741211, 0.026010623931884767, 0.025935871124267578, 0.025672704696655273, 0.025778175354003906, 0.02568608093261719, 0.025647039413452147, 0.025631744384765624, 0.025769983291625977, 0.025705472946166992, 0.02568806457519531, 0.025651199340820312, 0.025745407104492187, 0.02591744041442871, 0.02576486396789551, 0.025672704696655273, 0.025750528335571288, 0.02568191909790039, 0.025742336273193358, 0.02573721694946289, 0.025645055770874024, 0.02570649528503418, 0.025746431350708008, 0.02572287940979004, 0.025857023239135742, 0.026674175262451173, 0.025591808319091795, 0.025671680450439452, 0.02567884826660156, 0.0255467529296875, 0.02555904006958008, 0.02561846351623535, 0.02581705665588379, 0.02571059226989746, 0.025597951889038087, 0.027371519088745116, 0.026281984329223632, 0.025673728942871094, 0.025565183639526368, 0.026813440322875977, 0.02614374351501465, 0.025670688629150392, 0.02558255958557129, 0.025555967330932617, 0.02565017509460449, 0.0255416316986084, 0.025712671279907225]",tokens/s,38.69781182468823,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,2206.191616,2783.444992,0.0,2153.775104,2041.744384,s,1,9.80061328125,9.80061328125,0.0,9.80061328125,9.80061328125,9.80061328125,9.80061328125,[9.80061328125],,kWh,3.561910573610527e-05,1.9506189178564702e-05,5.751171267601163e-05,0.0001126370075906816,,MB,2319.880192,2802.31936,0.0,2153.775104,1917.560832,s,10,0.4867462730407715,0.04867462730407715,0.0002118534081760472,0.04871560096740723,0.04890115432739258,0.04890908851623535,0.04891543586730957,"[0.04811244964599609, 0.048579841613769534, 0.04876252746582031, 0.048715808868408206, 0.04871539306640625, 0.048655937194824216, 0.048728126525878906, 0.04865977478027344, 0.04889939117431641, 0.048917022705078125]",tokens/s,5259.413665372977,kWh,5.681715409254931e-07,3.11329693428497e-07,2.0912676986538477e-06,2.970768933007838e-06,tokens/kWh,86172976.0115694,MB,2329.010176,2802.31936,0.0,2153.775104,1999.054336,s,10,11.294524291992186,1.1294524291992187,0.024332405072643195,1.1403378906250001,1.15049970703125,1.1523781494140626,1.1538809033203126,"[1.14615771484375, 1.146260986328125, 1.1285723876953124, 1.1009830322265626, 1.0779427490234375, 1.154256591796875, 1.1075889892578126, 1.150082275390625, 1.13451806640625, 1.1481614990234374]",tokens/s,55.77924166728029,kWh,1.2821140766505602e-05,7.025508480829834e-06,2.626472720834396e-05,4.611137645567939e-05,tokens/kWh,1366257.1981678612,,s,630,11.290917881011968,0.017922091874622164,0.0005937263491349749,0.018172927856445312,0.018395443153381347,0.0185831937789917,0.019266866741180427,"[0.01723391914367676, 0.01723494338989258, 0.017258495330810548, 0.017378303527832033, 0.017110015869140623, 0.017113088607788086, 0.017458175659179686, 0.018550783157348632, 0.018353151321411132, 0.018322431564331054, 0.018711551666259766, 0.01884262466430664, 0.018316287994384766, 0.018255872726440428, 0.018712575912475587, 0.018571264266967775, 0.01817804718017578, 0.018469888687133788, 0.018568191528320312, 0.018315263748168945, 0.01822719955444336, 0.018197504043579102, 0.01842278480529785, 0.018327552795410155, 0.01823846435546875, 0.01827123260498047, 0.01822105598449707, 0.018298879623413086, 0.018153472900390624, 0.018358272552490236, 0.018231296539306642, 0.018272319793701173, 0.018192319869995116, 0.018189311981201172, 0.018242559432983398, 0.01820057678222656, 0.018155519485473632, 0.01824870491027832, 0.018321407318115233, 0.01822208023071289, 0.0182794246673584, 0.01824665641784668, 0.018109439849853515, 0.018355199813842774, 0.01819545555114746, 0.01827840042114258, 0.018191360473632814, 0.018127872467041017, 0.018291711807250977, 0.018327552795410155, 0.018348031997680665, 0.01822719955444336, 0.01822719955444336, 0.018239488601684572, 0.018299903869628906, 0.018304000854492186, 0.01824358367919922, 0.01830297660827637, 0.01823539161682129, 0.01840640068054199, 0.018199552536010744, 0.01820979118347168, 0.01826201629638672, 0.01719500732421875, 0.017641471862792968, 0.017773567199707033, 0.01716531181335449, 0.016827392578125, 0.017116159439086915, 0.017332223892211913, 0.018494464874267577, 0.018190336227416993, 0.01824665641784668, 0.018111488342285157, 0.018499584197998048, 0.01835212707519531, 0.01822105598449707, 0.018199552536010744, 0.018183168411254884, 0.01818009567260742, 0.01787392044067383, 0.017770496368408203, 0.018159616470336915, 0.018152448654174806, 0.018172927856445312, 0.0178288631439209, 0.017754112243652344, 0.018280448913574218, 0.01839820861816406, 0.018861055374145508, 0.019556352615356445, 0.018702335357666015, 0.018734079360961914, 0.01836851119995117, 0.018129919052124025, 0.018448383331298827, 0.01862246322631836, 0.01824460792541504, 0.01842278480529785, 0.018185216903686522, 0.018086912155151368, 0.018131967544555663, 0.018306047439575195, 0.01827020835876465, 0.018163711547851562, 0.018120704650878908, 0.018337791442871093, 0.018164735794067383, 0.01827123260498047, 0.018318336486816408, 0.018139135360717772, 0.018120704650878908, 0.018152448654174806, 0.018265087127685545, 0.01821696090698242, 0.01820159912109375, 0.018289663314819335, 0.01823744010925293, 0.01821286392211914, 0.01830297660827637, 0.018314239501953124, 0.019121152877807617, 0.01877299118041992, 0.018233343124389647, 0.01844121551513672, 0.018340864181518556, 0.017123327255249024, 0.017262592315673828, 0.017187839508056642, 0.01721855926513672, 0.017185792922973633, 0.017132543563842775, 0.017105920791625977, 0.017116159439086915, 0.017124351501464845, 0.017286144256591796, 0.017097728729248047, 0.017105920791625977, 0.017104896545410156, 0.0170383358001709, 0.01717862319946289, 0.017122304916381836, 0.017583103179931642, 0.018328575134277342, 0.018181119918823242, 0.01820159912109375, 0.018283519744873047, 0.01823744010925293, 0.01825484848022461, 0.0181790714263916, 0.01822003173828125, 0.01822003173828125, 0.018215936660766603, 0.018113536834716795, 0.01819340705871582, 0.018315263748168945, 0.01819340705871582, 0.018280448913574218, 0.01826304054260254, 0.018226175308227538, 0.01807974433898926, 0.018265087127685545, 0.01796505546569824, 0.018256895065307616, 0.0181790714263916, 0.017747968673706056, 0.018168832778930662, 0.017712127685546874, 0.017879039764404296, 0.017737728118896484, 0.017819648742675782, 0.017761280059814453, 0.017777664184570312, 0.018288639068603514, 0.017968128204345703, 0.018465791702270508, 0.019172351837158205, 0.018777088165283205, 0.018308095932006836, 0.018318336486816408, 0.017854463577270507, 0.018157567977905274, 0.01824870491027832, 0.01824563217163086, 0.01820159912109375, 0.018316287994384766, 0.018198528289794923, 0.018255872726440428, 0.01826304054260254, 0.017138687133789063, 0.01719603157043457, 0.017085439682006837, 0.016973823547363282, 0.017060863494873048, 0.01717350387573242, 0.01705062484741211, 0.017105920791625977, 0.017102848052978514, 0.017099775314331055, 0.0170700798034668, 0.017059839248657227, 0.01701068878173828, 0.017102848052978514, 0.01702707290649414, 0.017067007064819336, 0.017054719924926756, 0.017105920791625977, 0.016973823547363282, 0.017063936233520507, 0.017077247619628907, 0.017063936233520507, 0.017039360046386717, 0.017082368850708008, 0.01702195167541504, 0.017121280670166016, 0.01703321647644043, 0.017139711380004884, 0.01706188774108887, 0.017094655990600584, 0.01700556755065918, 0.01704960060119629, 0.01704243278503418, 0.01697587203979492, 0.017052671432495118, 0.017107967376708985, 0.017101823806762697, 0.0170700798034668, 0.017088512420654296, 0.017101856231689454, 0.017086431503295897, 0.017160192489624023, 0.01843507194519043, 0.018524160385131837, 0.01886412811279297, 0.018298879623413086, 0.018130943298339842, 0.018129919052124025, 0.01817087936401367, 0.01830297660827637, 0.018325504302978517, 0.01820262336730957, 0.018142208099365235, 0.018185216903686522, 0.018166784286499024, 0.01821183967590332, 0.01823539161682129, 0.01822105598449707, 0.018241535186767577, 0.01824051284790039, 0.018259967803955078, 0.018101247787475586, 0.01818828773498535, 0.017055744171142577, 0.01706598472595215, 0.017076223373413087, 0.017035263061523438, 0.017086463928222655, 0.017052671432495118, 0.01704140853881836, 0.017064960479736328, 0.017099775314331055, 0.017067007064819336, 0.017055744171142577, 0.017120256423950195, 0.017092607498168946, 0.017156095504760743, 0.017098751068115235, 0.017133567810058595, 0.017111040115356444, 0.01704652786254883, 0.017088512420654296, 0.017113088607788086, 0.017073152542114257, 0.017074176788330078, 0.017107967376708985, 0.017123327255249024, 0.01702195167541504, 0.01706598472595215, 0.017125375747680666, 0.017083391189575196, 0.01705779266357422, 0.017110015869140623, 0.017163263320922852, 0.017056768417358398, 0.017137664794921875, 0.017119232177734374, 0.017075199127197266, 0.017052671432495118, 0.016916479110717773, 0.017903615951538086, 0.017506303787231444, 0.016990207672119142, 0.017101823806762697, 0.01704140853881836, 0.017067007064819336, 0.017088512420654296, 0.017091583251953125, 0.017039360046386717, 0.017177600860595704, 0.017082368850708008, 0.017150976181030272, 0.01705062484741211, 0.017091583251953125, 0.017116159439086915, 0.017087488174438475, 0.017129472732543945, 0.01700249671936035, 0.017187839508056642, 0.017110015869140623, 0.017146879196166993, 0.017090560913085938, 0.017137664794921875, 0.01704960060119629, 0.017105920791625977, 0.017103872299194335, 0.020354047775268554, 0.018751487731933594, 0.018108415603637695, 0.018191360473632814, 0.018158592224121094, 0.018173952102661133, 0.0182108154296875, 0.018153472900390624, 0.01819443130493164, 0.018134016036987305, 0.018241535186767577, 0.018317312240600587, 0.018338815689086914, 0.018140159606933593, 0.018229248046875, 0.018122751235961913, 0.018091007232666014, 0.01816268730163574, 0.018157567977905274, 0.018300928115844727, 0.01828556823730469, 0.01820467185974121, 0.01819647979736328, 0.018544639587402344, 0.017797119140625, 0.018359296798706053, 0.018147327423095702, 0.01783500862121582, 0.018142208099365235, 0.018502656936645507, 0.018709503173828124, 0.01823744010925293, 0.018714624404907225, 0.018273279190063475, 0.01859686470031738, 0.018184192657470705, 0.018155519485473632, 0.018206720352172853, 0.018182144165039063, 0.018309120178222657, 0.01830297660827637, 0.01816166305541992, 0.01885081672668457, 0.018699264526367186, 0.019400703430175782, 0.018584575653076172, 0.018283519744873047, 0.018185216903686522, 0.018120704650878908, 0.01817087936401367, 0.01820364761352539, 0.018199552536010744, 0.01823027229309082, 0.018241567611694334, 0.018219999313354492, 0.018287616729736327, 0.01821388816833496, 0.018217983245849608, 0.01820057678222656, 0.018345983505249023, 0.018310144424438478, 0.01823641586303711, 0.01816268730163574, 0.01721139144897461, 0.017177600860595704, 0.017125375747680666, 0.017135616302490234, 0.01699942398071289, 0.017082368850708008, 0.01699839973449707, 0.017183744430541992, 0.017091583251953125, 0.017077247619628907, 0.017006591796875, 0.017031167984008787, 0.017362943649291994, 0.01719705581665039, 0.017098751068115235, 0.01719193649291992, 0.01702707290649414, 0.017068031311035157, 0.017044479370117188, 0.01704140853881836, 0.017097728729248047, 0.01707827186584473, 0.0178657283782959, 0.017329151153564454, 0.017122304916381836, 0.017082399368286132, 0.01709769630432129, 0.017112064361572265, 0.01702707290649414, 0.017071104049682616, 0.017120256423950195, 0.01705062484741211, 0.017086463928222655, 0.017068031311035157, 0.017099775314331055, 0.017670143127441407, 0.018095104217529297, 0.018111488342285157, 0.018231296539306642, 0.018093055725097656, 0.01817190361022949, 0.018117631912231445, 0.018174976348876954, 0.018085887908935547, 0.018137088775634767, 0.01808076858520508, 0.018069503784179687, 0.01803980827331543, 0.018150400161743165, 0.018028543472290038, 0.018184192657470705, 0.01819443130493164, 0.018145280838012694, 0.018250751495361327, 0.018131967544555663, 0.018176000595092775, 0.018176000595092775, 0.01822105598449707, 0.01820057678222656, 0.018242559432983398, 0.018256895065307616, 0.018197504043579102, 0.01818828773498535, 0.017137664794921875, 0.017074176788330078, 0.017128448486328125, 0.017142784118652343, 0.017675264358520508, 0.017811456680297853, 0.018534400939941405, 0.01824563217163086, 0.018132991790771484, 0.018143232345581056, 0.018489343643188477, 0.018141183853149414, 0.01823744010925293, 0.018395135879516602, 0.01903104019165039, 0.018508800506591795, 0.019349504470825195, 0.019305471420288087, 0.018449407577514648, 0.01820876884460449, 0.018159616470336915, 0.018581504821777343, 0.018284543991088868, 0.018272256851196288, 0.018106367111206053, 0.018068479537963866, 0.018166784286499024, 0.01822822380065918, 0.018197504043579102, 0.018189311981201172, 0.01817087936401367, 0.01815449523925781, 0.01824460792541504, 0.01819545555114746, 0.018198528289794923, 0.01820569610595703, 0.018124799728393554, 0.01822719955444336, 0.018177024841308592, 0.018155519485473632, 0.018181119918823242, 0.018273279190063475, 0.018172927856445312, 0.01823539161682129, 0.018127872467041017, 0.019108863830566408, 0.018298879623413086, 0.01824563217163086, 0.018166784286499024, 0.018192384719848635, 0.018184192657470705, 0.018197504043579102, 0.018131967544555663, 0.018139135360717772, 0.018147327423095702, 0.01826304054260254, 0.018127872467041017, 0.01816985511779785, 0.018317312240600587, 0.018293760299682618, 0.018141183853149414, 0.018545663833618165, 0.020552703857421875, 0.01717862319946289, 0.017062911987304686, 0.017152000427246093, 0.01722675132751465, 0.01741414451599121, 0.017951744079589844, 0.017168384552001953, 0.017133567810058595, 0.017141759872436522, 0.017175552368164062, 0.017087488174438475, 0.017092607498168946, 0.017068031311035157, 0.01705369567871094, 0.017099775314331055, 0.018010112762451173, 0.018143232345581056, 0.018229248046875, 0.018144256591796876, 0.018326528549194337, 0.018124799728393554, 0.018250751495361327, 0.018215936660766603, 0.018233343124389647, 0.018206720352172853, 0.0182108154296875, 0.01829478454589844, 0.018164735794067383, 0.018145280838012694, 0.018052095413208007, 0.01818726348876953, 0.018122751235961913, 0.018190336227416993, 0.018167808532714845, 0.01818623924255371, 0.018183168411254884, 0.018107391357421874, 0.018157567977905274, 0.018173952102661133, 0.0182609920501709, 0.01818726348876953, 0.018297855377197265, 0.01822003173828125, 0.018112512588500978, 0.018234367370605468, 0.018299903869628906, 0.01824665641784668, 0.01825279998779297, 0.01825382423400879, 0.018330623626708984, 0.018423807144165038, 0.01841663932800293, 0.01818623924255371, 0.018356224060058594, 0.018199552536010744, 0.018670591354370117, 0.018215936660766603, 0.01810534477233887, 0.01827123260498047, 0.018546688079833985, 0.018207744598388673, 0.01843097686767578, 0.01904947280883789, 0.01719808006286621, 0.017102848052978514, 0.017102848052978514, 0.017157119750976564, 0.017118207931518553, 0.01798041534423828, 0.01822105598449707, 0.018129919052124025, 0.01827840042114258, 0.018264064788818358, 0.018191360473632814, 0.01827123260498047, 0.01815449523925781, 0.018283519744873047, 0.01826918411254883, 0.018308095932006836, 0.01823744010925293, 0.01820364761352539, 0.018257919311523436, 0.018152448654174806, 0.018366464614868162, 0.018197504043579102, 0.018274303436279296, 0.018147327423095702, 0.018223104476928712, 0.018229248046875, 0.018367488861083983, 0.018266111373901366, 0.018324480056762696, 0.018495487213134765, 0.01819340705871582, 0.018307071685791015, 0.01824051284790039, 0.018125823974609375, 0.01823027229309082, 0.01821286392211914, 0.0182609920501709, 0.018316287994384766, 0.018490367889404297, 0.018258943557739257, 0.01818726348876953, 0.01821696090698242, 0.018152448654174806, 0.01822719955444336, 0.01818726348876953, 0.018233343124389647, 0.018092031478881835, 0.01821900749206543, 0.018119680404663087, 0.018198528289794923, 0.018223104476928712, 0.01823539161682129, 0.01805414390563965, 0.018919424057006837, 0.018579456329345705, 0.018207744598388673, 0.018144256591796876, 0.018190336227416993, 0.018130943298339842, 0.018505727767944336, 0.02124799919128418, 0.01883852767944336, 0.01826918411254883]",tokens/s,55.79705801062256,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694951a-2e63f503383e131173776bd3;a8610390-05ae-4e3e-bc7a-e7b44b00b464) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpteh0fdvz/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5176.17664,6590.824448,0.0,5953.814528,5766.738432,s,1,12.2566171875,12.2566171875,0.0,12.2566171875,12.2566171875,12.2566171875,12.2566171875,[12.2566171875],,kWh,6.373715067986029e-05,3.489235995825329e-05,0.00011842870585399012,0.0002170582164921037,,MB,1812.258816,6651.641856,0.0,5995.757568,5281.196032,s,10,1.513292037963867,0.1513292037963867,0.0001546889693754523,0.15130438232421875,0.1515049728393555,0.15158523483276368,0.15164944442749023,"[0.15148713684082032, 0.15114051818847657, 0.15120623779296874, 0.1513040008544922, 0.1513047637939453, 0.15166549682617186, 0.15117765808105468, 0.15120661926269532, 0.15142562866210937, 0.1513739776611328]",tokens/s,1691.6761178790562,kWh,1.7886352815507356e-06,9.800262742933995e-07,6.561389992060075e-06,9.33005154790421e-06,tokens/kWh,27438219.251586527,MB,1829.462016,6672.613376,0.0,6014.631936,5281.198592,s,10,27.45596337890625,2.745596337890625,0.00956167192563741,2.7441132812499998,2.758476904296875,2.75948369140625,2.76028912109375,"[2.740733154296875, 2.733046630859375, 2.735604248046875, 2.760490478515625, 2.740971923828125, 2.7344521484375, 2.758253173828125, 2.75093408203125, 2.754222900390625, 2.747254638671875]",tokens/s,22.945834801193453,kWh,3.262621482949068e-05,1.7880217301795866e-05,7.203447844013905e-05,0.00012254091057142558,tokens/kWh,514114.0187894973,,s,630,27.453776824951177,0.043577423531668526,0.000574607785723208,0.04336384010314941,0.04437299346923828,0.04478300247192383,0.04562204727172852,"[0.04450406265258789, 0.043123710632324216, 0.04338175964355469, 0.04326201629638672, 0.04330080032348633, 0.04310220718383789, 0.04333363342285156, 0.043291648864746096, 0.04339199829101562, 0.04334796905517578, 0.04332032012939453, 0.043243518829345705, 0.04322611236572266, 0.04321996688842773, 0.04326297760009765, 0.0433172492980957, 0.04349446487426758, 0.04321683120727539, 0.043401214599609376, 0.04489625549316406, 0.0440074234008789, 0.04353740692138672, 0.04393267059326172, 0.04368588638305664, 0.04318822479248047, 0.04361625671386719, 0.04331110382080078, 0.04308889770507812, 0.04309299087524414, 0.043240447998046876, 0.04336435317993164, 0.043199489593505856, 0.04328140640258789, 0.04326911926269531, 0.04317184066772461, 0.04311142349243164, 0.04361523056030273, 0.04461875152587891, 0.043668479919433595, 0.04317388916015625, 0.04343603134155274, 0.04318003082275391, 0.04319846343994141, 0.04308992004394531, 0.043834369659423826, 0.043202560424804685, 0.04388147354125976, 0.044832767486572264, 0.04351385498046875, 0.04372684860229492, 0.04319334411621094, 0.043319297790527345, 0.043638782501220705, 0.04446003341674805, 0.0442081298828125, 0.043514881134033206, 0.04333055877685547, 0.04335615921020508, 0.04315238571166992, 0.043344894409179685, 0.04432896041870117, 0.04316774368286133, 0.04380364990234375, 0.04412211227416992, 0.043210750579833986, 0.04316876983642578, 0.043886592864990234, 0.04397260665893555, 0.04423168182373047, 0.04319641494750977, 0.04323020935058594, 0.04332032012939453, 0.04329062271118164, 0.043363327026367186, 0.043210750579833986, 0.043504638671875, 0.04340326309204102, 0.04336640167236328, 0.04331827163696289, 0.043254783630371094, 0.04326604843139648, 0.04317593765258789, 0.04302950286865234, 0.043245567321777346, 0.04338790512084961, 0.04348928070068359, 0.043401214599609376, 0.043194366455078126, 0.043270145416259766, 0.04338995361328125, 0.043878398895263675, 0.04394803237915039, 0.043069438934326174, 0.04325888061523438, 0.04324249649047852, 0.043222015380859374, 0.04325888061523438, 0.04334694290161133, 0.043469825744628904, 0.043423744201660154, 0.04329574584960937, 0.043433982849121096, 0.04336435317993164, 0.04336640167236328, 0.04316774368286133, 0.0433172492980957, 0.04335411071777344, 0.0433438720703125, 0.04331417465209961, 0.04367462539672851, 0.04331827163696289, 0.04312575912475586, 0.04316364669799805, 0.043238399505615234, 0.04409036636352539, 0.04374528121948242, 0.043194366455078126, 0.04325785446166992, 0.04314316940307617, 0.043245567321777346, 0.04337868881225586, 0.04345446395874023, 0.04312063980102539, 0.043243518829345705, 0.04318105697631836, 0.043259902954101564, 0.043325439453125, 0.042979328155517575, 0.04321484756469727, 0.04323942565917969, 0.043153408050537106, 0.04281651306152344, 0.04315238571166992, 0.04271206283569336, 0.04398387145996094, 0.04396543884277344, 0.0433070068359375, 0.0433172492980957, 0.04309401702880859, 0.04320460891723633, 0.04333977508544922, 0.044278785705566405, 0.043218944549560545, 0.04328857421875, 0.04316876983642578, 0.04321484756469727, 0.04321484756469727, 0.04408732986450195, 0.043422721862792966, 0.04315235137939453, 0.04333055877685547, 0.043205631256103515, 0.043453441619873044, 0.04319846343994141, 0.04337664031982422, 0.04300288009643555, 0.04347187042236328, 0.04343091201782227, 0.04374425506591797, 0.04361830520629883, 0.04307763290405273, 0.04301311874389648, 0.04346060943603516, 0.043154430389404294, 0.04372991943359375, 0.0430489616394043, 0.0455362548828125, 0.04511129760742188, 0.04436479949951172, 0.043227134704589845, 0.043878398895263675, 0.04344319915771484, 0.04333776092529297, 0.04309193420410156, 0.043207679748535156, 0.043493377685546876, 0.04340428924560547, 0.043270145416259766, 0.04347596740722656, 0.043153408050537106, 0.04340326309204102, 0.0431646728515625, 0.044042240142822264, 0.04329676818847656, 0.043154430389404294, 0.0433070068359375, 0.04337254333496094, 0.043379711151123046, 0.04310323333740235, 0.0435230712890625, 0.04370739364624023, 0.043291648864746096, 0.04329062271118164, 0.04336640167236328, 0.04332953643798828, 0.04351692962646484, 0.04324863815307617, 0.04333055877685547, 0.04338175964355469, 0.04345753479003906, 0.04319641494750977, 0.04349235153198242, 0.04375142288208008, 0.04390911865234375, 0.04337356948852539, 0.043245567321777346, 0.043322368621826174, 0.04336640167236328, 0.043156478881835936, 0.04366643142700195, 0.04449894332885742, 0.04619571304321289, 0.04544716644287109, 0.04496691131591797, 0.04544921493530273, 0.044526592254638675, 0.04320870590209961, 0.045059070587158204, 0.043407360076904294, 0.044142593383789064, 0.04457984161376953, 0.043328510284423825, 0.043440128326416014, 0.043417598724365236, 0.04357529449462891, 0.04352819061279297, 0.04402995300292969, 0.04379852676391602, 0.04338585662841797, 0.04343910217285156, 0.043066368103027344, 0.04316159820556641, 0.04324249649047852, 0.0436049919128418, 0.043633663177490234, 0.043815937042236325, 0.044799999237060545, 0.04477849578857422, 0.04633292770385742, 0.04566835021972656, 0.04506009674072266, 0.04372991943359375, 0.04311142349243164, 0.04332339096069336, 0.043207679748535156, 0.04337152099609375, 0.04334284973144531, 0.04326502227783203, 0.0431912956237793, 0.043600894927978515, 0.04406784057617188, 0.04353740692138672, 0.04354150390625, 0.04324863815307617, 0.04327423858642578, 0.04338790512084961, 0.04327526473999024, 0.043146305084228516, 0.04450400161743164, 0.04479590225219727, 0.04665651321411133, 0.04407295989990234, 0.043551742553710936, 0.043177982330322266, 0.04352614212036133, 0.043182144165039064, 0.04320044708251953, 0.043184127807617184, 0.0432281608581543, 0.043649024963378906, 0.044075008392333984, 0.04345753479003906, 0.043312126159667966, 0.04315955352783203, 0.043207679748535156, 0.043363327026367186, 0.04328550338745117, 0.04336640167236328, 0.043028480529785154, 0.042982398986816404, 0.04319027328491211, 0.04271001434326172, 0.04337561416625976, 0.04321484756469727, 0.04321177673339844, 0.04309299087524414, 0.04340326309204102, 0.04344934463500977, 0.04338995361328125, 0.04328140640258789, 0.043663360595703124, 0.04336742401123047, 0.04335513687133789, 0.04336640167236328, 0.04417638397216797, 0.043415550231933595, 0.04329983901977539, 0.04334796905517578, 0.043412479400634765, 0.04335615921020508, 0.04331110382080078, 0.04325068664550781, 0.043240447998046876, 0.04327423858642578, 0.04339814376831055, 0.04319539260864258, 0.0451512336730957, 0.04367871856689453, 0.04461056137084961, 0.044349441528320314, 0.04355686569213867, 0.043189247131347655, 0.04324966430664062, 0.043207679748535156, 0.04317388916015625, 0.043837440490722655, 0.04323328018188476, 0.044283905029296876, 0.04405759811401367, 0.04333772659301758, 0.044034046173095705, 0.04412723159790039, 0.043617279052734374, 0.043469825744628904, 0.043324417114257815, 0.043393024444580076, 0.04321177673339844, 0.04349542236328125, 0.043170814514160154, 0.04285440063476562, 0.04336435317993164, 0.04323328018188476, 0.043222015380859374, 0.04319744110107422, 0.04302950286865234, 0.043291648864746096, 0.04374118423461914, 0.044213249206542966, 0.044082176208496096, 0.04331110382080078, 0.04308992004394531, 0.04407807922363281, 0.04330905532836914, 0.044660736083984375, 0.04371148681640625, 0.04319744110107422, 0.043046913146972655, 0.043210750579833986, 0.04361523056030273, 0.04347187042236328, 0.04309401702880859, 0.04312575912475586, 0.043044864654541014, 0.04332748794555664, 0.04312473678588867, 0.0431912956237793, 0.0429936637878418, 0.04306739044189453, 0.04300697708129883, 0.043535358428955076, 0.04342476654052734, 0.04380979156494141, 0.04372377777099609, 0.0432803840637207, 0.04379955291748047, 0.04334899139404297, 0.04312268829345703, 0.043170814514160154, 0.043121662139892575, 0.043302913665771485, 0.04284928131103516, 0.04326604843139648, 0.04318105697631836, 0.04313292694091797, 0.043128833770751954, 0.04317388916015625, 0.043166721343994144, 0.043205631256103515, 0.04341964721679688, 0.043509761810302736, 0.04334592056274414, 0.0440074234008789, 0.04449484634399414, 0.046203903198242184, 0.04408220672607422, 0.04344112014770508, 0.04340636825561523, 0.04423779296875, 0.04465049743652344, 0.04369510269165039, 0.043805694580078124, 0.04401049423217773, 0.044700672149658206, 0.04437299346923828, 0.04333670425415039, 0.044319744110107424, 0.04341657638549805, 0.04311654281616211, 0.043344894409179685, 0.04343091201782227, 0.04316057586669922, 0.04323942565917969, 0.04326911926269531, 0.04329779052734375, 0.043291648864746096, 0.04403919982910156, 0.04394390487670898, 0.04346777725219726, 0.04337664031982422, 0.043025409698486325, 0.04307455825805664, 0.04323430252075195, 0.04335615921020508, 0.04348211288452149, 0.0435230712890625, 0.043169792175292966, 0.04334694290161133, 0.04324761581420898, 0.043493377685546876, 0.04324867248535156, 0.043199455261230466, 0.04333977508544922, 0.04336848068237305, 0.04423984146118164, 0.04449792098999023, 0.04401049423217773, 0.044837886810302735, 0.04437299346923828, 0.04337868881225586, 0.045385726928710936, 0.0462479362487793, 0.04379852676391602, 0.044852256774902344, 0.04312368011474609, 0.04444364929199219, 0.04369203186035156, 0.04432076644897461, 0.04372787094116211, 0.043202560424804685, 0.04318003082275391, 0.04318515014648437, 0.04328755187988281, 0.04321791839599609, 0.04318310546875, 0.04327526473999024, 0.04331520080566406, 0.043251712799072264, 0.04326707077026367, 0.04318105697631836, 0.04489215850830078, 0.04379955291748047, 0.043245567321777346, 0.04306022262573242, 0.04467302322387695, 0.04541545486450195, 0.04443952178955078, 0.04484505462646484, 0.043824127197265625, 0.04308070373535156, 0.04295577621459961, 0.04311654281616211, 0.04333260726928711, 0.04340224075317383, 0.04321791839599609, 0.04314726257324219, 0.04472012710571289, 0.04365107345581055, 0.04333977508544922, 0.04347596740722656, 0.04300288009643555, 0.04317695999145508, 0.04333567810058594, 0.04355481719970703, 0.04425932693481445, 0.04353740692138672, 0.04333465576171875, 0.04321590423583985, 0.043220958709716796, 0.043289600372314455, 0.04474982452392578, 0.04353228759765625, 0.043396095275878906, 0.043216896057128903, 0.04423884963989258, 0.04347187042236328, 0.04331417465209961, 0.043409408569335936, 0.043393024444580076, 0.04489932632446289, 0.04364492797851562, 0.04388044738769531, 0.04369305419921875, 0.043170814514160154, 0.04306227111816406, 0.04335411071777344, 0.04379033660888672, 0.04366950225830078, 0.04471295928955078, 0.043640830993652346, 0.04426649475097656, 0.04364799880981445, 0.04431155014038086, 0.045262847900390625, 0.043422721862792966, 0.04352511978149414, 0.042881023406982424, 0.043215873718261716, 0.04368896102905274, 0.04331622314453125, 0.04368076705932617, 0.04461363220214844, 0.04478668975830078, 0.04463411331176758, 0.043254783630371094, 0.04348620986938476, 0.04331008148193359, 0.044314624786376954, 0.044111873626708986, 0.04352511978149414, 0.04528745651245117, 0.04363158416748047, 0.04416307067871094, 0.04356095886230469, 0.04311347198486328, 0.04343910217285156, 0.043494400024414064, 0.04321484756469727, 0.04326707077026367, 0.04313497543334961, 0.04452761459350586, 0.04384153747558594, 0.0433438720703125, 0.04374323272705078, 0.04435763168334961, 0.043474945068359375, 0.04306124877929687, 0.04352819061279297, 0.04364492797851562, 0.04344838333129883, 0.04329260635375977, 0.04403814315795898, 0.04380672073364258, 0.04348928070068359, 0.04362956619262695, 0.0451409912109375, 0.04432998275756836, 0.043943935394287106, 0.044734462738037106, 0.044265472412109375, 0.04329983901977539, 0.04474163055419922, 0.04327423858642578, 0.04340022277832031, 0.043133918762207034, 0.0434411506652832, 0.04315750503540039, 0.04323123168945313, 0.044300289154052735, 0.04402687835693359, 0.04310323333740235, 0.043878398895263675, 0.04342784118652344, 0.04326297760009765, 0.043210784912109376, 0.04400431823730469, 0.0435968017578125, 0.04321484756469727, 0.04347187042236328, 0.04320460891723633, 0.0433172492980957, 0.04303257751464844, 0.043268096923828124, 0.043210750579833986, 0.04320153427124023, 0.04324761581420898, 0.04321279907226563, 0.0433070068359375, 0.04333567810058594, 0.04319641494750977, 0.043524097442626954, 0.04356403350830078, 0.04365926361083984, 0.04353228759765625, 0.04347289657592773, 0.043643905639648435, 0.04413337707519531, 0.04453683090209961, 0.04349747085571289, 0.04565708923339844, 0.04539084625244141, 0.044777473449707034, 0.04466483306884766, 0.0442081298828125, 0.04343296051025391, 0.04341964721679688, 0.04356710433959961, 0.044467201232910154, 0.04393471908569336, 0.04315142440795899, 0.043334590911865235, 0.043256832122802735, 0.04338483047485352, 0.045216766357421875, 0.04432588958740234, 0.04329471969604492, 0.04340531158447265, 0.04323123168945313, 0.04333363342285156, 0.04392038345336914, 0.04467814254760742, 0.04348211288452149, 0.04355481719970703, 0.043305984497070314, 0.043259902954101564, 0.04329574584960937, 0.04334899139404297, 0.043245567321777346, 0.043344894409179685, 0.04318207931518555, 0.04329676818847656, 0.04329471969604492, 0.043329601287841794, 0.04327519989013672, 0.043302913665771485, 0.04320153427124023, 0.04353433609008789, 0.043464702606201173, 0.04334694290161133, 0.04305817413330078, 0.04328755187988281]",tokens/s,22.947662320450895,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphz4rm27d/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 92006 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949404-66840ecf6171208760579ba3;b3c6d036-1a12-4792-95fa-669049080a88) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490ef-28040acc6c6c8292624420af;71727f08-ce16-4f96-bb4a-7b21966561ed) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpua4whjaa/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491b7-2bf1ab347b750d2438fd14f1;18e55136-c679-4eb8-bde6-b738d756fd72) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,4461.907968,24111.480832,0.0,23465.033728,21690.932224,s,10,26.134937744140625,2.6134937744140623,0.002110944060506743,2.6131138916015626,2.6165289794921875,2.616893395996094,2.617184929199219,"[2.61349365234375, 2.615531005859375, 2.611662353515625, 2.612973388671875, 2.612295654296875, 2.611193115234375, 2.61325439453125, 2.616447998046875, 2.6172578125, 2.610828369140625]",tokens/s,97.95317000798843,kWh,3.0828719950384564e-05,1.6895240036155882e-05,0.0001496905919745939,0.00019741455196113433,tokens/kWh,1296763.5741989252,MB,4461.907968,24111.480832,0.0,23465.033728,21890.217984,s,10,1550.2514375,155.02514375,0.01758597574684522,155.027953125,155.04441406249998,155.04866796875,155.05207109375002,"[155.040484375, 155.052921875, 155.004375, 155.03190625, 155.025, 155.03090625, 155.00465625, 154.996828125, 155.020890625, 155.04346875]",tokens/s,0.4063856899342497,kWh,0.0018302277258535225,0.001003126846851137,0.008909165071770808,0.01174251964447547,tokens/kWh,5365.117701092351,,s,629,1571.3079760742191,2.498104890420062,0.3103664945461323,2.460632080078125,2.46187294921875,2.4623572265625002,5.0725159374999995,"[2.460918701171875, 2.46082666015625, 2.461980712890625, 2.4610048828125, 2.460906494140625, 2.46040576171875, 2.46205126953125, 2.4613447265625, 2.461484130859375, 2.4613662109375, 2.461151123046875, 2.461115478515625, 2.460440673828125, 2.460030029296875, 2.461190185546875, 2.460876708984375, 2.460451904296875, 2.460170166015625, 2.463140869140625, 2.46112060546875, 2.46163671875, 2.46084814453125, 2.463774658203125, 2.461010986328125, 2.4610498046875, 2.46070263671875, 2.461833251953125, 2.460528564453125, 2.4617880859375, 2.46072021484375, 2.461929443359375, 2.4613662109375, 2.46120849609375, 2.46112060546875, 2.4614892578125, 2.459990966796875, 2.461517822265625, 2.4609638671875, 2.46169189453125, 2.460205078125, 2.460928955078125, 2.461189208984375, 2.459634765625, 2.45965625, 2.45981396484375, 2.459525146484375, 2.459683837890625, 2.46053369140625, 2.46042529296875, 2.459734130859375, 2.461192138671875, 2.46074072265625, 2.46057470703125, 2.46108154296875, 2.46078564453125, 2.46036474609375, 2.461035400390625, 2.4617646484375, 2.460655517578125, 2.461253662109375, 2.461116455078125, 2.46118505859375, 5.0779052734375, 2.461738037109375, 2.461781982421875, 2.461154296875, 2.463372314453125, 2.462037109375, 2.462613525390625, 2.462189453125, 2.46213427734375, 2.46221923828125, 2.462265380859375, 2.462834716796875, 2.461442138671875, 2.460442626953125, 2.460788818359375, 2.460884033203125, 2.46097509765625, 2.461346923828125, 2.46034130859375, 2.460333984375, 2.46076318359375, 2.461075439453125, 2.460369873046875, 2.459978759765625, 2.4613203125, 2.461739013671875, 2.460712890625, 2.461740966796875, 2.46230322265625, 2.462064697265625, 2.4611767578125, 2.4604365234375, 2.46226318359375, 2.46073046875, 2.461908935546875, 2.4604140625, 2.4613857421875, 2.460633056640625, 2.46066796875, 2.459797607421875, 2.460780517578125, 2.46072119140625, 2.46026025390625, 2.4598271484375, 2.46049169921875, 2.46009765625, 2.460492919921875, 2.46042529296875, 2.461200439453125, 2.461295654296875, 2.460458984375, 2.461582275390625, 2.46042822265625, 2.460832763671875, 2.461412353515625, 2.460978271484375, 2.460579833984375, 2.4606044921875, 2.460789794921875, 2.460420166015625, 2.4605654296875, 2.461767578125, 2.46121875, 5.0727412109375, 2.4607958984375, 2.46120751953125, 2.4612392578125, 2.460706787109375, 2.46135595703125, 2.46187109375, 2.461371337890625, 2.4605634765625, 2.46158642578125, 2.46133251953125, 2.46209326171875, 2.46131103515625, 2.461284423828125, 2.46093212890625, 2.46143798828125, 2.460158935546875, 2.460937255859375, 2.459987060546875, 2.4602919921875, 2.460261474609375, 2.460375, 2.460521484375, 2.461273193359375, 2.46073046875, 2.46131396484375, 2.460286865234375, 2.46070263671875, 2.459401123046875, 2.460560302734375, 2.460210205078125, 2.4605859375, 2.459334716796875, 2.46042822265625, 2.460444580078125, 2.460652587890625, 2.4594482421875, 2.459885498046875, 2.459210693359375, 2.459854736328125, 2.45916162109375, 2.459714599609375, 2.45982421875, 2.45997265625, 2.460665771484375, 2.461526123046875, 2.459255859375, 2.4603525390625, 2.460240966796875, 2.460114990234375, 2.459881591796875, 2.46009765625, 2.459779052734375, 2.45985791015625, 2.459809814453125, 2.459896728515625, 2.459599853515625, 2.459850830078125, 2.459740234375, 2.459671630859375, 2.459470947265625, 2.45933984375, 2.4605419921875, 5.07296044921875, 2.459979736328125, 2.460074951171875, 2.460875732421875, 2.46111328125, 2.461024169921875, 2.4617041015625, 2.45998583984375, 2.461939697265625, 2.4617861328125, 2.46111328125, 2.460988525390625, 2.460284912109375, 2.460103759765625, 2.4607939453125, 2.461890625, 2.46074072265625, 2.4606279296875, 2.46205224609375, 2.46086865234375, 2.461393798828125, 2.461231201171875, 2.462035888671875, 2.4613251953125, 2.462630859375, 2.460718017578125, 2.46099853515625, 2.460675048828125, 2.4612158203125, 2.460883056640625, 2.461149169921875, 2.461590576171875, 2.4607939453125, 2.460654541015625, 2.460872802734375, 2.461231201171875, 2.46080712890625, 2.461241455078125, 2.46087158203125, 2.459632568359375, 2.460600341796875, 2.460632080078125, 2.46099365234375, 2.460303466796875, 2.46082568359375, 2.462000244140625, 2.461948974609375, 2.4596806640625, 2.460571533203125, 2.460125244140625, 2.46019677734375, 2.45987841796875, 2.46080810546875, 2.460212158203125, 2.460813232421875, 2.45957421875, 2.46019384765625, 2.460409912109375, 2.459958251953125, 2.45973095703125, 2.460412841796875, 2.46169384765625, 2.46048046875, 5.07272607421875, 2.461116455078125, 2.460166259765625, 2.461013916015625, 2.460505126953125, 2.46078662109375, 2.461305908203125, 2.4609658203125, 2.460851318359375, 2.461484130859375, 2.4605869140625, 2.4610498046875, 2.4601630859375, 2.46051318359375, 2.4606064453125, 2.461643798828125, 2.461190185546875, 2.461765625, 2.46075390625, 2.4614091796875, 2.460251220703125, 2.4604609375, 2.460229736328125, 2.460904541015625, 2.46035546875, 2.460232666015625, 2.46175439453125, 2.462803955078125, 2.4607958984375, 2.461580322265625, 2.461107177734375, 2.461393798828125, 2.460929931640625, 2.460676025390625, 2.461404052734375, 2.460982177734375, 2.460367919921875, 2.4600791015625, 2.4596552734375, 2.460010498046875, 2.45901318359375, 2.4592802734375, 2.46055224609375, 2.460517333984375, 2.459428955078125, 2.46019384765625, 2.460880859375, 2.4609208984375, 2.459928466796875, 2.46034423828125, 2.460252197265625, 2.460788818359375, 2.460273681640625, 2.45994287109375, 2.460907470703125, 2.4596796875, 2.460264404296875, 2.460949462890625, 2.460282958984375, 2.4613427734375, 2.462763916015625, 2.461613037109375, 2.460632080078125, 5.0719755859375, 2.462064697265625, 2.46147998046875, 2.46153515625, 2.461073486328125, 2.460905517578125, 2.461240234375, 2.460853271484375, 2.461421630859375, 2.462041015625, 2.46237890625, 2.46096484375, 2.461697998046875, 2.4613037109375, 2.461698974609375, 2.460219482421875, 2.462074951171875, 2.460283935546875, 2.459872314453125, 2.46007080078125, 2.45981689453125, 2.460726318359375, 2.459724853515625, 2.45985888671875, 2.460906494140625, 2.46042822265625, 2.46017626953125, 2.460051513671875, 2.46018359375, 2.46103759765625, 2.460602294921875, 2.46068212890625, 2.461318115234375, 2.460810302734375, 2.4603740234375, 2.46188037109375, 2.462437255859375, 2.460948486328125, 2.460538818359375, 2.460599365234375, 2.461042724609375, 2.46105712890625, 2.45985595703125, 2.46139697265625, 2.46086865234375, 2.460706787109375, 2.46046826171875, 2.4603740234375, 2.461024169921875, 2.4599951171875, 2.460008544921875, 2.460411865234375, 2.4604580078125, 2.460127197265625, 2.4596591796875, 2.460818359375, 2.461365234375, 2.460373046875, 2.46135302734375, 2.461212646484375, 2.4607734375, 2.460324951171875, 2.46021728515625, 5.07445458984375, 2.4605234375, 2.461729736328125, 2.461869140625, 2.461232177734375, 2.461845458984375, 2.463498291015625, 2.460968994140625, 2.46013232421875, 2.460211181640625, 2.45943603515625, 2.460124267578125, 2.46013037109375, 2.460103759765625, 2.461614013671875, 2.46322900390625, 2.461684814453125, 2.4606064453125, 2.4600556640625, 2.45979345703125, 2.459621337890625, 2.460230712890625, 2.459693115234375, 2.460508056640625, 2.460240966796875, 2.460556396484375, 2.459707275390625, 2.459610107421875, 2.459715576171875, 2.460527587890625, 2.459706298828125, 2.45965625, 2.45981591796875, 2.45966845703125, 2.4598037109375, 2.459366455078125, 2.4594267578125, 2.45954052734375, 2.46019580078125, 2.459505615234375, 2.4598701171875, 2.4605830078125, 2.46051953125, 2.4611103515625, 2.460297119140625, 2.45998388671875, 2.459361328125, 2.460695556640625, 2.46107958984375, 2.46091162109375, 2.45956494140625, 2.460255126953125, 2.460651611328125, 2.460200927734375, 2.45922314453125, 2.460324951171875, 2.460303466796875, 2.46060546875, 2.460147705078125, 2.459874267578125, 2.459558837890625, 2.460209228515625, 2.46225927734375, 5.07727880859375, 2.46051953125, 2.459748291015625, 2.460139404296875, 2.459675537109375, 2.460464111328125, 2.45935400390625, 2.460265380859375, 2.460979248046875, 2.46025927734375, 2.460180419921875, 2.45945654296875, 2.45956396484375, 2.4599091796875, 2.459864990234375, 2.459249755859375, 2.459658203125, 2.45960693359375, 2.4602666015625, 2.463476806640625, 2.46247314453125, 2.461254638671875, 2.4606064453125, 2.46013134765625, 2.4603095703125, 2.46048046875, 2.460453857421875, 2.460747802734375, 2.4610087890625, 2.45964501953125, 2.460734375, 2.460478515625, 2.460600341796875, 2.460285888671875, 2.460894287109375, 2.460316650390625, 2.460169189453125, 2.460146728515625, 2.459640869140625, 2.459826171875, 2.459534423828125, 2.46048876953125, 2.459629638671875, 2.459568115234375, 2.459454345703125, 2.46009765625, 2.46054296875, 2.46160693359375, 2.459303955078125, 2.459570068359375, 2.460251220703125, 2.459474853515625, 2.45965625, 2.460379150390625, 2.45972900390625, 2.459229248046875, 2.459660400390625, 2.45932958984375, 2.464058349609375, 2.461569091796875, 2.45967041015625, 2.459884521484375, 2.460506103515625, 5.07673388671875, 2.459622314453125, 2.462644287109375, 2.4620205078125, 2.461388916015625, 2.460607421875, 2.461044677734375, 2.461013916015625, 2.461294677734375, 2.460347412109375, 2.46035546875, 2.4605205078125, 2.460373046875, 2.459558837890625, 2.46158740234375, 2.462236572265625, 2.462750732421875, 2.46039453125, 2.459875244140625, 2.460180419921875, 2.459242431640625, 2.461361083984375, 2.461865966796875, 2.4626064453125, 2.46232470703125, 2.462096435546875, 2.460041259765625, 2.4603125, 2.460031005859375, 2.460012451171875, 2.460483642578125, 2.46249365234375, 2.460971923828125, 2.460064697265625, 2.460525634765625, 2.4618515625, 2.461947998046875, 2.46048046875, 2.460482666015625, 2.459716552734375, 2.46003515625, 2.45914111328125, 2.459892822265625, 2.4607099609375, 2.461216796875, 2.45960693359375, 2.459496337890625, 2.459989013671875, 2.46011181640625, 2.461148193359375, 2.459788330078125, 2.459845703125, 2.45964501953125, 2.45973291015625, 2.459387939453125, 2.460958740234375, 2.4599326171875, 2.459988037109375, 2.459134033203125, 2.4599716796875, 2.459010009765625, 2.4597216796875, 2.466093017578125, 5.07026025390625, 2.459715576171875, 2.459428955078125, 2.460008544921875, 2.45975244140625, 2.460271728515625, 2.4604365234375, 2.46042724609375, 2.46135595703125, 2.460927001953125, 2.460580810546875, 2.46221826171875, 2.461213623046875, 2.461199462890625, 2.460501953125, 2.461664306640625, 2.46034423828125, 2.460695556640625, 2.460241943359375, 2.46086865234375, 2.4613837890625, 2.4614892578125, 2.460771240234375, 2.461020263671875, 2.462573486328125, 2.461728759765625, 2.461054931640625, 2.46047021484375, 2.46080712890625, 2.46276806640625, 2.46089306640625, 2.461909912109375, 2.46152392578125, 2.4612607421875, 2.460707763671875, 2.461158447265625, 2.46102734375, 2.46084716796875, 2.460749755859375, 2.460251220703125, 2.460735595703125, 2.46051220703125, 2.461053955078125, 2.4607548828125, 2.46175244140625, 2.460388427734375, 2.46027880859375, 2.4614501953125, 2.461758544921875, 2.461529052734375, 2.460019775390625, 2.4599306640625, 2.460873779296875, 2.460814453125, 2.460541015625, 2.460760009765625, 2.46168994140625, 2.460673095703125, 2.46126806640625, 2.460421142578125, 2.462738525390625, 2.46156689453125, 2.461497314453125]",tokens/s,0.4003034475593408,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1242.517504,1002.962944,0.0,356.51584,319.013888,s,24,0.17130540800094604,0.007137725333372751,0.00022311786687976846,0.0071282398700714105,0.007300579309463501,0.007423537635803223,0.007834636788368226,"[0.007951807975769044, 0.0069541120529174804, 0.007165823936462402, 0.007056672096252442, 0.007144512176513672, 0.00713647985458374, 0.007169407844543457, 0.007167103767395019, 0.007114399909973144, 0.007214687824249268, 0.007116288185119629, 0.007130335807800293, 0.006872928142547608, 0.006911136150360107, 0.006938623905181885, 0.00706390380859375, 0.0072626562118530275, 0.006821887969970703, 0.007244128227233887, 0.007442368030548096, 0.006888895988464355, 0.007316832065582275, 0.007094272136688232, 0.007126143932342529]",tokens/s,35865.76788028823,kWh,8.10983955910138e-08,4.4438154296628874e-08,1.700140534975713e-07,2.95550603385214e-07,tokens/kWh,866179926.7800356,MB,1242.517504,1002.962944,0.0,356.51584,319.016448,s,24,9.959905761718751,0.414996073404948,0.006233471890431573,0.41629383850097657,0.4203448516845703,0.42279765014648435,0.42333133697509767,"[0.415234619140625, 0.41458303833007815, 0.4162372741699219, 0.4174971008300781, 0.41968191528320314, 0.41635040283203123, 0.41644073486328126, 0.4139960021972656, 0.41676824951171876, 0.41499542236328124, 0.41386602783203125, 0.4151525268554688, 0.3974216613769531, 0.42062896728515625, 0.4150201416015625, 0.4167072448730469, 0.4132667541503906, 0.40156256103515625, 0.42318035888671873, 0.4192855529785156, 0.40144622802734375, 0.4233764343261719, 0.41931097412109375, 0.4178955688476563]",tokens/s,151.80866527988903,kWh,4.816906560124589e-06,2.6384911088194618e-06,8.122948136426575e-06,1.5578345805370628e-05,tokens/kWh,4044075.0762048676,,s,1511,10.116198400020602,0.006695035340847519,0.0009268447542668934,0.006612927913665772,0.0067041277885437015,0.006775808095932007,0.014021119976043704,"[0.007634943962097168, 0.007476223945617676, 0.007463935852050781, 0.00738918399810791, 0.007346176147460938, 0.0073175039291381834, 0.007235583782196045, 0.00733900785446167, 0.007542784214019775, 0.0065781760215759275, 0.006622208118438721, 0.006672383785247803, 0.006590464115142822, 0.006896639823913574, 0.0068577280044555666, 0.006666240215301514, 0.0066109437942504885, 0.006701087951660157, 0.00667952013015747, 0.006412288188934326, 0.006393856048583985, 0.006409215927124024, 0.006362112045288086, 0.006533120155334473, 0.006343679904937744, 0.006370304107666015, 0.006401023864746094, 0.006310912132263183, 0.006371327877044678, 0.006326272010803223, 0.0063591041564941405, 0.00633132791519165, 0.006340608119964599, 0.006364160060882569, 0.006348800182342529, 0.00637440013885498, 0.006408192157745361, 0.006342656135559082, 0.006371327877044678, 0.006342720031738281, 0.006365119934082031, 0.006333439826965332, 0.00643174409866333, 0.00638156795501709, 0.006400000095367431, 0.0063610877990722655, 0.006308864116668702, 0.006468607902526856, 0.00638156795501709, 0.006362112045288086, 0.0063805441856384275, 0.006326272010803223, 0.006370304107666015, 0.0063836159706115725, 0.006333439826965332, 0.006375423908233643, 0.006348800182342529, 0.006347775936126709, 0.006386688232421875, 0.006316031932830811, 0.006342656135559082, 0.0063211841583251955, 0.013602784156799316, 0.006305791854858398, 0.006337535858154297, 0.006291456222534179, 0.006341631889343262, 0.006328320026397705, 0.00628223991394043, 0.006347775936126709, 0.006299647808074951, 0.006325247764587402, 0.0063017277717590334, 0.006366176128387451, 0.006404096126556396, 0.006366208076477051, 0.006372352123260498, 0.00637440013885498, 0.006460415840148926, 0.006395904064178467, 0.006765567779541016, 0.006776832103729248, 0.0069283838272094726, 0.006744063854217529, 0.00679423999786377, 0.0067952961921691896, 0.006851552009582519, 0.006718463897705078, 0.006633471965789795, 0.006652927875518798, 0.006595583915710449, 0.006665215969085693, 0.006680575847625733, 0.006662144184112549, 0.006667263984680176, 0.006696959972381592, 0.006604800224304199, 0.006696959972381592, 0.006730751991271973, 0.006635519981384277, 0.006675456047058105, 0.0066447358131408694, 0.006719488143920899, 0.006614016056060791, 0.006627327919006347, 0.006595583915710449, 0.006636544227600098, 0.006609920024871826, 0.006592512130737305, 0.006639616012573242, 0.006603775978088379, 0.006661119937896728, 0.006605823993682861, 0.006584320068359375, 0.006621183872222901, 0.006619135856628418, 0.006663167953491211, 0.006627327919006347, 0.00658841609954834, 0.006609920024871826, 0.006628352165222168, 0.0067276802062988285, 0.006615104198455811, 0.006631360054016113, 0.006608895778656006, 0.014178303718566895, 0.006615039825439453, 0.00662937593460083, 0.0066078720092773435, 0.006615039825439453, 0.006625279903411865, 0.006624256134033203, 0.006624256134033203, 0.006604800224304199, 0.006620160102844238, 0.006608895778656006, 0.006621183872222901, 0.006648831844329834, 0.006624256134033203, 0.006626304149627686, 0.006686719894409179, 0.006639616012573242, 0.006585343837738037, 0.006602752208709717, 0.0066119680404663084, 0.006658048152923584, 0.006601727962493896, 0.006615039825439453, 0.006512639999389648, 0.0066641921997070315, 0.0067051520347595215, 0.0066344962120056155, 0.0066078720092773435, 0.006585343837738037, 0.006615039825439453, 0.006620160102844238, 0.006621183872222901, 0.006598656177520752, 0.006598656177520752, 0.0065812478065490725, 0.006618112087249756, 0.006597631931304931, 0.006608895778656006, 0.006660096168518067, 0.006615039825439453, 0.006555647850036621, 0.006604800224304199, 0.006606847763061524, 0.006584320068359375, 0.0065812478065490725, 0.00657919979095459, 0.006617087841033936, 0.006643712043762207, 0.006619135856628418, 0.006556672096252441, 0.006552576065063476, 0.006434815883636475, 0.006490111827850342, 0.006504447937011719, 0.006508543968200684, 0.00657203197479248, 0.006569983959197998, 0.006549503803253174, 0.006557695865631104, 0.0066119680404663084, 0.006565887928009034, 0.006652927875518798, 0.0066109437942504885, 0.013988863945007325, 0.006624256134033203, 0.006592512130737305, 0.006639616012573242, 0.0066375679969787596, 0.006642687797546387, 0.006603775978088379, 0.006667263984680176, 0.006649856090545654, 0.006614016056060791, 0.006626304149627686, 0.006628352165222168, 0.006699007987976074, 0.006639679908752441, 0.006616000175476074, 0.0066375679969787596, 0.006624256134033203, 0.006598656177520752, 0.006624256134033203, 0.006661119937896728, 0.006599679946899414, 0.006597631931304931, 0.006599679946899414, 0.006659071922302246, 0.0067041277885437015, 0.006591487884521485, 0.006616064071655273, 0.00659660816192627, 0.006567935943603515, 0.006624256134033203, 0.006604800224304199, 0.006604800224304199, 0.006601727962493896, 0.0066375679969787596, 0.006586368083953857, 0.0066109437942504885, 0.006624256134033203, 0.006635519981384277, 0.006601727962493896, 0.006585343837738037, 0.006585343837738037, 0.006604800224304199, 0.006619135856628418, 0.006654975891113281, 0.0066375679969787596, 0.006622208118438721, 0.006576128005981445, 0.0066447358131408694, 0.006661119937896728, 0.006622208118438721, 0.006684671878814697, 0.006584320068359375, 0.0066109437942504885, 0.0066078720092773435, 0.006583295822143555, 0.006645760059356689, 0.0066007041931152345, 0.006585343837738037, 0.006651904106140137, 0.006690815925598144, 0.006594560146331787, 0.006615039825439453, 0.006627327919006347, 0.014088191986083985, 0.006567935943603515, 0.0065812478065490725, 0.006626304149627686, 0.006586368083953857, 0.006626304149627686, 0.006603775978088379, 0.006598656177520752, 0.006854656219482422, 0.006645760059356689, 0.006645760059356689, 0.006620160102844238, 0.0066109437942504885, 0.006646783828735352, 0.006623231887817383, 0.006658048152923584, 0.0066416640281677245, 0.006660096168518067, 0.006614016056060791, 0.006606847763061524, 0.006636544227600098, 0.006589439868927002, 0.0066007041931152345, 0.006583295822143555, 0.006616064071655273, 0.006621183872222901, 0.006563839912414551, 0.006594560146331787, 0.006635519981384277, 0.006654975891113281, 0.0066416640281677245, 0.006614016056060791, 0.0065781760215759275, 0.007001088142395019, 0.006724607944488525, 0.006776832103729248, 0.006923264026641846, 0.006908927917480469, 0.007229440212249756, 0.006760447978973389, 0.006677504062652588, 0.0067041277885437015, 0.006633471965789795, 0.006689792156219483, 0.006648831844329834, 0.00660588788986206, 0.006613952159881592, 0.006594560146331787, 0.0066078720092773435, 0.006654975891113281, 0.0066078720092773435, 0.006602752208709717, 0.006626304149627686, 0.00657919979095459, 0.006684671878814697, 0.006621183872222901, 0.006585343837738037, 0.006631455898284912, 0.006635488033294678, 0.006573056221008301, 0.006650879859924317, 0.006606847763061524, 0.006642687797546387, 0.01434931182861328, 0.006706175804138184, 0.006661119937896728, 0.006606847763061524, 0.006617087841033936, 0.006575104236602783, 0.006612991809844971, 0.006605823993682861, 0.006573056221008301, 0.0065177597999572755, 0.006467584133148193, 0.0065075201988220215, 0.006500351905822754, 0.0065781760215759275, 0.006614016056060791, 0.006605823993682861, 0.006686719894409179, 0.006617087841033936, 0.006611008167266846, 0.006617023944854737, 0.006618112087249756, 0.006622208118438721, 0.006604800224304199, 0.006615039825439453, 0.006621183872222901, 0.0066078720092773435, 0.006645760059356689, 0.006660096168518067, 0.006627327919006347, 0.006625279903411865, 0.006633471965789795, 0.006614016056060791, 0.006750207901000976, 0.0066406397819519045, 0.006614016056060791, 0.006614016056060791, 0.006592512130737305, 0.006621183872222901, 0.006646783828735352, 0.006500415802001953, 0.006513599872589111, 0.006481919765472412, 0.006615039825439453, 0.006624256134033203, 0.006639616012573242, 0.00658022403717041, 0.0066119680404663084, 0.006622208118438721, 0.006597631931304931, 0.006621183872222901, 0.006657023906707763, 0.006600736141204834, 0.006635488033294678, 0.006545407772064209, 0.0065443840026855465, 0.006523903846740723, 0.006500351905822754, 0.006523903846740723, 0.006499328136444092, 0.0065484800338745115, 0.006569983959197998, 0.0066641921997070315, 0.006716415882110595, 0.01415782356262207, 0.006632448196411133, 0.006606847763061524, 0.006604800224304199, 0.00658841609954834, 0.006623231887817383, 0.006602752208709717, 0.006601727962493896, 0.006605823993682861, 0.006618112087249756, 0.006612991809844971, 0.0066406397819519045, 0.006635519981384277, 0.006615039825439453, 0.006665215969085693, 0.0066304001808166506, 0.006602752208709717, 0.006648831844329834, 0.006602752208709717, 0.00657203197479248, 0.006553599834442139, 0.006601727962493896, 0.006551551818847656, 0.006707200050354004, 0.006573056221008301, 0.006590496063232422, 0.0066037440299987794, 0.006590464115142822, 0.006642687797546387, 0.006612991809844971, 0.006595583915710449, 0.006689792156219483, 0.00667852783203125, 0.006576128005981445, 0.006612991809844971, 0.006601727962493896, 0.006564864158630371, 0.006606847763061524, 0.0066344962120056155, 0.0065710082054138185, 0.00658841609954834, 0.006599711894989014, 0.00657916784286499, 0.00658022403717041, 0.006684671878814697, 0.006550528049468994, 0.006591487884521485, 0.006593535900115967, 0.006625279903411865, 0.00662937593460083, 0.006553599834442139, 0.0066109437942504885, 0.006631423950195312, 0.006582272052764892, 0.006649856090545654, 0.00659660816192627, 0.006560768127441406, 0.006602752208709717, 0.006618112087249756, 0.006554624080657959, 0.00658739185333252, 0.006608895778656006, 0.006449215888977051, 0.014105536460876466, 0.006559743881225586, 0.006602752208709717, 0.006622208118438721, 0.006557695865631104, 0.006612991809844971, 0.006627327919006347, 0.006625279903411865, 0.006560768127441406, 0.006626304149627686, 0.006623231887817383, 0.006604800224304199, 0.006590464115142822, 0.0066119680404663084, 0.006515711784362793, 0.006474751949310303, 0.006576128005981445, 0.006467584133148193, 0.006466559886932373, 0.006496255874633789, 0.006532095909118653, 0.006472703933715821, 0.006501376152038574, 0.006467584133148193, 0.006494207859039307, 0.0065075201988220215, 0.006481919765472412, 0.006482944011688232, 0.006497280120849609, 0.006467616081237793, 0.0064726719856262206, 0.00648089599609375, 0.006492159843444824, 0.006602752208709717, 0.006665215969085693, 0.006614016056060791, 0.006632448196411133, 0.006595583915710449, 0.006575104236602783, 0.006602752208709717, 0.00659660816192627, 0.006615039825439453, 0.006586368083953857, 0.006584320068359375, 0.006590464115142822, 0.0066078720092773435, 0.006621183872222901, 0.006591487884521485, 0.00658739185333252, 0.00657919979095459, 0.006599679946899414, 0.006576128005981445, 0.00665503978729248, 0.006610879898071289, 0.006606847763061524, 0.00657203197479248, 0.006584320068359375, 0.0066078720092773435, 0.0065894718170166015, 0.006585311889648437, 0.006475776195526123, 0.006569983959197998, 0.006608895778656006, 0.014060544013977052, 0.006614016056060791, 0.006609920024871826, 0.006631423950195312, 0.006593535900115967, 0.006609920024871826, 0.006618112087249756, 0.006598656177520752, 0.006564864158630371, 0.00658739185333252, 0.006601727962493896, 0.006608895778656006, 0.006594560146331787, 0.006595583915710449, 0.006605823993682861, 0.00662937593460083, 0.006681600093841553, 0.0066713600158691405, 0.006608895778656006, 0.006583295822143555, 0.00658739185333252, 0.006583295822143555, 0.006593535900115967, 0.006553599834442139, 0.006652927875518798, 0.006683648109436035, 0.006639616012573242, 0.006656000137329102, 0.006604800224304199, 0.006593535900115967, 0.006597631931304931, 0.0065771517753601075, 0.006597631931304931, 0.00658739185333252, 0.006591487884521485, 0.006598656177520752, 0.006598656177520752, 0.006624256134033203, 0.0066007041931152345, 0.0066007041931152345, 0.006635519981384277, 0.006590464115142822, 0.006689792156219483, 0.006603775978088379, 0.0066078720092773435, 0.006593535900115967, 0.006575104236602783, 0.0066212158203125, 0.006588384151458741, 0.006633471965789795, 0.006592512130737305, 0.006511616230010986, 0.006509568214416504, 0.0064767999649047855, 0.006648831844329834, 0.006685696125030518, 0.0066304001808166506, 0.006681600093841553, 0.0066744318008422855, 0.006615039825439453, 0.006653952121734619, 0.006653952121734619, 0.006643712043762207, 0.014236672401428223, 0.0066447358131408694, 0.006639616012573242, 0.006658048152923584, 0.006650879859924317, 0.006642687797546387, 0.006651904106140137, 0.0066744318008422855, 0.006650879859924317, 0.006643712043762207, 0.0066344962120056155, 0.006614016056060791, 0.006625279903411865, 0.006636544227600098, 0.006662144184112549, 0.006662144184112549, 0.006665215969085693, 0.006599679946899414, 0.006651904106140137, 0.006532127857208252, 0.0065382080078125, 0.0065413122177124024, 0.006599679946899414, 0.006529024124145508, 0.006524928092956543, 0.006496255874633789, 0.006631423950195312, 0.006649856090545654, 0.006647808074951172, 0.006662144184112549, 0.006620160102844238, 0.006645760059356689, 0.006635519981384277, 0.006672383785247803, 0.006624256134033203, 0.006660096168518067, 0.006633471965789795, 0.006516736030578613, 0.0064880638122558594, 0.00663759994506836, 0.0064992961883544925, 0.006536191940307618, 0.006494207859039307, 0.006503424167633057, 0.006509600162506103, 0.006504415988922119, 0.0064542717933654785, 0.006605823993682861, 0.00652185583114624, 0.0065484800338745115, 0.006526976108551025, 0.0065771517753601075, 0.006526976108551025, 0.006481919765472412, 0.006527999877929688, 0.0065177597999572755, 0.006498303890228272, 0.006512639999389648, 0.0065136637687683106, 0.0065669121742248536, 0.0065146880149841305, 0.006498303890228272, 0.006510591983795166, 0.014119935989379882, 0.0066109437942504885, 0.006601727962493896, 0.006649856090545654, 0.006703135967254639, 0.006661087989807129, 0.006651904106140137, 0.006604800224304199, 0.0066406397819519045, 0.006617087841033936, 0.006614016056060791, 0.006648831844329834, 0.006633471965789795, 0.00652185583114624, 0.00653004789352417, 0.006516736030578613, 0.006532095909118653, 0.00652288007736206, 0.006494207859039307, 0.006586368083953857, 0.006556672096252441, 0.006494207859039307, 0.006533120155334473, 0.006512639999389648, 0.00652288007736206, 0.006645760059356689, 0.006689792156219483, 0.006711296081542969, 0.00672051191329956, 0.006668288230895996, 0.0067123198509216305, 0.006601727962493896, 0.00652185583114624, 0.00658739185333252, 0.006556672096252441, 0.006518784046173095, 0.006518784046173095, 0.006492159843444824, 0.00658841609954834, 0.006502399921417237, 0.006502399921417237, 0.006535168170928955, 0.006529024124145508, 0.006554624080657959, 0.006508543968200684, 0.006511616230010986, 0.0064849920272827145, 0.006505504131317139, 0.00649724817276001, 0.006583295822143555, 0.006526976108551025, 0.0064849920272827145, 0.006533120155334473, 0.006525951862335205, 0.006479872226715088, 0.006542335987091064, 0.006512639999389648, 0.0064778242111206055, 0.0065177597999572755, 0.006503424167633057, 0.006551551818847656, 0.006511616230010986, 0.006508543968200684, 0.014226431846618653, 0.0066979842185974124, 0.006638591766357422, 0.006646783828735352, 0.0066007041931152345, 0.006658048152923584, 0.006615039825439453, 0.006631423950195312, 0.006619135856628418, 0.006662144184112549, 0.0065812478065490725, 0.006553599834442139, 0.006510591983795166, 0.00658739185333252, 0.006773759841918945, 0.006754303932189941, 0.006670335769653321, 0.0066979842185974124, 0.006624256134033203, 0.006711296081542969, 0.006715392112731934, 0.006631423950195312, 0.006546432018280029, 0.006619135856628418, 0.006663167953491211, 0.0066406397819519045, 0.006766592025756836, 0.006685696125030518, 0.0066826238632202144, 0.006653952121734619, 0.006616064071655273, 0.00662937593460083, 0.006627327919006347, 0.006665215969085693, 0.006620160102844238, 0.0065833601951599124, 0.00659449577331543, 0.0064839677810668945, 0.006452223777770996, 0.00648089599609375, 0.0064778242111206055, 0.006474751949310303, 0.006462463855743408, 0.006458367824554443, 0.00648089599609375, 0.006453248023986816, 0.006493184089660644, 0.006461440086364746, 0.006519807815551758, 0.006486015796661377, 0.00652288007736206, 0.006569983959197998, 0.006503424167633057, 0.006456319808959961, 0.006512639999389648, 0.006496255874633789, 0.006542335987091064, 0.006493184089660644, 0.006486015796661377, 0.006561791896820069, 0.006503424167633057, 0.006489120006561279, 0.00658735990524292, 0.013493247985839844, 0.006289408206939697, 0.006296576023101807, 0.006315008163452148, 0.006300672054290772, 0.006293504238128662, 0.006275072097778321, 0.0062269439697265625, 0.006235136032104492, 0.006207488059997559, 0.006214655876159668, 0.006227968215942382, 0.006205440044403076, 0.006254591941833496, 0.006221824169158936, 0.006329343795776367, 0.006291456222534179, 0.006296576023101807, 0.006258687973022461, 0.0063283519744873045, 0.006334432125091553, 0.006309887886047363, 0.006434815883636475, 0.006309887886047363, 0.006338560104370118, 0.006328320026397705, 0.006333439826965332, 0.006348800182342529, 0.006306816101074219, 0.006341631889343262, 0.006312960147857666, 0.0063170561790466305, 0.006302720069885254, 0.006285312175750732, 0.006295551776885986, 0.006289472103118896, 0.006285247802734375, 0.0062791681289672855, 0.006329343795776367, 0.0064143362045288085, 0.0063508481979370115, 0.006376448154449463, 0.0063201279640197755, 0.006395904064178467, 0.006427648067474365, 0.006301695823669433, 0.006292479991912842, 0.006301695823669433, 0.006306816101074219, 0.006335487842559814, 0.006315008163452148, 0.0063201279640197755, 0.0062791681289672855, 0.006328320026397705, 0.006266880035400391, 0.006331391811370849, 0.006337535858154297, 0.006280191898345947, 0.006295551776885986, 0.006269951820373535, 0.006280191898345947, 0.006284287929534912, 0.006262784004211426, 0.013628416061401367, 0.006308864116668702, 0.0063211522102355954, 0.0063201279640197755, 0.0062863359451293946, 0.006301695823669433, 0.006296576023101807, 0.006576128005981445, 0.0066447358131408694, 0.006652927875518798, 0.006668288230895996, 0.006708223819732666, 0.006662144184112549, 0.006710271835327148, 0.006683648109436035, 0.006653952121734619, 0.006662144184112549, 0.006675456047058105, 0.0066641921997070315, 0.006755328178405762, 0.006732800006866455, 0.006658048152923584, 0.0067276802062988285, 0.006668288230895996, 0.006662144184112549, 0.0067276802062988285, 0.006647808074951172, 0.007676928043365478, 0.007031807899475098, 0.006772736072540283, 0.0066826238632202144, 0.0067010560035705566, 0.006714367866516113, 0.006654975891113281, 0.006670335769653321, 0.006653952121734619, 0.0066119680404663084, 0.006774784088134766, 0.00667955207824707, 0.006646783828735352, 0.0069621758460998535, 0.006809599876403808, 0.006687744140625, 0.006675456047058105, 0.0069008002281188965, 0.006768576145172119, 0.006726655960083008, 0.006666240215301514, 0.0066416640281677245, 0.006711296081542969, 0.006719488143920899, 0.006622208118438721, 0.006683648109436035, 0.0066744318008422855, 0.006653952121734619, 0.0066744318008422855, 0.006677504062652588, 0.0066406397819519045, 0.00667852783203125, 0.0067338237762451176, 0.0066713600158691405, 0.006665215969085693, 0.006628352165222168, 0.013584383964538574, 0.006291456222534179, 0.006294528007507324, 0.006313983917236328, 0.006647808074951172, 0.006662176132202148, 0.006650911808013916, 0.0066631040573120115, 0.006639616012573242, 0.006676479816436768, 0.006642687797546387, 0.00672051191329956, 0.006683648109436035, 0.0066344962120056155, 0.006663167953491211, 0.006652927875518798, 0.006658048152923584, 0.006625279903411865, 0.0066416640281677245, 0.006654975891113281, 0.0066447358131408694, 0.006648928165435791, 0.006696864128112793, 0.00672156810760498, 0.00659555196762085, 0.006661119937896728, 0.0065474557876586915, 0.006543360233306885, 0.006553599834442139, 0.006553599834442139, 0.006569983959197998, 0.0065484800338745115, 0.0065484800338745115, 0.006510591983795166, 0.006535168170928955, 0.006557695865631104, 0.006568960189819336, 0.006615039825439453, 0.006576128005981445, 0.006526976108551025, 0.0065413122177124024, 0.006532095909118653, 0.006518784046173095, 0.006553599834442139, 0.006540287971496582, 0.0065382399559021, 0.00653926420211792, 0.0065372161865234375, 0.006543360233306885, 0.006755328178405762, 0.006635519981384277, 0.006667263984680176, 0.006663167953491211, 0.006659071922302246, 0.006658048152923584, 0.006626304149627686, 0.00653926420211792, 0.006536191940307618, 0.006534143924713135, 0.006536191940307618, 0.006558720111846924, 0.006525951862335205, 0.006555647850036621, 0.014024703979492188, 0.006663167953491211, 0.0066938881874084475, 0.006687744140625, 0.006643712043762207, 0.0067420158386230465, 0.006636544227600098, 0.006662144184112549, 0.0066304001808166506, 0.006636544227600098, 0.006628352165222168, 0.006694911956787109, 0.006685696125030518, 0.006700032234191895, 0.006662144184112549, 0.006724607944488525, 0.006725632190704346, 0.006654975891113281, 0.00667955207824707, 0.006652927875518798, 0.006609920024871826, 0.006743040084838867, 0.006716415882110595, 0.006688767910003662, 0.006670335769653321, 0.006663167953491211, 0.006626304149627686, 0.006616064071655273, 0.006737919807434082, 0.006636544227600098, 0.006661119937896728, 0.006651904106140137, 0.006619135856628418, 0.006636544227600098, 0.006639616012573242, 0.006668288230895996, 0.006621183872222901, 0.0066375679969787596, 0.006575104236602783, 0.006642687797546387, 0.006590464115142822, 0.006633471965789795, 0.006618112087249756, 0.006599679946899414, 0.006623231887817383, 0.006322175979614258, 0.006309887886047363, 0.006273024082183838, 0.006336512088775635, 0.006567935943603515, 0.0063907837867736815, 0.006482944011688232, 0.006322175979614258, 0.006307839870452881, 0.006502399921417237, 0.006603775978088379, 0.0066119680404663084, 0.006677504062652588, 0.006618112087249756, 0.006677504062652588, 0.006624256134033203, 0.006617087841033936, 0.006584320068359375, 0.01439641571044922, 0.0066304001808166506, 0.006607935905456543, 0.006634431838989258, 0.00667955207824707, 0.006647808074951172, 0.00660588788986206, 0.006608831882476807, 0.006612991809844971, 0.006631487846374511, 0.006518720149993896, 0.006568960189819336, 0.006631423950195312, 0.006667263984680176, 0.0066007041931152345, 0.006632448196411133, 0.006626304149627686, 0.00659660816192627, 0.00658841609954834, 0.006624256134033203, 0.006594560146331787, 0.0066641921997070315, 0.006632448196411133, 0.006619135856628418, 0.006649951934814453, 0.006619040012359619, 0.006639616012573242, 0.0066416640281677245, 0.00674508810043335, 0.006648831844329834, 0.0066344962120056155, 0.006591487884521485, 0.006621183872222901, 0.006604800224304199, 0.006652927875518798, 0.006602784156799316, 0.006627295970916748, 0.006569983959197998, 0.006623231887817383, 0.006626304149627686, 0.006623231887817383, 0.006616064071655273, 0.0066119680404663084, 0.006627327919006347, 0.00662937593460083, 0.006601727962493896, 0.006666240215301514, 0.006606847763061524, 0.006593535900115967, 0.006656000137329102, 0.006329343795776367, 0.0062904319763183595, 0.006337535858154297, 0.006302720069885254, 0.006250495910644531, 0.006260799884796143, 0.006354879856109619, 0.006252543926239014, 0.006238207817077636, 0.006238207817077636, 0.006223872184753418, 0.006252543926239014, 0.006278143882751465, 0.013460479736328124, 0.006302720069885254, 0.006312960147857666, 0.006304768085479737, 0.006268928050994873, 0.006306816101074219, 0.006271999835968017, 0.006340608119964599, 0.006277120113372803, 0.006309887886047363, 0.006330368041992188, 0.006273024082183838, 0.006296576023101807, 0.006294528007507324, 0.006270976066589356, 0.006304768085479737, 0.006306816101074219, 0.006336512088775635, 0.006303743839263916, 0.00628326416015625, 0.006303743839263916, 0.0063211522102355954, 0.00628223991394043, 0.006302720069885254, 0.006285312175750732, 0.0064143362045288085, 0.00633241605758667, 0.006295551776885986, 0.006328320026397705, 0.006300672054290772, 0.006343679904937744, 0.0063211522102355954, 0.006278143882751465, 0.006346752166748047, 0.006318079948425293, 0.006295551776885986, 0.006316031932830811, 0.006299647808074951, 0.006298624038696289, 0.006311935901641846, 0.006319104194641113, 0.006296576023101807, 0.006323200225830078, 0.006259712219238281, 0.006307839870452881, 0.006273024082183838, 0.0062679038047790524, 0.006304800033569336, 0.006276063919067383, 0.006398975849151611, 0.006325247764587402, 0.006411263942718506, 0.00658841609954834, 0.0065443840026855465, 0.0065710082054138185, 0.006657023906707763, 0.0066375679969787596, 0.006677504062652588, 0.006666240215301514, 0.006936575889587402, 0.006621183872222901, 0.006707200050354004, 0.006597631931304931, 0.014204928398132324, 0.006622208118438721, 0.006638591766357422, 0.006658048152923584, 0.006616064071655273, 0.006595583915710449, 0.006636544227600098, 0.006509568214416504, 0.0065064959526062015, 0.0065474557876586915, 0.0064839677810668945, 0.006519807815551758, 0.006601727962493896, 0.006621183872222901, 0.006665215969085693, 0.006653952121734619, 0.006594560146331787, 0.006592512130737305, 0.0066119680404663084, 0.006568960189819336, 0.006620160102844238, 0.006583295822143555, 0.00658841609954834, 0.006656000137329102, 0.006696959972381592, 0.006711296081542969, 0.006905856132507324, 0.006700032234191895, 0.006670335769653321, 0.006672383785247803, 0.006706175804138184, 0.006722559928894043, 0.006874112129211426, 0.0066938881874084475, 0.00675328016281128, 0.006690815925598144, 0.0067010560035705566, 0.006730751991271973, 0.006758399963378906, 0.0066344962120056155, 0.006667263984680176, 0.006636544227600098, 0.006665215969085693, 0.00667852783203125, 0.006659071922302246, 0.0066375679969787596, 0.006654975891113281, 0.006657023906707763, 0.006676479816436768, 0.006707200050354004, 0.0067573761940002445, 0.00667852783203125, 0.00672979211807251, 0.007108543872833252, 0.006815743923187256, 0.006897664070129395, 0.007226367950439453, 0.008385536193847656, 0.006814720153808594, 0.006776832103729248, 0.0067123198509216305, 0.006680575847625733, 0.006706175804138184, 0.014495743751525878, 0.006643712043762207, 0.006730751991271973, 0.006677504062652588, 0.006604800224304199, 0.006708223819732666, 0.0066713600158691405, 0.0067123198509216305, 0.006663167953491211, 0.006658048152923584, 0.006615039825439453, 0.006650879859924317, 0.006648831844329834, 0.006659071922302246, 0.006642687797546387, 0.006676479816436768, 0.006657023906707763, 0.006666240215301514, 0.006651904106140137, 0.006662144184112549, 0.006642687797546387, 0.006657023906707763, 0.0066344962120056155, 0.0066938881874084475, 0.006617087841033936, 0.006643712043762207, 0.006646783828735352, 0.006635519981384277, 0.006748159885406494, 0.006575104236602783, 0.006512639999389648, 0.0065136637687683106, 0.006534143924713135, 0.006594560146331787, 0.00659660816192627, 0.006670335769653321, 0.006673408031463623, 0.0066304001808166506, 0.006646783828735352, 0.00669593620300293, 0.0066416640281677245, 0.006632448196411133, 0.006654975891113281, 0.006638591766357422, 0.006643712043762207, 0.006639616012573242, 0.006602752208709717, 0.0066416640281677245, 0.006799359798431396, 0.006619135856628418, 0.0066202239990234375, 0.006612927913665772, 0.006662144184112549, 0.006647808074951172, 0.0066304001808166506, 0.0066744318008422855, 0.006666240215301514, 0.006662144184112549, 0.00667955207824707, 0.006737919807434082, 0.0067358717918396, 0.006633471965789795, 0.006627327919006347, 0.013528063774108886, 0.006284287929534912, 0.006270976066589356, 0.006296576023101807, 0.00624128007888794, 0.006300672054290772, 0.006276095867156982, 0.006313983917236328, 0.006309887886047363, 0.006268928050994873, 0.006355967998504639, 0.006330368041992188, 0.006269951820373535, 0.006284287929534912, 0.006298624038696289, 0.0063170561790466305, 0.006275072097778321, 0.006264832019805908, 0.0062679038047790524, 0.006275072097778321, 0.006355967998504639, 0.0062679038047790524, 0.006281216144561768, 0.006277120113372803, 0.006293504238128662, 0.006310912132263183, 0.006312960147857666, 0.006310912132263183, 0.0064440321922302245, 0.006316031932830811, 0.006293536186218262, 0.006313951969146728, 0.006285312175750732, 0.006295551776885986, 0.0064245758056640625, 0.006555647850036621, 0.006355967998504639, 0.00633241605758667, 0.006273024082183838, 0.006291456222534179, 0.006294528007507324, 0.006291456222534179, 0.006272031784057617, 0.006305759906768799, 0.006288383960723877, 0.0062863359451293946, 0.006309887886047363, 0.006262784004211426, 0.006276095867156982, 0.006325247764587402, 0.006336512088775635, 0.006355967998504639, 0.006593535900115967, 0.0066979842185974124, 0.006696959972381592, 0.00667955207824707, 0.00658739185333252, 0.006660096168518067, 0.006658048152923584, 0.006604800224304199, 0.006663167953491211, 0.006598656177520752, 0.006688767910003662, 0.014336000442504883, 0.006606847763061524, 0.006635519981384277, 0.0066344962120056155, 0.006615039825439453, 0.006618112087249756, 0.006616064071655273, 0.006873087882995605, 0.007664639949798584, 0.0074065918922424315, 0.006867968082427979, 0.0067338237762451176, 0.006797311782836914, 0.006686719894409179, 0.006696959972381592, 0.006700032234191895, 0.006801407814025879, 0.0066744318008422855, 0.0068280320167541505, 0.007235583782196045, 0.007131135940551757, 0.006710271835327148, 0.0066938881874084475, 0.006676479816436768, 0.006687744140625, 0.006663167953491211, 0.006716415882110595, 0.006707200050354004, 0.006669312000274658, 0.006708223819732666, 0.0066344962120056155, 0.00667955207824707, 0.006959104061126709, 0.006726655960083008, 0.006583295822143555, 0.0066447358131408694, 0.006635519981384277, 0.006618112087249756, 0.006632448196411133, 0.006635519981384277, 0.0066375679969787596, 0.006687744140625, 0.006594560146331787, 0.006642687797546387, 0.006676479816436768, 0.0065812478065490725, 0.006656000137329102, 0.006635519981384277, 0.0064737281799316405, 0.006524928092956543, 0.006605823993682861, 0.006601727962493896, 0.006645760059356689, 0.006675456047058105, 0.006601727962493896, 0.0066488637924194335, 0.00667849588394165, 0.006636544227600098, 0.006590464115142822, 0.006685696125030518, 0.006595583915710449, 0.006690815925598144, 0.006801407814025879, 0.014134271621704102, 0.006654975891113281, 0.006591487884521485, 0.006643712043762207, 0.0066447358131408694, 0.0066375679969787596, 0.006680575847625733, 0.006661119937896728, 0.0066078720092773435, 0.006665215969085693, 0.006608895778656006, 0.006653952121734619, 0.006666240215301514, 0.006605823993682861, 0.006661151885986328, 0.006680543899536133, 0.006620160102844238, 0.0066304001808166506, 0.006690815925598144, 0.006731776237487793, 0.00669593620300293, 0.0066744318008422855, 0.006615039825439453, 0.006669312000274658, 0.0066447358131408694, 0.006638591766357422, 0.006669312000274658, 0.006659071922302246, 0.006611008167266846, 0.006669248104095459, 0.006669312000274658, 0.0066406397819519045, 0.006665215969085693, 0.006619135856628418, 0.006647808074951172, 0.006700032234191895, 0.006636544227600098, 0.006626304149627686, 0.006731776237487793, 0.006645760059356689, 0.006620160102844238, 0.006659071922302246, 0.006632448196411133, 0.006608895778656006, 0.00657919979095459, 0.006495232105255127, 0.006553599834442139, 0.006552576065063476, 0.00659660816192627, 0.006731776237487793, 0.006663167953491211, 0.006635519981384277, 0.006714367866516113, 0.006703104019165039, 0.006657023906707763, 0.006699007987976074, 0.006661119937896728, 0.006847487926483154, 0.006709248065948486, 0.006659071922302246, 0.006594560146331787, 0.006659071922302246, 0.006639616012573242, 0.014132224082946777, 0.006639616012573242, 0.006605823993682861, 0.006700032234191895, 0.0070563840866088865, 0.006667263984680176, 0.006658080101013183, 0.006961120128631592, 0.006503424167633057, 0.006562816143035889, 0.0064880638122558594, 0.006516736030578613, 0.0066304001808166506, 0.0065075201988220215, 0.0065136637687683106, 0.006653952121734619, 0.006619135856628418, 0.006621183872222901, 0.0066375679969787596, 0.006589439868927002, 0.006619135856628418, 0.0066416640281677245, 0.0066007041931152345, 0.0066109437942504885, 0.006642687797546387, 0.0066109437942504885, 0.006598656177520752, 0.006634528160095215, 0.006590432167053223, 0.006624256134033203, 0.006589439868927002, 0.006617087841033936, 0.006636544227600098, 0.006554624080657959, 0.006609920024871826, 0.006616064071655273, 0.006788095951080322, 0.006667263984680176, 0.0066447358131408694, 0.006618112087249756, 0.006668288230895996, 0.00662937593460083, 0.0065710082054138185, 0.006667263984680176, 0.006648831844329834, 0.006560800075531006, 0.006664159774780273, 0.0066344962120056155, 0.006592512130737305, 0.006692863941192627, 0.0066979842185974124, 0.006448128223419189, 0.006543360233306885, 0.006525951862335205, 0.006505472183227539, 0.00658739185333252, 0.006567935943603515, 0.006686719894409179, 0.006691840171813965, 0.006684671878814697, 0.006652927875518798, 0.0066744318008422855, 0.006646783828735352]",tokens/s,149.36440946007178,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3149.217792,5128.060928,0.0,4481.613824,4276.256768,s,10,3.149311706542969,0.3149311706542969,0.0015717060185626956,0.31493319702148437,0.3165461700439453,0.3169147476196289,0.31720960968017575,"[0.3172833251953125, 0.3164642639160156, 0.3132740478515625, 0.3144434814453125, 0.31399490356445314, 0.3119559020996094, 0.3140972900390625, 0.31542291259765626, 0.3162637023925781, 0.31611187744140623]",tokens/s,812.8760308741042,kWh,3.694201881686847e-06,2.024221956560268e-06,1.6665230346062446e-05,2.238365418430956e-05,tokens/kWh,11436917.220578322,MB,3149.217792,5128.060928,0.0,4481.613824,4465.661952,s,10,184.204947265625,18.420494726562502,0.008042192397515377,18.4165078125,18.4298185546875,18.432060644531248,18.433854316406247,"[18.41758203125, 18.4293203125, 18.415013671875, 18.434302734375, 18.41543359375, 18.427755859375, 18.427966796875, 18.41258203125, 18.409763671875, 18.4152265625]",tokens/s,3.420103582188457,kWh,0.0002180770655886995,0.00011952181134027568,0.0009648250270677349,0.00130242390399671,tokens/kWh,48371.3480739057,,s,629,186.7401061401367,0.2968841115105512,0.03741416455160097,0.29225473022460935,0.29295676879882815,0.29340855712890623,0.6063264038085938,"[0.29236837768554685, 0.29334426879882813, 0.29221682739257815, 0.29205914306640623, 0.2920222778320313, 0.2922946472167969, 0.2924492797851562, 0.2926929931640625, 0.29193624877929686, 0.2920509338378906, 0.29204376220703127, 0.2921123962402344, 0.291873779296875, 0.2915205078125, 0.29172222900390626, 0.29199871826171875, 0.2923089904785156, 0.2930401306152344, 0.2921902160644531, 0.29266842651367186, 0.29187994384765625, 0.2924564514160156, 0.2924277648925781, 0.2920263671875, 0.29210009765625, 0.2922608642578125, 0.2919096374511719, 0.29193624877929686, 0.29192294311523437, 0.29198541259765626, 0.29212261962890623, 0.29261724853515625, 0.2921973571777344, 0.29344256591796875, 0.2920785827636719, 0.2922250366210937, 0.29242059326171876, 0.2925332336425781, 0.2925537414550781, 0.29240524291992187, 0.2921902160644531, 0.2919741516113281, 0.29227825927734374, 0.2922506103515625, 0.2918768615722656, 0.29220248413085936, 0.29238067626953124, 0.29187481689453126, 0.29204788208007815, 0.2923376770019531, 0.2920396728515625, 0.29325619506835937, 0.2925700988769531, 0.29266226196289064, 0.29220452880859377, 0.2922823791503906, 0.2919710693359375, 0.29259878540039064, 0.2924472351074219, 0.29214208984375, 0.2923765869140625, 0.2920519714355469, 0.6092022705078125, 0.29223724365234377, 0.2937958374023438, 0.29247589111328126, 0.29319781494140623, 0.2925875244140625, 0.2924288024902344, 0.29230181884765627, 0.29243902587890624, 0.2923919372558594, 0.2927564697265625, 0.29259161376953124, 0.29243084716796874, 0.29296743774414064, 0.2923427734375, 0.29186456298828123, 0.2922250366210937, 0.29213082885742186, 0.29205810546875, 0.2923417663574219, 0.2929213562011719, 0.2921850891113281, 0.2936012878417969, 0.29257931518554686, 0.29359002685546876, 0.29242059326171876, 0.2921574401855469, 0.2920642700195312, 0.2926080017089844, 0.29214617919921876, 0.2923816833496094, 0.29216973876953123, 0.29204684448242185, 0.2921922607421875, 0.2923284606933594, 0.2922137451171875, 0.2923929748535156, 0.292546630859375, 0.2926417236328125, 0.2925137939453125, 0.2924062805175781, 0.2922659912109375, 0.29193319702148435, 0.29348370361328124, 0.29205792236328126, 0.2921850891113281, 0.2963548278808594, 0.292305908203125, 0.2925137939453125, 0.2923642883300781, 0.2921707458496094, 0.2921430969238281, 0.2926448669433594, 0.2927411193847656, 0.29220761108398435, 0.292232177734375, 0.29215130615234375, 0.2934067077636719, 0.29257626342773435, 0.29231924438476564, 0.2922465209960938, 0.2922557373046875, 0.2923213195800781, 0.6058434448242187, 0.2923243408203125, 0.29236737060546875, 0.2920919189453125, 0.2922966918945312, 0.2922536926269531, 0.2924298095703125, 0.29252813720703125, 0.29234893798828127, 0.2925455322265625, 0.2922403869628906, 0.29219635009765627, 0.2920130615234375, 0.29205810546875, 0.2921932678222656, 0.2922680358886719, 0.2919772033691406, 0.29201919555664063, 0.29235302734375, 0.2921318359375, 0.29199154663085936, 0.2918656005859375, 0.29201611328125, 0.294361083984375, 0.2921062316894531, 0.29225473022460935, 0.2920929260253906, 0.2933616638183594, 0.29293466186523437, 0.29222708129882813, 0.292073486328125, 0.29259878540039064, 0.2922178649902344, 0.29183078002929685, 0.29233355712890624, 0.2924349365234375, 0.291989501953125, 0.29248818969726564, 0.29223526000976563, 0.29176217651367187, 0.2918082580566406, 0.2931097717285156, 0.29253836059570315, 0.29301043701171875, 0.2920130615234375, 0.2920407409667969, 0.2919710388183594, 0.29457406616210935, 0.2923519897460938, 0.2923028564453125, 0.2920929260253906, 0.2918901672363281, 0.29224755859375, 0.2919526672363281, 0.291937255859375, 0.29211032104492185, 0.29193011474609376, 0.29155224609375, 0.2918410339355469, 0.2920980529785156, 0.292168701171875, 0.29260595703125, 0.292453369140625, 0.6075504760742187, 0.2925127563476563, 0.2923765869140625, 0.29210931396484374, 0.29227825927734374, 0.2921656188964844, 0.29187890625, 0.29214105224609377, 0.29254144287109374, 0.29202023315429687, 0.29206732177734374, 0.293718017578125, 0.2936227722167969, 0.29316915893554685, 0.29292236328125, 0.29260595703125, 0.292431884765625, 0.2927585144042969, 0.2929725341796875, 0.29310772705078125, 0.29271142578125, 0.2924615783691406, 0.29305242919921876, 0.29276776123046877, 0.2925915832519531, 0.29245440673828127, 0.29250765991210936, 0.2925107116699219, 0.29247589111328126, 0.29257830810546875, 0.29249331665039063, 0.2926243896484375, 0.2935541687011719, 0.2926243896484375, 0.29202532958984373, 0.2919198608398437, 0.292021240234375, 0.29210418701171875, 0.2919884948730469, 0.2921758728027344, 0.2921983947753906, 0.292052978515625, 0.2918523254394531, 0.2925229797363281, 0.2951219177246094, 0.2927698059082031, 0.2923653259277344, 0.29271551513671873, 0.2925813903808594, 0.29277389526367187, 0.2923816833496094, 0.2925250549316406, 0.2931272277832031, 0.2930205993652344, 0.29253631591796875, 0.2923786315917969, 0.2930882568359375, 0.2928087158203125, 0.29209906005859376, 0.2928845520019531, 0.29307998657226564, 0.2928609313964844, 0.2925823974609375, 0.6065142211914063, 0.2926080322265625, 0.29234066772460937, 0.29249432373046874, 0.2920980529785156, 0.2918707275390625, 0.2923858032226562, 0.2918133850097656, 0.2919024963378906, 0.2923048706054687, 0.29231924438476564, 0.29207757568359377, 0.29218612670898436, 0.29211032104492185, 0.29206936645507814, 0.29310873413085936, 0.2925721740722656, 0.2921850891113281, 0.292738037109375, 0.2921891784667969, 0.2920550537109375, 0.2918143920898438, 0.2921983947753906, 0.29208370971679687, 0.29210214233398435, 0.29210214233398435, 0.2925189208984375, 0.2919208984375, 0.29203353881835936, 0.2927615966796875, 0.291736572265625, 0.29209701538085936, 0.2920550537109375, 0.2918656005859375, 0.2918370056152344, 0.2924051818847656, 0.2920263671875, 0.2931199951171875, 0.29166488647460936, 0.29208370971679687, 0.2922158203125, 0.2921277465820312, 0.2917294006347656, 0.29210012817382813, 0.29211541748046876, 0.29188607788085935, 0.29205810546875, 0.29206732177734374, 0.29190142822265625, 0.2926776428222656, 0.29242471313476565, 0.2921656188964844, 0.29246054077148437, 0.2923315124511719, 0.29213287353515627, 0.2920325012207031, 0.2924195861816406, 0.29780581665039063, 0.29234072875976563, 0.2921574401855469, 0.29292031860351564, 0.2927329406738281, 0.2922823791503906, 0.6048440551757812, 0.29242266845703124, 0.29194342041015625, 0.2922823791503906, 0.29246875, 0.2921922607421875, 0.2924472351074219, 0.2927779846191406, 0.29216461181640624, 0.29211953735351565, 0.29237759399414065, 0.29223526000976563, 0.2919372863769531, 0.2918973388671875, 0.2923294982910156, 0.2918020935058594, 0.29216152954101565, 0.2923212890625, 0.29266021728515623, 0.2927503356933594, 0.2927216491699219, 0.29210418701171875, 0.2921973876953125, 0.29242471313476565, 0.29217279052734374, 0.29210931396484374, 0.2921912231445313, 0.292021240234375, 0.29214617919921876, 0.29203866577148435, 0.29192190551757813, 0.2918973388671875, 0.29224346923828126, 0.291884033203125, 0.291857421875, 0.29202532958984373, 0.2921379699707031, 0.2925025329589844, 0.2959308776855469, 0.2924974060058594, 0.2935582580566406, 0.2933534851074219, 0.2929541015625, 0.29257931518554686, 0.2927984619140625, 0.29274725341796876, 0.29252197265625, 0.29275750732421874, 0.2934097900390625, 0.29243289184570315, 0.2923284606933594, 0.2925189208984375, 0.2929377136230469, 0.29240524291992187, 0.2938306579589844, 0.29293670654296877, 0.29282302856445314, 0.29310772705078125, 0.2923100280761719, 0.2923991088867188, 0.29276568603515624, 0.2923294677734375, 0.2927698059082031, 0.6069278564453126, 0.2922486267089844, 0.29226181030273435, 0.29200384521484374, 0.29218099975585937, 0.2922465209960938, 0.29204376220703127, 0.2927329406738281, 0.2925977478027344, 0.292210693359375, 0.29327462768554685, 0.2930093994140625, 0.29313638305664064, 0.29303192138671874, 0.292389892578125, 0.29288653564453127, 0.29291519165039065, 0.29289266967773436, 0.2928721923828125, 0.292701171875, 0.29249331665039063, 0.29188607788085935, 0.29253631591796875, 0.29483929443359375, 0.2933084106445312, 0.29292849731445314, 0.2923530883789062, 0.29386029052734375, 0.29232742309570314, 0.2925465698242187, 0.29262130737304687, 0.292274169921875, 0.2924564514160156, 0.292242431640625, 0.2929407958984375, 0.2920704040527344, 0.2924810791015625, 0.292454345703125, 0.2922188720703125, 0.29238885498046874, 0.29204888916015626, 0.2919598083496094, 0.2920232849121094, 0.2920048522949219, 0.29257220458984373, 0.29197512817382815, 0.2930882568359375, 0.29200384521484374, 0.292384765625, 0.2927626342773437, 0.2920519714355469, 0.2919045104980469, 0.2923561096191406, 0.29254348754882814, 0.2925537414550781, 0.2922148132324219, 0.292211669921875, 0.2919024658203125, 0.29274008178710936, 0.29211032104492185, 0.29219430541992186, 0.2925619201660156, 0.2921574401855469, 0.6078638305664062, 0.2920642700195312, 0.29204171752929686, 0.29206219482421875, 0.2920550537109375, 0.2921185302734375, 0.29250149536132813, 0.29204888916015626, 0.29186456298828123, 0.2920867919921875, 0.29199871826171875, 0.29200689697265625, 0.29195059204101564, 0.2919956359863281, 0.2926612548828125, 0.291999755859375, 0.29500723266601564, 0.2922413940429687, 0.2919342041015625, 0.2922650146484375, 0.2921286926269531, 0.29177548217773436, 0.2920263671875, 0.29223629760742187, 0.2919342041015625, 0.2920151062011719, 0.29193624877929686, 0.29171505737304687, 0.29255679321289063, 0.29226290893554685, 0.29204888916015626, 0.29261932373046873, 0.2921389465332031, 0.2924472351074219, 0.292569091796875, 0.29272988891601565, 0.2924666442871094, 0.29255474853515623, 0.2924288024902344, 0.29235711669921877, 0.29266842651367186, 0.2926305236816406, 0.292279296875, 0.29281585693359374, 0.29269195556640626, 0.2922127380371094, 0.2923765869140625, 0.29226190185546874, 0.29332275390625, 0.29225677490234375, 0.2925455322265625, 0.29204684448242185, 0.2919465026855469, 0.29239501953125, 0.292094970703125, 0.2916546630859375, 0.29185330200195314, 0.2919178161621094, 0.29201202392578124, 0.2919045104980469, 0.2917683715820312, 0.29191879272460936, 0.29268173217773436, 0.609132568359375, 0.2924984436035156, 0.2927001647949219, 0.29203353881835936, 0.2919598083496094, 0.29245440673828127, 0.29231411743164065, 0.2920304565429688, 0.29216973876953123, 0.2918604736328125, 0.2920816650390625, 0.29198541259765626, 0.29182769775390627, 0.29221478271484375, 0.29226290893554685, 0.29183078002929685, 0.2922137451171875, 0.2939207763671875, 0.29266226196289064, 0.292632568359375, 0.2929541015625, 0.29197210693359377, 0.2920867919921875, 0.29224346923828126, 0.2920704040527344, 0.292126708984375, 0.2921769104003906, 0.2920243225097656, 0.292173828125, 0.2919126892089844, 0.2919823303222656, 0.2920867919921875, 0.2925608825683594, 0.29213287353515627, 0.29228546142578127, 0.29201608276367186, 0.2929305725097656, 0.2924963989257813, 0.2920181884765625, 0.2923519897460938, 0.29213082885742186, 0.2921359252929687, 0.29183795166015625, 0.29199258422851565, 0.2920724487304687, 0.29192807006835936, 0.292031494140625, 0.2920294494628906, 0.292173828125, 0.2920765380859375, 0.29200689697265625, 0.2918758544921875, 0.2921349182128906, 0.29268896484375, 0.2934025573730469, 0.292505615234375, 0.2919270324707031, 0.29225164794921876, 0.292105224609375, 0.29204071044921875, 0.2915840148925781, 0.2917908630371094, 0.2920570983886719, 0.6085621948242188, 0.2921451416015625, 0.29239706420898437, 0.29239501953125, 0.29228955078125, 0.2920243225097656, 0.2924892272949219, 0.2920345458984375, 0.2921123962402344, 0.291962890625, 0.2920796203613281, 0.29241549682617185, 0.29221682739257815, 0.2918553466796875, 0.291915771484375, 0.29186663818359376, 0.2920427551269531, 0.2920980529785156, 0.29218817138671876, 0.29189529418945315, 0.29189837646484373, 0.29198028564453127, 0.29211953735351565, 0.29269195556640626, 0.2930841674804687, 0.2925148010253906, 0.29509222412109376, 0.29249127197265623, 0.2924369812011719, 0.29232537841796874, 0.2920162048339844, 0.29203448486328126, 0.29213900756835937, 0.2920243225097656, 0.29217279052734374, 0.2922342529296875, 0.2917130126953125, 0.2923991088867188, 0.29214718627929687, 0.2926458740234375, 0.2934947814941406, 0.2932357177734375, 0.2925066223144531, 0.2929162292480469, 0.29218817138671876, 0.2923735046386719, 0.2922332153320312, 0.2920325012207031, 0.2919761962890625, 0.29228543090820314, 0.29216461181640624, 0.29214718627929687, 0.29235302734375, 0.292274169921875, 0.2920376281738281, 0.29199154663085936, 0.2923724670410156, 0.2924646301269531, 0.29217279052734374, 0.29284454345703126, 0.29187277221679686, 0.2919321594238281, 0.29226495361328125]",tokens/s,3.368317674233167,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1441.677312,1709.703168,0.0,1063.256064,942.605312,s,10,0.8730960388183594,0.08730960388183594,0.002035304872210584,0.08713673782348633,0.08910322875976563,0.09054942092895508,0.09170637466430664,"[0.09199561309814454, 0.0843604507446289, 0.08696444702148437, 0.08719612884521484, 0.08721517181396485, 0.08707734680175781, 0.08465267181396484, 0.0866138916015625, 0.08823846435546875, 0.08878185272216797]",tokens/s,2932.0943930345643,kWh,1.0099359104323139e-06,5.533989598310048e-07,2.5805410008034e-06,4.143875871066718e-06,tokens/kWh,61777912.265045814,MB,1441.677312,1709.703168,0.0,1063.256064,942.607872,s,10,53.98236181640625,5.398236181640624,0.0409841937132303,5.390563720703125,5.449904833984375,5.452959252929687,5.455402788085937,"[5.38043359375, 5.40069384765625, 5.41887353515625, 5.44691552734375, 5.456013671875, 5.44922607421875, 5.37513134765625, 5.35530859375, 5.36842431640625, 5.33134130859375]",tokens/s,11.670478630457612,kWh,6.202919682701882e-05,3.3993588802392576e-05,0.00015045469253839705,0.00024647747816780845,tokens/kWh,255601.4467054386,,s,629,54.67969024658198,0.08693114506610816,0.010438416792585154,0.08622489929199219,0.08683417816162109,0.08736747589111328,0.17189485961914075,"[0.08353689575195312, 0.08669286346435547, 0.08684236907958984, 0.08664883422851563, 0.08732160186767578, 0.0871107177734375, 0.08675321960449218, 0.08646348571777343, 0.08578867340087891, 0.08561151885986328, 0.08533920288085937, 0.08318355560302734, 0.08327986907958984, 0.08331366729736328, 0.08347750091552734, 0.08323891448974609, 0.08337407684326172, 0.08320102691650391, 0.08308223724365234, 0.08320921325683593, 0.08336589050292968, 0.08362290954589843, 0.08334028625488281, 0.08379084777832031, 0.08326246643066407, 0.08351641845703126, 0.08637849426269531, 0.0860948486328125, 0.08636211395263672, 0.0862208023071289, 0.0863825912475586, 0.08643379211425781, 0.08343961334228515, 0.08492339324951172, 0.08729497528076172, 0.08630579376220703, 0.08625049591064453, 0.08637340545654297, 0.08612451171875, 0.08631295776367187, 0.0867215347290039, 0.08637235260009765, 0.08630169677734376, 0.08631193542480468, 0.08447795104980468, 0.08495410919189453, 0.08645938873291016, 0.08668978881835937, 0.08635699462890625, 0.08412057495117188, 0.0861143035888672, 0.08629350280761719, 0.08610099029541016, 0.0859535369873047, 0.086508544921875, 0.08645426940917969, 0.0861286392211914, 0.08638976287841797, 0.08641228485107422, 0.08600883483886719, 0.08566067504882813, 0.08624947357177734, 0.16856166076660156, 0.08411341094970703, 0.0832890853881836, 0.08320819091796874, 0.08320409393310547, 0.08308633422851562, 0.08329011535644532, 0.08373766326904297, 0.08319993591308594, 0.08375193786621093, 0.08421683502197265, 0.08632524871826172, 0.0862371826171875, 0.08634674835205078, 0.08365670776367187, 0.0833986587524414, 0.08349798583984375, 0.08350003051757812, 0.08335769653320313, 0.08395059204101563, 0.0839208984375, 0.08677375793457032, 0.0867154541015625, 0.08665900421142578, 0.0864194564819336, 0.08657305908203125, 0.08616550445556641, 0.08598429107666015, 0.08726729583740235, 0.08673792266845703, 0.08624230194091796, 0.0869222412109375, 0.08644096374511719, 0.0866344985961914, 0.08689356994628906, 0.08668672180175781, 0.08644608306884766, 0.08659046173095702, 0.08682291412353516, 0.08598323059082032, 0.08748851013183594, 0.08676249694824219, 0.08683213043212891, 0.08647679901123047, 0.08678195190429687, 0.08769843292236328, 0.08739942169189453, 0.08640614318847656, 0.08639897918701171, 0.08644403076171875, 0.08633344268798829, 0.08639488220214844, 0.08622694396972656, 0.0867215347290039, 0.0865638427734375, 0.08646451568603515, 0.08639078521728516, 0.08660889434814453, 0.08628224182128906, 0.08633753967285156, 0.08671437072753906, 0.08621875, 0.08635084533691406, 0.17366220092773438, 0.08666828918457031, 0.08657817840576172, 0.08662118530273437, 0.08642969512939454, 0.08634265899658203, 0.0862525405883789, 0.08606208038330078, 0.08631501007080078, 0.08632012939453125, 0.08620851135253907, 0.08628530883789062, 0.08648499298095703, 0.08680038452148438, 0.08667135620117188, 0.08662732696533203, 0.08640409851074218, 0.08657817840576172, 0.08667033386230469, 0.08634060668945312, 0.08654131317138672, 0.08610816192626954, 0.08561766052246093, 0.08718950653076171, 0.08702668762207032, 0.08669388580322265, 0.08728883361816406, 0.08666214752197265, 0.08715264129638672, 0.08645017242431641, 0.08640409851074218, 0.08617984008789062, 0.08535756683349609, 0.0866324462890625, 0.08659967803955078, 0.08661299133300782, 0.08663346862792969, 0.08641843414306641, 0.08643379211425781, 0.08636006164550782, 0.08653209686279296, 0.0865771484375, 0.08655257415771485, 0.08642047882080078, 0.08629657745361329, 0.08657100677490234, 0.0862955551147461, 0.08327884674072265, 0.08376831817626954, 0.08357273864746094, 0.08349286651611328, 0.08337715148925781, 0.0860549087524414, 0.08430796813964844, 0.08347443389892578, 0.08344064331054687, 0.08356454467773437, 0.08309248352050781, 0.08637235260009765, 0.08680242919921875, 0.0863078384399414, 0.08653311920166015, 0.08646041870117188, 0.1741107177734375, 0.08636214447021484, 0.08619312286376953, 0.08634162902832031, 0.08638976287841797, 0.08647885131835938, 0.08649215698242188, 0.08664064025878906, 0.08642771148681641, 0.08624428558349609, 0.08617369842529297, 0.08573542022705079, 0.08636109161376954, 0.08620134735107422, 0.086329345703125, 0.08637644958496093, 0.08652902221679687, 0.08682086181640625, 0.08738508605957031, 0.0865249252319336, 0.0863815689086914, 0.0862894058227539, 0.08674918365478515, 0.08637337493896484, 0.08735027313232421, 0.08632422637939453, 0.08617984008789062, 0.08628428649902344, 0.08778956604003907, 0.08680754852294922, 0.08660582733154297, 0.08646348571777343, 0.08672358703613281, 0.0874486083984375, 0.08708297729492187, 0.08639078521728516, 0.08637337493896484, 0.08624742126464843, 0.08626278686523438, 0.08637651062011718, 0.08677881622314453, 0.08631501007080078, 0.08682803344726563, 0.0861112289428711, 0.08643583679199218, 0.08626380920410157, 0.08634572601318359, 0.086181884765625, 0.08633036804199219, 0.08633856201171874, 0.08504627227783203, 0.08628121948242187, 0.08638771057128906, 0.08684748840332031, 0.0863825912475586, 0.08656588745117187, 0.08662528228759765, 0.08654131317138672, 0.08605388641357421, 0.08622694396972656, 0.0862955551147461, 0.08632524871826172, 0.08606412506103515, 0.1738434600830078, 0.0866170883178711, 0.08668978881835937, 0.0868823013305664, 0.08633548736572266, 0.08676761627197266, 0.08665599822998046, 0.08651468658447266, 0.08632012939453125, 0.08640306854248046, 0.08641433715820312, 0.0875315170288086, 0.08692838287353516, 0.0863477783203125, 0.0873861083984375, 0.08702054595947266, 0.08652390289306641, 0.08673382568359375, 0.08648191833496094, 0.0868485107421875, 0.08659967803955078, 0.0865249252319336, 0.08798515319824218, 0.08668672180175781, 0.0869713897705078, 0.08641024017333984, 0.08619417572021484, 0.08626278686523438, 0.08664371490478516, 0.0865577621459961, 0.08645420837402344, 0.08650342559814453, 0.08614502716064452, 0.0860057601928711, 0.08658636474609376, 0.08687513732910156, 0.08634572601318359, 0.08673484802246094, 0.086614013671875, 0.08654745483398438, 0.08647782135009766, 0.08652288055419922, 0.08698777770996094, 0.08646348571777343, 0.08669286346435547, 0.08713728332519531, 0.08633344268798829, 0.08693965148925781, 0.0867000961303711, 0.08665286254882812, 0.08626483154296875, 0.0867430419921875, 0.08634982299804687, 0.08623616027832032, 0.0861685791015625, 0.08641228485107422, 0.08606719970703125, 0.08660889434814453, 0.08628736114501953, 0.08668057250976563, 0.08639794921875, 0.08680242919921875, 0.08641741180419922, 0.1737769012451172, 0.08605696105957031, 0.08600371551513672, 0.08643071746826173, 0.08781107330322266, 0.0866519012451172, 0.08646246337890626, 0.08642668914794922, 0.08613164520263672, 0.08653823852539062, 0.08642047882080078, 0.0866324462890625, 0.08596377563476562, 0.0863815689086914, 0.08631398773193359, 0.08695295715332031, 0.08793907165527344, 0.08786227416992187, 0.08662630462646484, 0.08779571533203125, 0.08724275207519532, 0.08729190063476562, 0.0877844467163086, 0.08636006164550782, 0.0858818588256836, 0.08638771057128906, 0.086949951171875, 0.08719251251220703, 0.08616550445556641, 0.08660377502441406, 0.08642457580566407, 0.08629452514648438, 0.0865054702758789, 0.08606105804443359, 0.08653311920166015, 0.08622796630859375, 0.0860200958251953, 0.08642253112792969, 0.08642355346679688, 0.0863641586303711, 0.08620851135253907, 0.0867583999633789, 0.08634880065917969, 0.08620851135253907, 0.08670003509521484, 0.08732057952880859, 0.08644608306884766, 0.08616960144042969, 0.08388813018798828, 0.08584806060791016, 0.08618905639648437, 0.0861788787841797, 0.0874474868774414, 0.08617574310302735, 0.08607027435302735, 0.0862064666748047, 0.08648703765869141, 0.08633241271972657, 0.08608153533935547, 0.08628838348388672, 0.08599142456054687, 0.08555622100830078, 0.08707379150390625, 0.1688248291015625, 0.08354815673828125, 0.08360345458984375, 0.08642150115966797, 0.08649830627441406, 0.0861808624267578, 0.08627609252929687, 0.08619622039794922, 0.0862033920288086, 0.08743424224853516, 0.08643276977539062, 0.08662425231933593, 0.08616754913330078, 0.08617164611816407, 0.0859135971069336, 0.08686489868164063, 0.08547532653808594, 0.08608972930908203, 0.08635596466064453, 0.08650649261474609, 0.08669593811035156, 0.0856258544921875, 0.08557164764404297, 0.08602413177490234, 0.08610304260253906, 0.08613478088378906, 0.0858818588256836, 0.08467046356201172, 0.08328396606445312, 0.08320614624023437, 0.08348467254638672, 0.0833433609008789, 0.08343142700195312, 0.08517120361328125, 0.08331775665283203, 0.0835225601196289, 0.08325939178466797, 0.0829675521850586, 0.08333618927001953, 0.0835962905883789, 0.08292864227294922, 0.08303923034667969, 0.0840273895263672, 0.08592588806152343, 0.08553369903564453, 0.08563097381591797, 0.08609180450439453, 0.08550192260742187, 0.08622489929199219, 0.08613069152832031, 0.08599961853027344, 0.08535244750976563, 0.0853544921875, 0.08618495941162109, 0.08482406616210937, 0.08524288177490234, 0.08417894744873047, 0.08835072326660157, 0.08645426940917969, 0.08594847869873047, 0.08594425964355469, 0.08639794921875, 0.0862003173828125, 0.17308876037597656, 0.0861470718383789, 0.08535763549804687, 0.08573228454589844, 0.08777318572998047, 0.0860579833984375, 0.08572621154785157, 0.086181884765625, 0.08619213104248047, 0.08773222351074218, 0.08648397064208985, 0.08551526641845703, 0.08338022613525391, 0.08344371032714844, 0.08378880310058594, 0.08320511627197266, 0.08437350463867188, 0.08477490997314453, 0.08600780487060547, 0.08639385223388672, 0.08647065734863281, 0.08623411560058594, 0.08597503662109375, 0.08415744018554687, 0.08344166564941406, 0.08589516448974609, 0.08594534301757813, 0.08557977294921874, 0.08637545776367188, 0.08613065338134765, 0.08599756622314453, 0.08644300842285156, 0.08653823852539062, 0.08349593353271484, 0.08332083129882813, 0.08336589050292968, 0.08353075408935547, 0.08340172576904296, 0.08323072052001954, 0.08343756866455078, 0.08349286651611328, 0.0832890853881836, 0.0835041275024414, 0.08353897857666015, 0.08367203521728515, 0.08369664001464844, 0.08330963134765625, 0.08333510589599609, 0.08366182708740234, 0.08354617309570313, 0.08324396514892578, 0.08303308868408203, 0.08764422607421875, 0.084233154296875, 0.08548761749267578, 0.08656588745117187, 0.08594841766357422, 0.08611126708984375, 0.08648700714111328, 0.08606719970703125, 0.08635289764404297, 0.08466329956054687, 0.08477286529541016, 0.17457868957519532, 0.08449132537841797, 0.08596371459960937, 0.08606515502929687, 0.08642355346679688, 0.08605696105957031, 0.08621673583984375, 0.08608969879150391, 0.08441241455078125, 0.08336179351806641, 0.08336179351806641, 0.08567501068115234, 0.08582860565185547, 0.083346435546875, 0.08477388763427735, 0.0841707534790039, 0.08664268493652344, 0.08617881774902343, 0.0860057601928711, 0.08586239624023438, 0.08551219177246094, 0.08600678253173828, 0.08574361419677734, 0.08587980651855469, 0.0835389404296875, 0.08575692749023438, 0.08598118591308594, 0.085970947265625, 0.08765235137939453, 0.08452095794677734, 0.08327577972412109, 0.08366387176513672, 0.08551423645019532, 0.08583475494384765, 0.08568831634521484, 0.08532889556884765, 0.0860057601928711, 0.08593408203125, 0.08623308563232422, 0.08616754913330078, 0.08630989074707031, 0.08582454681396484, 0.08615420532226563, 0.08506265258789063, 0.08292249298095702, 0.08326246643066407, 0.08425062561035156, 0.08737894439697266, 0.08324813079833984, 0.08346521759033203, 0.08319385528564453, 0.08310169219970703, 0.08324198150634765, 0.08323072052001954, 0.08317030334472657, 0.08586962890625, 0.08609273529052734, 0.08769741058349609, 0.08657305908203125, 0.08599244689941406, 0.08583679962158203, 0.08583782196044921, 0.08327680206298828, 0.17533644104003906, 0.08587776184082031, 0.08605081939697265, 0.0857548828125, 0.08574873352050781, 0.0859535369873047, 0.08613581085205078, 0.08587161254882812, 0.08592998504638671, 0.08673795318603515, 0.08629654693603515, 0.08595455932617188, 0.08510157012939454, 0.08576204681396485, 0.08615424346923828, 0.08628736114501953, 0.08378880310058594, 0.08452607727050782, 0.08607129669189453, 0.0862586898803711, 0.08617062377929688, 0.08422502136230468, 0.08376422119140625, 0.0832890853881836, 0.08342630767822265, 0.0846397476196289, 0.08346630096435546, 0.08343545532226562, 0.08319590759277344, 0.08344371032714844, 0.08367001342773438, 0.0833259506225586, 0.0832511978149414, 0.08327372741699218, 0.0835389404296875, 0.0833966064453125, 0.08333004760742188, 0.0835758056640625, 0.08591462707519532, 0.08338739013671875, 0.08385945892333985, 0.08679219055175781, 0.0835594253540039, 0.08360345458984375, 0.08362598419189453, 0.08332288360595703, 0.08334137725830078, 0.08309037017822266, 0.08337209320068359, 0.08316102600097657, 0.08313855743408204, 0.08323993682861328, 0.08327168273925781, 0.08664064025878906, 0.08615526580810547, 0.08606719970703125, 0.08610099029541016, 0.08608665466308593, 0.08620543670654297, 0.0832573471069336, 0.08329011535644532, 0.08356147003173828, 0.08407449340820312]",tokens/s,11.50335704469939,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1587.982336,2204.63104,0.0,1558.183936,1460.94592,s,10,1.2122179565429687,0.12122179565429687,0.0013469234018574053,0.12083382415771485,0.12219305953979492,0.12357574729919434,0.12468189750671387,"[0.12495843505859375, 0.12104054260253906, 0.12057548522949219, 0.12037449645996094, 0.12039497375488281, 0.11999222564697265, 0.12062710571289062, 0.12108892822265625, 0.12127996826171875, 0.12188579559326172]",tokens/s,2111.8314459725275,kWh,1.4224020360165804e-06,7.790973572291746e-07,6.068685410500128e-06,8.270184803745883e-06,tokens/kWh,30954568.25632818,MB,1587.982336,2204.63104,0.0,1558.183936,1532.626432,s,10,69.92014404296876,6.992014404296876,0.006727859419007241,6.99023779296875,7.000593994140624,7.004389770507812,7.007426391601562,"[6.99975048828125, 7.008185546875, 6.98858837890625, 6.986208984375, 6.9851357421875, 6.99374658203125, 6.98664404296875, 6.99031103515625, 6.99016455078125, 6.99140869140625]",tokens/s,9.010278920661827,kWh,8.274091382308927e-05,4.5348256432790435e-05,0.0003488542096386977,0.00047694337989457746,tokens/kWh,132091.15097461964,,s,629,70.89538343048102,0.112711261415709,0.014361343558964127,0.11091452789306641,0.11135979766845704,0.11152422943115234,0.2312941162109375,"[0.11260415649414063, 0.11204608154296875, 0.11146956634521485, 0.11088588714599609, 0.1111009292602539, 0.11083468627929688, 0.11089715576171875, 0.11064217376708985, 0.11076403045654297, 0.11082137298583984, 0.1107609634399414, 0.11087155151367188, 0.11063091278076172, 0.11059814453125, 0.11085517120361328, 0.11080703735351563, 0.11068621063232421, 0.11079782104492188, 0.11078246307373046, 0.11077222442626954, 0.11091353607177734, 0.11159961700439452, 0.11078144073486328, 0.11073638153076172, 0.11068006134033204, 0.11083161926269532, 0.11083673858642579, 0.11088690948486328, 0.11075276947021484, 0.11083673858642579, 0.11077632141113282, 0.11068621063232421, 0.11071183776855469, 0.11085411071777344, 0.1114419174194336, 0.11082854461669922, 0.11074559783935548, 0.11080089569091797, 0.1116968994140625, 0.11094118499755859, 0.11080806732177734, 0.11078758239746093, 0.11085107421875, 0.11109375762939454, 0.11160063934326171, 0.11135692596435547, 0.11146137237548828, 0.1114777603149414, 0.11146444702148438, 0.11149005126953125, 0.11145423889160157, 0.1115258560180664, 0.11139177703857422, 0.11129033660888672, 0.11130060577392578, 0.11138253021240234, 0.111531005859375, 0.11130368041992188, 0.11148185729980468, 0.11152178955078125, 0.11143679809570313, 0.11145728302001953, 0.23240191650390624, 0.11125657653808593, 0.11134259033203125, 0.11128422546386718, 0.11129036712646484, 0.1112063980102539, 0.111388671875, 0.111494140625, 0.11130060577392578, 0.11108249664306641, 0.11072716522216797, 0.11124531555175782, 0.11140300750732422, 0.11135897827148437, 0.11131391906738282, 0.11140505981445313, 0.11139686584472656, 0.11118386840820313, 0.11124326324462891, 0.11125350189208984, 0.11120230102539062, 0.11123506927490234, 0.1113855972290039, 0.11118592071533204, 0.11209420776367188, 0.11132621002197265, 0.11162521362304688, 0.11137433624267579, 0.11121459197998047, 0.11121868896484376, 0.11114701080322266, 0.11115007781982422, 0.11130572509765625, 0.1112442855834961, 0.11130060577392578, 0.11130879974365235, 0.11134054565429688, 0.11200409698486329, 0.11145523071289062, 0.11124121856689453, 0.1113733139038086, 0.11126783752441406, 0.11100364685058593, 0.11117977905273438, 0.11108147430419922, 0.111246337890625, 0.11108761596679688, 0.11105996704101563, 0.11184435272216797, 0.1111009292602539, 0.11088896179199219, 0.11092991638183594, 0.11088076782226562, 0.110919677734375, 0.11087155151367188, 0.11093504333496093, 0.11084390258789062, 0.1109719009399414, 0.11107532501220703, 0.11094834899902344, 0.11092889404296875, 0.11180748748779297, 0.11107635498046875, 0.23147418212890625, 0.11071794891357421, 0.11122892761230468, 0.11095040130615234, 0.11085721588134766, 0.11081318664550781, 0.11099545288085938, 0.11087974548339843, 0.11081625366210937, 0.11067084503173828, 0.11092377471923828, 0.1108449249267578, 0.1108848648071289, 0.11079167938232422, 0.11096473693847657, 0.11096063995361329, 0.1108705291748047, 0.11078348541259765, 0.11086438751220704, 0.11078860473632812, 0.1108479995727539, 0.110814208984375, 0.11086540985107422, 0.11114393615722656, 0.11088076782226562, 0.11085004425048828, 0.11089305877685547, 0.11083161926269532, 0.11099545288085938, 0.11088697814941406, 0.11098617553710938, 0.11086438751220704, 0.11098419189453125, 0.11145216369628906, 0.1108828125, 0.11083366394042969, 0.11092684936523438, 0.11094425964355469, 0.110814208984375, 0.11094528198242187, 0.1110087661743164, 0.1107927017211914, 0.11096678161621094, 0.11096678161621094, 0.11110297393798828, 0.11113471984863281, 0.11093913269042968, 0.11126271820068359, 0.11096371459960938, 0.11096166229248047, 0.11107737731933594, 0.11087872314453125, 0.11096985626220703, 0.11090943908691406, 0.11095859527587891, 0.110887939453125, 0.11089417266845703, 0.11090525054931641, 0.11086540985107422, 0.11097702026367187, 0.11102207946777344, 0.11098214721679688, 0.11105177307128906, 0.23123660278320313, 0.1107066879272461, 0.11069132995605468, 0.11077017974853516, 0.1106851806640625, 0.11081011199951171, 0.1107988510131836, 0.11065446472167968, 0.11082444763183594, 0.11065446472167968, 0.1107558364868164, 0.11116544342041015, 0.11103641510009765, 0.11059302520751953, 0.1107240982055664, 0.11067903900146485, 0.11058995056152343, 0.11075788879394531, 0.11065856170654297, 0.11070771026611329, 0.11082342529296875, 0.11079065704345703, 0.11078144073486328, 0.11115724945068359, 0.1109401626586914, 0.11073126220703125, 0.11080089569091797, 0.11085004425048828, 0.11101388549804687, 0.11086131286621094, 0.11091865539550781, 0.11091251373291015, 0.11143782043457032, 0.11103030395507812, 0.11083977508544922, 0.1108449249267578, 0.11086438751220704, 0.11087155151367188, 0.11083570861816407, 0.11086847686767579, 0.11088690948486328, 0.1110804443359375, 0.11092889404296875, 0.11091046142578125, 0.1115863037109375, 0.11088998413085938, 0.11093504333496093, 0.11090739440917968, 0.11090636444091796, 0.11097702026367187, 0.11095142364501953, 0.11091046142578125, 0.110919677734375, 0.11080191802978516, 0.11134054565429688, 0.11100774383544922, 0.11119206237792968, 0.11088690948486328, 0.11100364685058593, 0.11100057220458984, 0.11100057220458984, 0.11090841674804687, 0.11100466918945312, 0.23131648254394532, 0.11071078491210938, 0.11071897888183593, 0.11086233520507813, 0.110740478515625, 0.11082444763183594, 0.11087564849853515, 0.1107240982055664, 0.11075993347167969, 0.1107589111328125, 0.11076914978027344, 0.11082649230957031, 0.11070873260498047, 0.11081728363037109, 0.1108111343383789, 0.11080601501464844, 0.11072306823730468, 0.11097907257080078, 0.1107435531616211, 0.11080703735351563, 0.11061555480957032, 0.11081215667724609, 0.11066265869140625, 0.11081523132324218, 0.11085926055908203, 0.11076300811767578, 0.1107609634399414, 0.11074457550048829, 0.1105950698852539, 0.11098009490966797, 0.11077836608886718, 0.11077632141113282, 0.11076812744140625, 0.11072614288330078, 0.11080397033691407, 0.11082240295410156, 0.11096166229248047, 0.11143270111083985, 0.11085209655761719, 0.11084595489501953, 0.11080191802978516, 0.11091046142578125, 0.11078758239746093, 0.1108746566772461, 0.11085001373291016, 0.11092991638183594, 0.1108111343383789, 0.11094834899902344, 0.1108695068359375, 0.11100774383544922, 0.11099954986572266, 0.1109381103515625, 0.1110487060546875, 0.11092173004150391, 0.11100262451171874, 0.11102413177490235, 0.11090431976318359, 0.11215052795410156, 0.11126271820068359, 0.11105894470214844, 0.11091763305664062, 0.11097087860107421, 0.110919677734375, 0.23087001037597657, 0.11094732666015625, 0.11078963470458984, 0.11091149139404297, 0.11068927764892578, 0.11086438751220704, 0.11078656005859375, 0.11087872314453125, 0.11075993347167969, 0.11079782104492188, 0.11075071716308593, 0.11108659362792969, 0.11072306823730468, 0.11085414123535156, 0.1107558364868164, 0.11082649230957031, 0.11118899536132812, 0.11099954986572266, 0.1111203842163086, 0.11085107421875, 0.11103948974609375, 0.11162419128417969, 0.11094528198242187, 0.11086438751220704, 0.1111756820678711, 0.11122380828857421, 0.1109381103515625, 0.11092582702636719, 0.11080806732177734, 0.11080191802978516, 0.11078758239746093, 0.11109792327880859, 0.11111417388916016, 0.11162726593017579, 0.11107225799560547, 0.11119206237792968, 0.11101081848144531, 0.11101907348632813, 0.11168351745605469, 0.11117772674560547, 0.11115007781982422, 0.11107328033447265, 0.11089510345458985, 0.11141017913818359, 0.11118694305419922, 0.11109478759765624, 0.11088690948486328, 0.11098111724853515, 0.1110149154663086, 0.11100774383544922, 0.11092173004150391, 0.11092991638183594, 0.11092889404296875, 0.11095552062988281, 0.1108848648071289, 0.11096166229248047, 0.11103743743896484, 0.11111219024658203, 0.11098419189453125, 0.11095552062988281, 0.11087974548339843, 0.1110302734375, 0.11111424255371094, 0.23252377319335937, 0.11073945617675782, 0.11071078491210938, 0.1107619857788086, 0.11067596435546875, 0.11072512054443359, 0.11079167938232422, 0.11081215667724609, 0.11079167938232422, 0.11069951629638672, 0.11086643218994141, 0.11136307525634766, 0.11100774383544922, 0.1108479995727539, 0.11097293090820312, 0.11082752227783203, 0.1111695327758789, 0.11093408203125, 0.11092269134521485, 0.11094118499755859, 0.11074457550048829, 0.11095040130615234, 0.1107957763671875, 0.11132006072998046, 0.11097805023193359, 0.11088588714599609, 0.11095449829101563, 0.1108828125, 0.11095040130615234, 0.11084185791015624, 0.1109032974243164, 0.11080300903320313, 0.11090118408203126, 0.1108479995727539, 0.11090022277832032, 0.11081932830810547, 0.11081932830810547, 0.11082035064697265, 0.11080806732177734, 0.11085721588134766, 0.11085619354248047, 0.11086335754394532, 0.11097087860107421, 0.11086438751220704, 0.11089817810058594, 0.11083878326416016, 0.11082342529296875, 0.11165081787109375, 0.1110835189819336, 0.1108828125, 0.11089202880859375, 0.11087769317626953, 0.11091353607177734, 0.11080499267578126, 0.11088690948486328, 0.11074150085449219, 0.11088690948486328, 0.11083776092529297, 0.110993408203125, 0.11085004425048828, 0.11094834899902344, 0.11091661071777344, 0.11094937896728516, 0.23204966735839844, 0.1106872329711914, 0.11069849395751953, 0.11082956695556641, 0.11078144073486328, 0.11082342529296875, 0.11082854461669922, 0.11079376220703124, 0.11085718536376953, 0.11064422607421875, 0.11098521423339844, 0.11083878326416016, 0.11085926055908203, 0.11075686645507812, 0.11085311889648437, 0.11100364685058593, 0.11110502624511719, 0.11081011199951171, 0.11087872314453125, 0.11080499267578126, 0.11087974548339843, 0.1108326416015625, 0.11086335754394532, 0.11092889404296875, 0.11091558074951172, 0.1112279052734375, 0.11199488067626953, 0.11085414123535156, 0.11087462615966796, 0.11084902191162109, 0.11083673858642579, 0.11081523132324218, 0.1108479995727539, 0.11087769317626953, 0.11084185791015624, 0.11078656005859375, 0.11094732666015625, 0.11140509033203125, 0.11115721893310547, 0.11089920043945313, 0.11119821166992187, 0.11126681518554687, 0.11108249664306641, 0.11089612579345703, 0.11095552062988281, 0.11087155151367188, 0.11136819458007813, 0.11114701080322266, 0.11097907257080078, 0.1109227523803711, 0.11113369750976562, 0.11089612579345703, 0.11105280303955078, 0.11134566497802735, 0.11094322967529296, 0.11105689239501954, 0.11099750518798829, 0.11082035064697265, 0.11092377471923828, 0.11092173004150391, 0.11097705841064454, 0.110930908203125, 0.11121868896484376, 0.2321387481689453, 0.11072819519042969, 0.11069235229492187, 0.11076505279541016, 0.11068006134033204, 0.1108674545288086, 0.11114189147949219, 0.11086029052734375, 0.11097805023193359, 0.11142041778564453, 0.11087257385253907, 0.11068313598632812, 0.11082854461669922, 0.11079782104492188, 0.11100057220458984, 0.11087974548339843, 0.11062271881103515, 0.1111910400390625, 0.11069951629638672, 0.11069747161865234, 0.11072000122070312, 0.11083570861816407, 0.11088998413085938, 0.11100978851318359, 0.11074253082275391, 0.11108249664306641, 0.11088896179199219, 0.11087155151367188, 0.11092787170410157, 0.1107558364868164, 0.11087059020996094, 0.11099231719970704, 0.11096268463134766, 0.11093504333496093, 0.11110399627685547, 0.11086029052734375, 0.1108111343383789, 0.11096268463134766, 0.11103948974609375, 0.11202969360351563, 0.11105792236328126, 0.1110292510986328, 0.11090841674804687, 0.11127295684814453, 0.11102105712890625, 0.1109012451171875, 0.11093401336669922, 0.11088998413085938, 0.11083478546142578, 0.1109031982421875, 0.11108454132080078, 0.111067138671875, 0.11216793823242187, 0.11090943908691406, 0.1109012451171875, 0.11096268463134766, 0.11092070770263672, 0.11093299102783204, 0.11096575927734376, 0.11093401336669922, 0.11090841674804687, 0.11118182373046875, 0.11094528198242187, 0.23299072265625, 0.1107957763671875, 0.11074969482421874, 0.11083776092529297, 0.11094118499755859, 0.11186585235595703, 0.11088690948486328, 0.11068211364746093, 0.11145830535888672, 0.11130879974365235, 0.11078348541259765, 0.11109069061279297, 0.11089408111572266, 0.11126681518554687, 0.1109381103515625, 0.1113169937133789, 0.11098009490966797, 0.11079065704345703, 0.11102003479003907, 0.11084902191162109, 0.11073535919189453, 0.11083878326416016, 0.11077120208740235, 0.11114393615722656, 0.11094528198242187, 0.11090943908691406, 0.11164876556396484, 0.11096575927734376, 0.11078656005859375, 0.11130572509765625, 0.11101900482177735, 0.11106201934814453, 0.11099852752685548, 0.11108454132080078, 0.11094118499755859, 0.11127295684814453, 0.11090431976318359, 0.11078656005859375, 0.11078451538085937, 0.11092889404296875, 0.1109032974243164, 0.11096575927734376, 0.11088179016113281, 0.1108848648071289, 0.11105996704101563, 0.11092684936523438, 0.11082546997070312, 0.11094528198242187, 0.11090022277832032, 0.11093401336669922, 0.11086137390136719, 0.11093910217285156, 0.11091452789306641, 0.1108674545288086, 0.11074150085449219, 0.11088384246826172, 0.1108705291748047, 0.11097907257080078, 0.11091149139404297, 0.11094118499755859, 0.11102003479003907, 0.11086335754394532, 0.11101081848144531]",tokens/s,8.872227916177204,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 67696 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1610.30144,2204.63104,0.0,1558.183936,1460.94592,s,10,1.21095556640625,0.12109555664062499,0.0010641547634299327,0.12088590621948242,0.12162080078125001,0.1228196014404297,0.12377864196777344,"[0.12401840209960938, 0.12120333099365234, 0.120457763671875, 0.12045849609375, 0.12041088104248047, 0.12001967620849609, 0.12061344146728516, 0.12115837097167968, 0.12126080322265625, 0.12135440063476563]",tokens/s,2114.032976120921,kWh,1.4179196839452421e-06,7.768791417479628e-07,6.063890433118708e-06,8.258689258811912e-06,tokens/kWh,30997654.951946687,MB,1610.30144,2204.63104,0.0,1558.183936,1532.626432,s,10,69.89488818359375,6.989488818359375,0.004243103217668666,6.988570068359375,6.994356689453125,6.996513305664062,6.998238598632812,"[6.9874970703125, 6.99032666015625, 6.998669921875, 6.98964306640625, 6.98713720703125, 6.99221142578125, 6.99387744140625, 6.98657861328125, 6.9846640625, 6.98428271484375]",tokens/s,9.01353470006521,kWh,8.264319939233324e-05,4.5294450197869364e-05,0.00034850786544628274,0.0004764455150364854,tokens/kWh,132229.18048703967,,s,629,70.86934743499755,0.11266986873608516,0.014351668547378101,0.11087564849853515,0.11130736846923828,0.11157073669433594,0.2314320635986328,"[0.11316531372070313, 0.11251097869873047, 0.11209014129638672, 0.1110199966430664, 0.11069747161865234, 0.11074562835693359, 0.11080188751220703, 0.11064524841308594, 0.11066162872314453, 0.11064832305908204, 0.11070259094238281, 0.11060224151611328, 0.11069235229492187, 0.11098521423339844, 0.11078963470458984, 0.11066470336914062, 0.11059200286865234, 0.1106851806640625, 0.11073945617675782, 0.11071590423583984, 0.11070976257324219, 0.11080191802978516, 0.11061965179443359, 0.11073126220703125, 0.1108305892944336, 0.11098111724853515, 0.11077222442626954, 0.11084595489501953, 0.1107240982055664, 0.11080294036865235, 0.11078144073486328, 0.1107988510131836, 0.11058589172363281, 0.1108244171142578, 0.11086029052734375, 0.11067801666259766, 0.11069136047363282, 0.11075682830810547, 0.11072306823730468, 0.11083468627929688, 0.11062067413330078, 0.1107927017211914, 0.11077324676513672, 0.1107435531616211, 0.11105484771728516, 0.11073535919189453, 0.11071078491210938, 0.11087257385253907, 0.1108848648071289, 0.11094322967529296, 0.11104768371582031, 0.11100262451171874, 0.11098623657226563, 0.11090943908691406, 0.11087564849853515, 0.110814208984375, 0.11086438751220704, 0.11091763305664062, 0.11087155151367188, 0.11090636444091796, 0.11095859527587891, 0.11091661071777344, 0.232015869140625, 0.1105264663696289, 0.11075379180908203, 0.11073433685302735, 0.11063603210449219, 0.11060736083984375, 0.1107957763671875, 0.11059814453125, 0.1106513900756836, 0.1106165771484375, 0.11049267578125, 0.11155967712402344, 0.11085414123535156, 0.11077120208740235, 0.11081728363037109, 0.11089510345458985, 0.11144294738769531, 0.11074559783935548, 0.11073126220703125, 0.11089100646972656, 0.11055308532714844, 0.11082546997070312, 0.11138662719726562, 0.11091353607177734, 0.11087462615966796, 0.11142348480224609, 0.11084697723388671, 0.11085517120361328, 0.11074253082275391, 0.11189965057373047, 0.11119615936279297, 0.11109990692138672, 0.11119411468505859, 0.11075379180908203, 0.11090431976318359, 0.11084697723388671, 0.1111357421875, 0.11090739440917968, 0.11068109130859374, 0.11095552062988281, 0.11090841674804687, 0.11090636444091796, 0.11058073425292969, 0.11056639862060547, 0.11075788879394531, 0.1106534423828125, 0.11183309173583984, 0.11085107421875, 0.11081318664550781, 0.11086335754394532, 0.11153305816650391, 0.11171635437011719, 0.11179724884033203, 0.11118284606933594, 0.11113676452636718, 0.11101900482177735, 0.11095142364501953, 0.1110282211303711, 0.11115731048583985, 0.11110700988769531, 0.11107020568847656, 0.11094425964355469, 0.11104051208496094, 0.23170252990722656, 0.11086438751220704, 0.11075379180908203, 0.11077324676513672, 0.11128524780273437, 0.11143679809570313, 0.11095244598388672, 0.11065650939941406, 0.11109990692138672, 0.1110149154663086, 0.1110282211303711, 0.11082240295410156, 0.11086847686767579, 0.11093196868896485, 0.11128832244873046, 0.11095040130615234, 0.11104972839355469, 0.11132723236083984, 0.11132621002197265, 0.11095244598388672, 0.11086233520507813, 0.11052748870849609, 0.11109990692138672, 0.11152793884277344, 0.11081523132324218, 0.11071488189697265, 0.11085209655761719, 0.1112074203491211, 0.11076710510253907, 0.11094220733642578, 0.11125657653808593, 0.11104153442382812, 0.11101593780517578, 0.1107589111328125, 0.1108828125, 0.11091865539550781, 0.11094118499755859, 0.11155353546142578, 0.11141836547851562, 0.11086131286621094, 0.11138457489013671, 0.11153817749023437, 0.11202867126464844, 0.11150035095214844, 0.11127289581298828, 0.110814208984375, 0.11091455841064453, 0.11140914916992188, 0.11102617645263672, 0.11109072113037109, 0.11095037078857421, 0.1117276153564453, 0.11129241943359375, 0.11085517120361328, 0.11174604797363281, 0.11111321258544922, 0.11094118499755859, 0.11088076782226562, 0.11153305816650391, 0.11111219024658203, 0.11100364685058593, 0.11089612579345703, 0.11108659362792969, 0.23120793151855468, 0.11064934539794923, 0.11062374114990234, 0.11083673858642579, 0.11067494201660157, 0.11077836608886718, 0.11070771026611329, 0.11060428619384766, 0.11080601501464844, 0.11074969482421874, 0.11075379180908203, 0.11072819519042969, 0.11140505981445313, 0.11159654235839844, 0.11136716461181641, 0.11071282958984376, 0.1108705291748047, 0.11113164520263671, 0.11083161926269532, 0.11081830596923828, 0.11085721588134766, 0.1109719009399414, 0.1109401626586914, 0.11103539276123046, 0.1110149154663086, 0.11100057220458984, 0.11097395324707031, 0.11096781158447265, 0.110993408203125, 0.11080806732177734, 0.11181362915039063, 0.11124121856689453, 0.11092582702636719, 0.11085517120361328, 0.11085311889648437, 0.11080294036865235, 0.11073638153076172, 0.11090431976318359, 0.110919677734375, 0.11091558074951172, 0.11097395324707031, 0.11094528198242187, 0.11098316955566406, 0.11123609924316406, 0.11135488128662109, 0.11103846740722656, 0.11125350189208984, 0.11107635498046875, 0.11111219024658203, 0.11118284606933594, 0.11092479705810547, 0.11090431976318359, 0.11093401336669922, 0.1108111343383789, 0.11092070770263672, 0.11098214721679688, 0.11090943908691406, 0.11086847686767579, 0.11084595489501953, 0.11083980560302735, 0.1109381103515625, 0.11087462615966796, 0.11084185791015624, 0.23191654968261718, 0.11074253082275391, 0.1107558364868164, 0.11099954986572266, 0.11119821166992187, 0.11085721588134766, 0.11090022277832032, 0.1106175994873047, 0.11069951629638672, 0.11075174713134765, 0.11111014556884766, 0.11087564849853515, 0.11088384246826172, 0.11175116729736329, 0.11096575927734376, 0.11088896179199219, 0.11072512054443359, 0.1114224624633789, 0.11089100646972656, 0.11084390258789062, 0.11089408111572266, 0.11085004425048828, 0.1106012191772461, 0.11063295745849609, 0.11075276947021484, 0.11079065704345703, 0.11080397033691407, 0.11073945617675782, 0.11084902191162109, 0.11068621063232421, 0.11091661071777344, 0.11076300811767578, 0.11077638244628907, 0.11071379089355468, 0.11078246307373046, 0.11075276947021484, 0.11077120208740235, 0.11068621063232421, 0.11095654296875, 0.11124121856689453, 0.11106816101074218, 0.11082854461669922, 0.11157810974121093, 0.11099852752685548, 0.11090943908691406, 0.11087564849853515, 0.11085926055908203, 0.11091251373291015, 0.11085823822021484, 0.11085517120361328, 0.11084390258789062, 0.11089920043945313, 0.110887939453125, 0.1107927017211914, 0.11096883392333984, 0.11088697814941406, 0.11091756439208984, 0.11122278594970703, 0.11093606567382812, 0.1109227523803711, 0.11093094635009766, 0.1110118408203125, 0.11085721588134766, 0.23100518798828126, 0.11085721588134766, 0.11103437042236328, 0.11103846740722656, 0.11079373168945313, 0.11100672149658203, 0.11120025634765625, 0.11079373168945313, 0.11077632141113282, 0.11078246307373046, 0.11091251373291015, 0.111067138671875, 0.11082035064697265, 0.11113983917236328, 0.11079167938232422, 0.11095449829101563, 0.11072921752929688, 0.11073945617675782, 0.111098876953125, 0.11083161926269532, 0.11117874908447266, 0.1109964828491211, 0.11097702026367187, 0.11084185791015624, 0.11079475402832031, 0.11084902191162109, 0.11168972778320313, 0.11109683227539062, 0.111351806640625, 0.11110707092285156, 0.11155865478515625, 0.11136409759521484, 0.11103641510009765, 0.1109381103515625, 0.11078348541259765, 0.11093299102783204, 0.11105177307128906, 0.11092684936523438, 0.11187814331054688, 0.11165286254882813, 0.11094220733642578, 0.11094630432128906, 0.11073843383789063, 0.11088690948486328, 0.110993408203125, 0.11093299102783204, 0.11133235168457031, 0.11102003479003907, 0.1108674545288086, 0.11093196868896485, 0.11087974548339843, 0.11092377471923828, 0.11084390258789062, 0.11086029052734375, 0.11091865539550781, 0.11089202880859375, 0.11088384246826172, 0.11085619354248047, 0.1107988510131836, 0.1108674545288086, 0.11078041839599609, 0.11094834899902344, 0.11084390258789062, 0.23151922607421874, 0.11238092803955078, 0.11110195159912109, 0.11081215667724609, 0.11071180725097657, 0.11068927764892578, 0.11093196868896485, 0.1107957763671875, 0.11103743743896484, 0.11083062744140625, 0.11077935791015625, 0.11092377471923828, 0.1107988510131836, 0.11103437042236328, 0.11119308471679687, 0.11070873260498047, 0.11073433685302735, 0.11069439697265625, 0.11098623657226563, 0.11105382537841797, 0.11100466918945312, 0.11083468627929688, 0.11070259094238281, 0.1114286117553711, 0.11100672149658203, 0.11114701080322266, 0.11124531555175782, 0.11091558074951172, 0.11108659362792969, 0.11109785461425781, 0.11102003479003907, 0.11099033355712891, 0.11115113830566406, 0.11081520080566407, 0.11091558074951172, 0.1108695068359375, 0.11087974548339843, 0.11102719879150391, 0.11108147430419922, 0.11098009490966797, 0.11090227508544923, 0.11093094635009766, 0.11104467010498047, 0.11085004425048828, 0.11133433532714844, 0.11109785461425781, 0.11109273529052735, 0.1115494384765625, 0.11117158508300781, 0.11097395324707031, 0.11107430267333984, 0.11089510345458985, 0.11159347534179688, 0.11095654296875, 0.11123916625976563, 0.11102003479003907, 0.11120950317382812, 0.11099440002441406, 0.1108479995727539, 0.11085107421875, 0.11095040130615234, 0.11082444763183594, 0.1111900177001953, 0.23210704040527344, 0.11165692901611328, 0.11079167938232422, 0.11076812744140625, 0.11082649230957031, 0.11084902191162109, 0.11091149139404297, 0.11082137298583984, 0.11113267517089843, 0.11084697723388671, 0.11103641510009765, 0.11080089569091797, 0.1109227523803711, 0.11097087860107421, 0.11118284606933594, 0.11100569915771484, 0.11142144012451172, 0.11084902191162109, 0.11086233520507813, 0.11098419189453125, 0.11085517120361328, 0.11071590423583984, 0.11084595489501953, 0.11079065704345703, 0.11075788879394531, 0.11074969482421874, 0.11114905548095703, 0.11111116790771484, 0.11128524780273437, 0.11088896179199219, 0.11084185791015624, 0.11085414123535156, 0.11084083557128906, 0.11082649230957031, 0.11088076782226562, 0.11086847686767579, 0.11084902191162109, 0.11075379180908203, 0.11080703735351563, 0.1108326416015625, 0.11081625366210937, 0.11082854461669922, 0.1107988510131836, 0.11076300811767578, 0.11106098937988282, 0.11079065704345703, 0.11085107421875, 0.11082752227783203, 0.11089715576171875, 0.11078041839599609, 0.11082240295410156, 0.11080294036865235, 0.11087462615966796, 0.11081625366210937, 0.11089202880859375, 0.11096883392333984, 0.11079475402832031, 0.11079167938232422, 0.1108695068359375, 0.11085517120361328, 0.1108848648071289, 0.11083878326416016, 0.1109012451171875, 0.23219711303710938, 0.11054796600341797, 0.11063091278076172, 0.11097907257080078, 0.1107220458984375, 0.11073228454589844, 0.11069644927978516, 0.11064835357666016, 0.1108274917602539, 0.11064934539794923, 0.11062783813476562, 0.11064729309082032, 0.11074867248535156, 0.11066880035400391, 0.11075686645507812, 0.11128934478759765, 0.11109171295166016, 0.1118392333984375, 0.11117874908447266, 0.11070054626464844, 0.11073843383789063, 0.11083673858642579, 0.11082240295410156, 0.11079475402832031, 0.11125965118408203, 0.11074662780761718, 0.11078041839599609, 0.11130265808105469, 0.11090227508544923, 0.11071590423583984, 0.11079065704345703, 0.1107589111328125, 0.1107589111328125, 0.11080908966064452, 0.11087974548339843, 0.11083366394042969, 0.11086847686767579, 0.11106816101074218, 0.11083673858642579, 0.11082444763183594, 0.11081932830810547, 0.11074559783935548, 0.11097805023193359, 0.11082854461669922, 0.11081830596923828, 0.11076403045654297, 0.11080191802978516, 0.1108111343383789, 0.1107619857788086, 0.11075379180908203, 0.11113471984863281, 0.11089510345458985, 0.1109964828491211, 0.1112616958618164, 0.11135692596435547, 0.11087667083740234, 0.11085209655761719, 0.11084902191162109, 0.11087564849853515, 0.11095756530761719, 0.11090022277832032, 0.11075174713134765, 0.11066777801513672, 0.23221554565429686, 0.11071282958984376, 0.11071078491210938, 0.11081011199951171, 0.11064627075195313, 0.11077938842773437, 0.11075587463378907, 0.11070563507080078, 0.11075174713134765, 0.11071692657470703, 0.11072819519042969, 0.11062989044189453, 0.11059712219238281, 0.11076300811767578, 0.11081932830810547, 0.11082035064697265, 0.11069747161865234, 0.11060018920898437, 0.11116134643554687, 0.11085107421875, 0.1106175994873047, 0.11080191802978516, 0.11060838317871094, 0.11082956695556641, 0.1108674545288086, 0.1106851806640625, 0.11128422546386718, 0.11070361328125, 0.11062579345703125, 0.11066880035400391, 0.11061248016357422, 0.11078656005859375, 0.11088902282714844, 0.11059398651123047, 0.11090943908691406, 0.11082956695556641, 0.11072614288330078, 0.11071488189697265, 0.11071180725097657, 0.11069747161865234, 0.11098419189453125, 0.1116354522705078, 0.11094118499755859, 0.11096781158447265, 0.11192729949951172, 0.11095654296875, 0.11084390258789062, 0.11088384246826172, 0.1108326416015625, 0.11087155151367188, 0.11092070770263672, 0.11140300750732422, 0.11090022277832032, 0.11150950622558593, 0.11123712158203125, 0.11087872314453125, 0.11086438751220704, 0.11093094635009766, 0.11086029052734375, 0.11086438751220704, 0.11089817810058594, 0.1110292510986328, 0.11088896179199219]",tokens/s,8.875487397099688,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,6397.865984,20902.838272,0.0,20256.391168,19273.711616,s,10,26.880439941406248,2.6880439941406253,0.0033477874655363365,2.6871883544921875,2.692009619140625,2.6933998291015624,2.6945119970703124,"[2.685803955078125, 2.6947900390625, 2.683811767578125, 2.686383544921875, 2.68633642578125, 2.68401416015625, 2.6879931640625, 2.68876318359375, 2.69170068359375, 2.690843017578125]",tokens/s,95.23653651429312,kWh,3.17124683658282e-05,1.7378374240543054e-05,0.0001501383978884052,0.00019922924049477648,tokens/kWh,1284951.9446253772,MB,6401.134592,20902.838272,0.0,20256.391168,19862.692352,s,10,1586.032109375,158.60321093750002,0.015321928299545614,158.5981796875,158.6242765625,158.62938828125,158.63347765625,"[158.6345, 158.613953125, 158.592734375, 158.623140625, 158.5865, 158.597484375, 158.605109375, 158.59528125, 158.598875, 158.58453125]",tokens/s,0.3972176832209664,kWh,0.0018724184694555073,0.0010262522225316024,0.008901508065645191,0.0118001787576323,tokens/kWh,5338.902172075308,,s,629,1607.7030268554693,2.555966656367995,0.3193652798169538,2.51726953125,2.51903720703125,2.519619970703125,5.2035773632812505,"[2.5194208984375, 2.518276123046875, 2.516651123046875, 2.517525390625, 2.51843896484375, 2.517624755859375, 2.52004150390625, 2.52022265625, 2.521489501953125, 2.5164267578125, 2.5175634765625, 2.517775390625, 2.519953369140625, 2.51852197265625, 2.5173544921875, 2.519920654296875, 2.52021044921875, 2.516950927734375, 2.5166611328125, 2.518200439453125, 2.52010693359375, 2.518486083984375, 2.519525390625, 2.516905029296875, 2.517927001953125, 2.5164677734375, 2.517693359375, 2.518721435546875, 2.518928466796875, 2.51907470703125, 2.519510986328125, 2.518275146484375, 2.517242919921875, 2.516676513671875, 2.5165732421875, 2.5177783203125, 2.517148681640625, 2.51657421875, 2.516912109375, 2.518000732421875, 2.517034912109375, 2.516802490234375, 2.51715185546875, 2.51765966796875, 2.517441650390625, 2.51902783203125, 2.518970458984375, 2.5190830078125, 2.518890380859375, 2.51803857421875, 2.518179931640625, 2.51873486328125, 2.517571533203125, 2.517244873046875, 2.516959228515625, 2.516928466796875, 2.5168681640625, 2.5169306640625, 2.517191650390625, 2.517224365234375, 2.517115966796875, 2.516737060546875, 5.213111328125, 2.516869140625, 2.517271484375, 2.516408203125, 2.51673486328125, 2.51637255859375, 2.5162373046875, 2.516429931640625, 2.5193134765625, 2.518941650390625, 2.519627685546875, 2.5193779296875, 2.51966064453125, 2.51765966796875, 2.51892333984375, 2.516537353515625, 2.517127197265625, 2.519396240234375, 2.5196083984375, 2.518447021484375, 2.51694091796875, 2.517104736328125, 2.516771728515625, 2.51951318359375, 2.519360595703125, 2.519175048828125, 2.5198193359375, 2.5188720703125, 2.516715576171875, 2.51652001953125, 2.516981689453125, 2.51901025390625, 2.517675048828125, 2.516708251953125, 2.51647998046875, 2.51678515625, 2.517172119140625, 2.516953125, 2.51710986328125, 2.517333984375, 2.5174814453125, 2.517621826171875, 2.5172724609375, 2.517373046875, 2.5174833984375, 2.517396484375, 2.51939111328125, 2.517060546875, 2.5173310546875, 2.517473388671875, 2.51749169921875, 2.517325927734375, 2.517126220703125, 2.517371826171875, 2.517205078125, 2.51726953125, 2.51808056640625, 2.517114990234375, 2.51685986328125, 2.516756591796875, 2.517073974609375, 2.51732275390625, 2.51730224609375, 5.20315478515625, 2.5170166015625, 2.517244873046875, 2.517020751953125, 2.517108642578125, 2.51638671875, 2.51642578125, 2.517214111328125, 2.516769775390625, 2.51730029296875, 2.51892529296875, 2.519654296875, 2.518119384765625, 2.5167421875, 2.51654150390625, 2.5173125, 2.51736669921875, 2.51793310546875, 2.5171240234375, 2.51818603515625, 2.51892431640625, 2.5187666015625, 2.51812353515625, 2.51782763671875, 2.5180478515625, 2.517296142578125, 2.516853759765625, 2.517425048828125, 2.5174833984375, 2.5169951171875, 2.516545654296875, 2.517794921875, 2.5171640625, 2.51698388671875, 2.51639501953125, 2.51702685546875, 2.516306884765625, 2.5166806640625, 2.51652001953125, 2.517170166015625, 2.516893798828125, 2.516665283203125, 2.516609130859375, 2.516989013671875, 2.51647998046875, 2.516822021484375, 2.51631201171875, 2.51747216796875, 2.517741455078125, 2.518084716796875, 2.517223388671875, 2.51799951171875, 2.51795654296875, 2.518109130859375, 2.51673388671875, 2.517454833984375, 2.51732177734375, 2.51715283203125, 2.516638671875, 2.517203857421875, 2.516884521484375, 2.51759912109375, 2.519858154296875, 5.20646044921875, 2.519667724609375, 2.520236083984375, 2.519354248046875, 2.519604248046875, 2.51765771484375, 2.518856689453125, 2.5197373046875, 2.51866015625, 2.51622802734375, 2.517433349609375, 2.517887939453125, 2.517296142578125, 2.519244873046875, 2.51947509765625, 2.51691015625, 2.517485595703125, 2.51801708984375, 2.51822998046875, 2.518202392578125, 2.518054931640625, 2.517960693359375, 2.518828125, 2.517103515625, 2.516676513671875, 2.51656494140625, 2.5168916015625, 2.516701171875, 2.51679541015625, 2.516830322265625, 2.517245849609375, 2.51702685546875, 2.517789794921875, 2.5177548828125, 2.517986328125, 2.519099365234375, 2.51782666015625, 2.517256103515625, 2.51816845703125, 2.5177353515625, 2.517129150390625, 2.517339111328125, 2.51709130859375, 2.517085205078125, 2.51757568359375, 2.517266357421875, 2.51751123046875, 2.518306884765625, 2.518222900390625, 2.51692236328125, 2.517740478515625, 2.517210205078125, 2.5171865234375, 2.51708935546875, 2.517200927734375, 2.51718359375, 2.51808349609375, 2.517579833984375, 2.517981201171875, 2.518381591796875, 2.517424072265625, 2.517716064453125, 2.517458984375, 5.20374169921875, 2.517011474609375, 2.51717333984375, 2.51589111328125, 2.516104248046875, 2.51622705078125, 2.51666748046875, 2.51635400390625, 2.516790283203125, 2.516591552734375, 2.517108642578125, 2.5161943359375, 2.516461669921875, 2.515986328125, 2.516514892578125, 2.51587890625, 2.516221923828125, 2.516361328125, 2.517339111328125, 2.51677490234375, 2.517726318359375, 2.519022705078125, 2.518340576171875, 2.516381591796875, 2.5166611328125, 2.5175654296875, 2.51858544921875, 2.518182861328125, 2.519140380859375, 2.516368408203125, 2.5173974609375, 2.516633544921875, 2.51689990234375, 2.516451416015625, 2.51803759765625, 2.519834716796875, 2.518279052734375, 2.516961181640625, 2.5182822265625, 2.51641748046875, 2.51694482421875, 2.5169111328125, 2.51719580078125, 2.5168271484375, 2.516947998046875, 2.5164697265625, 2.5166357421875, 2.51717724609375, 2.516738037109375, 2.5174794921875, 2.5177138671875, 2.51707080078125, 2.517907470703125, 2.51693359375, 2.518024169921875, 2.51669921875, 2.517652587890625, 2.51960009765625, 2.51852294921875, 2.5182841796875, 2.517738525390625, 2.518305908203125, 2.51839794921875, 5.20286328125, 2.518064208984375, 2.5179228515625, 2.51654052734375, 2.517392333984375, 2.517768310546875, 2.517991455078125, 2.516991943359375, 2.51664697265625, 2.516994140625, 2.516967529296875, 2.51681689453125, 2.516999267578125, 2.517242919921875, 2.51738427734375, 2.5165751953125, 2.516220947265625, 2.516633544921875, 2.516949951171875, 2.517053466796875, 2.5165966796875, 2.51759716796875, 2.516681640625, 2.516168701171875, 2.5164677734375, 2.51702978515625, 2.5176904296875, 2.5161298828125, 2.51717333984375, 2.518159423828125, 2.51761962890625, 2.5179013671875, 2.517589111328125, 2.5173984375, 2.516906005859375, 2.516503662109375, 2.516475830078125, 2.51711279296875, 2.519480224609375, 2.5183896484375, 2.5181982421875, 2.518749267578125, 2.518531005859375, 2.518134765625, 2.51865087890625, 2.518466552734375, 2.5166396484375, 2.516759521484375, 2.516642822265625, 2.518162353515625, 2.517697509765625, 2.516461669921875, 2.516862060546875, 2.5173310546875, 2.517142578125, 2.516802490234375, 2.51664599609375, 2.51910546875, 2.520224853515625, 2.518654052734375, 2.517782470703125, 2.517315673828125, 2.517593017578125, 5.2058564453125, 2.517256103515625, 2.516883544921875, 2.516770751953125, 2.517991455078125, 2.51818505859375, 2.517248046875, 2.517381103515625, 2.517410888671875, 2.518013916015625, 2.51770263671875, 2.517663818359375, 2.51745068359375, 2.5174189453125, 2.5167841796875, 2.51723779296875, 2.5164462890625, 2.517445556640625, 2.516367431640625, 2.5164462890625, 2.51627734375, 2.51877685546875, 2.517401611328125, 2.51765771484375, 2.516915283203125, 2.517138427734375, 2.516568115234375, 2.516221923828125, 2.516367431640625, 2.517359619140625, 2.519510986328125, 2.51938720703125, 2.518856689453125, 2.519548828125, 2.51677392578125, 2.5177978515625, 2.517477294921875, 2.518878173828125, 2.5189775390625, 2.518950927734375, 2.518413330078125, 2.5166806640625, 2.516497314453125, 2.517235595703125, 2.518279052734375, 2.5198427734375, 2.51673486328125, 2.516912109375, 2.516906005859375, 2.51755517578125, 2.517419921875, 2.517097412109375, 2.516971435546875, 2.51842138671875, 2.517538818359375, 2.5178203125, 2.5180068359375, 2.517959716796875, 2.517367919921875, 2.517199951171875, 2.5175673828125, 2.517475341796875, 2.51647998046875, 5.206263671875, 2.51768212890625, 2.518013916015625, 2.51671240234375, 2.516947021484375, 2.51635302734375, 2.5160029296875, 2.516082763671875, 2.5166357421875, 2.519185302734375, 2.520660888671875, 2.520238037109375, 2.518698974609375, 2.5163837890625, 2.516863037109375, 2.51846142578125, 2.518802490234375, 2.518381591796875, 2.516673583984375, 2.516651123046875, 2.517413818359375, 2.51720703125, 2.516526123046875, 2.516347900390625, 2.516926513671875, 2.51696630859375, 2.516371337890625, 2.51662548828125, 2.517037109375, 2.518067138671875, 2.51704833984375, 2.517189697265625, 2.516686767578125, 2.516638671875, 2.516578369140625, 2.51708935546875, 2.517098388671875, 2.5169755859375, 2.5169140625, 2.51700830078125, 2.517178466796875, 2.51765771484375, 2.517274658203125, 2.516906982421875, 2.51740576171875, 2.51774462890625, 2.51700830078125, 2.517297119140625, 2.51739453125, 2.518012939453125, 2.516905029296875, 2.51756640625, 2.51799853515625, 2.51736669921875, 2.517075927734375, 2.51706982421875, 2.517170166015625, 2.51795556640625, 2.516641845703125, 2.5179892578125, 2.519339111328125, 2.519236572265625, 2.517425048828125, 5.20979345703125, 2.516677734375, 2.51662939453125, 2.516441162109375, 2.51880029296875, 2.516022216796875, 2.51607763671875, 2.516926513671875, 2.5178603515625, 2.516306884765625, 2.5163828125, 2.51637353515625, 2.517161865234375, 2.516613037109375, 2.516834228515625, 2.51662548828125, 2.517593994140625, 2.515980224609375, 2.5162177734375, 2.516642822265625, 2.517887939453125, 2.51702587890625, 2.517031005859375, 2.517222412109375, 2.517981201171875, 2.517895263671875, 2.51740576171875, 2.517854248046875, 2.5180498046875, 2.51757373046875, 2.518212646484375, 2.517984375, 2.518205322265625, 2.51793408203125, 2.51681689453125, 2.5167216796875, 2.517544921875, 2.516486083984375, 2.5170185546875, 2.517210205078125, 2.517508056640625, 2.51639697265625, 2.516465576171875, 2.517015625, 2.5180283203125, 2.51659375, 2.516681640625, 2.51827099609375, 2.518345703125, 2.5169111328125, 2.51719580078125, 2.51778759765625, 2.520734619140625, 2.519320556640625, 2.5171875, 2.517358642578125, 2.51997900390625, 2.518500244140625, 2.516546630859375, 2.517432373046875, 2.5194833984375, 2.519772216796875, 2.519079833984375, 5.2087294921875, 2.517329833984375, 2.5174794921875, 2.516798583984375, 2.51810302734375, 2.5172890625, 2.51738818359375, 2.5166357421875, 2.51726953125, 2.516717529296875, 2.5166181640625, 2.517689453125, 2.51707177734375, 2.516453369140625, 2.516779052734375, 2.516828125, 2.516937744140625, 2.517130126953125, 2.516748291015625, 2.516958251953125, 2.517098388671875, 2.517117919921875, 2.516738037109375, 2.51713330078125, 2.516843505859375, 2.516346923828125, 2.516989013671875, 2.517284912109375, 2.516630615234375, 2.516404296875, 2.516778076171875, 2.51865185546875, 2.518513671875, 2.51744677734375, 2.517791748046875, 2.51757470703125, 2.516971435546875, 2.51658740234375, 2.51664794921875, 2.5177119140625, 2.517885986328125, 2.51691015625, 2.51734326171875, 2.516971435546875, 2.517379150390625, 2.5163837890625, 2.516974609375, 2.516904052734375, 2.517392333984375, 2.51668798828125, 2.517559326171875, 2.51702587890625, 2.517056396484375, 2.5166611328125, 2.517138427734375, 2.517266357421875, 2.517547119140625, 2.51685693359375, 2.517096435546875, 2.5175234375, 2.51769140625, 2.51662744140625, 2.51719580078125]",tokens/s,0.3912414105671436,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1263.828992,2645.03296,0.0,1998.585856,1692.386816,s,10,0.1855360298156738,0.018553602981567382,0.00043875123267479884,0.018496671676635743,0.018861865425109862,0.019271300983428954,0.019598849430084227,"[0.019680736541748046, 0.018462047576904297, 0.018017887115478515, 0.018770879745483398, 0.01853129577636719, 0.018288383483886717, 0.018085119247436523, 0.018451072692871093, 0.018648672103881835, 0.01859993553161621]",tokens/s,13797.859114174787,kWh,2.114928848604586e-07,1.1588762332417353e-07,6.19279465806741e-07,9.46659973991373e-07,tokens/kWh,270424447.03839666,MB,1264.156672,2645.03296,0.0,1998.585856,1714.454528,s,10,11.12458154296875,1.1124581542968752,0.01406764968643287,1.1127079467773437,1.1250791748046873,1.1330105346679686,1.1393556225585937,"[1.14094189453125, 1.0845853271484376, 1.10562890625, 1.1158067626953125, 1.1171712646484375, 1.101681884765625, 1.123316650390625, 1.1187562255859376, 1.109609130859375, 1.10708349609375]",tokens/s,56.63134362102716,kWh,1.3564222330478142e-05,7.43279469964388e-06,2.677221466959136e-05,4.776923169971338e-05,tokens/kWh,1318840.5540208428,,s,629,11.272059902191153,0.017920603978046363,0.0022541365398613443,0.01777663993835449,0.018067455291748045,0.018251570892333986,0.03606495193481445,"[0.01827123260498047, 0.018265087127685545, 0.01800396728515625, 0.0180633602142334, 0.018020351409912108, 0.017958911895751953, 0.017951744079589844, 0.018018304824829103, 0.01802342414855957, 0.017916927337646483, 0.01800601577758789, 0.018231296539306642, 0.017929216384887696, 0.01817804718017578, 0.01799782371520996, 0.018084863662719726, 0.01784832000732422, 0.01869824028015137, 0.019519487380981446, 0.019140607833862306, 0.018869247436523438, 0.018712575912475587, 0.018748416900634765, 0.018544639587402344, 0.018654207229614257, 0.019080192565917968, 0.018343936920166014, 0.018095104217529297, 0.018095104217529297, 0.018199552536010744, 0.018050048828125, 0.01787494468688965, 0.017922048568725587, 0.017927167892456054, 0.017872896194458008, 0.017913856506347657, 0.017844224929809572, 0.017917951583862304, 0.017910783767700195, 0.017952768325805665, 0.017810432434082032, 0.018069503784179687, 0.01785139274597168, 0.017900543212890627, 0.01786675262451172, 0.01785139274597168, 0.017785856246948242, 0.017811456680297853, 0.017880064010620117, 0.01778483200073242, 0.01782374382019043, 0.017967103958129883, 0.017938432693481447, 0.017829887390136717, 0.017932287216186525, 0.018110464096069336, 0.0179814395904541, 0.01801625633239746, 0.018017280578613282, 0.017952768325805665, 0.018096128463745118, 0.017328128814697266, 0.03599564743041992, 0.017175552368164062, 0.01725132751464844, 0.017185792922973633, 0.017250303268432618, 0.01722060775756836, 0.01719193649291992, 0.017125375747680666, 0.017138687133789063, 0.01719705581665039, 0.01806438446044922, 0.01765990447998047, 0.01723084831237793, 0.017154048919677735, 0.017120256423950195, 0.01719398307800293, 0.017345535278320313, 0.017192960739135742, 0.017192960739135742, 0.017153024673461914, 0.017183744430541992, 0.017127424240112304, 0.017128448486328125, 0.017132543563842775, 0.017188863754272463, 0.017141759872436522, 0.017131519317626954, 0.017175552368164062, 0.017119232177734374, 0.01718988800048828, 0.01719705581665039, 0.01716223907470703, 0.017142784118652343, 0.017192960739135742, 0.01719808006286621, 0.017147903442382813, 0.017124351501464845, 0.017116159439086915, 0.017127424240112304, 0.017111040115356444, 0.017142784118652343, 0.01779302406311035, 0.01723904037475586, 0.017245183944702147, 0.017126399993896483, 0.01740185546875, 0.01724928092956543, 0.017155071258544922, 0.01721036720275879, 0.017176576614379883, 0.01716223907470703, 0.017187839508056642, 0.017137664794921875, 0.017185792922973633, 0.017144832611083984, 0.01717862319946289, 0.01720217514038086, 0.017183744430541992, 0.017167360305786132, 0.01720729637145996, 0.017152000427246093, 0.01714995193481445, 0.017188863754272463, 0.035544063568115236, 0.01720729637145996, 0.017348608016967772, 0.017148960113525392, 0.01784623908996582, 0.017950719833374023, 0.018066432952880858, 0.017897472381591797, 0.01788313674926758, 0.017346559524536134, 0.01720217514038086, 0.017344512939453126, 0.018008064270019532, 0.01804287910461426, 0.018095104217529297, 0.017966079711914062, 0.017930240631103517, 0.017811456680297853, 0.01784217643737793, 0.01785241508483887, 0.01826918411254883, 0.017861631393432616, 0.017954816818237306, 0.017864704132080078, 0.017854463577270507, 0.01786777687072754, 0.017949695587158202, 0.017916927337646483, 0.017961984634399415, 0.018075647354125975, 0.017930240631103517, 0.01799782371520996, 0.01720012855529785, 0.01722163200378418, 0.017177600860595704, 0.017258495330810548, 0.01720217514038086, 0.01723187255859375, 0.017426431655883787, 0.01724415969848633, 0.017171455383300782, 0.0171428165435791, 0.01711510467529297, 0.017171455383300782, 0.017632255554199217, 0.017307647705078123, 0.01725132751464844, 0.017209344863891602, 0.01720012855529785, 0.01724928092956543, 0.017460224151611328, 0.017337343215942384, 0.01716739273071289, 0.017188863754272463, 0.0171048641204834, 0.017150976181030272, 0.017136640548706054, 0.01721343994140625, 0.017156095504760743, 0.017252351760864256, 0.01720729637145996, 0.017108991622924806, 0.018921472549438476, 0.03711283111572266, 0.017960960388183594, 0.01794047927856445, 0.017308671951293944, 0.0172677116394043, 0.01714995193481445, 0.01721548843383789, 0.01720729637145996, 0.01716531181335449, 0.017163263320922852, 0.017517568588256836, 0.017304576873779298, 0.018160640716552736, 0.018274303436279296, 0.017976320266723633, 0.017821695327758787, 0.01784012794494629, 0.017701887130737306, 0.017527807235717775, 0.017163263320922852, 0.01739571189880371, 0.017712127685546874, 0.017694719314575197, 0.01780838394165039, 0.017533952713012696, 0.01719603157043457, 0.017132543563842775, 0.01741004753112793, 0.017710079193115236, 0.017952768325805665, 0.01789952087402344, 0.017967103958129883, 0.018033664703369142, 0.017896448135375977, 0.017869823455810546, 0.017712127685546874, 0.017709056854248048, 0.01777561569213867, 0.017909759521484374, 0.017778688430786133, 0.017898496627807618, 0.017753087997436523, 0.01782067108154297, 0.017892351150512697, 0.017910783767700195, 0.01777663993835449, 0.017760255813598632, 0.017971200942993162, 0.017833984375, 0.017648639678955077, 0.017754112243652344, 0.017796096801757814, 0.017781759262084963, 0.017777664184570312, 0.017855487823486327, 0.01795686340332031, 0.01784832000732422, 0.017781759262084963, 0.017789951324462892, 0.017721343994140625, 0.01779302406311035, 0.017879039764404296, 0.017757183074951173, 0.03669913482666016, 0.017513471603393553, 0.01776742362976074, 0.01776639938354492, 0.017827840805053712, 0.01779097557067871, 0.01787494468688965, 0.01782579231262207, 0.017917951583862304, 0.017985536575317384, 0.017950719833374023, 0.017716224670410157, 0.017673215866088866, 0.01776639938354492, 0.017821695327758787, 0.017810432434082032, 0.01782579231262207, 0.018156543731689453, 0.01783305549621582, 0.017725343704223632, 0.017736703872680663, 0.017726463317871095, 0.017276927947998046, 0.017566719055175782, 0.017895423889160156, 0.017763328552246094, 0.01784217643737793, 0.017838079452514647, 0.01784524726867676, 0.017864704132080078, 0.017789951324462892, 0.01780531120300293, 0.017785856246948242, 0.017544191360473634, 0.01722572708129883, 0.01796505546569824, 0.017736703872680663, 0.017722368240356445, 0.018108415603637695, 0.017870847702026366, 0.01782579231262207, 0.01781760025024414, 0.017765375137329103, 0.01779916763305664, 0.017942527770996093, 0.01783705520629883, 0.017838079452514647, 0.017971200942993162, 0.017773567199707033, 0.017757183074951173, 0.017721343994140625, 0.017209344863891602, 0.017476640701293945, 0.017805280685424803, 0.01741414451599121, 0.017085439682006837, 0.01697177505493164, 0.01739263916015625, 0.017143808364868163, 0.01762099266052246, 0.017771520614624024, 0.017876991271972655, 0.017740800857543947, 0.036397056579589845, 0.017701887130737306, 0.017744895935058593, 0.017725439071655275, 0.017699840545654297, 0.01720012855529785, 0.019172351837158205, 0.01902387237548828, 0.018176000595092775, 0.017833984375, 0.01785036849975586, 0.017771520614624024, 0.0178606071472168, 0.017293312072753905, 0.017302528381347656, 0.01785651206970215, 0.01821388816833496, 0.017930240631103517, 0.01722675132751465, 0.017116159439086915, 0.01722060775756836, 0.017083391189575196, 0.017130495071411133, 0.017120256423950195, 0.017187839508056642, 0.01747148895263672, 0.017762304306030274, 0.017780736923217775, 0.017702911376953127, 0.01777459144592285, 0.017750015258789064, 0.01780633544921875, 0.017605632781982423, 0.017183744430541992, 0.017228799819946287, 0.01721855926513672, 0.017169408798217774, 0.01724723243713379, 0.017786880493164063, 0.01722470474243164, 0.017300479888916014, 0.01742438316345215, 0.017140735626220704, 0.01724006462097168, 0.01720627212524414, 0.017283071517944337, 0.017316864013671874, 0.01748684883117676, 0.01722060775756836, 0.01726361656188965, 0.01724006462097168, 0.01721855926513672, 0.01725644874572754, 0.017179647445678712, 0.0171909122467041, 0.01718272018432617, 0.01720012855529785, 0.01717350387573242, 0.017169408798217774, 0.01717043113708496, 0.017176576614379883, 0.01721036720275879, 0.01722777557373047, 0.03609190368652344, 0.018330623626708984, 0.01841971206665039, 0.01794047927856445, 0.017928192138671875, 0.017904640197753906, 0.017781759262084963, 0.01782067108154297, 0.017748992919921876, 0.01779097557067871, 0.0178155517578125, 0.01762713623046875, 0.01741004753112793, 0.0172359676361084, 0.01724928092956543, 0.017926143646240233, 0.01780531120300293, 0.017581056594848633, 0.01721446418762207, 0.017269760131835937, 0.01724825668334961, 0.017524768829345703, 0.017801183700561524, 0.018189311981201172, 0.01789132881164551, 0.017922048568725587, 0.01785139274597168, 0.017810432434082032, 0.01783296012878418, 0.01780019187927246, 0.017740800857543947, 0.017821695327758787, 0.01780735969543457, 0.017889280319213868, 0.017743871688842772, 0.017833984375, 0.017904640197753906, 0.01785036849975586, 0.017888256072998047, 0.018562047958374024, 0.018885631561279297, 0.018080799102783204, 0.017959903717041016, 0.01780940818786621, 0.01804595184326172, 0.017960960388183594, 0.01778179168701172, 0.017766368865966796, 0.01782374382019043, 0.017957887649536132, 0.01789132881164551, 0.017932287216186525, 0.01779916763305664, 0.01784012794494629, 0.017949695587158202, 0.018008064270019532, 0.017871871948242187, 0.017836032867431642, 0.01779916763305664, 0.0178606071472168, 0.01785036849975586, 0.01765376091003418, 0.017263647079467773, 0.03623728179931641, 0.01822412872314453, 0.018000896453857423, 0.017853439331054686, 0.01784832000732422, 0.017847295761108398, 0.017924095153808595, 0.01782681655883789, 0.017847295761108398, 0.017869823455810546, 0.017503231048583985, 0.017280000686645508, 0.017565696716308594, 0.017952768325805665, 0.017887231826782226, 0.017903615951538086, 0.018059263229370116, 0.017936384201049805, 0.017829887390136717, 0.017869823455810546, 0.017743871688842772, 0.017771520614624024, 0.017746944427490235, 0.01781657600402832, 0.017921024322509766, 0.017885183334350584, 0.01783705520629883, 0.01779097557067871, 0.017946624755859376, 0.017881120681762695, 0.017975263595581055, 0.01784320068359375, 0.017888256072998047, 0.01746329689025879, 0.017374208450317383, 0.0176312313079834, 0.01767731285095215, 0.01780531120300293, 0.017738752365112305, 0.017762304306030274, 0.017156095504760743, 0.01746227264404297, 0.017780736923217775, 0.017772544860839845, 0.017268735885620116, 0.01719910430908203, 0.017176576614379883, 0.017605632781982423, 0.01780531120300293, 0.017844224929809572, 0.017950719833374023, 0.017886207580566405, 0.017855487823486327, 0.017879039764404296, 0.01777459144592285, 0.017949695587158202, 0.01784012794494629, 0.017901567459106444, 0.017992704391479493, 0.017946624755859376, 0.017821695327758787, 0.017325056076049804, 0.01728102493286133, 0.036772865295410156, 0.01763020706176758, 0.017847295761108398, 0.017917951583862304, 0.017923072814941408, 0.017935359954833984, 0.01785241508483887, 0.017862655639648437, 0.017925119400024413, 0.01788313674926758, 0.017318912506103516, 0.017284095764160155, 0.01722572708129883, 0.017179647445678712, 0.01747865676879883, 0.017236991882324217, 0.017275903701782228, 0.01717350387573242, 0.017317888259887695, 0.017160192489624023, 0.01720729637145996, 0.017201152801513672, 0.017261568069458007, 0.01717452812194824, 0.017154048919677735, 0.01721651268005371, 0.017131519317626954, 0.017114112854003907, 0.017138687133789063, 0.01740083122253418, 0.01738444709777832, 0.017150976181030272, 0.017217567443847656, 0.01723798370361328, 0.01720524787902832, 0.017177600860595704, 0.017168384552001953, 0.01715814399719238, 0.017143808364868163, 0.018020351409912108, 0.018546688079833985, 0.019092479705810548, 0.018093055725097656, 0.017953792572021485, 0.01799065589904785, 0.017950719833374023, 0.017655839920043947, 0.017588191986083985, 0.01784524726867676, 0.017939456939697264, 0.017925119400024413, 0.01807155227661133, 0.017925119400024413, 0.01785958480834961, 0.017993728637695314, 0.017932287216186525, 0.017968128204345703, 0.017968128204345703, 0.017861631393432616, 0.017893375396728514, 0.017958911895751953, 0.017920000076293945, 0.017286144256591796, 0.036160511016845705, 0.017304576873779298, 0.017262592315673828, 0.01722470474243164, 0.01724928092956543, 0.01740287971496582, 0.017313823699951172, 0.0172205753326416, 0.017254400253295898, 0.0172728328704834, 0.01722470474243164, 0.017352703094482422, 0.017209344863891602, 0.017320959091186524, 0.0172728328704834, 0.017324031829833983, 0.017306623458862306, 0.017386495590209963, 0.01739571189880371, 0.01725542449951172, 0.017260543823242186, 0.017144832611083984, 0.01720627212524414, 0.017313791275024415, 0.0172359676361084, 0.0172677116394043, 0.018152448654174806, 0.018092031478881835, 0.01797324752807617, 0.01804595184326172, 0.018120704650878908, 0.018067455291748045, 0.018136064529418947, 0.01801113510131836, 0.017984512329101563, 0.01804287910461426, 0.017991680145263672, 0.01822003173828125, 0.01803264045715332, 0.018051071166992186, 0.018081792831420897, 0.017947647094726564, 0.01801420783996582, 0.018067455291748045, 0.018125823974609375, 0.01804083251953125, 0.017975296020507812, 0.01802137565612793, 0.018163711547851562, 0.018093055725097656, 0.017265663146972657, 0.01704140853881836, 0.017209344863891602, 0.01724723243713379, 0.01724415969848633, 0.01721139144897461, 0.017262592315673828, 0.017277952194213866, 0.017246208190917968, 0.017375232696533204, 0.017257471084594727, 0.017229824066162108, 0.017253376007080077]",tokens/s,55.80169068102002,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949924-4e03e7531ba0feba6dc8a9ed;57175d6d-b9ff-4445-9bdc-e97db273007e) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",deci,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2013.822976,5480.382464,0.0,4833.93536,4503.282688,s,10,5.711301574707031,0.5711301574707031,0.0015604684158887843,0.5710088500976562,0.5724867248535156,0.573356430053711,0.5740521942138671,"[0.5715228271484375, 0.5742261352539062, 0.5684718017578125, 0.5703966064453125, 0.570494873046875, 0.5693699340820313, 0.5704375610351563, 0.5720609130859375, 0.57229345703125, 0.5720274658203125]",tokens/s,448.2340787846278,kWh,6.721677934681928e-06,3.683185084507083e-06,3.147118258432934e-05,4.187604560351835e-05,tokens/kWh,6113280.189438215,MB,2014.928896,5480.382464,0.0,4833.93536,4688.699392,s,10,334.74296874999993,33.474296875,0.0037315247196552594,33.473017578124995,33.479158984375,33.4801732421875,33.4809846484375,"[33.47309765625, 33.47263671875, 33.4715390625, 33.4729375, 33.47534765625, 33.47058984375, 33.47893359375, 33.4811875, 33.4778203125, 33.46887890625]",tokens/s,1.8820410249468458,kWh,0.00039524016447641235,0.00021662581418956808,0.0018242358853134645,0.002436101863979445,tokens/kWh,25860.987560301615,,s,629,339.3508396606445,0.5395084891266209,0.06790528508400501,0.531294189453125,0.5317521484375,0.5319190551757812,1.1017576318359374,"[0.5316085815429688, 0.5315625, 0.5310873413085937, 0.5312276611328125, 0.5310341186523437, 0.5311795043945312, 0.5312860107421875, 0.53180517578125, 0.5313863525390625, 0.5315205078125, 0.5309071655273437, 0.5309808349609375, 0.5307955322265625, 0.5312429809570313, 0.530924560546875, 0.5313556518554687, 0.5308876953125, 0.531230712890625, 0.5310802001953125, 0.5317345581054688, 0.5312921752929688, 0.5313382568359375, 0.5308221435546875, 0.5310607299804687, 0.5308549194335938, 0.5315635375976563, 0.5311006469726562, 0.5312788696289062, 0.5312388916015625, 0.5312849731445313, 0.5308907470703125, 0.5311416015625, 0.5308528442382813, 0.5311743774414063, 0.531367919921875, 0.5315932006835937, 0.5311682739257813, 0.5317539672851562, 0.5317908325195313, 0.531473388671875, 0.53108837890625, 0.53146728515625, 0.531220458984375, 0.5315338134765625, 0.5310525512695312, 0.531900390625, 0.5317529296875, 0.5317069091796875, 0.53103515625, 0.53153076171875, 0.5311344604492187, 0.5311918334960938, 0.5308630981445313, 0.5311641845703124, 0.5309531860351563, 0.531577880859375, 0.5316065063476563, 0.5314017333984375, 0.5316864013671875, 0.5318963012695312, 0.5315604248046875, 0.53161279296875, 1.1062353515625, 0.5315419921875, 0.5316771850585937, 0.5312737426757812, 0.5311528930664062, 0.5314109497070313, 0.5312112426757812, 0.5310986328125, 0.5311057739257813, 0.53089892578125, 0.5317980346679687, 0.531167236328125, 0.53102490234375, 0.5308078002929687, 0.5314508666992187, 0.5309224853515625, 0.5310689086914062, 0.5308067626953125, 0.5315430297851562, 0.5309972534179688, 0.5317294311523437, 0.5312696533203125, 0.5314498291015625, 0.5310535888671875, 0.5314406127929687, 0.5309706420898438, 0.5315850219726562, 0.5310156860351563, 0.5313699951171875, 0.5310637817382813, 0.5316474609375, 0.5311948852539062, 0.53110888671875, 0.53115185546875, 0.531330078125, 0.5310965576171875, 0.5311754150390625, 0.5309706420898438, 0.5319178466796874, 0.532337646484375, 0.5315972900390625, 0.531103759765625, 0.5311692504882812, 0.5309296875, 0.5313453979492188, 0.53089794921875, 0.532917236328125, 0.5309666137695312, 0.5310463256835938, 0.5309081420898437, 0.5315809326171875, 0.5311968994140625, 0.5312440185546875, 0.5310320434570313, 0.5312368774414062, 0.5308692626953125, 0.5317509155273438, 0.5314529418945313, 0.5314232177734375, 0.5311610717773437, 0.531356689453125, 0.5314232177734375, 0.5313045043945313, 1.1011102294921875, 0.5311477661132813, 0.5314263305664062, 0.531145751953125, 0.5313720092773437, 0.5311477661132813, 0.5315952758789062, 0.5309235229492187, 0.5311928100585938, 0.531162109375, 0.5311211547851562, 0.53140478515625, 0.5317939453125, 0.53121435546875, 0.5318656005859375, 0.531483642578125, 0.5315000610351562, 0.5312716674804687, 0.5315645141601563, 0.531324951171875, 0.5316557006835938, 0.5313607788085938, 0.5315901489257813, 0.5309573364257812, 0.5312061157226563, 0.5308528442382813, 0.5311078491210938, 0.5310279541015624, 0.5312245483398438, 0.5308385009765625, 0.5313310546875, 0.53131982421875, 0.5318225708007812, 0.5312051391601562, 0.5311702880859375, 0.5310709838867187, 0.5313167114257813, 0.5311600341796875, 0.5312184448242188, 0.531252197265625, 0.5313607788085938, 0.5311314086914063, 0.5312163696289063, 0.5310596923828125, 0.531451904296875, 0.5310115966796874, 0.5313280029296875, 0.53153076171875, 0.5314334716796875, 0.5314508666992187, 0.5315317993164063, 0.5316935424804687, 0.5310279541015624, 0.5312266235351563, 0.5312102661132813, 0.531051513671875, 0.5313505249023438, 0.5309439697265625, 0.53108837890625, 0.5311426391601562, 0.5314908447265625, 0.5311590576171875, 0.5316188354492187, 1.1020093994140625, 0.5310126342773438, 0.5312286987304687, 0.5310289916992188, 0.531694580078125, 0.5315460815429688, 0.5315625, 0.5309337768554687, 0.5313526000976563, 0.5313218383789062, 0.5313914794921875, 0.53136279296875, 0.5317498779296875, 0.5313587036132813, 0.5314078979492187, 0.531162109375, 0.5312286987304687, 0.531251220703125, 0.5314600830078124, 0.5311160278320313, 0.53119384765625, 0.5310310668945313, 0.5316546630859375, 0.531409912109375, 0.531567626953125, 0.5313894653320312, 0.5315020751953125, 0.531135498046875, 0.5310525512695312, 0.5308528442382813, 0.5313935546875, 0.5313812255859375, 0.5312890625, 0.530966552734375, 0.531019775390625, 0.5310494995117188, 0.5313024291992188, 0.5313894653320312, 0.531294189453125, 0.5311016845703125, 0.5310525512695312, 0.5311856689453125, 0.531736572265625, 0.531641357421875, 0.5321881713867187, 0.5313126220703125, 0.5313751220703125, 0.5310965576171875, 0.531794921875, 0.5309716186523438, 0.5311610717773437, 0.5310167236328125, 0.5313290405273438, 0.5308538818359375, 0.5310453491210938, 0.5313873901367188, 0.5312819213867187, 0.5309849853515625, 0.531398681640625, 0.5310084838867187, 0.5321144409179688, 0.531578857421875, 0.5316700439453125, 1.10214453125, 0.530830322265625, 0.5315020751953125, 0.5308692626953125, 0.5310894165039063, 0.5316024169921875, 0.5316566772460938, 0.5311129760742187, 0.5313003540039063, 0.5309224853515625, 0.5312870483398437, 0.5310771484375, 0.5316566772460938, 0.5308528442382813, 0.5312696533203125, 0.5311334228515625, 0.5315010375976562, 0.5314529418945313, 0.5316433715820312, 0.5310289916992188, 0.5314703369140625, 0.5313914794921875, 0.5314805908203125, 0.5312102661132813, 0.5313157348632812, 0.5312808837890625, 0.5316044921875, 0.5312051391601562, 0.5314078979492187, 0.5316720581054688, 0.5313822631835937, 0.5311006469726562, 0.53110888671875, 0.5308703002929688, 0.5316167602539063, 0.5314221801757812, 0.531800048828125, 0.5311160278320313, 0.5314561157226563, 0.531411865234375, 0.5317734375, 0.5317805786132812, 0.5310750732421875, 0.5310975952148438, 0.53131982421875, 0.53121435546875, 0.531240966796875, 0.530924560546875, 0.5315112915039063, 0.5309634399414063, 0.53119384765625, 0.5311242065429688, 0.5314119873046875, 0.5314744262695312, 0.5316505737304688, 0.5320745239257813, 0.531435546875, 0.53260595703125, 0.5313802490234375, 0.5314990234375, 0.531493896484375, 0.531061767578125, 0.5313065185546875, 1.1010294189453125, 0.5310013427734375, 0.53124609375, 0.5309849853515625, 0.5314959106445313, 0.5311447143554687, 0.531198974609375, 0.531135498046875, 0.5317611694335938, 0.5310453491210938, 0.5312368774414062, 0.5310105590820312, 0.5315205688476563, 0.5312000122070313, 0.531442626953125, 0.5309010009765625, 0.53104638671875, 0.5309081420898437, 0.5313760986328125, 0.5308211059570312, 0.5312214965820312, 0.5310167236328125, 0.5315963134765626, 0.531188720703125, 0.5312471313476562, 0.5308661499023437, 0.5318123779296875, 0.5314027709960938, 0.5315277099609375, 0.5311959228515625, 0.5315491943359375, 0.5311867065429687, 0.5311385498046876, 0.5308538818359375, 0.5313402709960937, 0.5308897094726562, 0.53110888671875, 0.5310873413085937, 0.532853759765625, 0.5313351440429688, 0.531751953125, 0.5309685668945312, 0.5313668823242188, 0.5309788208007813, 0.5314406127929687, 0.5319198608398438, 0.5315972900390625, 0.5316341552734375, 0.531399658203125, 0.5313290405273438, 0.5312604370117188, 0.531052490234375, 0.5311856689453125, 0.5311262817382812, 0.5311273193359375, 0.5309450073242188, 0.5313003540039063, 0.5310310668945313, 0.531162109375, 0.5316484985351563, 0.5313546142578125, 0.531051513671875, 0.5314641723632813, 1.10335595703125, 0.5312788696289062, 0.5317376098632812, 0.5317816162109374, 0.5310443725585937, 0.5310945434570312, 0.531515380859375, 0.5312010498046875, 0.5312890625, 0.5315604248046875, 0.5310709838867187, 0.5312757568359375, 0.5313177490234375, 0.5313668823242188, 0.5315430297851562, 0.5313382568359375, 0.531462158203125, 0.5310822143554688, 0.5318031616210938, 0.5317324829101563, 0.5316137084960938, 0.5309522094726562, 0.5314713745117188, 0.531346435546875, 0.5311826171875, 0.5308856201171875, 0.5316904907226563, 0.5310525512695312, 0.5313648681640625, 0.5309481201171875, 0.5315020751953125, 0.5309798583984375, 0.5310975952148438, 0.5311068115234375, 0.53123583984375, 0.5308887329101563, 0.5311610717773437, 0.5312593994140625, 0.5315113525390625, 0.5320181274414062, 0.53134130859375, 0.5311498413085938, 0.5313812255859375, 0.5318276977539063, 0.5313341674804688, 0.5315061645507813, 0.5313771362304688, 0.5312665405273438, 0.53157373046875, 0.5311734008789063, 0.5318348999023438, 0.5309931640625, 0.5319618530273438, 0.5310545654296875, 0.531431396484375, 0.5314805908203125, 0.5322670288085938, 0.5313526000976563, 0.5318143920898437, 0.5314058227539062, 0.5316392822265625, 0.5314242553710937, 0.5316956176757812, 1.1039549560546875, 0.5310596923828125, 0.531430419921875, 0.5309603881835937, 0.5313013916015625, 0.5310545654296875, 0.5314334716796875, 0.5311129760742187, 0.5314866943359375, 0.5309327392578125, 0.5313535766601563, 0.5317621459960937, 0.5320017700195312, 0.5313474731445312, 0.5321390380859375, 0.5313034057617188, 0.5315491943359375, 0.5315614624023437, 0.5321195678710937, 0.5317386474609375, 0.531430419921875, 0.53125732421875, 0.5315266723632812, 0.5314437255859376, 0.5318656005859375, 0.5314652099609375, 0.5318246459960938, 0.5309419555664062, 0.5313320922851562, 0.5310873413085937, 0.5319567260742187, 0.5315020751953125, 0.5317171020507813, 0.5314866943359375, 0.531135498046875, 0.5316986694335938, 0.5315419921875, 0.531198974609375, 0.5315286865234375, 0.5312481079101562, 0.53157373046875, 0.5313710327148438, 0.531556396484375, 0.5311170043945312, 0.5320908813476563, 0.5312860107421875, 0.5311273193359375, 0.53108837890625, 0.5317069091796875, 0.5314140014648437, 0.5318666381835937, 0.5313013916015625, 0.5315419921875, 0.5312225341796875, 0.531567626953125, 0.5311692504882812, 0.53146728515625, 0.5312901000976562, 0.5316116333007812, 0.5311057739257813, 0.5314898071289063, 0.5310587158203125, 0.5314539794921875, 1.1042969970703125, 0.5312860107421875, 0.5315072021484375, 0.5316034545898437, 0.5322137451171876, 0.5317847290039063, 0.5316690063476562, 0.5311959228515625, 0.531631103515625, 0.5317171020507813, 0.531820556640625, 0.5314273071289063, 0.5316249389648438, 0.5310904541015625, 0.531863525390625, 0.5312501831054688, 0.53151025390625, 0.531252197265625, 0.5320970458984375, 0.5310699462890625, 0.5312184448242188, 0.5309183959960937, 0.5319987182617187, 0.531577880859375, 0.5319649047851562, 0.5313546142578125, 0.53127783203125, 0.5312000122070313, 0.5314058227539062, 0.5309552612304688, 0.5314765014648437, 0.5311477661132813, 0.5315399780273438, 0.5311395874023438, 0.5313935546875, 0.5317590942382813, 0.531430419921875, 0.5314949340820313, 0.53096240234375, 0.5309450073242188, 0.5312819213867187, 0.5316137084960938, 0.5311641845703124, 0.5311508178710938, 0.5317099609375, 0.531009521484375, 0.5310576782226563, 0.530977783203125, 0.5313966064453125, 0.531272705078125, 0.531178466796875, 0.53085693359375, 0.5311764526367188, 0.5310115966796874, 0.531294189453125, 0.5309757690429687, 0.5316322021484375, 0.5310668334960937, 0.531282958984375, 0.531072021484375, 0.5319813232421875, 0.5315072021484375, 0.5314345092773437, 1.1038515625, 0.5315747680664062, 0.5315563354492188, 0.5314283447265625, 0.5311314086914063, 0.5308887329101563, 0.5313218383789062, 0.5309593505859375, 0.5309398803710937, 0.5308262329101563, 0.5310136108398438, 0.5309296875, 0.5314590454101562, 0.5310699462890625, 0.5316587524414063, 0.5311928100585938, 0.5312696533203125, 0.5310955810546875, 0.5310802001953125, 0.5312481079101562, 0.5316751098632813, 0.5312921752929688, 0.5321625366210937, 0.531294189453125, 0.5315594482421875, 0.5310873413085937, 0.5322465209960937, 0.5310218505859375, 0.5312512817382813, 0.5308609619140625, 0.531178466796875, 0.5309429931640625, 0.5310105590820312, 0.5310392456054688, 0.5313372192382813, 0.5314283447265625, 0.5313710327148438, 0.5309235229492187, 0.5312706298828125, 0.5308815307617187, 0.5310955810546875, 0.5310003051757812, 0.5311764526367188, 0.5315164184570312, 0.53134130859375, 0.5313402709960937, 0.5311385498046876, 0.5311682739257813, 0.53187890625, 0.5310679321289062, 0.5314652099609375, 0.5309286499023438, 0.5317437744140625, 0.5314334716796875, 0.5315911865234375, 0.5309706420898438, 0.5311815795898438, 0.5312235717773437, 0.5312030639648437, 0.5309204711914063, 0.5312102661132813, 0.5308764038085938, 0.5311703491210937]",tokens/s,1.853538952869569,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1263.857664,872.93952,0.0,226.492416,184.528384,s,12,0.17770700836181638,0.014808917363484699,0.0007458121254662319,0.014730735778808594,0.015585615634918212,0.016198500156402586,0.016727724933624268,"[0.01686003112792969, 0.014940928459167481, 0.014185312271118165, 0.014250240325927734, 0.014782591819763184, 0.01479747200012207, 0.014225376129150391, 0.01565724754333496, 0.014866304397583009, 0.014252096176147462, 0.014678879737854004, 0.014210528373718261]",tokens/s,17286.881526615558,kWh,1.717408794130004e-07,9.410611709969618e-08,3.064682684303119e-07,5.723152649430085e-07,tokens/kWh,447305909.3146723,MB,1263.857664,872.93952,0.0,226.492416,184.530944,s,12,10.442449340820314,0.8702041117350262,0.008126167996174669,0.869532958984375,0.87863642578125,0.8844940338134766,0.8896149688720703,"[0.8908952026367187, 0.8730538330078125, 0.8646798095703125, 0.8587084350585937, 0.870618896484375, 0.8792567138671875, 0.8648596801757813, 0.8676231079101563, 0.8708908081054687, 0.868447021484375, 0.8618011474609375, 0.8716146850585937]",tokens/s,72.39680800219347,kWh,1.0212165763734822e-05,5.594490671040206e-06,1.7625837744070347e-05,3.3432494178845375e-05,tokens/kWh,1884394.2561684093,,s,755,10.587394044876108,0.014023038470034567,0.0018357835916675285,0.013768704414367675,0.014138367652893067,0.01426595821380615,0.028149206199645997,"[0.01590272045135498, 0.015169535636901856, 0.013799424171447755, 0.013592576026916504, 0.013566975593566894, 0.013568032264709472, 0.013549535751342774, 0.013544447898864746, 0.013549568176269532, 0.01500057601928711, 0.014542847633361817, 0.015096832275390625, 0.014296064376831055, 0.014089216232299804, 0.014114815711975098, 0.014088191986083985, 0.014008319854736329, 0.014020607948303223, 0.01397862434387207, 0.013996031761169434, 0.013991935729980469, 0.014033920288085937, 0.014502911567687989, 0.014139391899108887, 0.014253055572509766, 0.01410972785949707, 0.014130144119262696, 0.01405951976776123, 0.014032896041870118, 0.014044159889221192, 0.014107647895812989, 0.01409331226348877, 0.014196736335754395, 0.014124032020568847, 0.01380352020263672, 0.013652992248535157, 0.014197759628295899, 0.014205951690673829, 0.014385215759277344, 0.014391231536865234, 0.01419264030456543, 0.014195712089538574, 0.014110719680786133, 0.014142463684082032, 0.014052351951599122, 0.014109696388244629, 0.014038016319274902, 0.013868032455444336, 0.013783040046691895, 0.014055423736572266, 0.014000127792358399, 0.014098464012145996, 0.014048224449157714, 0.014029824256896972, 0.013983743667602539, 0.014000127792358399, 0.014010368347167968, 0.01409945583343506, 0.01408512020111084, 0.014088191986083985, 0.013898752212524413, 0.013739007949829102, 0.029507583618164062, 0.014137344360351562, 0.014060544013977052, 0.014026752471923828, 0.014106623649597168, 0.014073856353759765, 0.013790207862854004, 0.013739007949829102, 0.013693951606750488, 0.013793279647827148, 0.013578240394592284, 0.013528063774108886, 0.013835264205932616, 0.014154751777648926, 0.013549568176269532, 0.013612031936645508, 0.01356492805480957, 0.013528063774108886, 0.013546496391296388, 0.013410304069519043, 0.01346457576751709, 0.013619199752807617, 0.014065664291381836, 0.014048255920410157, 0.01399500846862793, 0.014000127792358399, 0.014065664291381836, 0.013827072143554688, 0.013361151695251466, 0.013429759979248047, 0.013656064033508301, 0.0135731201171875, 0.013530112266540528, 0.013545472145080567, 0.013538304328918458, 0.013493247985839844, 0.013543423652648925, 0.01358950424194336, 0.013771776199340821, 0.01408614444732666, 0.01386393642425537, 0.01419878387451172, 0.014858240127563477, 0.014154784202575684, 0.014159839630126953, 0.014034943580627441, 0.014146559715270996, 0.014025728225708007, 0.014043135643005371, 0.014113792419433594, 0.014020607948303223, 0.014134271621704102, 0.014055423736572266, 0.01397862434387207, 0.013842432022094727, 0.013796352386474609, 0.013836288452148437, 0.014095359802246094, 0.014161919593811035, 0.014041088104248046, 0.013948927879333496, 0.013541376113891602, 0.013576191902160644, 0.02812518310546875, 0.013533184051513672, 0.013554688453674316, 0.01355571174621582, 0.01355571174621582, 0.013507583618164062, 0.01354751968383789, 0.013560832023620606, 0.01354751968383789, 0.013593600273132325, 0.01355571174621582, 0.013528063774108886, 0.013491231918334961, 0.014089183807373047, 0.01396224021911621, 0.014020607948303223, 0.014097408294677734, 0.014039039611816406, 0.014054400444030762, 0.014026752471923828, 0.014045215606689454, 0.014015487670898438, 0.014002143859863281, 0.01407795238494873, 0.014055423736572266, 0.014014464378356933, 0.014206975936889648, 0.014147583961486816, 0.014009344100952148, 0.014095359802246094, 0.014009344100952148, 0.01458796787261963, 0.013855680465698243, 0.013546496391296388, 0.013480959892272949, 0.01354854393005371, 0.013485055923461914, 0.013527039527893067, 0.01358950424194336, 0.013511712074279786, 0.013557727813720704, 0.013578240394592284, 0.013524991989135742, 0.013562879562377929, 0.01353932762145996, 0.013575167655944824, 0.013516799926757812, 0.013549568176269532, 0.013531135559082032, 0.013541376113891602, 0.01349120044708252, 0.013478912353515626, 0.013517824172973633, 0.013557760238647461, 0.013520895957946777, 0.013494272232055664, 0.013519871711730956, 0.013518848419189454, 0.013554688453674316, 0.013521984100341796, 0.013955007553100586, 0.014013440132141113, 0.013520895957946777, 0.02814566421508789, 0.013528063774108886, 0.013505536079406738, 0.013579263687133789, 0.014011391639709473, 0.01377894401550293, 0.013514752388000489, 0.0134717435836792, 0.013480959892272949, 0.013486080169677735, 0.013496319770812988, 0.013534208297729493, 0.013576191902160644, 0.013510656356811524, 0.013544447898864746, 0.01370419216156006, 0.01357209587097168, 0.013484031677246093, 0.01358233642578125, 0.013533184051513672, 0.013503487586975099, 0.013554688453674316, 0.013522944450378417, 0.013522944450378417, 0.013629440307617188, 0.01337446403503418, 0.013472767829895019, 0.01358233642578125, 0.013496319770812988, 0.013522944450378417, 0.013493247985839844, 0.013541376113891602, 0.013524991989135742, 0.01356390380859375, 0.013475839614868163, 0.013526016235351563, 0.013596672058105469, 0.013507583618164062, 0.01349120044708252, 0.013430815696716308, 0.013491168022155761, 0.01347481632232666, 0.013444095611572266, 0.013515775680541992, 0.013492223739624023, 0.013515775680541992, 0.013526016235351563, 0.0134901762008667, 0.013553664207458497, 0.013494272232055664, 0.013554688453674316, 0.01418239974975586, 0.014006272315979004, 0.013982720375061035, 0.013683712005615235, 0.014056447982788087, 0.013996031761169434, 0.014000160217285156, 0.013957088470458984, 0.014022656440734863, 0.014030847549438476, 0.014040063858032227, 0.014070783615112305, 0.029281280517578126, 0.014043135643005371, 0.013820927619934082, 0.013874176025390626, 0.013810688018798829, 0.014020607948303223, 0.014101504325866699, 0.014052351951599122, 0.014024703979492188, 0.014104576110839843, 0.014031871795654297, 0.014309375762939454, 0.014042143821716308, 0.014036959648132324, 0.014081024169921874, 0.013678591728210449, 0.013590527534484862, 0.013532159805297851, 0.013518848419189454, 0.013528063774108886, 0.013536255836486816, 0.01349836826324463, 0.01350656032562256, 0.013479935646057128, 0.013532159805297851, 0.013505536079406738, 0.01360588836669922, 0.013565952301025391, 0.013524031639099121, 0.013553600311279296, 0.01353932762145996, 0.014235648155212402, 0.014135295867919923, 0.014071807861328126, 0.014183423995971679, 0.013936639785766602, 0.013384703636169434, 0.013356032371520997, 0.013511679649353027, 0.013528063774108886, 0.013531135559082032, 0.013515775680541992, 0.013584383964538574, 0.013509632110595703, 0.013478912353515626, 0.01356390380859375, 0.013512703895568847, 0.013569024085998535, 0.013583359718322753, 0.014160896301269531, 0.014122048377990722, 0.014047167778015136, 0.014003199577331543, 0.014025728225708007, 0.014043135643005371, 0.014055423736572266, 0.014096384048461913, 0.014112768173217773, 0.014035967826843262, 0.014054400444030762, 0.014088224411010743, 0.01405129623413086, 0.014038016319274902, 0.029211648941040037, 0.014095359802246094, 0.014023679733276367, 0.014079999923706055, 0.014004223823547364, 0.014013440132141113, 0.014130175590515137, 0.014094335556030273, 0.014038016319274902, 0.014025728225708007, 0.014000127792358399, 0.014019583702087402, 0.014023679733276367, 0.014055423736572266, 0.014143487930297852, 0.01405951976776123, 0.014072832107543945, 0.014009344100952148, 0.014002176284790039, 0.014034943580627441, 0.014028800010681153, 0.01406771183013916, 0.014094335556030273, 0.014021632194519042, 0.014028800010681153, 0.014054400444030762, 0.01419264030456543, 0.014105600357055664, 0.01405951976776123, 0.014128128051757812, 0.01420902442932129, 0.014044159889221192, 0.014138367652893067, 0.013905920028686524, 0.013549568176269532, 0.014351360321044922, 0.013802495956420899, 0.0140697603225708, 0.014033920288085937, 0.014023679733276367, 0.014018560409545898, 0.01395199966430664, 0.014052351951599122, 0.014125056266784668, 0.013990912437438965, 0.01408409595489502, 0.014056447982788087, 0.014020607948303223, 0.014041088104248046, 0.014120960235595703, 0.013915200233459473, 0.013513664245605468, 0.013510656356811524, 0.013613056182861329, 0.013559807777404785, 0.01358233642578125, 0.013644800186157227, 0.01355059242248535, 0.013569024085998535, 0.013546496391296388, 0.013520895957946777, 0.013541376113891602, 0.013530112266540528, 0.02814975929260254, 0.01353932762145996, 0.013537280082702637, 0.01354751968383789, 0.013501503944396972, 0.013505472183227538, 0.013517824172973633, 0.013566975593566894, 0.013507583618164062, 0.01358233642578125, 0.013467647552490235, 0.013643775939941406, 0.013494272232055664, 0.013504511833190918, 0.01354854393005371, 0.013565952301025391, 0.01353932762145996, 0.013536255836486816, 0.013529088020324707, 0.013622271537780761, 0.013521920204162598, 0.013693951606750488, 0.014116864204406738, 0.01409126377105713, 0.01376255989074707, 0.013542400360107423, 0.013549568176269532, 0.013579263687133789, 0.013540351867675781, 0.013524991989135742, 0.013511679649353027, 0.01353932762145996, 0.013500415802001953, 0.01354751968383789, 0.013590527534484862, 0.013543423652648925, 0.013552639961242676, 0.013552639961242676, 0.013570048332214356, 0.013590527534484862, 0.013536288261413574, 0.01353212833404541, 0.013485055923461914, 0.013488127708435058, 0.013817855834960938, 0.014138367652893067, 0.014079999923706055, 0.014411775588989258, 0.014155839920043945, 0.014061504364013672, 0.014072832107543945, 0.014124032020568847, 0.014048255920410157, 0.013993984222412109, 0.014065664291381836, 0.013975584030151367, 0.013948896408081054, 0.014000127792358399, 0.014117888450622559, 0.013784064292907714, 0.013839360237121581, 0.01419878387451172, 0.01437491226196289, 0.030264320373535155, 0.014108672142028808, 0.01397862434387207, 0.014050304412841797, 0.013996031761169434, 0.014062591552734375, 0.013642751693725585, 0.013524031639099121, 0.0135730562210083, 0.01356601619720459, 0.013521856307983398, 0.013789183616638183, 0.013531135559082032, 0.013470720291137696, 0.013502464294433594, 0.013459456443786622, 0.013676544189453126, 0.013512703895568847, 0.013513728141784668, 0.013510656356811524, 0.013524991989135742, 0.0134717435836792, 0.013549599647521973, 0.013559807777404785, 0.013570015907287597, 0.013513728141784668, 0.013535231590270995, 0.013509632110595703, 0.013495295524597169, 0.013510656356811524, 0.013502464294433594, 0.013839360237121581, 0.01392851161956787, 0.013561792373657226, 0.013512703895568847, 0.013534208297729493, 0.013538304328918458, 0.01350656032562256, 0.01354854393005371, 0.01351478385925293, 0.013458399772644044, 0.013501440048217773, 0.013503520011901856, 0.013499360084533691, 0.013486080169677735, 0.013538304328918458, 0.01348300838470459, 0.013515775680541992, 0.014631936073303223, 0.015512576103210449, 0.014542847633361817, 0.01419059181213379, 0.014310400009155273, 0.014002176284790039, 0.014158847808837891, 0.014009344100952148, 0.013933631896972656, 0.01369593620300293, 0.014564352035522461, 0.014092288017272948, 0.014072832107543945, 0.014104576110839843, 0.014041088104248046, 0.029276159286499022, 0.014034943580627441, 0.013997056007385255, 0.014071807861328126, 0.013996031761169434, 0.013958144187927245, 0.01397862434387207, 0.015081472396850586, 0.014701567649841308, 0.014668800354003907, 0.013993984222412109, 0.01417728042602539, 0.014112768173217773, 0.01437388801574707, 0.014112768173217773, 0.014028800010681153, 0.014118911743164063, 0.014029824256896972, 0.014139391899108887, 0.014566399574279786, 0.014102527618408203, 0.014079999923706055, 0.014047231674194336, 0.014014464378356933, 0.014028800010681153, 0.013956095695495606, 0.014009344100952148, 0.014114815711975098, 0.014044159889221192, 0.01359769630432129, 0.013529088020324707, 0.013566975593566894, 0.013596672058105469, 0.013494272232055664, 0.01353932762145996, 0.01356390380859375, 0.013559807777404785, 0.013463552474975587, 0.013546496391296388, 0.013578240394592284, 0.013587455749511718, 0.013480959892272949, 0.013545472145080567, 0.013477888107299805, 0.013524991989135742, 0.013486080169677735, 0.013546496391296388, 0.013487104415893555, 0.013461503982543945, 0.013484031677246093, 0.013544511795043946, 0.013510592460632324, 0.013520895957946777, 0.013590527534484862, 0.013477888107299805, 0.013493247985839844, 0.013519871711730956, 0.013566975593566894, 0.01355571174621582, 0.013488127708435058, 0.01353932762145996, 0.013544447898864746, 0.013489151954650879, 0.028542015075683595, 0.01350444793701172, 0.01350656032562256, 0.013545503616333008, 0.013851615905761719, 0.013469696044921875, 0.013505536079406738, 0.013469696044921875, 0.013545472145080567, 0.01398476791381836, 0.014100480079650878, 0.014065664291381836, 0.013980671882629395, 0.014014464378356933, 0.014029824256896972, 0.01397657585144043, 0.014020607948303223, 0.013973504066467286, 0.013846528053283692, 0.013362175941467285, 0.013513728141784668, 0.013600768089294434, 0.013557760238647461, 0.0134901762008667, 0.013462528228759766, 0.013511679649353027, 0.013615103721618652, 0.013508607864379883, 0.013500415802001953, 0.013559807777404785, 0.013534208297729493, 0.013486080169677735, 0.01354854393005371, 0.013780991554260253, 0.014002176284790039, 0.013959168434143066, 0.014021632194519042, 0.014008319854736329, 0.013988863945007325, 0.013999103546142578, 0.01400115203857422, 0.013963264465332031, 0.014003199577331543, 0.013998080253601074, 0.014146559715270996, 0.01407590389251709, 0.014002176284790039, 0.014013440132141113, 0.014163968086242675, 0.014083104133605957, 0.013996000289916993, 0.013991935729980469, 0.014025728225708007, 0.013792256355285644, 0.01354854393005371, 0.013505536079406738, 0.013533184051513672, 0.01348198413848877, 0.013488127708435058, 0.013557760238647461, 0.013664256095886231, 0.013599743843078613, 0.014167039871215821, 0.029051904678344728, 0.014051327705383301, 0.014191616058349609, 0.014185471534729004, 0.014052351951599122, 0.014104576110839843, 0.014022720336914063, 0.014039999961853027, 0.013798399925231934, 0.013768704414367675, 0.013875200271606445, 0.013957119941711426, 0.014002176284790039, 0.014022656440734863, 0.013963264465332031, 0.01395206356048584, 0.013903807640075684, 0.013497344017028809, 0.013495295524597169, 0.0134717435836792, 0.0135731201171875, 0.013503487586975099, 0.013528063774108886, 0.0134901762008667, 0.013510656356811524, 0.013513728141784668, 0.013542400360107423, 0.013566975593566894, 0.013360128402709961, 0.013392895698547362, 0.013500415802001953, 0.013462528228759766, 0.013599743843078613, 0.013531135559082032, 0.013561856269836426, 0.013949952125549316, 0.014019583702087402, 0.014110719680786133, 0.013762623786926269, 0.013502400398254394, 0.013551615715026855, 0.013466624259948731, 0.013542400360107423, 0.013495295524597169, 0.013476863861083984, 0.013543423652648925, 0.013522944450378417, 0.013475839614868163, 0.013612031936645508, 0.01353321647644043, 0.013546463966369628, 0.013493247985839844, 0.013521920204162598, 0.01353932762145996, 0.013508607864379883, 0.01353932762145996, 0.013549568176269532, 0.013532159805297851, 0.013557760238647461, 0.013508607864379883, 0.013493247985839844, 0.013567999839782715, 0.013578240394592284, 0.02814873504638672, 0.013509632110595703, 0.01349836826324463, 0.013489151954650879, 0.013528063774108886, 0.013495295524597169, 0.013513824462890626, 0.013567904472351074, 0.013513728141784668, 0.013622271537780761, 0.013479935646057128, 0.013486080169677735, 0.013485055923461914, 0.013629440307617188, 0.013565952301025391, 0.013479935646057128, 0.013392895698547362, 0.01337446403503418, 0.014511103630065919, 0.014114815711975098, 0.014020607948303223, 0.014053376197814941, 0.014180352210998535, 0.014109696388244629, 0.013961215972900391, 0.014033920288085937, 0.014102527618408203, 0.013990912437438965, 0.014018560409545898, 0.014072832107543945, 0.01418239974975586, 0.014221311569213867, 0.014842880249023438, 0.014110719680786133, 0.014094335556030273, 0.014072832107543945, 0.014045184135437011, 0.013964287757873535, 0.013947903633117676, 0.013545472145080567, 0.013396991729736327, 0.013604864120483399, 0.01397043228149414, 0.014063615798950196, 0.014005248069763183, 0.014004223823547364, 0.014024703979492188, 0.013560832023620606, 0.013507583618164062, 0.013520895957946777, 0.013496383666992188, 0.013994943618774415, 0.013971455574035644, 0.014010368347167968, 0.013985792160034179, 0.014003199577331543, 0.014017536163330077, 0.014043135643005371, 0.014005248069763183, 0.013945856094360352, 0.01354854393005371, 0.013531135559082032, 0.013667327880859375]",tokens/s,71.31122132602515,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2986.500096,9259.450368,0.0,8613.003264,8211.364864,s,10,10.947190917968749,1.094719091796875,0.002010850578895603,1.0950899047851563,1.0971980224609374,1.0974085205078123,1.0975769189453124,"[1.0976190185546875, 1.096309326171875, 1.0916474609375, 1.09289697265625, 1.0933328857421876, 1.0920892333984376, 1.0948057861328124, 1.0953740234375, 1.0959649658203126, 1.0971512451171874]",tokens/s,233.8499455415552,kWh,1.291276744670338e-05,7.073689176177139e-06,6.076365972199904e-05,8.075011634487956e-05,tokens/kWh,3170274.070029042,MB,2986.500096,9330.753536,0.0,8684.306432,8503.627264,s,10,640.9193046874999,64.09193046875,0.016181839642145233,64.08713867187501,64.11070859375,64.119698046875,64.126889609375,"[64.0750625, 64.08454296875, 64.0822421875, 64.076171875, 64.07810546875, 64.09387890625, 64.10216796875, 64.089734375, 64.1087109375, 64.1286875]",tokens/s,0.9829630585197867,kWh,0.0007566528167658381,0.00041471410911453853,0.003539824637413,0.004711191563293376,tokens/kWh,13372.413147208052,,s,629,649.7487822875972,1.0329869352743999,0.1300160071749506,1.0172456665039062,1.0179758178710938,1.0183352172851563,2.110224638671875,"[1.016637451171875, 1.0170296020507812, 1.017143310546875, 1.0172344360351562, 1.0165718994140625, 1.016827880859375, 1.0167459716796876, 1.0169722900390625, 1.0170675048828124, 1.01732861328125, 1.0169548950195313, 1.0172835693359374, 1.0170890502929688, 1.01718017578125, 1.016700927734375, 1.0168627319335937, 1.016974365234375, 1.01762353515625, 1.0176194458007812, 1.0171002807617187, 1.0172579956054688, 1.0172610473632813, 1.01688525390625, 1.0172958984375, 1.016853515625, 1.0170009765625, 1.0169313354492187, 1.0170941162109375, 1.0171217651367188, 1.0168176879882813, 1.016890380859375, 1.0176798706054688, 1.0172395629882813, 1.017396240234375, 1.0169989013671874, 1.0170675048828124, 1.0171791381835937, 1.0177362060546875, 1.0171443481445313, 1.0171986083984375, 1.017133056640625, 1.0175907592773437, 1.01686376953125, 1.0169241333007812, 1.0167664794921876, 1.0166435546875, 1.0166865844726563, 1.01715966796875, 1.0166220703125, 1.0169251708984375, 1.0169763793945312, 1.0173429565429688, 1.016700927734375, 1.0169528198242188, 1.016858642578125, 1.0169600219726562, 1.0173624267578125, 1.0173972778320313, 1.0168811645507811, 1.0170654907226562, 1.0171514892578124, 1.0170183715820313, 2.113563720703125, 1.0165084228515624, 1.0165995483398438, 1.01704296875, 1.0168893432617188, 1.0168678588867188, 1.0165933837890626, 1.01682275390625, 1.0167122192382811, 1.0169712524414063, 1.017650146484375, 1.0175662231445313, 1.0175538940429687, 1.0177986450195313, 1.0178928833007812, 1.016896484375, 1.0167449340820311, 1.0172692260742187, 1.0168688354492188, 1.0166507568359375, 1.0167510986328125, 1.0167859497070313, 1.0173839111328125, 1.0172303466796875, 1.0169999389648436, 1.0167675170898438, 1.0172876586914064, 1.0169815063476562, 1.0168299560546874, 1.0169784545898437, 1.0168616943359374, 1.0168115234375, 1.0175344848632812, 1.0173378295898436, 1.0172190551757812, 1.017038818359375, 1.0170572509765625, 1.01707568359375, 1.0168831787109376, 1.0171309814453124, 1.01711669921875, 1.01686376953125, 1.017228271484375, 1.0172507934570312, 1.01673779296875, 1.0168883056640625, 1.0183065795898438, 1.0170347290039063, 1.0172979125976562, 1.0179051513671875, 1.0176614379882813, 1.017459716796875, 1.017312255859375, 1.0175733642578124, 1.01785498046875, 1.0175313720703125, 1.0179799194335937, 1.0179594116210937, 1.01766552734375, 1.017586669921875, 1.017523193359375, 1.0172456665039062, 1.0174935302734376, 2.110212158203125, 1.01697021484375, 1.0170562744140625, 1.0166824951171876, 1.017027587890625, 1.0168463134765624, 1.0169794311523437, 1.0168719482421875, 1.0174310302734375, 1.0172272338867188, 1.0172548828125, 1.0166978759765626, 1.0173173828125, 1.0170009765625, 1.0174668579101562, 1.0170675048828124, 1.0172139282226562, 1.017228271484375, 1.0174627685546875, 1.0169548950195313, 1.0171443481445313, 1.0174464111328125, 1.0173347778320312, 1.0173265991210938, 1.0174474487304688, 1.0170040283203126, 1.0171893920898438, 1.0172518310546874, 1.0172119140625, 1.0169682006835938, 1.0171197509765626, 1.0175057983398437, 1.017186279296875, 1.017196533203125, 1.0173778076171875, 1.016795166015625, 1.0170050659179688, 1.0167817993164063, 1.017259033203125, 1.0169620361328124, 1.0171812133789062, 1.0173296508789063, 1.0171238403320313, 1.0170531616210938, 1.0173737182617189, 1.0171729736328126, 1.0174576416015626, 1.01732861328125, 1.0171401977539063, 1.0170521850585938, 1.0174976196289063, 1.017228271484375, 1.0171412353515625, 1.0169722900390625, 1.0173092041015626, 1.0171893920898438, 1.0174105834960938, 1.0172794799804687, 1.0174299926757813, 1.0173112182617188, 1.017143310546875, 1.0173870239257812, 1.0169405517578125, 2.1099970703125, 1.0165718994140625, 1.0168822021484376, 1.0171340942382812, 1.0167890014648437, 1.0165309448242188, 1.0165729370117187, 1.0171259155273438, 1.0168145751953126, 1.0167357177734375, 1.0166978759765626, 1.0165995483398438, 1.017038818359375, 1.017069580078125, 1.0168657836914063, 1.0166671142578125, 1.0169508056640626, 1.0168002319335938, 1.0168309936523436, 1.01686376953125, 1.0169978637695312, 1.0169108276367187, 1.0175303955078125, 1.017259033203125, 1.0170203857421876, 1.017275390625, 1.0176215209960937, 1.0172620849609375, 1.0172682495117187, 1.0177730712890625, 1.017175048828125, 1.0169896850585938, 1.0171904296875, 1.0173450317382813, 1.0171514892578124, 1.017196533203125, 1.0175733642578124, 1.0169241333007812, 1.0169774169921875, 1.016858642578125, 1.0170009765625, 1.01692724609375, 1.0172477416992187, 1.0172241821289063, 1.0172979125976562, 1.017185302734375, 1.0175098876953126, 1.0170951538085937, 1.0170142822265624, 1.016974365234375, 1.0171340942382812, 1.0168934326171875, 1.0171658325195312, 1.0174044189453124, 1.0170419311523438, 1.0170009765625, 1.01764404296875, 1.017259033203125, 1.0177556762695312, 1.0170634155273437, 1.0172395629882813, 1.0169517822265626, 1.0172119140625, 2.11085009765625, 1.0167725830078125, 1.017186279296875, 1.0167930908203124, 1.0171504516601562, 1.0169845581054688, 1.0168934326171875, 1.0167664794921876, 1.0167285766601561, 1.016658935546875, 1.0167992553710938, 1.0168207397460938, 1.0168862915039063, 1.016806396484375, 1.0169343872070313, 1.01682177734375, 1.0170880126953126, 1.0174935302734376, 1.0173870239257812, 1.0168729858398438, 1.016933349609375, 1.0174105834960938, 1.0178846435546876, 1.0173112182617188, 1.017037841796875, 1.0165770263671876, 1.0169405517578125, 1.0169047241210938, 1.0172682495117187, 1.017038818359375, 1.0169456787109374, 1.0170654907226562, 1.0172426147460938, 1.01705419921875, 1.0172129516601562, 1.0170951538085937, 1.0170245361328125, 1.017217041015625, 1.0174197998046874, 1.016669189453125, 1.016816650390625, 1.0169917602539063, 1.0169262084960937, 1.0170654907226562, 1.0176153564453125, 1.0173839111328125, 1.0169313354492187, 1.017007080078125, 1.0170951538085937, 1.0168402099609375, 1.0171781005859375, 1.0170706176757813, 1.017101318359375, 1.0171300048828125, 1.0170357666015626, 1.016890380859375, 1.0173296508789063, 1.0192701416015626, 1.0171207885742188, 1.0169364624023438, 1.0173163452148437, 1.0173982543945312, 1.0175610961914063, 2.1102294921875, 1.0168330078125, 1.0173009643554687, 1.0168422241210937, 1.0167633666992189, 1.016748046875, 1.017270263671875, 1.0175191040039062, 1.0172374877929689, 1.0172579956054688, 1.0169036865234375, 1.01722314453125, 1.0172927856445313, 1.0177249145507812, 1.01745458984375, 1.0174157104492187, 1.0179625244140624, 1.0174013671875, 1.017365478515625, 1.0170818481445312, 1.016953857421875, 1.0169129028320312, 1.01753857421875, 1.0170408935546875, 1.017248779296875, 1.01697021484375, 1.0176993408203125, 1.0169528198242188, 1.0168002319335938, 1.0170091552734375, 1.0168760375976562, 1.0173685913085937, 1.0181334838867186, 1.0179051513671875, 1.0173931274414063, 1.0176051025390624, 1.0173521728515624, 1.0176522216796875, 1.0186475219726563, 1.017776123046875, 1.0168862915039063, 1.0169896850585938, 1.0183987426757812, 1.01768603515625, 1.018186767578125, 1.017185302734375, 1.0171566162109376, 1.0170480346679687, 1.0171371459960938, 1.0170992431640624, 1.0172692260742187, 1.0174402465820314, 1.0176215209960937, 1.0168811645507811, 1.0169937744140625, 1.017322509765625, 1.0178201904296875, 1.0176041259765625, 1.0179737548828125, 1.017111572265625, 1.017365478515625, 1.0178099365234374, 1.0178980102539064, 2.113080322265625, 1.0174884033203124, 1.0172631225585937, 1.0174566650390624, 1.0172620849609375, 1.0167183227539063, 1.0171514892578124, 1.017007080078125, 1.017343994140625, 1.0170880126953126, 1.016985595703125, 1.0176143188476563, 1.0176163940429688, 1.0170951538085937, 1.0172026977539061, 1.0171873168945313, 1.0176123046875, 1.017691162109375, 1.0170316772460937, 1.0167756958007812, 1.0169569091796875, 1.0173972778320313, 1.0178017578125, 1.0174781494140626, 1.0178662109375, 1.0171248779296875, 1.017670654296875, 1.0176788330078126, 1.0176256103515624, 1.0175354614257812, 1.0175897827148437, 1.0182471923828125, 1.01829736328125, 1.0176644897460938, 1.018271728515625, 1.0174146728515625, 1.0175713500976562, 1.0185062255859374, 1.0173245239257813, 1.0170501098632811, 1.0175221557617187, 1.0181396484375, 1.01762353515625, 1.0171248779296875, 1.0179276733398437, 1.0172764282226563, 1.0176091918945311, 1.0178416748046875, 1.0178385620117187, 1.0172415771484375, 1.0178160400390626, 1.0175958862304688, 1.0175139770507813, 1.0184058837890626, 1.0174474487304688, 1.0172610473632813, 1.0176348266601563, 1.0172989501953125, 1.0170726318359375, 1.0170726318359375, 1.0176849975585938, 1.0169343872070313, 1.0174033813476562, 2.112878662109375, 1.016953857421875, 1.0167767333984374, 1.0172262573242188, 1.017343994140625, 1.0176256103515624, 1.017802734375, 1.0175139770507813, 1.0169364624023438, 1.0173214721679686, 1.0175713500976562, 1.0171924438476563, 1.0175733642578124, 1.01711767578125, 1.0170449829101562, 1.0170040283203126, 1.0178385620117187, 1.0171453247070312, 1.0173972778320313, 1.0172129516601562, 1.0172528686523437, 1.0171781005859375, 1.0176245727539062, 1.01760205078125, 1.0175027465820312, 1.01718017578125, 1.017638916015625, 1.017111572265625, 1.0172241821289063, 1.0173060913085938, 1.017427978515625, 1.0172743530273438, 1.0173153076171875, 1.0171094970703125, 1.0171586303710938, 1.0175293579101563, 1.0171494140625, 1.0172057495117188, 1.0173634643554688, 1.0171945190429688, 1.0169733276367188, 1.0171525268554686, 1.0175272827148438, 1.0170572509765625, 1.0170685424804689, 1.0172467041015625, 1.01697021484375, 1.0168197021484375, 1.0174822387695313, 1.0170521850585938, 1.0171043701171876, 1.0171340942382812, 1.0171760864257813, 1.0169978637695312, 1.01732763671875, 1.0172794799804687, 1.0176839599609375, 1.017469970703125, 1.0180280151367187, 1.0174791870117188, 1.017849853515625, 1.0173368530273437, 1.01711669921875, 2.113290283203125, 1.017007080078125, 1.0168678588867188, 1.01684326171875, 1.0171268920898437, 1.016648681640625, 1.0167285766601561, 1.0171514892578124, 1.0172190551757812, 1.0169129028320312, 1.0168606567382812, 1.0169886474609375, 1.0172241821289063, 1.0173921508789063, 1.0177730712890625, 1.016896484375, 1.0171392211914063, 1.0172139282226562, 1.0171043701171876, 1.0171217651367188, 1.0177310791015626, 1.01746484375, 1.017628662109375, 1.017260009765625, 1.0183670043945312, 1.0173040771484374, 1.0178580322265625, 1.0175846557617187, 1.01725390625, 1.01791845703125, 1.0185062255859374, 1.0173634643554688, 1.0173931274414063, 1.0182645874023437, 1.0179307250976561, 1.0173163452148437, 1.0178989868164063, 1.0175098876953126, 1.0173235473632813, 1.0175590209960939, 1.01777099609375, 1.0175580444335937, 1.0173604125976563, 1.0171300048828125, 1.0181734619140625, 1.0173685913085937, 1.0175887451171874, 1.01719140625, 1.017218017578125, 1.0171586303710938, 1.0180546264648438, 1.0181591186523438, 1.0179061889648438, 1.0179747924804687, 1.0184212646484374, 1.0185728149414062, 1.0190858154296876, 1.0186967163085938, 1.0185779418945313, 1.0188789672851561, 1.0184888305664062, 1.0182215576171876, 1.01819189453125, 2.114828369140625, 1.0173347778320312, 1.0177095947265624, 1.0177669067382813, 1.0172406005859376, 1.0176143188476563, 1.017554931640625, 1.0172661743164062, 1.0174033813476562, 1.0175989990234375, 1.0177607421875, 1.0175191040039062, 1.0183218994140626, 1.0180669555664061, 1.0179829711914063, 1.0176931762695312, 1.0172927856445313, 1.0174801635742188, 1.0174003295898437, 1.0175467529296875, 1.0174320678710937, 1.0178211669921875, 1.0185543823242187, 1.0174586791992188, 1.0178580322265625, 1.0182103271484375, 1.0180054931640625, 1.0179921875, 1.0187222900390625, 1.0177330932617188, 1.0182236328125, 1.0179921875, 1.0184345703125, 1.0186956787109376, 1.018313720703125, 1.0182072143554688, 1.0180147094726562, 1.0174085083007813, 1.01800244140625, 1.0174351196289062, 1.0173193969726562, 1.0178539428710938, 1.0183413696289063, 1.0178672485351563, 1.0183229370117188, 1.0190479125976561, 1.01767578125, 1.0170265502929687, 1.0175949096679688, 1.0170900268554688, 1.0174924926757813, 1.0183259887695313, 1.0185646362304688, 1.0176317138671875, 1.0183250122070313, 1.0178457641601562, 1.018119140625, 1.01793994140625, 1.0190244140625, 1.0184181518554687, 1.0175703125, 1.0174607543945313, 1.0177362060546875]",tokens/s,0.9680664545233214,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492bf-47ca378751a51202361d0c4e;efa62947-c698-40da-8752-a7596e8054bd) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1384.353792,6237.454336,0.0,5591.007232,5346.100224,s,10,5.621484252929688,0.5621484252929687,0.0006512231383256937,0.5619280090332031,0.5629722900390625,0.5631148376464843,0.5632288757324219,"[0.5632573852539062, 0.5619976196289063, 0.561502197265625, 0.561581787109375, 0.56184912109375, 0.5611882934570313, 0.5618583984375, 0.5625692138671875, 0.5627396240234375, 0.5629406127929687]",tokens/s,455.39574333341466,kWh,6.6308295009312805e-06,3.633394705801994e-06,3.0662030702449045e-05,4.092625490918232e-05,tokens/kWh,6255153.3378287,MB,1385.762816,6237.454336,0.0,5591.007232,5529.622016,s,10,326.683455078125,32.668345507812504,0.005059921734622061,32.669546875,32.673459570312495,32.67500029296875,32.676232871093745,"[32.676541015625, 32.670173828125, 32.662392578125, 32.6731171875, 32.671205078125, 32.66616796875, 32.67228515625, 32.668919921875, 32.66134765625, 32.6613046875]",tokens/s,1.9284723184078545,kWh,0.0003856536088773498,0.00021137137319951726,0.0017394750242949513,0.0023365000063718185,tokens/kWh,26963.406731518968,,s,629,331.22317260742193,0.5265869198846135,0.06676456858509212,0.5185095825195313,0.5189097534179687,0.5190709106445313,1.080322138671875,"[0.5188095703125, 0.5182166748046875, 0.5186017456054688, 0.5188095703125, 0.5184255981445313, 0.5183231811523438, 0.51841943359375, 0.5183098754882812, 0.5187072143554687, 0.5183672485351563, 0.5182525634765625, 0.5186539306640625, 0.5185064697265624, 0.5181951904296875, 0.5179996337890626, 0.5185115966796875, 0.5183805541992188, 0.5186437377929688, 0.5184778442382812, 0.5189140625, 0.5192980346679688, 0.5191638793945312, 0.518930419921875, 0.5191065673828125, 0.518508544921875, 0.5184798583984375, 0.518361083984375, 0.5188628540039063, 0.518540283203125, 0.5188761596679687, 0.5184215087890625, 0.518645751953125, 0.5184061279296875, 0.5188270263671875, 0.5185966186523437, 0.5184757690429688, 0.5186529541015625, 0.5185863647460938, 0.5191638793945312, 0.5191014404296875, 0.5186867065429688, 0.5188812866210938, 0.5187266845703125, 0.5185075073242188, 0.518697998046875, 0.5185228881835937, 0.5184696044921875, 0.5186682739257813, 0.51854541015625, 0.5184461059570312, 0.5185996704101562, 0.5186078491210937, 0.51859765625, 0.5185884399414062, 0.51877685546875, 0.5190922241210938, 0.5191895141601562, 0.5191741333007812, 0.5192550659179688, 0.5191157836914062, 0.5191659545898437, 0.518845458984375, 1.0806650390625, 0.5187543334960938, 0.5186386108398438, 0.518856689453125, 0.5189160766601563, 0.5187870483398438, 0.5190062255859375, 0.5187553100585938, 0.5186437377929688, 0.51831298828125, 0.5186365356445313, 0.5181327514648437, 0.5182822265625, 0.517855224609375, 0.5185443725585938, 0.5185177612304688, 0.5186467895507813, 0.5184409790039063, 0.5186969604492188, 0.5182013549804687, 0.5184624633789062, 0.518456298828125, 0.5188157348632813, 0.518245361328125, 0.5183590698242188, 0.51825048828125, 0.5184716796875, 0.5190277099609375, 0.5182545776367188, 0.5186058349609375, 0.5184737548828126, 0.51859765625, 0.518403076171875, 0.5183590698242188, 0.5187256469726562, 0.5184972534179687, 0.51888232421875, 0.5187758178710937, 0.5185167236328125, 0.5184839477539063, 0.5184092407226563, 0.5188699951171875, 0.5185812377929687, 0.5185095825195313, 0.5184573364257813, 0.5184921875, 0.5184737548828126, 0.5184603881835937, 0.5184911499023438, 0.5186386108398438, 0.5184378662109375, 0.5186990356445312, 0.5184532470703125, 0.5188690185546875, 0.5186221923828125, 0.5189376220703125, 0.5190870971679687, 0.5185853271484375, 0.5185474853515625, 0.5184737548828126, 0.5185966186523437, 0.5188659057617188, 0.5187153930664062, 1.08008544921875, 0.5183876953125, 0.5183805541992188, 0.5181194458007813, 0.5184419555664063, 0.5182669067382812, 0.518513671875, 0.5181430053710937, 0.5185761108398438, 0.51825048828125, 0.5185054931640625, 0.5182116088867188, 0.5189078979492188, 0.5183272705078125, 0.5183908081054688, 0.5181583251953125, 0.5181675415039062, 0.5184737548828126, 0.5183364868164062, 0.51837646484375, 0.5182279663085938, 0.51806005859375, 0.5183364868164062, 0.5183334350585938, 0.5184174194335938, 0.5184747314453125, 0.518287353515625, 0.5188116455078124, 0.5184337768554688, 0.518771728515625, 0.5185095825195313, 0.5183733520507813, 0.5180508422851563, 0.5186058349609375, 0.5183743896484375, 0.5189140625, 0.5184409790039063, 0.5186283569335938, 0.5184163818359375, 0.5185833129882812, 0.5184041137695312, 0.5184071655273438, 0.5186764526367188, 0.5181409301757812, 0.5184235229492188, 0.5182443237304688, 0.5182146606445313, 0.5187942504882812, 0.5187543334960938, 0.5190287475585937, 0.5187266845703125, 0.518451171875, 0.5184706420898437, 0.5183651733398438, 0.5186488037109375, 0.51856689453125, 0.5184808959960937, 0.5186990356445312, 0.5183580322265625, 0.5184839477539063, 0.5183866577148437, 0.5184931640625, 0.5186262817382813, 1.0804141845703126, 0.5185863647460938, 0.5185955810546875, 0.5184798583984375, 0.5184952392578125, 0.5181726684570312, 0.5187952880859374, 0.518445068359375, 0.51839794921875, 0.5184686279296875, 0.5185792236328125, 0.5180999755859375, 0.5182177124023437, 0.5181572875976562, 0.5183170776367187, 0.5185464477539062, 0.5185044555664062, 0.5182678833007812, 0.5184010009765625, 0.5184624633789062, 0.518302734375, 0.518582275390625, 0.5185014038085938, 0.5188433837890625, 0.5185966186523437, 0.519103515625, 0.5185044555664062, 0.5182361450195313, 0.5182843017578125, 0.5184716796875, 0.5183252563476562, 0.5183436889648437, 0.5181808471679688, 0.5183488159179688, 0.5182064819335938, 0.5183488159179688, 0.5181931762695312, 0.5184389038085937, 0.5183406372070313, 0.5183948974609375, 0.5183641357421875, 0.5188024291992187, 0.5191905517578125, 0.5192560424804687, 0.5191629028320313, 0.51881982421875, 0.5188444213867187, 0.518962158203125, 0.5190697021484375, 0.5190154418945313, 0.5188935546875, 0.5190338745117188, 0.518972412109375, 0.5189693603515625, 0.5189816284179688, 0.5191700439453125, 0.5193287963867188, 0.518739990234375, 0.5188853759765625, 0.5185628051757812, 0.5189918823242188, 0.5186201782226563, 0.5191505737304688, 1.0808135986328125, 0.51853515625, 0.5181163330078125, 0.5184470825195312, 0.5182996215820312, 0.518171630859375, 0.5183119506835937, 0.51818701171875, 0.518203369140625, 0.5182802124023438, 0.5185525512695313, 0.5187522583007812, 0.518487060546875, 0.5186611328125, 0.5187286987304688, 0.518908935546875, 0.518830078125, 0.5189468383789062, 0.5185269775390625, 0.5185689697265625, 0.5184174194335938, 0.5183887329101563, 0.518234130859375, 0.5185863647460938, 0.5180579833984374, 0.5184757690429688, 0.5182648315429688, 0.5183539428710937, 0.5185167236328125, 0.5185372314453125, 0.518513671875, 0.51848193359375, 0.5186898193359375, 0.5184696044921875, 0.5185628051757812, 0.51841845703125, 0.5189990234375, 0.5187839965820312, 0.5193861083984375, 0.518898681640625, 0.5187921752929687, 0.5188413696289063, 0.5190901489257812, 0.5189130249023437, 0.5188925170898437, 0.5186703491210938, 0.5184102172851562, 0.5185751342773437, 0.518434814453125, 0.5185751342773437, 0.5184911499023438, 0.5185218505859375, 0.5186570434570312, 0.518592529296875, 0.5186652221679687, 0.5188474731445313, 0.5188054809570313, 0.5184102172851562, 0.5188587646484375, 0.5183876953125, 0.5187327880859375, 0.5192386474609375, 0.518329345703125, 1.0797823486328124, 0.5184645385742187, 0.5180784912109375, 0.5182371826171875, 0.5183170776367187, 0.51822900390625, 0.518414306640625, 0.5182443237304688, 0.5181859741210938, 0.5187860717773437, 0.5186262817382813, 0.5189263305664062, 0.5186529541015625, 0.5186600952148438, 0.518392822265625, 0.51827197265625, 0.5186017456054688, 0.518455322265625, 0.5184215087890625, 0.5184624633789062, 0.5183283081054687, 0.5182945556640625, 0.5183364868164062, 0.5184890747070312, 0.5184890747070312, 0.5183621215820312, 0.5186611328125, 0.5183385620117188, 0.5185679321289063, 0.518445068359375, 0.5188474731445313, 0.5187174682617187, 0.5190717163085937, 0.5187266845703125, 0.5183631591796874, 0.518223876953125, 0.5186600952148438, 0.518667236328125, 0.5187758178710937, 0.518560791015625, 0.51850341796875, 0.5185003662109375, 0.5185853271484375, 0.5183672485351563, 0.5182003173828125, 0.5184706420898437, 0.5184307250976562, 0.5187706909179688, 0.5183969116210938, 0.5187327880859375, 0.5183334350585938, 0.5185341186523438, 0.5186181030273438, 0.5184255981445313, 0.5184542846679687, 0.5187205200195313, 0.518129638671875, 0.5184603881835937, 0.5185167236328125, 0.51881982421875, 0.518624267578125, 0.51871435546875, 0.5189232788085938, 1.0807879638671876, 0.5186129760742187, 0.518371337890625, 0.5185361938476563, 0.518656005859375, 0.5183733520507813, 0.5184470825195312, 0.5185771484375, 0.5185413208007813, 0.5184358520507812, 0.5186365356445313, 0.5184696044921875, 0.5184747314453125, 0.5182965698242188, 0.51846142578125, 0.5183303833007813, 0.5184307250976562, 0.5183743896484375, 0.5185730590820312, 0.518403076171875, 0.5184429931640625, 0.5185464477539062, 0.5187133178710938, 0.518592529296875, 0.5189293823242187, 0.5185494995117188, 0.5188423461914062, 0.5185587158203125, 0.5185433349609375, 0.5187420043945312, 0.5186324462890625, 0.5185761108398438, 0.5185269775390625, 0.5186856689453125, 0.5186160888671875, 0.518697998046875, 0.5186262817382813, 0.5185792236328125, 0.51859765625, 0.5188095703125, 0.5186631469726563, 0.5189130249023437, 0.5186232299804687, 0.51881982421875, 0.5186939086914063, 0.5188731079101563, 0.5187584228515625, 0.5185372314453125, 0.5184962768554687, 0.5189385986328126, 0.5185597534179688, 0.5186795654296875, 0.518572021484375, 0.518687744140625, 0.518813720703125, 0.5186826171875, 0.518513671875, 0.5185904541015625, 0.51853515625, 0.5184655151367188, 0.5187901611328125, 0.5186437377929688, 0.5187153930664062, 1.0812200927734374, 0.5190051879882812, 0.5185259399414063, 0.5182935180664062, 0.51880859375, 0.5185054931640625, 0.5187153930664062, 0.5184440307617187, 0.5183447265625, 0.5185700073242188, 0.5186478271484375, 0.5184255981445313, 0.5183006591796875, 0.5179422607421875, 0.5181675415039062, 0.5181358032226563, 0.5180006103515625, 0.5183467407226563, 0.51862939453125, 0.5185341186523438, 0.5186846923828125, 0.5184655151367188, 0.5186693115234375, 0.518824951171875, 0.5188341674804687, 0.5186990356445312, 0.5188495483398438, 0.51874609375, 0.5188362426757812, 0.518856689453125, 0.5190697021484375, 0.5187962646484375, 0.5187471313476563, 0.518961181640625, 0.5186170654296876, 0.5190113525390625, 0.5188720703125, 0.5186529541015625, 0.5184112548828125, 0.518487060546875, 0.5186253051757812, 0.51827099609375, 0.5185054931640625, 0.5184163818359375, 0.5184174194335938, 0.5182597045898437, 0.5180006103515625, 0.5184583740234375, 0.5184020385742187, 0.518455322265625, 0.5182986450195313, 0.5184798583984375, 0.5184307250976562, 0.5184706420898437, 0.51846142578125, 0.5185208129882812, 0.5186549682617188, 0.51833447265625, 0.5185966186523437, 0.5187051391601563, 0.5184440307617187, 0.5187573852539062, 0.518919189453125, 1.0813255615234374, 0.5182597045898437, 0.5179361572265625, 0.5181696166992188, 0.5183784790039062, 0.5182894287109375, 0.5182791748046875, 0.51852490234375, 0.518408203125, 0.5189385986328126, 0.518445068359375, 0.5186150512695312, 0.518277099609375, 0.5182412719726562, 0.5184952392578125, 0.5183211669921876, 0.5186611328125, 0.518781982421875, 0.5183262939453125, 0.5183580322265625, 0.518451171875, 0.5184849853515625, 0.5184337768554688, 0.51825048828125, 0.5185095825195313, 0.518381591796875, 0.51833447265625, 0.5182853393554687, 0.5183549194335938, 0.5184962768554687, 0.51846142578125, 0.518513671875, 0.5183303833007813, 0.5186969604492188, 0.5186815795898437, 0.5183436889648437, 0.518614013671875, 0.5184727172851562, 0.5183436889648437, 0.5183866577148437, 0.5183252563476562, 0.518297607421875, 0.5183355102539062, 0.5184061279296875, 0.5183549194335938, 0.5184010009765625, 0.5183662109375, 0.5183590698242188, 0.5187440795898437, 0.5185802001953125, 0.5185177612304688, 0.5186570434570312, 0.5183119506835937, 0.5188116455078124, 0.5184276733398437, 0.5184890747070312, 0.5185904541015625, 0.5183037719726562, 0.518476806640625, 0.518213623046875, 0.51850341796875, 0.51825048828125, 0.5185443725585938, 1.0814249267578124, 0.518656005859375, 0.518150146484375, 0.5185014038085938, 0.51856591796875, 0.5186262817382813, 0.5183231811523438, 0.5181675415039062, 0.5183549194335938, 0.5182146606445313, 0.5182986450195313, 0.5185382690429687, 0.5181614379882813, 0.518223876953125, 0.5182249145507812, 0.5183303833007813, 0.5183180541992187, 0.5182515258789062, 0.518128662109375, 0.51858740234375, 0.5183529052734375, 0.5182423095703125, 0.5184102172851562, 0.5182515258789062, 0.5182013549804687, 0.5184522094726562, 0.5184061279296875, 0.5185044555664062, 0.51863037109375, 0.5185700073242188, 0.5186262817382813, 0.5184317626953125, 0.51835595703125, 0.5185781860351563, 0.5184603881835937, 0.5184542846679687, 0.5183364868164062, 0.518434814453125, 0.5183662109375, 0.5183795166015625, 0.51816552734375, 0.5183355102539062, 0.51835595703125, 0.518350830078125, 0.5186395874023437, 0.5182269287109375, 0.5184327392578125, 0.5185730590820312, 0.5184491577148438, 0.5190123291015625, 0.5186806030273438, 0.5186017456054688, 0.5186570434570312, 0.5187604370117187, 0.5186160888671875, 0.518666259765625, 0.518624267578125, 0.5184112548828125, 0.5184635009765625, 0.5184808959960937, 0.5184696044921875, 0.5186734008789062, 0.5181480712890625]",tokens/s,1.8990217231736815,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gptj,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2286.108672,9584.508928,0.0,8938.061824,8628.937728,s,10,10.131530883789061,1.0131530883789062,0.001088448228780468,1.0127930908203124,1.0144804138183594,1.0150706817626953,1.015542896118164,"[1.0156609497070312, 1.0116715698242187, 1.0127140502929688, 1.012624267578125, 1.0126713256835937, 1.0143492431640626, 1.0128721313476563, 1.0132860717773438, 1.013524658203125, 1.0121566162109374]",tokens/s,252.6765233570105,kWh,1.1963782442940607e-05,6.5549710930463335e-06,5.5387488754404804e-05,7.390624229039174e-05,tokens/kWh,3463848.1414618148,MB,2286.108672,9584.508928,0.0,8938.061824,8715.664896,s,10,592.3277656249999,59.232776562499986,0.006289011500180114,59.23276171875,59.24203125,59.24248828125,59.24285390625,"[59.23234765625, 59.22259375, 59.2419296875, 59.22881640625, 59.23756640625, 59.22535546875, 59.22958984375, 59.23317578125, 59.2429453125, 59.2334453125]",tokens/s,1.0636003182043137,kWh,0.0006992366234295897,0.00038324170626838165,0.0032439279284735946,0.004326406258171565,tokens/kWh,14561.739291359381,,s,629,600.5023488159183,0.9546937183082955,0.12033659124895162,0.9401558837890625,0.9410275512695312,0.9412470825195313,1.95283546875,"[0.94150146484375, 0.940000244140625, 0.9400443115234375, 0.9407887573242187, 0.9415792846679687, 0.9401016235351562, 0.9407989501953125, 0.939236328125, 0.939652099609375, 0.9404119262695313, 0.9412341918945313, 0.9400452880859375, 0.940031982421875, 0.9407703247070313, 0.9399039916992188, 0.9399715576171875, 0.9395128173828124, 0.9409924926757812, 0.9403135986328125, 0.939736083984375, 0.9405081787109375, 0.940558349609375, 0.9399757080078125, 0.9390223388671874, 0.9397933959960938, 0.9407989501953125, 0.9399931030273437, 0.9401292724609375, 0.9413304443359375, 0.9404334106445312, 0.9403258666992188, 0.9405870361328125, 0.9406074829101563, 0.9403986206054687, 0.9400125732421875, 0.941048828125, 0.9410816040039063, 0.9399193725585937, 0.94055322265625, 0.9405183715820312, 0.9404334106445312, 0.9397545166015625, 0.940400634765625, 0.9393602294921874, 0.94025830078125, 0.94036376953125, 0.9395015869140625, 0.9403494262695312, 0.9393715209960938, 0.9401753540039063, 0.939345947265625, 0.9394923706054688, 0.9394974975585938, 0.940368896484375, 0.9394483032226563, 0.939377685546875, 0.9410477905273438, 0.9396592407226563, 0.9396121826171875, 0.9394268188476562, 0.940126220703125, 0.939109375, 1.9525693359375, 0.9394636840820313, 0.9393213500976563, 0.9392977905273437, 0.9397432250976563, 0.940037109375, 0.9395179443359375, 0.9403463745117188, 0.9393008422851562, 0.93964697265625, 0.9392588500976562, 0.9400934448242187, 0.9392957153320313, 0.9392506713867188, 0.9399244995117187, 0.9402685546875, 0.9394237670898438, 0.9390971069335937, 0.940157958984375, 0.9403064575195312, 0.9402173461914063, 0.939841552734375, 0.9407047729492187, 0.939747314453125, 0.9393909912109375, 0.9412290649414062, 0.940248046875, 0.9398323364257812, 0.9403709716796875, 0.9403074340820312, 0.9406505126953125, 0.940516357421875, 0.9410795288085938, 0.9408665771484375, 0.94008935546875, 0.940015625, 0.940010498046875, 0.9404405517578125, 0.94000537109375, 0.9402777709960938, 0.9398855590820312, 0.9399234619140625, 0.9397626953125, 0.9402910766601562, 0.9401549072265625, 0.940037109375, 0.9408952026367188, 0.9404805297851563, 0.9399664916992188, 0.9401907348632812, 0.94055322265625, 0.9397831420898437, 0.93945751953125, 0.9397196655273438, 0.9400934448242187, 0.9397329711914062, 0.9398640747070313, 0.94013232421875, 0.9406156616210938, 0.9400360717773437, 0.9403074340820312, 0.9402255249023438, 0.9400115356445312, 1.95331689453125, 0.9401005859375, 0.93952099609375, 0.939863037109375, 0.9400064086914063, 0.9403811645507812, 0.9397329711914062, 0.9406719970703125, 0.94033203125, 0.9403596801757812, 0.9412177734375, 0.9406023559570312, 0.9398558959960938, 0.94081640625, 0.9405286254882812, 0.9401968383789062, 0.9398538208007813, 0.9395271606445312, 0.940263427734375, 0.9397442626953125, 0.9402941284179688, 0.941149169921875, 0.9406617431640625, 0.9412741088867187, 0.940669921875, 0.9410816040039063, 0.9408839721679687, 0.939631591796875, 0.9415341796875, 0.9398968505859375, 0.9399746704101563, 0.940179443359375, 0.9405819091796875, 0.9400064086914063, 0.9398374633789063, 0.9410344848632812, 0.9401630859375, 0.9402439575195313, 0.9399183349609375, 0.9410057983398438, 0.9402869873046875, 0.9402777709960938, 0.9403402099609375, 0.941686767578125, 0.9396182861328125, 0.9397001953125, 0.9402654418945312, 0.9400330200195313, 0.940537841796875, 0.9406842651367188, 0.9406279907226562, 0.9398609619140625, 0.9404866333007813, 0.9411369018554687, 0.9399429321289062, 0.939873291015625, 0.9398394775390625, 0.9408798828125, 0.9406709594726562, 0.94021630859375, 0.9408256225585937, 0.940242919921875, 0.9397821655273437, 1.95401318359375, 0.9401282348632812, 0.9397453002929688, 0.9401876220703125, 0.9404436645507812, 0.9402992553710937, 0.9395773315429687, 0.9403228149414062, 0.939809814453125, 0.939509765625, 0.9397462768554687, 0.9422510375976563, 0.9400678100585937, 0.9396019287109375, 0.9398917236328125, 0.9403330688476562, 0.9396807861328125, 0.9395588989257813, 0.9399459838867188, 0.9394462890625, 0.939831298828125, 0.9402726440429687, 0.9402921142578125, 0.9396920166015625, 0.939767822265625, 0.9400340576171875, 0.9404702758789063, 0.9402992553710937, 0.9400657958984375, 0.93994189453125, 0.9394892578125, 0.9398200073242188, 0.9408123168945313, 0.9402921142578125, 0.9403504638671875, 0.940864501953125, 0.9403279418945313, 0.9399326782226562, 0.940169189453125, 0.9409740600585937, 0.9400985717773438, 0.9403341064453125, 0.9408727416992188, 0.9405614013671875, 0.940031005859375, 0.9394452514648437, 0.9399480590820313, 0.9405870361328125, 0.9398425903320312, 0.9403135986328125, 0.9397453002929688, 0.939378662109375, 0.9397616577148438, 0.9410897827148438, 0.9408286743164063, 0.9397012329101563, 0.9395425415039063, 0.9402388305664062, 0.9399132080078125, 0.94078564453125, 0.9406279907226562, 0.9397565307617187, 0.9397637329101562, 1.9536650390625, 0.9407723388671875, 0.939578369140625, 0.9401507568359375, 0.940353515625, 0.9400514526367187, 0.9401712646484375, 0.9398651123046875, 0.9396582641601563, 0.9398251342773437, 0.9404794921875, 0.9411246337890625, 0.9405941772460937, 0.940052490234375, 0.9404395751953125, 0.941676513671875, 0.9395343627929688, 0.9395845336914063, 0.939747314453125, 0.9404139404296875, 0.9399725952148438, 0.9405081787109375, 0.940210205078125, 0.9395486450195313, 0.9399818115234375, 0.94059521484375, 0.9396868896484375, 0.9403258666992188, 0.9402019653320313, 0.93954150390625, 0.9396776733398438, 0.9398087768554687, 0.9410938720703125, 0.9401641235351562, 0.939953125, 0.9407017211914063, 0.940769287109375, 0.9399070434570312, 0.9406832885742188, 0.9408511962890626, 0.93992041015625, 0.93991015625, 0.9406443481445312, 0.9401129150390625, 0.9394769897460937, 0.9398220825195313, 0.940062744140625, 0.9406146850585938, 0.9397760009765626, 0.94078466796875, 0.9406842651367188, 0.9410416870117188, 0.9400278930664062, 0.9406760864257813, 0.9400023193359375, 0.9400360717773437, 0.9401005859375, 0.9414287109375, 0.9405501708984375, 0.940611572265625, 0.9412608032226563, 0.9403648071289062, 0.94048974609375, 1.9550003662109374, 0.9402306518554687, 0.9403934936523437, 0.9403607177734375, 0.9412402954101563, 0.9396428833007813, 0.9399511108398437, 0.939725830078125, 0.939884521484375, 0.9398927612304687, 0.940263427734375, 0.9413529663085938, 0.940885986328125, 0.9410529174804687, 0.9415485229492188, 0.9412464599609375, 0.940295166015625, 0.939989013671875, 0.9397749633789062, 0.9401620483398437, 0.9402521362304688, 0.940422119140625, 0.9397033081054688, 0.9398681640625, 0.94054296875, 0.9411123046875, 0.9401774291992188, 0.9401016235351562, 0.9397852172851563, 0.9397985229492187, 0.9396900024414062, 0.939810791015625, 0.93952001953125, 0.9393899536132813, 0.9396654052734374, 0.941065185546875, 0.9407026977539062, 0.9396029663085937, 0.9407150268554687, 0.9401466674804687, 0.9398435668945313, 0.9395322875976563, 0.9403463745117188, 0.9394933471679687, 0.9393858642578125, 0.939447265625, 0.9401026611328125, 0.9395661010742188, 0.939377685546875, 0.9397606201171875, 0.939526123046875, 0.9393858642578125, 0.9394186401367187, 0.9398968505859375, 0.9392496337890625, 0.9398272094726563, 0.9395732421875, 0.9405542602539062, 0.93991015625, 0.940548095703125, 0.9398589477539062, 0.9403648071289062, 0.939809814453125, 1.95293896484375, 0.93994189453125, 0.9393387451171875, 0.9408471069335937, 0.9404620971679688, 0.9404794921875, 0.9408706665039063, 0.9407283325195313, 0.9402695922851563, 0.940379150390625, 0.9420646362304688, 0.9404036865234375, 0.9405603637695312, 0.9400238037109375, 0.94124853515625, 0.9406914672851563, 0.9400872802734375, 0.9396541137695312, 0.9395056762695313, 0.9401641235351562, 0.94051123046875, 0.9398302612304688, 0.9397872924804688, 0.939341796875, 0.9398619995117188, 0.9404784545898438, 0.9394800415039063, 0.9401866455078125, 0.93954150390625, 0.9394053344726563, 0.940663818359375, 0.941427734375, 0.9398405151367187, 0.9396613159179688, 0.939568115234375, 0.94137548828125, 0.9404170532226562, 0.93991015625, 0.9405634765625, 0.9408040771484375, 0.9399254760742187, 0.9397933959960938, 0.9401558837890625, 0.9402664794921874, 0.9399244995117187, 0.9395875854492187, 0.9407119140625, 0.9401763916015625, 0.9404497680664062, 0.9396480712890625, 0.939274169921875, 0.9396900024414062, 0.9406156616210938, 0.9394974975585938, 0.9394503784179687, 0.9392230224609375, 0.939916259765625, 0.9409976196289063, 0.939694091796875, 0.9403258666992188, 0.9398446044921875, 0.9401170043945313, 0.9398978271484375, 1.9538472900390624, 0.9399326782226562, 0.9398046875, 0.9415096435546875, 0.9401118774414062, 0.9396039428710937, 0.9404282836914063, 0.9398599853515625, 0.9395968017578125, 0.9396162719726563, 0.9403279418945313, 0.9400186767578125, 0.9399378051757813, 0.939483154296875, 0.9413427124023438, 0.939953125, 0.9398405151367187, 0.9391646728515625, 0.9401661376953125, 0.9404569702148438, 0.9409935302734375, 0.9405778198242187, 0.9403176879882813, 0.9402091674804688, 0.9410509033203125, 0.9403566284179687, 0.9398660888671875, 0.9406289672851562, 0.9398743286132812, 0.9399254760742187, 0.9395353393554687, 0.9404968872070313, 0.9400115966796875, 0.93969091796875, 0.940168212890625, 0.9411409912109375, 0.940242919921875, 0.9399797973632813, 0.9398435668945313, 0.9398876342773438, 0.939953125, 0.94072216796875, 0.940315673828125, 0.9397380981445312, 0.9401937866210938, 0.9397545166015625, 0.9420257568359375, 0.9404149780273438, 0.94042626953125, 0.94061669921875, 0.9402767333984375, 0.9410713500976563, 0.9403862915039063, 0.9398855590820312, 0.9397760009765626, 0.940189697265625, 0.9404805297851563, 0.9394237670898438, 0.9399490356445312, 0.9412474975585937, 0.93998388671875, 0.939931640625, 0.9399439086914062, 1.9538431396484375, 0.940669921875, 0.939863037109375, 0.9412310791015625, 0.939705322265625, 0.9403146362304687, 0.9399193725585937, 0.9399849243164062, 0.940326904296875, 0.9404139404296875, 0.9400852661132812, 0.9402255249023438, 0.9408552856445312, 0.940031005859375, 0.9410283813476562, 0.940389404296875, 0.940705810546875, 0.9393633422851563, 0.9402235107421875, 0.9409392700195313, 0.941180908203125, 0.9399746704101563, 0.9405009765625, 0.9404282836914063, 0.940821533203125, 0.9403033447265625, 0.9397268676757813, 0.94030029296875, 0.9399244995117187, 0.9394667358398437, 0.9398067016601562, 0.941180908203125, 0.9402276000976563, 0.9401077880859375, 0.940368896484375, 0.9423933715820313, 0.940284912109375, 0.9403238525390625, 0.9398333740234375, 0.9402501220703126, 0.9403607177734375, 0.9407989501953125, 0.9403515014648437, 0.9402286376953125, 0.9402235107421875, 0.9409075317382812, 0.940410888671875, 0.9398958129882813, 0.9406812133789062, 0.940147705078125, 0.9399982299804688, 0.9407620849609375, 0.9411195068359375, 0.940062744140625, 0.9402501220703126, 0.9403064575195312, 0.9409515380859375, 0.939799560546875, 0.9402296142578125, 0.940310546875, 0.9400381469726562, 0.94015283203125, 0.9409638671875, 1.952026611328125, 0.9399254760742187, 0.9394933471679687, 0.9416345825195312, 0.9399859008789062, 0.9406945190429687, 0.9396664428710938, 0.940632080078125, 0.9401558837890625, 0.940590087890625, 0.94102734375, 0.940400634765625, 0.940221435546875, 0.9404784545898438, 0.940031982421875, 0.9396961059570312, 0.9399644165039063, 0.9391165161132813, 0.9401170043945313, 0.9396141967773437, 0.9402808227539062, 0.9396408081054688, 0.94097509765625, 0.9396275024414062, 0.9407098999023438, 0.939484130859375, 0.9403135986328125, 0.940037109375, 0.9396387939453125, 0.9410057983398438, 0.940732421875, 0.9403883666992188, 0.9400033569335937, 0.9398814697265625, 0.9403955078125, 0.9411707153320312, 0.940169189453125, 0.9412464599609375, 0.939968505859375, 0.9397186279296875, 0.9395138549804688, 0.9404170532226562, 0.94005859375, 0.940073974609375, 0.940353515625, 0.9408685913085938, 0.9408317260742187, 0.9399654541015625, 0.9402081298828125, 0.9396746215820313, 0.9398169555664062, 0.939715576171875, 0.9401446533203125, 0.9400780639648437, 0.9395189819335937, 0.9397626953125, 0.9406863403320312, 0.9393438720703124, 0.9411901245117188, 0.941717529296875, 0.9408173828125, 0.9400023193359375, 0.94002685546875]",tokens/s,1.0474563525692686,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694810d-01e8adbd1d9d990f66604f76;e02ab593-2d28-465f-bce4-90aefffa5f9c) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1719.15264,9941.024768,0.0,9294.577664,8911.746048,s,10,10.544145263671874,1.0544145263671874,0.0010263685969407418,1.0538491821289062,1.0558189086914063,1.056022979736328,1.0561862365722656,"[1.0548157958984374, 1.055557861328125, 1.0534412841796874, 1.0533297119140625, 1.0538177490234375, 1.05368359375, 1.0536180419921874, 1.05622705078125, 1.053880615234375, 1.0557735595703126]",tokens/s,242.7887643790399,kWh,1.2439715911944707e-05,6.81644635683667e-06,5.731321251720156e-05,7.656937478598295e-05,tokens/kWh,3343373.2574614706,MB,1719.15264,9941.024768,0.0,9294.577664,9207.169536,s,10,625.9803984374998,62.598039843749994,0.0064857424668782184,62.597361328125,62.608881640625,62.6090521484375,62.6091885546875,"[62.5915, 62.5971171875, 62.60001171875, 62.5950546875, 62.60922265625, 62.59760546875, 62.5876328125, 62.59442578125, 62.60884375, 62.598984375]",tokens/s,1.0064212898239837,kWh,0.0007390174119340049,0.000405046892581094,0.0033900267953525963,0.004534091099867695,tokens/kWh,13894.736257469185,,s,629,634.4764850463862,1.0087066534918707,0.1252696003233685,0.9935564575195313,0.9942255615234376,0.994458203125,2.047124262695313,"[0.9932533569335937, 0.993554443359375, 0.9934541015625, 0.9934970703125, 0.993522705078125, 0.9933742065429687, 0.9931765747070312, 0.9935513305664062, 0.9936742553710938, 0.9934131469726563, 0.99338134765625, 0.9935011596679687, 0.9932554321289062, 0.9931090087890625, 0.9931304931640625, 0.9932892456054687, 0.9933352661132813, 0.9934735107421875, 0.9935462646484375, 0.993201171875, 0.9934755859375, 0.9933844604492188, 0.99399169921875, 0.9942271728515625, 0.9945016479492188, 0.9934305419921875, 0.9931151123046875, 0.9933302001953125, 0.9932943115234375, 0.993154052734375, 0.9932830810546875, 0.9939834594726562, 0.9932789916992187, 0.9933536987304687, 0.9933199462890625, 0.9932779541015625, 0.9934039306640625, 0.993575927734375, 0.9934479370117187, 0.9935789794921875, 0.9932236938476563, 0.9937623291015625, 0.9932841186523438, 0.9933956909179688, 0.9934428100585937, 0.9935053100585938, 0.9933148193359375, 0.9936640014648438, 0.9938964233398437, 0.993997802734375, 0.993818603515625, 0.9943306274414062, 0.9934089965820313, 0.9936957397460937, 0.9934878540039063, 0.993517578125, 0.993406982421875, 0.9937264404296875, 0.9937561645507812, 0.9935595703125, 0.993470458984375, 0.994150390625, 2.049156005859375, 0.9941790771484375, 0.9928366088867188, 0.9927720947265625, 0.992954345703125, 0.9932196044921875, 0.992901123046875, 0.9929287719726563, 0.9937110595703125, 0.9937469482421875, 0.99350732421875, 0.993523681640625, 0.9935032348632813, 0.9931919555664063, 0.9934089965820313, 0.993375244140625, 0.9931519775390625, 0.9932297973632812, 0.99370703125, 0.9943889770507812, 0.9939599609375, 0.9944320068359375, 0.9945589599609375, 0.9943388061523437, 0.9938411865234374, 0.9933147583007812, 0.9931079711914063, 0.993070068359375, 0.993491943359375, 0.9930404052734375, 0.9930618896484374, 0.9932410888671875, 0.9931898803710938, 0.9932615966796875, 0.99338134765625, 0.9934192504882813, 0.9932717895507812, 0.9932431640625, 0.9937141723632813, 0.9942671508789063, 0.994344970703125, 0.9942804565429687, 0.9934131469726563, 0.993133544921875, 0.9934315795898437, 0.9938145141601562, 0.99424560546875, 0.9943848876953125, 0.99461328125, 0.9945005493164063, 0.9941248168945312, 0.9932974243164062, 0.993386474609375, 0.9932850952148438, 0.9934642944335937, 0.9933834228515624, 0.9939906616210937, 0.9948251953125, 0.9943306274414062, 0.9936486206054688, 0.9934315795898437, 0.9933772583007813, 0.9934315795898437, 2.0469842529296876, 0.99340185546875, 0.9938616333007813, 0.993259521484375, 0.9934468994140625, 0.9933701171875, 0.9933557739257812, 0.9932922973632813, 0.993364990234375, 0.993280029296875, 0.9933721313476562, 0.993448974609375, 0.99376025390625, 0.993455078125, 0.9935195922851563, 0.9930925903320312, 0.993028076171875, 0.992974853515625, 0.9931939697265625, 0.9932113647460937, 0.993269775390625, 0.9935667114257812, 0.9935780029296875, 0.9939630126953125, 0.9940654296875, 0.9936220092773438, 0.993196044921875, 0.9931427612304687, 0.99349609375, 0.9934161987304687, 0.9942200317382812, 0.9941903076171875, 0.9942251586914063, 0.9941360473632812, 0.9942907104492188, 0.99439208984375, 0.9937581787109375, 0.993448974609375, 0.993428466796875, 0.9934581909179687, 0.9935697631835938, 0.9937991943359376, 0.9938134765625, 0.9933875122070313, 0.9936414794921875, 0.9939404907226562, 0.993712158203125, 0.9935503540039062, 0.9940111083984375, 0.9935881958007813, 0.9936742553710938, 0.9938831176757813, 0.9940234375, 0.9937172241210938, 0.9938462524414062, 0.9939876098632813, 0.9938677978515625, 0.9941268310546875, 0.9940326538085937, 0.9937705078125, 0.9937592163085938, 0.9941647338867188, 0.9940643920898438, 2.0465888671875, 0.993291259765625, 0.9930659790039063, 0.9929953002929688, 0.9930301513671875, 0.9929625854492188, 0.9931141357421875, 0.9938667602539063, 0.9938483276367187, 0.9934500122070312, 0.9932410888671875, 0.9937684326171875, 0.9930966796875, 0.9929779663085937, 0.9930065307617187, 0.9930025024414062, 0.99308544921875, 0.9939855346679688, 0.9937489624023438, 0.993122314453125, 0.9932236938476563, 0.9935472412109375, 0.9931898803710938, 0.9932113647460937, 0.9934725341796875, 0.9931591796875, 0.9933363037109375, 0.9935697631835938, 0.9940367431640625, 0.9934243774414062, 0.993396728515625, 0.9935011596679687, 0.9935503540039062, 0.9933609008789063, 0.9936404418945313, 0.993280029296875, 0.9932615966796875, 0.993560546875, 0.993607666015625, 0.9932369995117187, 0.9933373413085937, 0.9933363037109375, 0.9936138305664063, 0.9941299438476563, 0.9947095336914062, 0.9946358032226562, 0.9948703002929687, 0.9945211181640625, 0.9942548217773437, 0.9943377685546875, 0.994208740234375, 0.9939722290039062, 0.9937797241210937, 0.9933690795898438, 0.9936834716796875, 0.9937991943359376, 0.9935728759765625, 0.9937192993164062, 0.993850341796875, 0.9936292114257812, 0.993522705078125, 0.9938278198242188, 0.9938964233398437, 2.047667236328125, 0.9933219604492187, 0.993249267578125, 0.9937152099609375, 0.993607666015625, 0.9934929809570312, 0.9934458618164063, 0.9934561157226562, 0.9934202880859375, 0.9931448364257812, 0.9932482299804688, 0.993133544921875, 0.9932185668945313, 0.9933486328125, 0.9932113647460937, 0.9930239868164062, 0.9931499633789063, 0.993385498046875, 0.993623046875, 0.9931847534179687, 0.9931407470703125, 0.9933311767578125, 0.9933025512695313, 0.9935738525390625, 0.9934878540039063, 0.9931028442382812, 0.993291259765625, 0.9935390625, 0.9940121459960938, 0.9939404907226562, 0.9939844970703124, 0.99399169921875, 0.9939630126953125, 0.99418212890625, 0.9944033203125, 0.99441357421875, 0.9945497436523437, 0.9946542358398438, 0.994677734375, 0.9946644287109375, 0.9946214599609375, 0.9943961791992187, 0.9941852416992187, 0.9934417724609375, 0.993924072265625, 0.9936271362304687, 0.9935360107421874, 0.99349609375, 0.9938565063476562, 0.9935574951171875, 0.9935206298828125, 0.9936732177734375, 0.9935462646484375, 0.9937049560546874, 0.9939435424804688, 0.9951979370117188, 0.9946101684570312, 0.9944935302734375, 0.9945128173828125, 0.9944463500976563, 0.994735107421875, 0.9944514770507813, 0.9943807983398437, 2.048078857421875, 0.993028076171875, 0.9930137329101563, 0.993206298828125, 0.9933762817382813, 0.9939036254882813, 0.9938851928710938, 0.993755126953125, 0.9937858276367187, 0.9940326538085937, 0.9939876098632813, 0.9934868774414063, 0.9936414794921875, 0.9939988403320312, 0.9935923461914062, 0.9933240356445312, 0.9931847534179687, 0.9932779541015625, 0.9934714965820313, 0.9930844116210937, 0.9936578369140625, 0.9938052978515625, 0.993322998046875, 0.9932216186523437, 0.9935308837890625, 0.99378076171875, 0.993306640625, 0.9932584838867188, 0.9932144775390624, 0.9931376342773437, 0.993728515625, 0.993671142578125, 0.9932789916992187, 0.99370703125, 0.9934725341796875, 0.9934192504882813, 0.9932841186523438, 0.9932666625976563, 0.9934161987304687, 0.9933086547851563, 0.994081787109375, 0.9937469482421875, 0.9935452270507813, 0.9935267944335937, 0.9936803588867188, 0.9936640014648438, 0.9933844604492188, 0.993386474609375, 0.9936271362304687, 0.9933690795898438, 0.9941544799804688, 0.9938729248046875, 0.9938770141601563, 0.9938646850585937, 0.9941729125976563, 0.9941309204101563, 0.9941688232421875, 0.9943121948242187, 0.9938104248046875, 0.99384326171875, 0.9937837524414063, 0.9938462524414062, 0.9935820922851563, 2.0471787109375, 0.9933516845703125, 0.993239013671875, 0.9932574462890625, 0.9932349243164063, 0.9929584350585937, 0.9934888916015625, 0.9937141723632813, 0.9931510009765625, 0.9934397583007812, 0.9934541015625, 0.993349609375, 0.9931478881835938, 0.9930997924804688, 0.9930321655273437, 0.9929564208984375, 0.99333837890625, 0.9933394165039062, 0.9931058959960938, 0.9935267944335937, 0.9934131469726563, 0.993406982421875, 0.9932113647460937, 0.9931427612304687, 0.9933762817382813, 0.99306494140625, 0.9933988037109375, 0.9935216674804688, 0.9931868286132812, 0.9932021484375, 0.9934817504882812, 0.993249267578125, 0.9933035278320312, 0.9930823974609375, 0.993165283203125, 0.9931611938476562, 0.9935851440429687, 0.9936312255859375, 0.9937080078125, 0.9935677490234375, 0.993924072265625, 0.9938779907226563, 0.9935370483398438, 0.9933721313476562, 0.9934315795898437, 0.9933414306640626, 0.9936466064453126, 0.9936640014648438, 0.9939773559570313, 0.9937377319335937, 0.9939671020507812, 0.9938831176757813, 0.9938268432617188, 0.9936035766601562, 0.99350732421875, 0.9936937255859375, 0.9939087524414062, 0.9935697631835938, 0.9935523681640624, 0.9934110717773438, 0.99375, 0.9941063842773438, 0.9937572021484375, 2.049478759765625, 0.9928888549804687, 0.9935093994140625, 0.9934356689453125, 0.99312744140625, 0.9929881591796875, 0.9932820434570313, 0.9932564697265625, 0.99310693359375, 0.9931765747070312, 0.9931530151367187, 0.9932451782226562, 0.9936434936523437, 0.993344482421875, 0.9931212768554688, 0.99290625, 0.9931212768554688, 0.9929718017578125, 0.9932113647460937, 0.9932083129882813, 0.9932031860351562, 0.9932339477539063, 0.9936619873046875, 0.9933270263671875, 0.9931744995117188, 0.9932288208007812, 0.993723388671875, 0.9935257568359375, 0.9934970703125, 0.993238037109375, 0.9932687377929688, 0.9936260986328125, 0.9937100830078125, 0.9936373901367187, 0.9935585327148437, 0.9932482299804688, 0.99365380859375, 0.9933915405273438, 0.9938831176757813, 0.9936926879882813, 0.9938933715820313, 0.9934356689453125, 0.9937561645507812, 0.9936506958007812, 0.9939199829101563, 0.9936773071289062, 0.9938401489257812, 0.99349609375, 0.994255859375, 0.994904052734375, 0.99376953125, 0.9936199340820312, 0.9937572021484375, 0.9941176147460937, 0.9938462524414062, 0.9936619262695312, 0.9941596069335937, 0.9936865234375, 0.99378173828125, 0.9937141723632813, 0.9936803588867188, 0.9935554809570313, 0.9938104248046875, 2.051559326171875, 0.9941268310546875, 0.9940828247070312, 0.994271240234375, 0.9940510864257812, 0.9941145629882813, 0.9936506958007812, 0.9942886352539062, 0.9940695190429687, 0.9944586181640624, 0.9935769653320312, 0.9936854858398437, 0.9932533569335937, 0.99340185546875, 0.9937705078125, 0.9931049194335938, 0.9933690795898438, 0.9936537475585937, 0.9935462646484375, 0.993891357421875, 0.99378173828125, 0.993122314453125, 0.9931417846679688, 0.993490966796875, 0.9935994873046875, 0.9932892456054687, 0.9932687377929688, 0.9935841064453125, 0.9936168823242187, 0.9935554809570313, 0.9935769653320312, 0.993480712890625, 0.993385498046875, 0.99350732421875, 0.9935564575195313, 0.99364453125, 0.9936558227539063, 0.9939107666015625, 0.9939158935546875, 0.9941985473632813, 0.9941432495117187, 0.9936455688476562, 0.9938226928710937, 0.9939691772460938, 0.9941217041015625, 0.9938595581054688, 0.9940776977539062, 0.993892333984375, 0.9935165405273437, 0.994145263671875, 0.9943971557617187, 0.9939005737304687, 0.9939691772460938, 0.9940899658203125, 0.9944524536132813, 0.9939476318359375, 0.994008056640625, 0.9935984497070313, 0.9936322631835938, 0.9937469482421875, 0.9941473388671875, 0.993839111328125, 0.9936773071289062, 2.049173583984375, 0.9932349243164063, 0.9930895385742188, 0.9929584350585937, 0.993112060546875, 0.9935626220703125, 0.9935841674804687, 0.9931212768554688, 0.9931560668945313, 0.9936353149414062, 0.9933639526367187, 0.993328125, 0.9930321655273437, 0.993048583984375, 0.9930782470703124, 0.9932236938476563, 0.9934100341796875, 0.9942405395507813, 0.9938800659179687, 0.9939517211914063, 0.9939046630859375, 0.9933926391601563, 0.9930547485351563, 0.9930239868164062, 0.99312841796875, 0.9933936767578125, 0.9935728759765625, 0.9934264526367188, 0.9932861328125, 0.9937889404296875, 0.9936240844726563, 0.9943818359375, 0.9941749877929688, 0.9940623168945313, 0.9940439453125, 0.9943009033203125, 0.9939138793945312, 0.9936312255859375, 0.9932615966796875, 0.9934940185546876, 0.99378173828125, 0.9934970703125, 0.9934868774414063, 0.9932533569335937, 0.9932605590820313, 0.9939199829101563, 0.994492431640625, 0.9941585693359375, 0.9941483764648438, 0.9944913940429687, 0.9944575805664062, 0.9941248168945312, 0.9935923461914062, 0.9933486328125, 0.9937080078125, 0.9940858764648437, 0.9940858764648437, 0.9938626708984375, 0.9938104248046875, 0.99388623046875, 0.9936803588867188, 0.9938411254882813, 0.993755126953125]",tokens/s,0.9913684980051132,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949012-71fdb0513e56e188783c67f0;b428a105-cecc-4e0c-8b16-5d7a038347de) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2104.19712,2911.371264,0.0,2264.92416,2140.72832,s,10,2.4884028778076175,0.2488402877807617,0.0017336823043823078,0.2489057083129883,0.25072755279541015,0.2508263542175293,0.2509053953552246,"[0.2507055969238281, 0.25092515563964846, 0.24757785034179688, 0.24682144165039063, 0.24747581481933595, 0.24631741333007812, 0.24757225036621094, 0.25036245727539064, 0.2504113311767578, 0.2502335662841797]",tokens/s,1028.7723193181093,kWh,2.9072395831265747e-06,1.5930372833037567e-06,1.325819488838971e-05,1.775847175482004e-05,tokens/kWh,14415654.879227767,MB,2104.19712,2911.371264,0.0,2264.92416,2246.907904,s,10,145.7403466796875,14.57403466796875,0.01212453290098591,14.5695126953125,14.590745703125,14.591192187499999,14.591549375,"[14.590646484375, 14.591638671875, 14.582625, 14.5875322265625, 14.570828125, 14.568197265625, 14.5662099609375, 14.56188671875, 14.5592158203125, 14.56156640625]",tokens/s,4.3227562878290176,kWh,0.00017210086691347241,9.432530317883612e-05,0.0007748077688972132,0.001041233938989522,tokens/kWh,60505.13495664491,,s,629,147.744148284912,0.23488735816361225,0.029563076186171044,0.23116493225097656,0.23200419921874998,0.2323472412109375,0.47893525024414063,"[0.23276133728027343, 0.23156428527832032, 0.23250534057617187, 0.23177215576171875, 0.23175167846679687, 0.2316636199951172, 0.23166464233398437, 0.23181925964355468, 0.2317864990234375, 0.2317178955078125, 0.23165542602539063, 0.23199948120117186, 0.2319114227294922, 0.23230770874023438, 0.2314403839111328, 0.23090074157714843, 0.2311679992675781, 0.23108505249023437, 0.23182847595214845, 0.23120793151855468, 0.2317506561279297, 0.23156838989257814, 0.23112498474121093, 0.23156224060058594, 0.23118540954589845, 0.23118438720703124, 0.230614013671875, 0.23102362060546874, 0.2310082550048828, 0.23108607482910157, 0.230908935546875, 0.23087001037597657, 0.23108709716796874, 0.2310952911376953, 0.23200051879882813, 0.2310277099609375, 0.23204249572753907, 0.2310840301513672, 0.230835205078125, 0.23095603942871093, 0.23102668762207032, 0.23113728332519531, 0.23130213928222657, 0.23161343383789063, 0.2317076416015625, 0.23303167724609375, 0.23183871459960936, 0.2320199737548828, 0.23191552734375, 0.23193702697753907, 0.23194009399414062, 0.23124581909179687, 0.23149977111816405, 0.23219200134277343, 0.2310451202392578, 0.23188479614257812, 0.23178752136230468, 0.23196159362792967, 0.23144857788085937, 0.23166770935058595, 0.23165029907226561, 0.23200358581542968, 0.48229580688476564, 0.2318663635253906, 0.23166464233398437, 0.23162367248535157, 0.23172402954101562, 0.23182745361328125, 0.23196876525878907, 0.2318561248779297, 0.23200358581542968, 0.23229849243164064, 0.23214796447753908, 0.2318919677734375, 0.23183360290527344, 0.23185714721679687, 0.23202919006347655, 0.231910400390625, 0.23255039978027345, 0.23172607421875, 0.23201484680175782, 0.23192678833007813, 0.23147724914550782, 0.23117721557617188, 0.23114137268066406, 0.2309345245361328, 0.2317998046875, 0.23216639709472656, 0.23188581848144532, 0.2317004852294922, 0.23109426879882813, 0.23139736938476563, 0.23195648193359375, 0.23148544311523436, 0.23106661987304689, 0.23104920959472655, 0.23114137268066406, 0.23106048583984376, 0.2310031433105469, 0.23155711364746093, 0.2323671112060547, 0.23183564758300781, 0.23187660217285155, 0.2323937225341797, 0.23180697631835936, 0.23087820434570314, 0.2311065673828125, 0.2314956817626953, 0.2341201934814453, 0.23121510314941407, 0.23109735107421875, 0.23107379150390625, 0.23113011169433595, 0.2308290557861328, 0.2309160919189453, 0.2308720703125, 0.23094578552246095, 0.2308229064941406, 0.2311014404296875, 0.23225958251953124, 0.23146803283691406, 0.232700927734375, 0.23182234191894532, 0.23120179748535155, 0.2311956481933594, 0.47941940307617187, 0.231984130859375, 0.23158067321777343, 0.23134104919433593, 0.23164210510253908, 0.23269786071777343, 0.2318868408203125, 0.23200665283203126, 0.23196467590332032, 0.23198822021484375, 0.23132159423828125, 0.23144960021972658, 0.23234970092773438, 0.23229849243164064, 0.2326097869873047, 0.23209368896484375, 0.23207942199707032, 0.23165023803710938, 0.2314403839111328, 0.23190937805175782, 0.23209164428710938, 0.23215309143066407, 0.23194931030273438, 0.23258828735351564, 0.23188890075683594, 0.231699462890625, 0.23152946472167968, 0.23133287048339843, 0.23157452392578126, 0.23149261474609376, 0.23182847595214845, 0.23126322937011717, 0.23139430236816405, 0.23153561401367187, 0.23187046813964843, 0.23149977111816405, 0.23232000732421876, 0.2311393280029297, 0.2319052734375, 0.23080345153808593, 0.23089459228515624, 0.23061196899414063, 0.23088230895996092, 0.23082701110839843, 0.23086898803710937, 0.23073382568359374, 0.23112396240234376, 0.2319974365234375, 0.2311065673828125, 0.23078912353515624, 0.23073587036132812, 0.2308495330810547, 0.23100108337402345, 0.23077580261230468, 0.23094578552246095, 0.23075942993164061, 0.23081983947753906, 0.23078399658203125, 0.23087615966796876, 0.23072665405273438, 0.23087615966796876, 0.2308290557861328, 0.23087615966796876, 0.4795392150878906, 0.23177830505371094, 0.2310133819580078, 0.23141786193847655, 0.23111372375488282, 0.23092633056640624, 0.23101951599121093, 0.23092633056640624, 0.23117619323730468, 0.2310963134765625, 0.23151104736328126, 0.23107174682617188, 0.23176499938964842, 0.23153663635253907, 0.2313123779296875, 0.23081983947753906, 0.23273677062988282, 0.23198104858398438, 0.23182540893554687, 0.2313287658691406, 0.23094989013671874, 0.23091506958007812, 0.23098880004882813, 0.23076454162597657, 0.230761474609375, 0.23085772705078125, 0.23132159423828125, 0.23243980407714843, 0.23207936096191406, 0.23211520385742188, 0.23186534118652344, 0.2317998046875, 0.2313912353515625, 0.23109324645996093, 0.23126527404785155, 0.23119667053222656, 0.23178956604003906, 0.2312058868408203, 0.23166157531738282, 0.23158784484863282, 0.23162060546875, 0.23184281921386718, 0.23122227478027343, 0.2323435516357422, 0.2337822723388672, 0.23145062255859375, 0.2319605712890625, 0.2309754943847656, 0.23164108276367187, 0.23121817016601562, 0.23156736755371093, 0.2316451873779297, 0.2317619171142578, 0.23206605529785157, 0.2315130920410156, 0.23114854431152343, 0.2315274200439453, 0.2312376251220703, 0.23179263305664063, 0.23200358581542968, 0.23266099548339844, 0.23178752136230468, 0.2316636199951172, 0.4785776672363281, 0.23102053833007813, 0.2308526153564453, 0.23079936218261718, 0.230835205078125, 0.2308177947998047, 0.2319779815673828, 0.23094578552246095, 0.23088333129882813, 0.23176089477539064, 0.2309969940185547, 0.23143936157226563, 0.23160627746582033, 0.231478271484375, 0.2311690216064453, 0.2314639434814453, 0.23161958312988282, 0.23254835510253907, 0.23204966735839844, 0.2317864990234375, 0.23187455749511718, 0.23142707824707032, 0.232457275390625, 0.2311648712158203, 0.2321274871826172, 0.23180799865722657, 0.2312376251220703, 0.2311690216064453, 0.23100621032714844, 0.23096115112304688, 0.23099392700195312, 0.23096217346191406, 0.23088742065429688, 0.2309969940185547, 0.23093043518066406, 0.23100210571289062, 0.23089971923828126, 0.23066213989257814, 0.23169740295410157, 0.23114239501953124, 0.23260263061523437, 0.23084646606445314, 0.23156736755371093, 0.23112498474121093, 0.23072972106933592, 0.23080447387695313, 0.23094992065429687, 0.23107376098632812, 0.23089260864257813, 0.23205165100097655, 0.23145266723632812, 0.23087309265136718, 0.23119052124023437, 0.2307184600830078, 0.2308362274169922, 0.23087513732910156, 0.23102566528320312, 0.2333665313720703, 0.23122125244140626, 0.2310635528564453, 0.230940673828125, 0.23090483093261718, 0.23118336486816407, 0.47746356201171875, 0.2322493438720703, 0.2315694122314453, 0.23091404724121095, 0.2314844207763672, 0.231510009765625, 0.23128985595703125, 0.2311628875732422, 0.231077880859375, 0.23118438720703124, 0.23118028259277343, 0.23097445678710937, 0.23111167907714844, 0.23096627807617187, 0.23094784545898436, 0.23088230895996092, 0.23102668762207032, 0.23121714782714844, 0.2314086456298828, 0.23167794799804686, 0.23117721557617188, 0.23124172973632812, 0.2311014404296875, 0.23113113403320312, 0.23106661987304689, 0.23094578552246095, 0.2310102996826172, 0.2310451202392578, 0.23154483032226564, 0.23112704467773437, 0.23111372375488282, 0.2310451202392578, 0.23104716491699218, 0.23118336486816407, 0.2313881530761719, 0.23125094604492188, 0.2312130584716797, 0.23101644897460938, 0.2333429718017578, 0.23087820434570314, 0.2312447967529297, 0.23098573303222655, 0.23105126953125, 0.2310522918701172, 0.23171685791015625, 0.2312447967529297, 0.23110041809082033, 0.23101849365234375, 0.2310399932861328, 0.231014404296875, 0.23103385925292969, 0.23098471069335938, 0.23129804992675781, 0.23095091247558594, 0.23119769287109376, 0.2312857666015625, 0.23109939575195312, 0.2308915252685547, 0.2313236541748047, 0.23161138916015625, 0.23103897094726564, 0.23182643127441407, 0.23234150695800782, 0.4790743103027344, 0.23154176330566406, 0.23141477966308593, 0.23161036682128905, 0.23167897033691406, 0.23150694274902345, 0.2312376251220703, 0.231583740234375, 0.2312970275878906, 0.2310840301513672, 0.23138304138183594, 0.2310635528564453, 0.23118336486816407, 0.23100006103515625, 0.23120793151855468, 0.23087615966796876, 0.2310133819580078, 0.230898681640625, 0.23100723266601564, 0.23108096313476562, 0.23136668395996093, 0.23115977478027344, 0.2310768585205078, 0.231046142578125, 0.23245619201660156, 0.23092428588867187, 0.2311414337158203, 0.2312303924560547, 0.2311260223388672, 0.2310102996826172, 0.2311925811767578, 0.23104920959472655, 0.23100933837890625, 0.2310194549560547, 0.23117926025390625, 0.2311075897216797, 0.23118540954589845, 0.23110348510742187, 0.23108096313476562, 0.23085977172851563, 0.23155711364746093, 0.23233740234375, 0.23105946350097656, 0.2312796173095703, 0.2310062713623047, 0.23090272521972657, 0.23148646545410156, 0.23091098022460937, 0.2309878387451172, 0.23087312316894532, 0.23152015686035157, 0.2321797180175781, 0.23130316162109374, 0.230898681640625, 0.23095295715332032, 0.23084236145019532, 0.2311014404296875, 0.2311331787109375, 0.23123052978515626, 0.2309252471923828, 0.23105740356445312, 0.23117619323730468, 0.23105740356445312, 0.48152984619140626, 0.2311157684326172, 0.23090380859375, 0.23155302429199218, 0.23127449035644532, 0.23103897094726564, 0.23097958374023436, 0.2311710662841797, 0.23115980529785157, 0.23104103088378905, 0.23108198547363282, 0.23097036743164062, 0.23084544372558594, 0.2313185272216797, 0.23110552978515625, 0.23093862915039062, 0.23098060607910156, 0.23100621032714844, 0.23143014526367187, 0.2317823944091797, 0.23226675415039064, 0.2317117462158203, 0.23127757263183593, 0.23153868103027345, 0.2311751708984375, 0.23130726623535156, 0.23114341735839844, 0.23096524047851563, 0.23087103271484374, 0.23130419921875, 0.2320271301269531, 0.23118643188476562, 0.23128370666503906, 0.2312806396484375, 0.23114854431152343, 0.2313799743652344, 0.23123968505859374, 0.23101542663574218, 0.2314403839111328, 0.23084031677246095, 0.23116184997558595, 0.23074099731445313, 0.23112396240234376, 0.23095706176757813, 0.23086592102050782, 0.2307573699951172, 0.23168409729003905, 0.23131033325195313, 0.2311393280029297, 0.2310215606689453, 0.23085977172851563, 0.23086285400390624, 0.2309519348144531, 0.2308239288330078, 0.23091813659667967, 0.23077786254882812, 0.2309222412109375, 0.2310697021484375, 0.2309160919189453, 0.23086592102050782, 0.23115367126464845, 0.2308720703125, 0.23090789794921876, 0.48265728759765625, 0.23116595458984374, 0.23137178039550782, 0.2312478790283203, 0.23101849365234375, 0.23090687561035156, 0.23088333129882813, 0.23087309265136718, 0.23085466003417968, 0.23150079345703126, 0.231151611328125, 0.23109120178222656, 0.231077880859375, 0.23160421752929689, 0.23117926025390625, 0.23098675537109375, 0.23116493225097656, 0.2308157501220703, 0.231014404296875, 0.231046142578125, 0.2309580841064453, 0.23116697692871094, 0.23099903869628907, 0.23071437072753906, 0.23106150817871093, 0.23102566528320312, 0.23100422668457032, 0.23102662658691406, 0.23118028259277343, 0.23100518798828126, 0.23101951599121093, 0.2309969940185547, 0.23098880004882813, 0.23096115112304688, 0.23098162841796874, 0.23114035034179686, 0.23111065673828124, 0.23085157775878906, 0.23094886779785156, 0.23084031677246095, 0.23096832275390625, 0.2307747802734375, 0.23147724914550782, 0.23169024658203125, 0.23177731323242187, 0.23133180236816406, 0.23098675537109375, 0.2309550018310547, 0.23120077514648438, 0.23074815368652343, 0.23088742065429688, 0.23084031677246095, 0.23159091186523437, 0.23110963439941407, 0.2308495330810547, 0.23108096313476562, 0.23082188415527344, 0.23110041809082033, 0.23168614196777343, 0.23109939575195312, 0.23103077697753907, 0.2309160919189453, 0.23111378479003905, 0.481429443359375, 0.2311956481933594, 0.2310645751953125, 0.23094989013671874, 0.23127655029296876, 0.23099186706542968, 0.2309416961669922, 0.23101747131347655, 0.23098880004882813, 0.23092941284179688, 0.23106661987304689, 0.23098060607910156, 0.231046142578125, 0.23074508666992188, 0.23084442138671876, 0.2313000946044922, 0.23091404724121095, 0.23080551147460937, 0.23100416564941406, 0.23077171325683593, 0.23084031677246095, 0.23112908935546875, 0.23205580139160156, 0.231077880859375, 0.23106048583984376, 0.2309580841064453, 0.23238552856445313, 0.23106764221191406, 0.2311690216064453, 0.23111065673828124, 0.23110041809082033, 0.23188890075683594, 0.2314956817626953, 0.23113113403320312, 0.23112908935546875, 0.23114341735839844, 0.23109735107421875, 0.23145062255859375, 0.23099288940429688, 0.231088134765625, 0.2312130584716797, 0.2308526153564453, 0.2309345245361328, 0.23095706176757813, 0.23097138977050782, 0.23116082763671875, 0.23089765930175782, 0.23111167907714844, 0.23134310913085937, 0.23081062316894532, 0.23087411499023439, 0.2307010498046875, 0.2309171142578125, 0.23114547729492188, 0.23143423461914062, 0.2308792266845703, 0.23096934509277345, 0.23170559692382814, 0.2311894989013672, 0.2319667205810547, 0.23111680603027343, 0.23170559692382814, 0.23135232543945314]",tokens/s,4.257359816288809,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1448.177664,2449.997824,0.0,1803.55072,1664.521216,s,10,1.3644380798339844,0.13644380798339845,0.0014011159264243525,0.13627196502685546,0.13786112670898437,0.13869882049560547,0.13936897552490235,"[0.13953651428222658, 0.13767497253417968, 0.1349912567138672, 0.13507321166992187, 0.13563778686523437, 0.13482015991210938, 0.13582060241699218, 0.13672332763671874, 0.13708172607421876, 0.13707852172851562]",tokens/s,1876.2302502664566,kWh,1.5975721627921314e-06,8.753909460972716e-07,6.686964809027082e-06,9.159927917916485e-06,tokens/kWh,27947818.180891287,MB,1448.472576,2449.997824,0.0,1803.55072,1763.593728,s,10,81.3460283203125,8.13460283203125,0.011946610722273996,8.1315908203125,8.150488769531249,8.15235205078125,8.153842675781249,"[8.13529541015625, 8.1396748046875, 8.11939111328125, 8.1203115234375, 8.14693603515625, 8.12648193359375, 8.12576123046875, 8.15007470703125, 8.15421533203125, 8.12788623046875]",tokens/s,7.744692801955592,kWh,9.601664840011627e-05,5.262418258723866e-05,0.0003946831618423715,0.0005433239928297265,tokens/kWh,115952.91360479954,,s,629,82.4390829467774,0.13106372487563966,0.01624326295714781,0.1289318389892578,0.12986593322753906,0.13038284606933595,0.2649074096679688,"[0.13380709838867189, 0.1322936248779297, 0.13054360961914063, 0.1302783966064453, 0.12868301391601564, 0.12876185607910157, 0.12865126037597657, 0.12903424072265626, 0.12871475219726564, 0.12893490600585938, 0.1289205780029297, 0.12972032165527345, 0.1293711395263672, 0.12937216186523437, 0.1286614990234375, 0.12999168395996094, 0.12912229919433593, 0.1288099822998047, 0.12900044250488282, 0.12889395141601562, 0.12870246887207032, 0.12895744323730468, 0.12921856689453126, 0.128795654296875, 0.12864614868164062, 0.12892466735839844, 0.1287782440185547, 0.1288970184326172, 0.12859596252441408, 0.13040333557128905, 0.1292042236328125, 0.12866867065429688, 0.12910386657714842, 0.12879667663574218, 0.12910797119140624, 0.12875161743164062, 0.12865228271484375, 0.12910182189941405, 0.12855398559570314, 0.12869017028808594, 0.1285877685546875, 0.12864717102050782, 0.12905984497070314, 0.12876800537109376, 0.1286492156982422, 0.128606201171875, 0.12849868774414064, 0.12877619934082032, 0.12863282775878906, 0.1288140869140625, 0.12869119262695314, 0.12871987915039063, 0.1286246337890625, 0.12838092041015625, 0.12851097106933593, 0.12944383239746093, 0.1286277160644531, 0.1287710723876953, 0.12888677978515625, 0.12879052734375, 0.12854885864257812, 0.1299640350341797, 0.2665809936523437, 0.12871066284179689, 0.12894105529785158, 0.1295984649658203, 0.12898304748535155, 0.12851507568359374, 0.12878950500488281, 0.12854783630371094, 0.12850688171386718, 0.12838911437988282, 0.12960768127441405, 0.12967936706542968, 0.1298472900390625, 0.1296302032470703, 0.13025894165039062, 0.13039308166503907, 0.1289871368408203, 0.1294192657470703, 0.1304954833984375, 0.12884991455078126, 0.12983807373046874, 0.1289390106201172, 0.12923085021972655, 0.1289871368408203, 0.12885401916503905, 0.12969369506835937, 0.12940800476074218, 0.12860415649414061, 0.12912844848632812, 0.12885093688964844, 0.12888882446289063, 0.1289707489013672, 0.12897279357910157, 0.12949708557128906, 0.1289758758544922, 0.12905471801757812, 0.12891751098632812, 0.1290301513671875, 0.1288406982421875, 0.1292779541015625, 0.12873216247558594, 0.1292216339111328, 0.12959539794921876, 0.12929945373535157, 0.12899020385742188, 0.1289697265625, 0.13126144409179688, 0.12883660888671875, 0.12922367858886719, 0.12980940246582032, 0.12895846557617188, 0.12943463134765626, 0.12903321838378906, 0.1292216339111328, 0.1292656707763672, 0.12897279357910157, 0.1300193328857422, 0.12942233276367188, 0.12876185607910157, 0.12872601318359375, 0.1286103057861328, 0.12886732482910157, 0.128753662109375, 0.2649610290527344, 0.1289195556640625, 0.12843621826171875, 0.12883558654785157, 0.12871475219726564, 0.12869529724121093, 0.12869325256347655, 0.12878950500488281, 0.1287209014892578, 0.12903117370605469, 0.12896153259277343, 0.1297407989501953, 0.12884378051757814, 0.12859085083007812, 0.12871270751953126, 0.1286604766845703, 0.12877926635742187, 0.12865126037597657, 0.12881817626953124, 0.12926361083984375, 0.12863999938964843, 0.1286297607421875, 0.1285550079345703, 0.12893798828125, 0.128932861328125, 0.12907008361816405, 0.12877516174316406, 0.12867584228515624, 0.12991897583007814, 0.1287045135498047, 0.12865843200683594, 0.12990669250488282, 0.1296609344482422, 0.12844236755371094, 0.12855091857910156, 0.12866867065429688, 0.12861337280273438, 0.1286871032714844, 0.12876185607910157, 0.12873420715332032, 0.1288775634765625, 0.12874342346191406, 0.1287720947265625, 0.12879257202148436, 0.1287915496826172, 0.1290198974609375, 0.12967730712890624, 0.129797119140625, 0.12876390075683594, 0.128795654296875, 0.12886834716796874, 0.128932861328125, 0.12865437316894532, 0.12863279724121093, 0.1286420440673828, 0.12882432556152343, 0.12872703552246093, 0.12905267333984374, 0.12885299682617188, 0.1287772216796875, 0.1287188415527344, 0.12861439514160156, 0.1287772216796875, 0.26476953125, 0.12873829650878907, 0.12883148193359376, 0.12851199340820313, 0.12869631958007813, 0.12847923278808593, 0.12853042602539064, 0.12893798828125, 0.12854988098144532, 0.12875263977050783, 0.128505859375, 0.12852224731445314, 0.12876800537109376, 0.12872909545898437, 0.12887347412109376, 0.12867584228515624, 0.12863282775878906, 0.1286860809326172, 0.1286614990234375, 0.12861952209472657, 0.12875570678710938, 0.12855398559570314, 0.12889599609375, 0.12875263977050783, 0.12984013366699218, 0.1295626220703125, 0.12891545104980467, 0.1287720947265625, 0.12863282775878906, 0.12864717102050782, 0.12877311706542968, 0.12869529724121093, 0.12856422424316405, 0.1285201873779297, 0.1286625213623047, 0.1287720947265625, 0.12851199340820313, 0.1287403564453125, 0.1289318389892578, 0.12899635314941407, 0.12849664306640626, 0.12876902770996093, 0.13012684631347657, 0.12892160034179687, 0.1313116149902344, 0.13377740478515626, 0.12898611450195313, 0.12884889221191406, 0.12867481994628907, 0.1287475128173828, 0.1288089599609375, 0.12871168518066406, 0.12876390075683594, 0.12973362731933594, 0.12869223022460938, 0.12912229919433593, 0.12858781433105468, 0.12859286499023437, 0.128395263671875, 0.12851199340820313, 0.12858982849121095, 0.1285191650390625, 0.12845773315429687, 0.26590206909179687, 0.13000090026855468, 0.1293588409423828, 0.1295380554199219, 0.12994560241699218, 0.12998042297363283, 0.12985958862304686, 0.13036749267578124, 0.12995071411132814, 0.1288765411376953, 0.12879974365234376, 0.12941722106933592, 0.12930047607421874, 0.12891136169433592, 0.12978790283203126, 0.12877619934082032, 0.12878746032714844, 0.12919500732421876, 0.12896255493164063, 0.1289134063720703, 0.1290373077392578, 0.12975308227539062, 0.12882841491699218, 0.1290198974609375, 0.12901478576660155, 0.12898918151855468, 0.12967628479003906, 0.12916940307617186, 0.12887449645996094, 0.12921139526367187, 0.12869631958007813, 0.12922880554199218, 0.12872909545898437, 0.12871168518066406, 0.12892979431152343, 0.129185791015625, 0.12892672729492188, 0.1294254150390625, 0.13041868591308595, 0.12922265625, 0.12881613159179686, 0.12874342346191406, 0.12914994812011718, 0.12993023681640625, 0.12914892578125, 0.12977766418457032, 0.1297592315673828, 0.12924826049804689, 0.12885197448730468, 0.12885606384277343, 0.12906803894042967, 0.12960665893554688, 0.12895333862304686, 0.1296312255859375, 0.12864512634277345, 0.13046885681152343, 0.12909056091308593, 0.13064703369140626, 0.12966400146484375, 0.13024972534179688, 0.12901683044433593, 0.12863385009765624, 0.1289738311767578, 0.2638612365722656, 0.1288478698730469, 0.1288151092529297, 0.12867584228515624, 0.1286604766845703, 0.12864102172851563, 0.12869223022460938, 0.12870553588867187, 0.1286871032714844, 0.12933427429199218, 0.12895846557617188, 0.1287362518310547, 0.12880487060546875, 0.12846080017089845, 0.12952064514160155, 0.12865740966796874, 0.12858982849121095, 0.1286840362548828, 0.128827392578125, 0.12865228271484375, 0.13033267211914062, 0.1298913269042969, 0.12930662536621093, 0.12879769897460938, 0.12924006652832032, 0.12911514282226563, 0.12893696594238283, 0.12896563720703125, 0.1286840362548828, 0.12859596252441408, 0.1286553649902344, 0.12849356079101562, 0.12909567260742189, 0.129185791015625, 0.12871168518066406, 0.12875161743164062, 0.12851199340820313, 0.12922265625, 0.1306798095703125, 0.12952780151367188, 0.129396728515625, 0.12962509155273438, 0.12861952209472657, 0.12871168518066406, 0.1286543426513672, 0.12885708618164063, 0.12871475219726564, 0.12898918151855468, 0.12887551879882814, 0.12875263977050783, 0.1291130828857422, 0.12952268981933593, 0.13002137756347656, 0.12975410461425782, 0.12933427429199218, 0.12950425720214845, 0.1291673583984375, 0.12911001586914062, 0.12872294616699217, 0.12854170227050782, 0.12845362854003906, 0.12869223022460938, 0.12868812561035156, 0.26576177978515625, 0.12924826049804689, 0.12867788696289062, 0.1287782440185547, 0.12968960571289062, 0.12889497375488282, 0.12866969299316405, 0.1288099822998047, 0.12962098693847657, 0.12901580810546875, 0.12879359436035157, 0.12876390075683594, 0.1289564208984375, 0.12874240112304688, 0.1286871032714844, 0.12908851623535156, 0.1289134063720703, 0.12863591003417968, 0.1290577850341797, 0.1289635772705078, 0.12908441162109374, 0.12881715393066406, 0.12893798828125, 0.1303019561767578, 0.12936192321777343, 0.12894105529785158, 0.1289318389892578, 0.129227783203125, 0.12891136169433592, 0.12872294616699217, 0.1287587890625, 0.1295083465576172, 0.1288826904296875, 0.1295636444091797, 0.1291714630126953, 0.12932505798339844, 0.1291663360595703, 0.12864614868164062, 0.12883148193359376, 0.12890419006347656, 0.12887962341308593, 0.12895846557617188, 0.12895333862304686, 0.12924826049804689, 0.12880793762207032, 0.12882330322265625, 0.12863282775878906, 0.12880076599121093, 0.12866764831542968, 0.1288140869140625, 0.12918885803222657, 0.12923085021972655, 0.12873216247558594, 0.12884378051757814, 0.128822265625, 0.12880793762207032, 0.12889190673828124, 0.12862361145019532, 0.12879872131347656, 0.12901785278320313, 0.12870655822753907, 0.12871168518066406, 0.1288826904296875, 0.26630654907226564, 0.1309654998779297, 0.1296506805419922, 0.1291356201171875, 0.128932861328125, 0.12939263916015625, 0.12916326904296874, 0.1299814453125, 0.1290997772216797, 0.12907008361816405, 0.12940390014648437, 0.12927078247070312, 0.12884991455078126, 0.12983091735839844, 0.13016986083984375, 0.1290373077392578, 0.12927999877929688, 0.129438720703125, 0.13061734008789064, 0.12889190673828124, 0.12984831237792968, 0.13007769775390626, 0.13092965698242187, 0.12898611450195313, 0.1293158416748047, 0.12955955505371095, 0.12935475158691406, 0.1288478698730469, 0.1289697265625, 0.12932403564453124, 0.12959744262695314, 0.12883763122558595, 0.1288212432861328, 0.12967628479003906, 0.12876287841796874, 0.12947251892089845, 0.12944998168945313, 0.12905574035644532, 0.12925234985351564, 0.12954010009765626, 0.12926771545410157, 0.12993536376953124, 0.12876185607910157, 0.12890725708007814, 0.12937216186523437, 0.12992716979980468, 0.13034597778320312, 0.12930764770507813, 0.12888064575195313, 0.12883865356445312, 0.12890316772460939, 0.12986265563964844, 0.12991384887695312, 0.1290188751220703, 0.1290076141357422, 0.12887962341308593, 0.1289758758544922, 0.12898611450195313, 0.1288642578125, 0.12870758056640624, 0.12963226318359375, 0.1289932861328125, 0.12938035583496094, 0.26711550903320314, 0.12889395141601562, 0.12987904357910157, 0.12996607971191407, 0.12963941955566408, 0.12896461486816407, 0.12874444580078126, 0.12914175415039061, 0.129512451171875, 0.12889804077148437, 0.12933938598632813, 0.12918885803222657, 0.1291653137207031, 0.12967526245117186, 0.13058149719238282, 0.12971212768554688, 0.12983705139160157, 0.1312491455078125, 0.12968960571289062, 0.1296855010986328, 0.12951449584960936, 0.12930458068847656, 0.12980633544921874, 0.12941004943847656, 0.12944691467285158, 0.12926669311523437, 0.12979507446289062, 0.12961485290527344, 0.1289553985595703, 0.12896563720703125, 0.12977459716796874, 0.13095936584472656, 0.13045452880859376, 0.13097267150878905, 0.12978483581542968, 0.13107609558105468, 0.13026611328125, 0.1297100830078125, 0.12907212829589843, 0.1287209014892578, 0.12875468444824217, 0.12891647338867188, 0.1297838134765625, 0.1291980743408203, 0.1292410888671875, 0.12913253784179687, 0.12873420715332032, 0.12888986206054687, 0.12877516174316406, 0.12898611450195313, 0.1289144287109375, 0.12875059509277345, 0.12870040893554688, 0.12886015319824218, 0.12891647338867188, 0.12871270751953126, 0.12871372985839843, 0.12924620056152344, 0.12940083312988282, 0.12916120910644532, 0.129939453125, 0.12903526306152344, 0.128827392578125, 0.2668564453125, 0.1297244110107422, 0.129291259765625, 0.12891647338867188, 0.1289871368408203, 0.12860211181640624, 0.1287884826660156, 0.12873420715332032, 0.12900863647460936, 0.12901683044433593, 0.1291663360595703, 0.1291724853515625, 0.12918885803222657, 0.12881715393066406, 0.12899122619628905, 0.1286604766845703, 0.1290997772216797, 0.12963839721679687, 0.12860415649414061, 0.12885299682617188, 0.1286973419189453, 0.128901123046875, 0.12870758056640624, 0.12876185607910157, 0.12874855041503908, 0.1287393341064453, 0.12990669250488282, 0.13078732299804688, 0.1296609344482422, 0.12924006652832032, 0.12881715393066406, 0.12901580810546875, 0.1292769317626953, 0.1289758758544922, 0.12890316772460939, 0.12894514465332033, 0.12875672912597655, 0.12894720458984374, 0.12874137878417968, 0.12878437805175783, 0.12906803894042967, 0.12887347412109376, 0.12873829650878907, 0.1290496063232422, 0.1295636444091797, 0.12900044250488282, 0.12882432556152343, 0.12890931701660155, 0.12891853332519532, 0.1297643585205078, 0.1288028106689453, 0.12875570678710938, 0.12874137878417968, 0.12874444580078126, 0.12869017028808594, 0.12901274108886718, 0.12943257141113282, 0.12871270751953126, 0.12867071533203125, 0.12885708618164063, 0.12893594360351562, 0.1285867462158203, 0.12889292907714844]",tokens/s,7.629876237294903,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1333.67808,6189.21984,0.0,5542.772736,5293.671424,s,10,5.586684448242187,0.5586684448242187,0.0019338580518860281,0.557967041015625,0.5590516418457031,0.5617419464111328,0.5638941900634765,"[0.5644322509765625, 0.5579137573242188, 0.5582323608398437, 0.5579798583984374, 0.5582894897460937, 0.5578035888671875, 0.557934814453125, 0.5584537963867188, 0.5579542236328126, 0.5576903076171875]",tokens/s,458.2324317253119,kWh,6.598755781665261e-06,3.6146258023682546e-06,3.063834241189155e-05,4.0851723995925066e-05,tokens/kWh,6266565.396983879,MB,1333.67808,6189.21984,0.0,5542.772736,5479.282176,s,10,326.859033203125,32.685903320312505,0.005888375643590895,32.6845341796875,32.69453828125,32.6956421875,32.6965253125,"[32.69429296875, 32.679224609375, 32.67916015625, 32.6882421875, 32.69674609375, 32.680171875, 32.68275, 32.683013671875, 32.689376953125, 32.6860546875]",tokens/s,1.9274364053095923,kWh,0.0003859057358256829,0.0002115092267264069,0.0017882986127365009,0.0023857135752885904,tokens/kWh,26407.19349236177,,s,629,331.3625651855466,0.5268085297067517,0.06627334687586009,0.518781982421875,0.519246826171875,0.5193795776367187,1.0766735449218752,"[0.5185361938476563, 0.5187901611328125, 0.518645751953125, 0.5193123779296875, 0.5184696044921875, 0.5191024780273438, 0.518635498046875, 0.5193840942382812, 0.5187215576171875, 0.5188474731445313, 0.518709228515625, 0.519257080078125, 0.5185044555664062, 0.5190707397460937, 0.5185781860351563, 0.5189417114257813, 0.5188659057617188, 0.5192376098632813, 0.5188485107421875, 0.5194188842773437, 0.519056396484375, 0.518424560546875, 0.518950927734375, 0.5194536743164062, 0.5195530395507812, 0.5194168090820312, 0.5195950317382813, 0.5193717651367188, 0.519319580078125, 0.5192949829101563, 0.5195980834960937, 0.5190328369140625, 0.5191741333007812, 0.5190369262695312, 0.5188321533203125, 0.5186027221679688, 0.5188935546875, 0.5191608276367188, 0.5192755126953125, 0.519130126953125, 0.5184635009765625, 0.519088134765625, 0.5189284057617187, 0.5190697021484375, 0.5194506225585938, 0.5187174682617187, 0.5190379638671875, 0.5189365844726562, 0.5185945434570313, 0.519277587890625, 0.51884033203125, 0.518724609375, 0.5184901123046874, 0.5186467895507813, 0.5185494995117188, 0.5188372192382813, 0.5189785766601562, 0.5190338745117188, 0.5186253051757812, 0.5187225341796875, 0.5185259399414063, 0.5186826171875, 1.0767308349609375, 0.518603759765625, 0.5190625, 0.5183805541992188, 0.5184440307617187, 0.5186907958984375, 0.5187993774414063, 0.5184624633789062, 0.5187317504882812, 0.5186785278320313, 0.5184757690429688, 0.518877197265625, 0.518561767578125, 0.5186365356445313, 0.5187368774414063, 0.5185228881835937, 0.518455322265625, 0.5188290405273438, 0.5190287475585937, 0.5186846923828125, 0.5185863647460938, 0.5187174682617187, 0.5186549682617188, 0.5183989868164063, 0.5184993286132813, 0.5187389526367188, 0.5186990356445312, 0.5186856689453125, 0.5186365356445313, 0.518687744140625, 0.5187051391601563, 0.5185955810546875, 0.5186806030273438, 0.5186826171875, 0.518709228515625, 0.5183426513671875, 0.51865087890625, 0.5188116455078124, 0.5186478271484375, 0.5184890747070312, 0.5186887817382813, 0.5187451171875, 0.518697998046875, 0.5185700073242188, 0.5188802490234375, 0.518781982421875, 0.5189662475585938, 0.5188433837890625, 0.5189642333984374, 0.5188710327148438, 0.5188710327148438, 0.5186898193359375, 0.5191925659179687, 0.5188259887695312, 0.5187809448242188, 0.5187225341796875, 0.5192509155273437, 0.5187993774414063, 0.5188997192382813, 0.5185228881835937, 0.5187072143554687, 0.5188116455078124, 0.5190584106445313, 1.07681689453125, 0.518445068359375, 0.5185474853515625, 0.5183795166015625, 0.5183733520507813, 0.5183323974609375, 0.5183467407226563, 0.5184655151367188, 0.5187625122070313, 0.5183334350585938, 0.5183016967773437, 0.5186815795898437, 0.5184327392578125, 0.5188362426757812, 0.5188045043945313, 0.5185730590820312, 0.5185095825195313, 0.5186785278320313, 0.51854541015625, 0.5186232299804687, 0.5184225463867187, 0.5185321044921875, 0.5186078491210937, 0.51862939453125, 0.5188853759765625, 0.5187942504882812, 0.5186447143554688, 0.5191956176757813, 0.5188433837890625, 0.518813720703125, 0.5189417114257813, 0.5186007080078125, 0.5189970092773437, 0.5184686279296875, 0.5187051391601563, 0.5187737426757812, 0.5187123413085938, 0.5185546264648437, 0.518793212890625, 0.5184378662109375, 0.51846142578125, 0.518530029296875, 0.5188843383789062, 0.5189683227539063, 0.5190963134765625, 0.5188731079101563, 0.5191423950195313, 0.5188699951171875, 0.518888427734375, 0.5187901611328125, 0.5192191772460938, 0.5189222412109376, 0.519203857421875, 0.5190103149414063, 0.5189519653320313, 0.5185955810546875, 0.5187174682617187, 0.5186375732421875, 0.5187215576171875, 0.5186795654296875, 0.5185198364257813, 0.5191956176757813, 0.5194281005859375, 1.076896728515625, 0.5189867553710937, 0.5189898071289063, 0.5190768432617188, 0.5188546752929688, 0.5190164184570313, 0.5189293823242187, 0.5191588134765625, 0.5189990234375, 0.5190471801757812, 0.5190215454101562, 0.518962158203125, 0.5189324951171875, 0.51877783203125, 0.5185863647460938, 0.5187440795898437, 0.5187747802734375, 0.5186703491210938, 0.518961181640625, 0.5189631958007812, 0.5189263305664062, 0.5191065673828125, 0.5191270141601563, 0.5187266845703125, 0.5191536865234375, 0.519077880859375, 0.5192693481445313, 0.5188792114257812, 0.5185464477539062, 0.5187072143554687, 0.5185413208007813, 0.5187113037109375, 0.5186324462890625, 0.518445068359375, 0.5185331420898438, 0.5186416625976562, 0.5187133178710938, 0.5184675903320313, 0.5186703491210938, 0.5187870483398438, 0.5187645263671875, 0.518656005859375, 0.5186129760742187, 0.5190205688476562, 0.5188474731445313, 0.5192632446289063, 0.5188905029296875, 0.5194403686523438, 0.518930419921875, 0.518666259765625, 0.518709228515625, 0.51875634765625, 0.5189447631835937, 0.5187727661132813, 0.5188259887695312, 0.518593505859375, 0.518830078125, 0.5188218994140625, 0.5188679809570312, 0.51879833984375, 0.5195130615234375, 0.5187696533203126, 0.5189273681640625, 1.07702783203125, 0.5185310668945312, 0.5188526000976562, 0.5185443725585938, 0.5186529541015625, 0.518382568359375, 0.519151611328125, 0.5186047973632812, 0.5189119873046875, 0.5184778442382812, 0.5184440307617187, 0.5187522583007812, 0.5190369262695312, 0.5186365356445313, 0.5191597900390625, 0.5186221923828125, 0.518540283203125, 0.518592529296875, 0.5189119873046875, 0.5189498901367188, 0.519520263671875, 0.5188690185546875, 0.51934619140625, 0.51949462890625, 0.5199288330078125, 0.5197547607421875, 0.5190215454101562, 0.519573486328125, 0.51902978515625, 0.5193554077148438, 0.5189078979492188, 0.519035888671875, 0.5191505737304688, 0.5188290405273438, 0.5192693481445313, 0.51888330078125, 0.5190390014648437, 0.519245849609375, 0.5186898193359375, 0.518709228515625, 0.5188157348632813, 0.519352294921875, 0.5192120361328125, 0.5192796020507813, 0.5196503295898437, 0.5190942993164063, 0.5189181518554687, 0.5189027709960937, 0.5193912353515625, 0.518920166015625, 0.5193021240234375, 0.5193011474609375, 0.5194506225585938, 0.519583740234375, 0.5188864135742187, 0.51835595703125, 0.5188598022460937, 0.51863037109375, 0.519320556640625, 0.5186631469726563, 0.51905126953125, 0.51867236328125, 0.5190215454101562, 1.07663671875, 0.5183795166015625, 0.5187102661132813, 0.5184112548828125, 0.51833447265625, 0.518308837890625, 0.5185689697265625, 0.5185751342773437, 0.5183692626953125, 0.5185904541015625, 0.5186160888671875, 0.51839794921875, 0.5187020874023438, 0.5187880859375, 0.5187522583007812, 0.5184368896484375, 0.5183836059570313, 0.5184163818359375, 0.5189284057617187, 0.5189427490234375, 0.51874609375, 0.5188782348632812, 0.5188167724609375, 0.5184901123046874, 0.518456298828125, 0.518635498046875, 0.5184102172851562, 0.5184757690429688, 0.5186948852539063, 0.5186232299804687, 0.5188280029296874, 0.5191874389648438, 0.5189837036132813, 0.5192386474609375, 0.5191004028320313, 0.5192028198242188, 0.5189232788085938, 0.5189160766601563, 0.5194721069335938, 0.5189151000976563, 0.5193482055664063, 0.5185126342773437, 0.5188956298828125, 0.5184440307617187, 0.5189990234375, 0.5186549682617188, 0.5188690185546875, 0.5187522583007812, 0.5186150512695312, 0.5184461059570312, 0.518830078125, 0.5186386108398438, 0.5191874389648438, 0.5188556518554688, 0.518888427734375, 0.51888330078125, 0.5187235717773437, 0.5187593994140625, 0.5187850341796875, 0.518666259765625, 0.518687744140625, 0.5185423583984375, 0.5187799072265625, 1.0766878662109376, 0.5186375732421875, 0.51905126953125, 0.5188710327148438, 0.5187266845703125, 0.5190973510742187, 0.518908935546875, 0.519141357421875, 0.518550537109375, 0.51863037109375, 0.5190062255859375, 0.5185525512695313, 0.51875634765625, 0.5187799072265625, 0.5186754760742187, 0.5188382568359375, 0.5187758178710937, 0.5184686279296875, 0.5186815795898437, 0.5187430419921875, 0.5186959228515625, 0.5186918334960937, 0.518940673828125, 0.5187666015625, 0.5187389526367188, 0.5188382568359375, 0.5186969604492188, 0.519372802734375, 0.518697998046875, 0.5186570434570312, 0.518687744140625, 0.5187123413085938, 0.5190942993164063, 0.5186478271484375, 0.5185515747070313, 0.5185474853515625, 0.5187676391601562, 0.5185812377929687, 0.5186754760742187, 0.5185700073242188, 0.5187891235351563, 0.5189447631835937, 0.5187184448242188, 0.5186047973632812, 0.519151611328125, 0.5186170654296876, 0.519161865234375, 0.5193031616210938, 0.51903076171875, 0.5185638427734375, 0.518667236328125, 0.5188187866210937, 0.5187286987304688, 0.5185003662109375, 0.5187952880859374, 0.518740966796875, 0.5187061767578125, 0.51859765625, 0.5187737426757812, 0.518634521484375, 0.5188270263671875, 0.5187686157226562, 0.5187706909179688, 1.077518310546875, 0.519056396484375, 0.5192898559570313, 0.518782958984375, 0.5186785278320313, 0.5184204711914062, 0.518846435546875, 0.5187430419921875, 0.5183098754882812, 0.5186918334960937, 0.5189222412109376, 0.5187593994140625, 0.5184962768554687, 0.5186631469726563, 0.5185730590820312, 0.5183323974609375, 0.5187102661132813, 0.518709228515625, 0.5189539794921875, 0.5187809448242188, 0.5188690185546875, 0.51852392578125, 0.5184266357421875, 0.5183641357421875, 0.5185474853515625, 0.5188229370117188, 0.5187440795898437, 0.518898681640625, 0.5188075561523438, 0.51873486328125, 0.5186150512695312, 0.5191997680664062, 0.5190574340820312, 0.5186181030273438, 0.5190830078125, 0.5190287475585937, 0.5187225341796875, 0.5189335327148438, 0.5190482177734375, 0.5184522094726562, 0.5187962646484375, 0.51858740234375, 0.518666259765625, 0.5187286987304688, 0.518603759765625, 0.51892431640625, 0.51871435546875, 0.5185014038085938, 0.5187501831054687, 0.5184440307617187, 0.5187451171875, 0.5186990356445312, 0.5186201782226563, 0.5188607788085937, 0.518729736328125, 0.5189560546875, 0.51917822265625, 0.5189519653320313, 0.5190205688476562, 0.5188720703125, 0.51892431640625, 0.51957861328125, 0.5188966674804687, 1.076885498046875, 0.5183836059570313, 0.5186785278320313, 0.5183488159179688, 0.5187010498046875, 0.5182863159179687, 0.5183037719726562, 0.51867138671875, 0.518687744140625, 0.5185167236328125, 0.5187676391601562, 0.5192069091796875, 0.5190737915039062, 0.5186611328125, 0.5192294311523438, 0.51938818359375, 0.5191044921875, 0.5192960205078125, 0.5188782348632812, 0.519246826171875, 0.5190031127929687, 0.51922021484375, 0.5191762084960938, 0.5192765502929687, 0.519278564453125, 0.519098388671875, 0.5190123291015625, 0.5186641845703125, 0.518856689453125, 0.5190707397460937, 0.5187973022460938, 0.518856689453125, 0.5186119384765625, 0.5186283569335938, 0.5187225341796875, 0.5189908447265625, 0.518697998046875, 0.5184389038085937, 0.5185259399414063, 0.51919873046875, 0.5189703979492187, 0.519035888671875, 0.5190819702148437, 0.5187225341796875, 0.519246826171875, 0.5188905029296875, 0.518962158203125, 0.5187327880859375, 0.5190523071289063, 0.5186856689453125, 0.5186232299804687, 0.5190911865234376, 0.5190225830078125, 0.5187317504882812, 0.5193215942382813, 0.5191966552734375, 0.5187368774414063, 0.5185955810546875, 0.5188178100585937, 0.5188208618164063, 0.518950927734375, 0.5186027221679688, 0.5190328369140625, 1.076612060546875, 0.5185167236328125, 0.5186959228515625, 0.5188966674804687, 0.5188515625, 0.5189232788085938, 0.5192007446289062, 0.5189017333984375, 0.5191997680664062, 0.5190430908203125, 0.5188444213867187, 0.5187010498046875, 0.5188218994140625, 0.51884033203125, 0.5189693603515625, 0.5187215576171875, 0.5187696533203126, 0.5189519653320313, 0.5188106079101562, 0.5188607788085937, 0.5188280029296874, 0.5186395874023437, 0.5188167724609375, 0.519056396484375, 0.5189109497070312, 0.5193502807617187, 0.5190021362304688, 0.5187338256835937, 0.5190707397460937, 0.5186467895507813, 0.5187655639648437, 0.51856689453125, 0.519203857421875, 0.5186898193359375, 0.5186150512695312, 0.51852392578125, 0.5187666015625, 0.5183876953125, 0.5183733520507813, 0.5182986450195313, 0.518972412109375, 0.5184624633789062, 0.5188782348632812, 0.5185863647460938, 0.5192232666015625, 0.51877685546875, 0.519309326171875, 0.5186734008789062, 0.5190625, 0.5192662963867187, 0.5189734497070313, 0.5185115966796875, 0.5190062255859375, 0.5187492065429687, 0.51869287109375, 0.5187256469726562, 0.518729736328125, 0.5186365356445313, 0.5189970092773437, 0.5185853271484375, 0.5187051391601563, 0.5185106201171875, 0.5193687133789062]",tokens/s,1.898222871517761,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,,cuda,0,42,,,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1721.061376,22129.672192,0.0,21483.225088,20799.036928,s,10,27.921223388671876,2.7921223388671876,0.002873024218980136,2.791891357421875,2.7956688476562497,2.7963155029296876,2.7968328271484375,"[2.793120361328125, 2.796962158203125, 2.790662353515625, 2.790180419921875, 2.7901220703125, 2.787002685546875, 2.79004345703125, 2.793321533203125, 2.795525146484375, 2.794283203125]",tokens/s,91.68652692484228,kWh,3.2928969429598913e-05,1.8046407099463975e-05,0.00015823184880759956,0.00020920722533666246,tokens/kWh,1223667.1060860218,MB,1726.52544,22129.672192,0.0,21483.225088,20902.142976,s,10,1662.0463125,166.20463124999998,0.013890741352160448,166.199171875,166.2239140625,166.22626953125,166.22815390625,"[166.211921875, 166.223390625, 166.191390625, 166.191359375, 166.191640625, 166.203515625, 166.194828125, 166.2176875, 166.191953125, 166.228625]",tokens/s,0.3790508093919315,kWh,0.0019620591764814327,0.001075380349219922,0.009375421333664404,0.012412860859365755,tokens/kWh,5075.381148131151,,s,629,1684.5351452636726,2.678116288177539,0.33164961442268687,2.637888427734375,2.63990244140625,2.640667578125,5.42840712890625,"[2.638138427734375, 2.638636962890625, 2.638865478515625, 2.6390283203125, 2.6389248046875, 2.63697509765625, 2.6374482421875, 2.637588623046875, 2.6383994140625, 2.637781982421875, 2.63811181640625, 2.639296630859375, 2.6380380859375, 2.637068359375, 2.638099365234375, 2.637255615234375, 2.637476806640625, 2.63743798828125, 2.637467529296875, 2.63781787109375, 2.637115478515625, 2.637315185546875, 2.63943896484375, 2.639993896484375, 2.6398291015625, 2.63933349609375, 2.63897802734375, 2.639859619140625, 2.63980126953125, 2.639171630859375, 2.637111328125, 2.63720556640625, 2.636434326171875, 2.63687158203125, 2.63707958984375, 2.639266845703125, 2.63845263671875, 2.637464599609375, 2.638060546875, 2.64020263671875, 2.638950439453125, 2.63784033203125, 2.63769091796875, 2.6381884765625, 2.639057861328125, 2.637621337890625, 2.638141357421875, 2.63823876953125, 2.639244384765625, 2.63790283203125, 2.6387138671875, 2.6390537109375, 2.638361572265625, 2.637244384765625, 2.639206298828125, 2.638740478515625, 2.639088623046875, 2.638509033203125, 2.637013916015625, 2.637980712890625, 2.637846435546875, 2.638864501953125, 5.43535302734375, 2.63853662109375, 2.63802978515625, 2.63697607421875, 2.637854736328125, 2.638664794921875, 2.63739794921875, 2.637233154296875, 2.637539306640625, 2.637358154296875, 2.637158447265625, 2.637360107421875, 2.63921875, 2.63846923828125, 2.638044189453125, 2.639015869140625, 2.636918701171875, 2.637613037109375, 2.637201416015625, 2.63785888671875, 2.6367763671875, 2.637592529296875, 2.63693310546875, 2.6386318359375, 2.63771337890625, 2.638298095703125, 2.637001708984375, 2.6378701171875, 2.6421064453125, 2.6414111328125, 2.641197021484375, 2.64100146484375, 2.63889111328125, 2.6406533203125, 2.6406552734375, 2.637020263671875, 2.637322265625, 2.63722900390625, 2.63769091796875, 2.6369853515625, 2.63765185546875, 2.63745751953125, 2.638650390625, 2.638825439453125, 2.637306884765625, 2.64330859375, 2.641152099609375, 2.6409931640625, 2.63775244140625, 2.639099853515625, 2.638685302734375, 2.6388388671875, 2.637993896484375, 2.639129638671875, 2.638023681640625, 2.638602294921875, 2.63790283203125, 2.63815380859375, 2.637550537109375, 2.63849169921875, 2.639201171875, 2.6400400390625, 2.638790771484375, 5.4299453125, 2.63796826171875, 2.638594970703125, 2.637592529296875, 2.6385439453125, 2.637388916015625, 2.638464111328125, 2.637737060546875, 2.63819775390625, 2.63681005859375, 2.637154296875, 2.63805224609375, 2.638739501953125, 2.63828173828125, 2.638024658203125, 2.637736083984375, 2.637816650390625, 2.63781884765625, 2.63653466796875, 2.637263916015625, 2.637474853515625, 2.63747998046875, 2.636569580078125, 2.64140478515625, 2.637125732421875, 2.638499755859375, 2.6369833984375, 2.637737060546875, 2.636338134765625, 2.63744921875, 2.637201416015625, 2.638740478515625, 2.638299072265625, 2.63929150390625, 2.637087646484375, 2.637667236328125, 2.6377021484375, 2.636683349609375, 2.637559814453125, 2.637581298828125, 2.6383564453125, 2.636505126953125, 2.63706005859375, 2.636032958984375, 2.6379765625, 2.637094970703125, 2.63819775390625, 2.64007568359375, 2.6376611328125, 2.6375966796875, 2.6389228515625, 2.63778515625, 2.639754150390625, 2.6381884765625, 2.63853466796875, 2.63933642578125, 2.639509521484375, 2.639793212890625, 2.640154541015625, 2.63959033203125, 2.636541015625, 2.6371533203125, 2.63670068359375, 5.43085791015625, 2.64038818359375, 2.640405517578125, 2.63921875, 2.638604248046875, 2.638392333984375, 2.63703857421875, 2.638612548828125, 2.638138427734375, 2.637631591796875, 2.637518798828125, 2.636412841796875, 2.639814697265625, 2.63709912109375, 2.638194580078125, 2.636851318359375, 2.637906982421875, 2.636904541015625, 2.63757421875, 2.637557861328125, 2.637801513671875, 2.637253662109375, 2.638193603515625, 2.63731396484375, 2.63754345703125, 2.637656005859375, 2.638063720703125, 2.6375537109375, 2.63655517578125, 2.636990478515625, 2.6373427734375, 2.638109619140625, 2.637427734375, 2.637330322265625, 2.63708154296875, 2.63813232421875, 2.636632080078125, 2.638185546875, 2.636789794921875, 2.6382294921875, 2.636559326171875, 2.63756494140625, 2.6364814453125, 2.637990966796875, 2.64270947265625, 2.63874755859375, 2.640064453125, 2.638127197265625, 2.637509521484375, 2.637388916015625, 2.637737060546875, 2.63737353515625, 2.639585205078125, 2.637530029296875, 2.638297119140625, 2.63809423828125, 2.637974609375, 2.63773583984375, 2.637834228515625, 2.63741552734375, 2.637762451171875, 2.637958251953125, 2.63785791015625, 5.428443359375, 2.63807275390625, 2.6370693359375, 2.638107666015625, 2.637665283203125, 2.63819677734375, 2.6379326171875, 2.63821923828125, 2.6370078125, 2.63802685546875, 2.638180419921875, 2.63954443359375, 2.6387373046875, 2.638023681640625, 2.636853271484375, 2.637129638671875, 2.637197265625, 2.636950439453125, 2.63758544921875, 2.638212158203125, 2.63828076171875, 2.6365849609375, 2.6370908203125, 2.638017578125, 2.63769384765625, 2.636833740234375, 2.638193603515625, 2.638341064453125, 2.640734130859375, 2.63990576171875, 2.639550537109375, 2.638340087890625, 2.63885107421875, 2.63923095703125, 2.6383955078125, 2.636854248046875, 2.636768310546875, 2.6369443359375, 2.637570068359375, 2.637836181640625, 2.63730078125, 2.63779541015625, 2.63750244140625, 2.637592529296875, 2.6366474609375, 2.638162841796875, 2.636866455078125, 2.6375146484375, 2.636231689453125, 2.6370693359375, 2.637305908203125, 2.63849169921875, 2.636894287109375, 2.636900390625, 2.637894775390625, 2.637490234375, 2.6377666015625, 2.64785107421875, 2.6376591796875, 2.637158447265625, 2.637832275390625, 2.63720849609375, 2.639467529296875, 5.425724609375, 2.637199462890625, 2.63809130859375, 2.638032958984375, 2.637854736328125, 2.638718017578125, 2.63857861328125, 2.637035400390625, 2.63773583984375, 2.638855224609375, 2.63739892578125, 2.638341064453125, 2.63885205078125, 2.639034423828125, 2.63807177734375, 2.637761474609375, 2.639326171875, 2.639801513671875, 2.639014892578125, 2.639296630859375, 2.637675537109375, 2.63769287109375, 2.637137939453125, 2.637119384765625, 2.6383955078125, 2.63736328125, 2.637094970703125, 2.637177734375, 2.6371953125, 2.637066162109375, 2.637592529296875, 2.639177734375, 2.637533203125, 2.6364169921875, 2.636937255859375, 2.6390283203125, 2.63927294921875, 2.63655126953125, 2.64357373046875, 2.637197265625, 2.637576171875, 2.638668701171875, 2.639214599609375, 2.639289306640625, 2.639532958984375, 2.638011474609375, 2.63690234375, 2.63817626953125, 2.636898193359375, 2.637287353515625, 2.637656982421875, 2.637981689453125, 2.636938232421875, 2.63751171875, 2.6373232421875, 2.63774609375, 2.637768798828125, 2.63883056640625, 2.639134765625, 2.641203125, 2.639278076171875, 2.638201904296875, 2.637488037109375, 5.42831396484375, 2.637304931640625, 2.638138427734375, 2.636865478515625, 2.639958984375, 2.63714404296875, 2.638241943359375, 2.63680810546875, 2.638630859375, 2.638310302734375, 2.640628662109375, 2.639406982421875, 2.63910302734375, 2.6390712890625, 2.640678955078125, 2.640310302734375, 2.63718701171875, 2.637646728515625, 2.637350830078125, 2.638884765625, 2.6375947265625, 2.637400146484375, 2.6368818359375, 2.642212890625, 2.637052978515625, 2.637252685546875, 2.636802001953125, 2.64029296875, 2.6377216796875, 2.6375556640625, 2.6366923828125, 2.63757421875, 2.638635009765625, 2.637696044921875, 2.637308837890625, 2.637477783203125, 2.637402099609375, 2.6365869140625, 2.637488037109375, 2.638253173828125, 2.6386328125, 2.637402099609375, 2.63834716796875, 2.6377451171875, 2.637392822265625, 2.636634033203125, 2.637593505859375, 2.637182861328125, 2.637476806640625, 2.6378896484375, 2.637464599609375, 2.639731689453125, 2.637717529296875, 2.63756591796875, 2.638674072265625, 2.63798974609375, 2.638200927734375, 2.63733251953125, 2.63752197265625, 2.63793359375, 2.636739501953125, 2.63832470703125, 2.637581298828125, 5.43222705078125, 2.638201904296875, 2.63801025390625, 2.639353759765625, 2.63851513671875, 2.637308837890625, 2.637641845703125, 2.6370263671875, 2.636823486328125, 2.638310302734375, 2.637739990234375, 2.636812255859375, 2.637005859375, 2.63739794921875, 2.637341796875, 2.636630126953125, 2.64057861328125, 2.638487548828125, 2.6377880859375, 2.63784033203125, 2.637433837890625, 2.638781494140625, 2.638268310546875, 2.63773486328125, 2.63734375, 2.64085302734375, 2.637772705078125, 2.63678369140625, 2.636781494140625, 2.637518798828125, 2.637106201171875, 2.636708984375, 2.63695166015625, 2.63906103515625, 2.6390087890625, 2.639414306640625, 2.63884912109375, 2.638530517578125, 2.639602783203125, 2.639301513671875, 2.638671875, 2.6394306640625, 2.640407470703125, 2.63750244140625, 2.640359375, 2.640257080078125, 2.641056884765625, 2.639901611328125, 2.64020263671875, 2.640825439453125, 2.637892578125, 2.63866357421875, 2.63870166015625, 2.638103515625, 2.641052734375, 2.639697998046875, 2.639138916015625, 2.63690234375, 2.63733447265625, 2.637642822265625, 2.63709912109375, 2.6370068359375, 2.638327880859375, 5.4337607421875, 2.63835546875, 2.638138427734375, 2.637888427734375, 2.63785986328125, 2.63816796875, 2.637724609375, 2.63817431640625, 2.63734375, 2.637358154296875, 2.637413330078125, 2.63850390625, 2.63788134765625, 2.638455810546875, 2.63848046875, 2.637498291015625, 2.637077392578125, 2.640005126953125, 2.638171142578125, 2.63721875, 2.638088134765625, 2.637340576171875, 2.637073486328125, 2.637824951171875, 2.63834619140625, 2.63819580078125, 2.63836474609375, 2.63822021484375, 2.637216796875, 2.63757421875, 2.63648046875, 2.6365439453125, 2.63712255859375, 2.63768994140625, 2.636919921875, 2.637600830078125, 2.637790283203125, 2.63785986328125, 2.636936279296875, 2.637821044921875, 2.637937744140625, 2.638752685546875, 2.63861767578125, 2.637978515625, 2.63923291015625, 2.639403076171875, 2.63928515625, 2.63895751953125, 2.63758447265625, 2.637350830078125, 2.63724853515625, 2.6378095703125, 2.638350341796875, 2.637401123046875, 2.636908447265625, 2.63790478515625, 2.638477294921875, 2.638087158203125, 2.639475830078125, 2.639469482421875, 2.640037841796875, 2.637907958984375, 2.636857421875, 5.4322666015625, 2.640575439453125, 2.641816650390625, 2.640384033203125, 2.639365234375, 2.639097900390625, 2.6402724609375, 2.638215087890625, 2.638160888671875, 2.637603759765625, 2.637083740234375, 2.6377421875, 2.637223876953125, 2.6389013671875, 2.638928955078125, 2.637473876953125, 2.637214599609375, 2.636859375, 2.637595703125, 2.63667822265625, 2.6370498046875, 2.638342041015625, 2.63901904296875, 2.637927490234375, 2.63882958984375, 2.640554931640625, 2.64306884765625, 2.639297607421875, 2.6393896484375, 2.64045166015625, 2.63986376953125, 2.639677490234375, 2.64067578125, 2.64060009765625, 2.640331787109375, 2.640183349609375, 2.639626220703125, 2.640194580078125, 2.63760498046875, 2.639280029296875, 2.6397255859375, 2.641039306640625, 2.6397880859375, 2.6377646484375, 2.639326171875, 2.63863916015625, 2.636462158203125, 2.637134765625, 2.6374892578125, 2.637487060546875, 2.63678466796875, 2.63695361328125, 2.636927978515625, 2.636856201171875, 2.63660546875, 2.637761474609375, 2.637675537109375, 2.6367119140625, 2.637622314453125, 2.6384404296875, 2.637370361328125, 2.63602880859375, 2.636900390625]",tokens/s,0.37339678057090686,,,,,,main,False,False -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495b3-2dbd359a484a8b0753338cd2;6ef125c4-ed05-4db1-8b9e-6083dc65104f) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3710, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ - self.model = InternLMModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ - self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in - self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 545, in __init__ - self.self_attn = INTERNLM_ATTENTION_CLASSES[config.attn_implementation](config=config) -KeyError: 'sdpa' - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.217-205.860.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.2,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3088.633856,14093.385728,0.0,13446.938624,13175.804928,s,10,15.872446411132811,1.5872446411132812,0.001977784195727725,1.5869342651367186,1.5899507080078126,1.5901222045898438,1.5902594018554688,"[1.58719189453125, 1.584764892578125, 1.5848123779296874, 1.5860927734375, 1.5866766357421875, 1.5853348388671875, 1.590293701171875, 1.58790966796875, 1.58991259765625, 1.58945703125]",tokens/s,161.28578630477753,kWh,1.8717543135086694e-05,1.0257253674026287e-05,9.112487845540085e-05,0.00012009967526451382,tokens/kWh,2131562.7992846123,MB,3091.492864,14093.385728,0.0,13446.938624,13300.586496,s,10,928.4395625000001,92.84395624999999,0.007570288473903254,92.84323828125,92.8531703125,92.8533390625,92.8534740625,"[92.8279765625, 92.8535078125, 92.8414140625, 92.8458828125, 92.8531328125, 92.853015625, 92.843109375, 92.8433671875, 92.8399140625, 92.8382421875]",tokens/s,0.6785579002079632,kWh,0.0010960241793427203,0.00060071625912522,0.005404193295573401,0.007100933734041341,tokens/kWh,8872.072654048685,,s,629,941.2515487060549,1.4964253556535052,0.1885841806933767,1.4736722412109375,1.4742849609375,1.4745136962890626,3.0601340332031253,"[1.4728734130859376, 1.473666015625, 1.473407958984375, 1.47373974609375, 1.4734244384765625, 1.47390771484375, 1.47325244140625, 1.4737960205078124, 1.4734847412109375, 1.4734468994140626, 1.473364013671875, 1.47363330078125, 1.4732943115234376, 1.47340087890625, 1.4733291015625, 1.47338134765625, 1.473206298828125, 1.473244140625, 1.4738125, 1.473511474609375, 1.4734613037109374, 1.4737889404296876, 1.473449951171875, 1.473580078125, 1.473765380859375, 1.473459228515625, 1.473560546875, 1.47323388671875, 1.474029541015625, 1.4734468994140626, 1.473263671875, 1.4734326171875, 1.47308642578125, 1.4729144287109375, 1.4731171875, 1.47327490234375, 1.473290283203125, 1.4730946044921875, 1.4730894775390626, 1.4733568115234374, 1.4730352783203124, 1.473344482421875, 1.4731263427734376, 1.4731683349609375, 1.4734151611328126, 1.4735421142578125, 1.473369140625, 1.4731141357421875, 1.4733404541015624, 1.47325439453125, 1.4731715087890624, 1.47344384765625, 1.4731990966796875, 1.473396728515625, 1.473218505859375, 1.4735196533203125, 1.4733302001953126, 1.47679638671875, 1.473838134765625, 1.4735543212890625, 1.4735584716796875, 1.473474609375, 3.058888671875, 1.4736353759765626, 1.4734478759765626, 1.47342333984375, 1.4732247314453124, 1.4741309814453125, 1.4735350341796876, 1.4742303466796876, 1.474051025390625, 1.4743828125, 1.4741749267578126, 1.4742200927734375, 1.4739588623046875, 1.4743316650390625, 1.4739302978515625, 1.4737408447265625, 1.474087890625, 1.47405517578125, 1.4743592529296874, 1.4743214111328125, 1.474566162109375, 1.473896484375, 1.47449853515625, 1.4739251708984376, 1.47448828125, 1.473300537109375, 1.4734755859375, 1.4742425537109376, 1.4745487060546876, 1.4744595947265624, 1.47428662109375, 1.4733526611328125, 1.4731519775390625, 1.47367529296875, 1.4740152587890625, 1.4736251220703125, 1.473764404296875, 1.4738944091796875, 1.4733157958984375, 1.473659912109375, 1.4736898193359376, 1.47346826171875, 1.4733157958984375, 1.4738472900390625, 1.4736722412109375, 1.4738759765625, 1.4739302978515625, 1.473797119140625, 1.4737530517578126, 1.47359130859375, 1.4737294921875, 1.4736865234375, 1.4733988037109376, 1.47375, 1.473734619140625, 1.4735994873046876, 1.4738953857421875, 1.47376123046875, 1.4736036376953126, 1.473574951171875, 1.474008056640625, 1.473680419921875, 1.473565673828125, 3.06031005859375, 1.4730966796875, 1.4736650390625, 1.4730096435546876, 1.473244140625, 1.4731519775390625, 1.4736844482421876, 1.4732667236328125, 1.4738687744140626, 1.4742548828125, 1.4740458984375, 1.473471435546875, 1.473302490234375, 1.4735523681640625, 1.473406982421875, 1.473101806640625, 1.47376953125, 1.4739149169921875, 1.4731151123046875, 1.4732298583984376, 1.4734039306640625, 1.4731314697265625, 1.473111083984375, 1.4737376708984375, 1.4738216552734376, 1.4737899169921875, 1.4738800048828125, 1.4739691162109374, 1.4734581298828124, 1.47338134765625, 1.4734776611328124, 1.47355029296875, 1.473333251953125, 1.4734776611328124, 1.4733946533203126, 1.4732882080078125, 1.473912841796875, 1.473892333984375, 1.474216064453125, 1.4738175048828126, 1.47407666015625, 1.4737275390625, 1.473375244140625, 1.4743492431640626, 1.4744185791015625, 1.4742845458984375, 1.4742108154296876, 1.474523193359375, 1.4741842041015625, 1.47373876953125, 1.4740675048828125, 1.474164794921875, 1.474018310546875, 1.473081298828125, 1.474177001953125, 1.4735738525390625, 1.47327587890625, 1.4737294921875, 1.4737100830078125, 1.4734151611328126, 1.4735892333984375, 1.47363330078125, 1.473797119140625, 3.0603828125, 1.4735103759765624, 1.4731171875, 1.4736036376953126, 1.4737879638671876, 1.4736036376953126, 1.47382470703125, 1.4734530029296875, 1.473491943359375, 1.4732840576171875, 1.4735810546875, 1.4739404296875, 1.4733045654296875, 1.4736578369140625, 1.4735308837890626, 1.4735565185546875, 1.4735206298828125, 1.47350830078125, 1.4735667724609376, 1.473364013671875, 1.4735780029296874, 1.473996826171875, 1.4732401123046874, 1.4733731689453125, 1.4740458984375, 1.473659912109375, 1.4739732666015626, 1.4740029296875, 1.47403369140625, 1.4737161865234376, 1.474050048828125, 1.474302978515625, 1.47325439453125, 1.473807373046875, 1.474555908203125, 1.474566162109375, 1.4731663818359375, 1.473481689453125, 1.4736650390625, 1.473850341796875, 1.4740213623046876, 1.4742425537109376, 1.4736527099609376, 1.4736947021484375, 1.473903564453125, 1.4739005126953124, 1.473833984375, 1.4741944580078126, 1.473732666015625, 1.4733824462890626, 1.473692626953125, 1.474039794921875, 1.4735994873046876, 1.4731766357421876, 1.4735155029296876, 1.473933349609375, 1.4738759765625, 1.473734619140625, 1.4740357666015624, 1.47370703125, 1.4738463134765625, 1.4742486572265625, 1.4741298828125, 3.061118896484375, 1.473859619140625, 1.4736097412109375, 1.4739149169921875, 1.4740848388671874, 1.474361328125, 1.4743634033203126, 1.4743951416015626, 1.473876953125, 1.4742210693359374, 1.4737510986328124, 1.473217529296875, 1.4732984619140626, 1.473310791015625, 1.473673095703125, 1.4737142333984374, 1.4734090576171874, 1.4737091064453125, 1.474240478515625, 1.4736937255859375, 1.4738544921875, 1.4743326416015625, 1.4735289306640624, 1.4736065673828125, 1.4740234375, 1.4740274658203125, 1.4741329345703125, 1.473870849609375, 1.4740899658203126, 1.473397705078125, 1.473323974609375, 1.473269775390625, 1.4733096923828124, 1.4737264404296875, 1.473578125, 1.47388916015625, 1.473797119140625, 1.474377685546875, 1.47427734375, 1.4738125, 1.473481689453125, 1.473944580078125, 1.473754150390625, 1.4732686767578125, 1.473934326171875, 1.4739149169921875, 1.4737080078125, 1.47405615234375, 1.4740255126953126, 1.4739588623046875, 1.4738177490234374, 1.4743765869140626, 1.474249755859375, 1.473743896484375, 1.4736650390625, 1.4745087890625, 1.473791015625, 1.473939453125, 1.4738421630859375, 1.4741165771484375, 1.4739957275390625, 1.4737919921875, 1.4739312744140625, 3.059681396484375, 1.4742149658203125, 1.4734141845703126, 1.4730894775390626, 1.4733916015625, 1.473896484375, 1.473638427734375, 1.4734898681640625, 1.4739404296875, 1.47435107421875, 1.47458349609375, 1.474293701171875, 1.4747064208984375, 1.4745426025390624, 1.47485693359375, 1.4739322509765624, 1.4736671142578126, 1.473491943359375, 1.4745078125, 1.474566162109375, 1.47476171875, 1.47451806640625, 1.4742261962890626, 1.4741903076171874, 1.47418115234375, 1.4742999267578125, 1.4743060302734374, 1.4741258544921876, 1.473733642578125, 1.4733138427734376, 1.472933837890625, 1.4730772705078126, 1.47359130859375, 1.473479736328125, 1.4740745849609376, 1.4737479248046874, 1.4733055419921874, 1.4737100830078125, 1.474017333984375, 1.4741851806640625, 1.47333935546875, 1.474234375, 1.4739200439453124, 1.473474609375, 1.47338037109375, 1.4740694580078124, 1.473732666015625, 1.4734735107421875, 1.4738309326171875, 1.4739005126953124, 1.473574951171875, 1.473660888671875, 1.47378076171875, 1.473206298828125, 1.4732236328125, 1.4738565673828126, 1.4739322509765624, 1.4736004638671876, 1.473474609375, 1.4738258056640625, 1.4735831298828126, 1.4737120361328124, 1.4735411376953125, 3.0642236328125, 1.4730126953125, 1.47343359375, 1.4729718017578124, 1.4735728759765625, 1.4736158447265626, 1.4739005126953124, 1.47361279296875, 1.473840087890625, 1.4736087646484375, 1.4734949951171874, 1.47426611328125, 1.47422314453125, 1.4739998779296875, 1.4739046630859376, 1.474639892578125, 1.474404296875, 1.474365478515625, 1.4745169677734375, 1.474093017578125, 1.4741513671875, 1.4736055908203125, 1.47390869140625, 1.47327490234375, 1.473606689453125, 1.473691650390625, 1.47382373046875, 1.473692626953125, 1.4730546875, 1.473733642578125, 1.4736373291015625, 1.47371923828125, 1.473250244140625, 1.473560546875, 1.4734674072265626, 1.473487060546875, 1.4741666259765625, 1.47329638671875, 1.473322021484375, 1.473585205078125, 1.4732359619140625, 1.473195068359375, 1.4736046142578125, 1.4733804931640626, 1.473373046875, 1.4742098388671876, 1.4742425537109376, 1.4739527587890624, 1.4736629638671874, 1.473869873046875, 1.47333935546875, 1.4739404296875, 1.4740797119140625, 1.4739158935546874, 1.47359130859375, 1.4735042724609375, 1.4738052978515626, 1.473397705078125, 1.4735718994140625, 1.473728515625, 1.4734254150390624, 1.473385498046875, 1.4733148193359376, 3.0616728515625, 1.4734571533203125, 1.4732861328125, 1.4731346435546875, 1.473143798828125, 1.4733568115234374, 1.4734940185546874, 1.4731695556640625, 1.4739505615234374, 1.473987548828125, 1.4735789794921874, 1.4733271484375, 1.4738790283203125, 1.4730577392578126, 1.473259521484375, 1.474188232421875, 1.474802734375, 1.4733424072265624, 1.473292236328125, 1.474060302734375, 1.4732933349609374, 1.4734765625, 1.47428662109375, 1.4740941162109376, 1.4739312744140625, 1.4739609375, 1.4747392578125, 1.4735155029296876, 1.47369775390625, 1.4734786376953124, 1.473407958984375, 1.473471435546875, 1.4732052001953124, 1.473833984375, 1.4732821044921875, 1.4737459716796875, 1.4735677490234376, 1.473176513671875, 1.4732420654296876, 1.473311767578125, 1.4738421630859375, 1.4735667724609376, 1.4740521240234374, 1.4739364013671874, 1.4734776611328124, 1.4733291015625, 1.4738514404296874, 1.473481689453125, 1.4737315673828124, 1.473935302734375, 1.4735574951171875, 1.473122314453125, 1.47314892578125, 1.473607666015625, 1.4732789306640626, 1.4734888916015625, 1.47401416015625, 1.47483544921875, 1.4746541748046875, 1.47443603515625, 1.4748702392578126, 1.4743603515625, 1.4745528564453125, 3.064785888671875, 1.4746173095703126, 1.47447607421875, 1.4746685791015626, 1.47447607421875, 1.4743643798828125, 1.4738125, 1.4731048583984374, 1.473474609375, 1.47338134765625, 1.473534912109375, 1.473302490234375, 1.473479736328125, 1.4740050048828126, 1.473471435546875, 1.47342333984375, 1.473850341796875, 1.4732401123046874, 1.4734808349609374, 1.4733658447265625, 1.4733629150390626, 1.473560546875, 1.473197021484375, 1.47348681640625, 1.473091552734375, 1.4735728759765625, 1.47310693359375, 1.473244140625, 1.4730096435546876, 1.4732452392578126, 1.473584228515625, 1.473293212890625, 1.4733302001953126, 1.4733189697265625, 1.473532958984375, 1.473896484375, 1.47376953125, 1.474218994140625, 1.473491943359375, 1.4740469970703125, 1.4737294921875, 1.4742374267578124, 1.4740889892578124, 1.473924072265625, 1.473764404296875, 1.473859619140625, 1.4736947021484375, 1.4739609375, 1.47409716796875, 1.47407763671875, 1.4737705078125, 1.4731356201171875, 1.4733404541015624, 1.473121337890625, 1.4731192626953125, 1.4733465576171876, 1.473375244140625, 1.4740101318359375, 1.4735616455078124, 1.47340087890625, 1.4733568115234374, 1.473385498046875, 1.4738780517578125, 3.063844970703125, 1.4731295166015625, 1.4731304931640625, 1.4736036376953126, 1.473776611328125, 1.47394970703125, 1.4742486572265625, 1.4738533935546876, 1.4738216552734376, 1.4736895751953125, 1.4738575439453125, 1.473681396484375, 1.473460205078125, 1.47378076171875, 1.4738780517578125, 1.4737530517578126, 1.473596435546875, 1.4732513427734375, 1.4733271484375, 1.47356982421875, 1.4737847900390626, 1.47350732421875, 1.473412109375, 1.47346533203125, 1.4734541015625, 1.473197021484375, 1.4740101318359375, 1.47386669921875, 1.4737366943359376, 1.4738145751953124, 1.4737264404296875, 1.47363330078125, 1.4732882080078125, 1.473638427734375, 1.4737899169921875, 1.473912841796875, 1.473796142578125, 1.4733824462890626, 1.473281005859375, 1.473376220703125, 1.473712158203125, 1.47343359375, 1.473532958984375, 1.4734254150390624, 1.4737110595703125, 1.473586181640625, 1.4736097412109375, 1.4738094482421875, 1.47342236328125, 1.473691650390625, 1.4737294921875, 1.4736414794921875, 1.4735841064453126, 1.4735462646484374, 1.4737427978515625, 1.473418212890625, 1.473576904296875, 1.473511474609375, 1.47342333984375, 1.4733507080078125, 1.47335986328125, 1.474207763671875, 1.47342138671875]",tokens/s,0.6682591926299519,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 103, in __init__ - assert out_features % (32 // self.w_bit) == 0 -AssertionError - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1357.860864,1315.438592,0.0,668.991488,598.701056,s,10,0.5735699539184571,0.0573569953918457,0.002323365932403844,0.05641374397277832,0.058685475921630856,0.06142336158752441,0.06361367012023926,"[0.06416124725341797, 0.05632092666625976, 0.05633155059814453, 0.05636393737792969, 0.056450401306152344, 0.056666881561279296, 0.058077056884765624, 0.05637046432495117, 0.05638243103027344, 0.056445056915283204]",tokens/s,4463.274239717147,kWh,6.797899288692694e-07,3.7245412829627415e-07,1.967943783804529e-06,3.0201878409700728e-06,tokens/kWh,84762939.75071888,MB,1357.860864,1315.438592,0.0,668.991488,634.11968,s,10,35.55266552734375,3.5552665527343748,0.02725806586268354,3.5483658447265625,3.5879743408203124,3.6048503295898437,3.618351120605469,"[3.621726318359375, 3.58422412109375, 3.529068603515625, 3.540139892578125, 3.558736572265625, 3.5575546875, 3.537781982421875, 3.52670166015625, 3.551919189453125, 3.5448125]",tokens/s,17.720190333280737,kWh,4.28260003378565e-05,2.3470953370349246e-05,0.00011588945605759889,0.00018218640976580466,tokens/kWh,345799.66793892405,,s,629,36.002755603790305,0.05723808522065227,0.006780702818673421,0.05590220642089844,0.05774848175048828,0.0580835319519043,0.11218710388183593,"[0.05981695938110351, 0.059014144897460936, 0.05988351821899414, 0.05948825454711914, 0.060007423400878904, 0.05807923126220703, 0.05880831909179687, 0.057845760345458984, 0.0577259521484375, 0.057599998474121096, 0.05738905715942383, 0.0576255989074707, 0.058726398468017575, 0.05784064102172851, 0.05770240020751953, 0.05808639907836914, 0.05745971298217773, 0.05741670227050781, 0.05756415939331055, 0.05732454299926758, 0.057987071990966796, 0.057485313415527345, 0.05737472152709961, 0.05715967941284179, 0.05847347259521484, 0.057591808319091796, 0.05679206466674805, 0.055755775451660154, 0.057815040588378906, 0.05788467025756836, 0.05797785568237305, 0.0564029426574707, 0.05548031997680664, 0.05780889511108398, 0.05739724731445312, 0.05749657440185547, 0.0573306884765625, 0.05792768096923828, 0.05750271987915039, 0.057316352844238284, 0.05745459365844727, 0.05743212890625, 0.057356224060058594, 0.05757440185546875, 0.05759692764282227, 0.05739110565185547, 0.05764198303222656, 0.057611263275146485, 0.05741363143920898, 0.057322494506835936, 0.05822566223144531, 0.05762047958374023, 0.057299968719482425, 0.056842239379882815, 0.05561244964599609, 0.05575369644165039, 0.05556838226318359, 0.05557759857177735, 0.05566566467285156, 0.055512065887451174, 0.055686145782470706, 0.05557145690917969, 0.11211571502685547, 0.055414783477783204, 0.05548953628540039, 0.05547315216064453, 0.05559296035766602, 0.057166847229003906, 0.057384960174560545, 0.05738905715942383, 0.057421886444091796, 0.05794604873657227, 0.058033153533935546, 0.05751910400390625, 0.05744128036499024, 0.05772697448730469, 0.057409534454345705, 0.057847808837890625, 0.05756108856201172, 0.05777407836914063, 0.05831167984008789, 0.05800960159301758, 0.05741875076293945, 0.058001407623291014, 0.05816524887084961, 0.057565185546875, 0.05741875076293945, 0.057442302703857424, 0.057466880798339844, 0.05788671875, 0.05794508743286133, 0.0575283203125, 0.05696921539306641, 0.0558919677734375, 0.055634944915771485, 0.055400447845458986, 0.055548927307128904, 0.055624702453613284, 0.055608318328857424, 0.05552742385864258, 0.055711742401123046, 0.05558169555664062, 0.05681459045410156, 0.05732147216796875, 0.057388031005859375, 0.056796161651611325, 0.05560627365112305, 0.056886272430419924, 0.057524223327636716, 0.05738393783569336, 0.058218494415283206, 0.05722009658813477, 0.05792051315307617, 0.057490432739257816, 0.057455615997314455, 0.05738700866699219, 0.0573941764831543, 0.05738598251342773, 0.05712793731689453, 0.05570560073852539, 0.055634944915771485, 0.05566566467285156, 0.05554073715209961, 0.0556492805480957, 0.05571072006225586, 0.11216793823242187, 0.0556124153137207, 0.055382015228271485, 0.05562572860717773, 0.05556531143188476, 0.05569232177734375, 0.05575062561035156, 0.055859233856201174, 0.05676950454711914, 0.05582950210571289, 0.05629542541503906, 0.055761985778808594, 0.055734207153320316, 0.05566361618041992, 0.055640064239501956, 0.055725055694580077, 0.0556308479309082, 0.05583363342285156, 0.05553353500366211, 0.05562777709960937, 0.05568307113647461, 0.05573222351074219, 0.05753241729736328, 0.05677260971069336, 0.055640064239501956, 0.05561548614501953, 0.056256511688232425, 0.05657395172119141, 0.05638662338256836, 0.05741254425048828, 0.05741881561279297, 0.056707008361816406, 0.05665075302124024, 0.055992321014404295, 0.05728665542602539, 0.05752524948120117, 0.05733171081542969, 0.055621631622314455, 0.05572403335571289, 0.05564723205566406, 0.05570150375366211, 0.055398399353027344, 0.055771137237548826, 0.055638015747070314, 0.05562572860717773, 0.055556095123291016, 0.05568102264404297, 0.055702529907226565, 0.05575987243652344, 0.05575680160522461, 0.055605247497558595, 0.05553664016723633, 0.05570560073852539, 0.055798782348632815, 0.05559910583496094, 0.05567180633544922, 0.05573222351074219, 0.05629337692260742, 0.05749555206298828, 0.0559370231628418, 0.055578624725341794, 0.05670809555053711, 0.055962623596191405, 0.11224575805664062, 0.05556224060058594, 0.05563904190063477, 0.055656448364257816, 0.0557127685546875, 0.05569228744506836, 0.05559910583496094, 0.05550592041015625, 0.05555302429199219, 0.055739391326904295, 0.055537662506103515, 0.05554995346069336, 0.057543678283691405, 0.057662464141845705, 0.05759692764282227, 0.05869772720336914, 0.058270721435546874, 0.057540607452392575, 0.05582131195068359, 0.055779327392578126, 0.055809024810791016, 0.05575987243652344, 0.0556492805480957, 0.055632896423339843, 0.05597491073608398, 0.055795711517333986, 0.05575065612792969, 0.05567488098144531, 0.05572710418701172, 0.05697843170166016, 0.057621505737304686, 0.05626777648925781, 0.05568716812133789, 0.05771468734741211, 0.05757747268676758, 0.057265151977539064, 0.05681151962280274, 0.05754982376098633, 0.057534465789794924, 0.057024513244628906, 0.05558169555664062, 0.05570457458496094, 0.055833694458007815, 0.05585500717163086, 0.05577011108398437, 0.05586739349365234, 0.05599129486083984, 0.05690777587890625, 0.05552025604248047, 0.055673854827880856, 0.05572915267944336, 0.05587865447998047, 0.05573529434204102, 0.05586841583251953, 0.05573017501831055, 0.055760894775390625, 0.055874561309814455, 0.05571891021728516, 0.05576499176025391, 0.05579264068603516, 0.05576192092895508, 0.05572198486328125, 0.055793663024902344, 0.11247821044921875, 0.055947265625, 0.055580673217773435, 0.05587353515625, 0.05770240020751953, 0.057093120574951174, 0.0557762565612793, 0.05583564758300781, 0.055890945434570315, 0.05565542221069336, 0.05675110244750976, 0.05625753784179687, 0.05588479995727539, 0.0562718734741211, 0.05585100936889648, 0.05575884628295898, 0.05576499176025391, 0.05688934326171875, 0.05688422393798828, 0.05730924987792969, 0.05688927841186524, 0.05672345733642578, 0.05574758529663086, 0.0558919677734375, 0.05698358535766602, 0.05571683120727539, 0.05599846267700195, 0.05756825637817383, 0.057455615997314455, 0.05779865646362305, 0.05597491073608398, 0.05708697509765625, 0.056840190887451174, 0.055809024810791016, 0.055806976318359375, 0.05576192092895508, 0.05717708969116211, 0.056443904876708986, 0.05544857788085938, 0.05571686553955078, 0.055669761657714846, 0.05571686553955078, 0.05590425491333008, 0.05589100646972656, 0.05568508911132813, 0.05578953552246094, 0.055803905487060546, 0.056334400177001955, 0.055878593444824216, 0.055670783996582034, 0.05734195327758789, 0.05783039855957031, 0.05769420623779297, 0.057328704833984376, 0.057484222412109376, 0.057388031005859375, 0.05765836715698242, 0.05777920150756836, 0.057540607452392575, 0.05716889572143555, 0.05852671813964844, 0.056840190887451174, 0.05597695922851562, 0.11283148956298829, 0.05591756820678711, 0.056292350769042966, 0.05755392074584961, 0.057414657592773435, 0.056551422119140625, 0.05571583938598633, 0.055984127044677735, 0.05593907165527344, 0.05541580963134766, 0.05567897415161133, 0.05571379089355469, 0.05580083084106445, 0.055672832489013675, 0.05564211273193359, 0.055809024810791016, 0.055736320495605465, 0.055839744567871094, 0.05565235137939453, 0.05571583938598633, 0.05600153732299805, 0.055823360443115234, 0.055818241119384764, 0.05570457458496094, 0.056213504791259764, 0.055795711517333986, 0.055673854827880856, 0.055744510650634765, 0.05580492782592773, 0.05590323257446289, 0.05576396942138672, 0.055449600219726565, 0.055818241119384764, 0.055979007720947264, 0.05598310470581055, 0.055825439453125, 0.05578236770629883, 0.05567795181274414, 0.056638465881347654, 0.05652070236206055, 0.0575098876953125, 0.057299968719482425, 0.0576808967590332, 0.05792563247680664, 0.057729022979736325, 0.05774848175048828, 0.058654720306396485, 0.058905601501464844, 0.05799935913085937, 0.0573941764831543, 0.05648793411254883, 0.056785919189453124, 0.05599027252197265, 0.057333759307861325, 0.05748121643066406, 0.057452545166015626, 0.057862144470214844, 0.056509441375732425, 0.055695358276367186, 0.05770240020751953, 0.056632320404052736, 0.057633857727050784, 0.05701318359375, 0.11698483276367187, 0.058790912628173826, 0.0565667839050293, 0.056400894165039066, 0.05603839874267578, 0.055823360443115234, 0.056051712036132816, 0.055597057342529295, 0.05556121444702149, 0.057240577697753904, 0.05742489624023438, 0.05738700866699219, 0.057365505218505856, 0.057353214263916014, 0.05783039855957031, 0.05734400177001953, 0.056095745086669924, 0.05565951919555664, 0.055653377532958986, 0.05596979141235352, 0.05557555389404297, 0.055583744049072265, 0.05551923370361328, 0.05565951919555664, 0.05549977493286133, 0.05557350540161133, 0.055498752593994144, 0.05578342437744141, 0.0557127685546875, 0.05554380798339844, 0.055572479248046876, 0.05547212982177734, 0.05575884628295898, 0.055569408416748046, 0.05546700668334961, 0.055634944915771485, 0.05568415832519531, 0.0554956169128418, 0.05561139297485351, 0.05549465560913086, 0.0555417594909668, 0.0557916145324707, 0.055653377532958986, 0.05557452774047852, 0.05553561782836914, 0.05562265777587891, 0.05569843292236328, 0.05557145690917969, 0.0568268814086914, 0.05713510513305664, 0.0576993293762207, 0.057264129638671876, 0.05562268829345703, 0.058207199096679686, 0.057929729461669924, 0.05596672058105469, 0.05567692947387695, 0.0556124153137207, 0.05563187026977539, 0.055662593841552734, 0.055618560791015625, 0.05551411056518555, 0.05571993637084961, 0.11230207824707031, 0.05556531143188476, 0.055583744049072265, 0.05559603118896484, 0.055820289611816405, 0.05559603118896484, 0.05586127853393555, 0.055710689544677734, 0.05566054534912109, 0.05534515380859375, 0.055344127655029295, 0.05557555389404297, 0.055825408935546876, 0.05577830505371094, 0.055521278381347655, 0.0557916145324707, 0.05571379089355469, 0.055702529907226565, 0.05556838226318359, 0.05560115051269531, 0.05558272171020508, 0.05557555389404297, 0.05774848175048828, 0.057299968719482425, 0.05733785629272461, 0.05655865478515625, 0.055502784729003905, 0.05562265777587891, 0.05769420623779297, 0.05662412643432617, 0.05678694534301758, 0.05764505767822266, 0.05634355163574219, 0.05650841522216797, 0.05620121765136719, 0.055725055694580077, 0.055766014099121096, 0.055537662506103515, 0.05568819046020508, 0.05545062255859375, 0.05575065612792969, 0.055580673217773435, 0.05566873550415039, 0.05554073715209961, 0.05567795181274414, 0.05562060928344727, 0.05770444869995117, 0.056671230316162106, 0.055934974670410156, 0.05575065612792969, 0.055629825592041014, 0.05556224060058594, 0.05587046432495117, 0.05570867156982422, 0.0557209587097168, 0.05575065612792969, 0.05553561782836914, 0.05629030227661133, 0.05745356750488281, 0.05623295974731445, 0.05563596725463867, 0.055613441467285155, 0.05556633758544922, 0.11219455718994141, 0.05558272171020508, 0.055465023040771486, 0.055540672302246095, 0.05531955337524414, 0.05546393585205078, 0.055548927307128904, 0.05548441696166992, 0.055616512298583984, 0.056395774841308595, 0.05721395111083984, 0.05674905776977539, 0.05730508804321289, 0.05730201721191406, 0.05641113662719727, 0.0557209587097168, 0.055585792541503906, 0.05566566467285156, 0.05559091186523438, 0.05565951919555664, 0.05544963073730469, 0.05593494415283203, 0.05556224060058594, 0.05557145690917969, 0.056120319366455076, 0.057458686828613284, 0.05740236663818359, 0.05729894256591797, 0.057373695373535157, 0.057766910552978515, 0.05665075302124024, 0.05573324966430664, 0.05637017440795898, 0.05809971237182617, 0.05586943817138672, 0.05620019149780273, 0.05695590209960937, 0.0555643196105957, 0.056606689453125, 0.057219070434570314, 0.05642649459838867, 0.05731020736694336, 0.05765119934082031, 0.05748121643066406, 0.056667137145996097, 0.05717708969116211, 0.057823230743408206, 0.05779046249389649, 0.05644291305541992, 0.056858592987060544, 0.056856575012207033, 0.058813438415527344, 0.05755289459228516, 0.05608345413208008, 0.05555302429199219, 0.05563907241821289, 0.05571273422241211, 0.05554278564453125, 0.05552537536621094, 0.05555507278442383, 0.0555233268737793, 0.05571072006225586, 0.0556124153137207, 0.11335577392578125, 0.0570880012512207, 0.05586636734008789, 0.055672832489013675, 0.056136798858642575, 0.0563342399597168, 0.05568511962890625, 0.056820735931396485, 0.05746278381347656, 0.05724262237548828, 0.05761228942871094, 0.05586431884765625, 0.05590220642089844, 0.05682995223999023, 0.057353214263916014, 0.05627699279785156, 0.055877632141113284, 0.05574655914306641, 0.057248767852783204, 0.05563391876220703, 0.05559500885009765, 0.05561859130859375, 0.055626720428466794, 0.055660575866699216, 0.055686111450195315, 0.056011775970458984, 0.05731737518310547, 0.05745459365844727, 0.057132064819335936, 0.05569123077392578, 0.056584190368652344, 0.056736766815185545, 0.05771366500854492, 0.05775974273681641, 0.05750579071044922, 0.05729075241088867, 0.05755187225341797, 0.05558476638793945, 0.05566054534912109, 0.05850931167602539, 0.05686476898193359, 0.05549363327026367, 0.05560422515869141, 0.055569408416748046, 0.05578649520874023, 0.05595852661132812, 0.05572403335571289, 0.05566054534912109, 0.055621631622314455, 0.055591934204101565, 0.05563699340820313, 0.05550899124145508, 0.05629132843017578, 0.05575065612792969, 0.05558476638793945, 0.05610086441040039, 0.05598003387451172, 0.05551308822631836, 0.05563904190063477, 0.05569945526123047, 0.05555302429199219, 0.05561446380615234, 0.05581107330322266]",tokens/s,17.470884921202547,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b36-46ef82bf569cfee26bd6c651;20ba95d5-dae9-4b7a-b2bb-e5c1b69baff7) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2805.69856,8389.132288,0.0,7742.685184,7007.0144,s,10,5.757144287109376,0.5757144287109376,0.0011485729711129637,0.5757064819335938,0.5769704406738282,0.5771491607666016,0.5772921368408204,"[0.5760953979492187, 0.5769307250976563, 0.574836669921875, 0.5745257568359375, 0.574697998046875, 0.5738778076171875, 0.5753175659179688, 0.5767816162109375, 0.577327880859375, 0.5767528686523438]",tokens/s,444.66490196050984,kWh,6.783310471125591e-06,3.7157116531489013e-06,3.30976653669996e-05,4.3596687491274095e-05,tokens/kWh,5872005.758493431,MB,2805.69856,8389.132288,0.0,7742.685184,7283.984384,s,10,336.2841484375,33.62841484375,0.0041773861216769514,33.62973046875,33.63291328125,33.63334921875,33.63369796875,"[33.62066796875, 33.6314140625, 33.6304453125, 33.62453125, 33.627359375, 33.629015625, 33.63378515625, 33.63111328125, 33.623, 33.63281640625]",tokens/s,1.873415690056198,kWh,0.0003969375698617947,0.00021755745967517217,0.0019112099178555991,0.0025257049473925665,tokens/kWh,24943.531137727943,,s,629,340.93043640136693,0.5420197717032864,0.06847421137969394,0.5337415771484375,0.53422822265625,0.5344366455078124,1.1091660595703123,"[0.5336361083984374, 0.5341531982421875, 0.533359619140625, 0.5337293090820312, 0.53304931640625, 0.5336708984375, 0.5331292114257813, 0.5338419189453125, 0.5331640625, 0.5335398559570312, 0.53309033203125, 0.5335951538085938, 0.5331834716796875, 0.5338798217773437, 0.5333298950195312, 0.5336842041015625, 0.5331845092773437, 0.5336678466796875, 0.533411865234375, 0.53406103515625, 0.5336094970703125, 0.5338388671875, 0.5332828369140625, 0.5337896728515625, 0.5332971801757812, 0.533770263671875, 0.5334599609375, 0.5337487182617188, 0.5331834716796875, 0.5339166870117188, 0.5336627197265625, 0.5344092407226563, 0.5335090942382813, 0.5339432983398438, 0.533738525390625, 0.5342853393554687, 0.5333934326171875, 0.5340252075195312, 0.53340771484375, 0.534096923828125, 0.5334374389648437, 0.5337191162109375, 0.5333012084960937, 0.5337835693359375, 0.5332633666992187, 0.53411328125, 0.533443603515625, 0.5338327026367188, 0.5333759765625, 0.5339893798828125, 0.5333831787109375, 0.5338306274414063, 0.5334395141601562, 0.5340466918945312, 0.5333514404296875, 0.5337937622070312, 0.5338665161132813, 0.5344153442382813, 0.5337047119140625, 0.5338112182617187, 0.5332122192382812, 0.5337702026367187, 1.1116871337890626, 0.5333892822265625, 0.53395556640625, 0.5335818481445312, 0.534329345703125, 0.53378662109375, 0.533939208984375, 0.533485595703125, 0.5340098266601563, 0.533265380859375, 0.5337507934570312, 0.5338316650390625, 0.53416552734375, 0.533770263671875, 0.5342669067382813, 0.5334896850585937, 0.5339904174804687, 0.5334537963867187, 0.5338736572265625, 0.5339002685546875, 0.5341122436523438, 0.5333739624023438, 0.5337057495117188, 0.5331763305664062, 0.5336084594726562, 0.5334323120117187, 0.5338009643554688, 0.5331425170898437, 0.5337620239257812, 0.5331527709960937, 0.5337323608398438, 0.5339801635742187, 0.5345730590820312, 0.53325927734375, 0.5336207275390625, 0.53326953125, 0.5340211791992188, 0.5333349609375, 0.5338777465820312, 0.53340673828125, 0.5337753295898438, 0.533338134765625, 0.533880859375, 0.533570556640625, 0.5340579833984375, 0.5336616821289063, 0.5373572998046875, 0.5339514770507813, 0.53390234375, 0.5336719360351563, 0.5342648315429688, 0.5339279174804688, 0.5340426025390625, 0.5336504516601562, 0.5338839721679688, 0.53328173828125, 0.534540283203125, 0.5336729736328125, 0.5342125854492188, 0.5336329956054687, 0.53383984375, 0.5335726318359375, 0.5343897705078124, 1.10948046875, 0.5335797729492188, 0.5341194458007813, 0.5339064331054687, 0.5345751342773437, 0.5335787353515625, 0.533949462890625, 0.5334251708984376, 0.5337794799804687, 0.5332059936523438, 0.5337763671875, 0.5334661254882812, 0.533986328125, 0.5332305908203125, 0.5337221069335938, 0.5334609985351563, 0.53401904296875, 0.5335316772460937, 0.5339371337890625, 0.53347021484375, 0.5337108764648437, 0.5331456298828126, 0.5338245239257813, 0.5349140625, 0.5346262817382812, 0.5334579467773437, 0.5338726196289062, 0.533796875, 0.5340897216796875, 0.5336565551757813, 0.5343047485351563, 0.5332715454101562, 0.534076416015625, 0.533580810546875, 0.5337415771484375, 0.5334824829101562, 0.53382861328125, 0.53319677734375, 0.5336668090820312, 0.5332664184570313, 0.5337743530273438, 0.5335418701171875, 0.5340856323242188, 0.5335429077148437, 0.53378662109375, 0.5334508056640626, 0.5341675415039062, 0.5349437255859375, 0.5338644409179687, 0.5339566040039062, 0.5342269287109375, 0.5335900268554687, 0.5341214599609375, 0.53355419921875, 0.5342976684570313, 0.5334230346679687, 0.5340139770507812, 0.5335992431640625, 0.5341634521484375, 0.5334210815429687, 0.5340436401367188, 0.5337569580078125, 0.5342617797851562, 1.1091947021484374, 0.5332183227539062, 0.5337579345703125, 0.533496826171875, 0.5339699096679688, 0.53334326171875, 0.5339320068359374, 0.5333401489257813, 0.533749755859375, 0.533454833984375, 0.533738525390625, 0.5335623779296875, 0.5348157348632813, 0.5332367553710937, 0.5340784301757813, 0.5337159423828125, 0.5338306274414063, 0.5333944091796875, 0.5338828735351563, 0.533201904296875, 0.5338593139648438, 0.53323681640625, 0.5340261840820313, 0.5336555786132813, 0.5336821899414063, 0.5331660766601563, 0.5336535034179688, 0.5331763305664062, 0.5336637573242188, 0.5330974731445313, 0.5336381225585938, 0.5332664184570313, 0.5338818359375, 0.533327880859375, 0.53427099609375, 0.5336790771484375, 0.5338746948242188, 0.533159912109375, 0.5337006225585937, 0.5332623291015625, 0.5337293090820312, 0.5337589721679687, 0.533981201171875, 0.533370849609375, 0.5360977783203125, 0.5334261474609375, 0.5341010131835937, 0.5335623779296875, 0.5340078125, 0.5334896850585937, 0.5340006103515625, 0.5336053466796875, 0.5344645385742187, 0.53421875, 0.5342853393554687, 0.5334138793945312, 0.5337979125976563, 0.53326953125, 0.533792724609375, 0.5334814453125, 0.5338665161132813, 0.5334456176757812, 0.53382861328125, 1.1090924072265624, 0.5336094970703125, 0.5341419677734375, 0.5334814453125, 0.5340355224609376, 0.53357666015625, 0.53401904296875, 0.5336771240234375, 0.5337230834960938, 0.5334425048828125, 0.5336268920898437, 0.5331640625, 0.5335726318359375, 0.533123046875, 0.5340631103515625, 0.5331763305664062, 0.5340078125, 0.5347676391601562, 0.5339586791992188, 0.5332183227539062, 0.5338541870117187, 0.5333883056640625, 0.53372314453125, 0.5331834716796875, 0.5336309814453125, 0.5334537963867187, 0.534108154296875, 0.5333411865234375, 0.5341143188476563, 0.5333524780273438, 0.5335510864257812, 0.5337293090820312, 0.5341480712890625, 0.5335869140625, 0.5337671508789062, 0.533243896484375, 0.5338193969726562, 0.5337169799804687, 0.5342074584960937, 0.533265380859375, 0.5335838623046875, 0.533375, 0.5341593627929687, 0.5334948120117188, 0.5341030883789063, 0.5336299438476563, 0.5339105224609375, 0.5335091552734375, 0.5336759643554687, 0.5331793823242188, 0.5338460083007812, 0.5332551879882812, 0.5341522216796875, 0.5335869140625, 0.53411328125, 0.5335480346679687, 0.53422802734375, 0.5365042724609375, 0.5340682373046876, 0.533644287109375, 0.5340825805664062, 0.533528564453125, 0.5341306762695313, 1.1086192626953124, 0.5332972412109375, 0.534067138671875, 0.5337210693359375, 0.5342228393554688, 0.53347021484375, 0.53422900390625, 0.5334702758789063, 0.5339790649414062, 0.5337579345703125, 0.5341173706054687, 0.5335818481445312, 0.534245361328125, 0.5336678466796875, 0.5341265869140625, 0.5333944091796875, 0.5336135864257813, 0.5332551879882812, 0.5341911010742187, 0.533359619140625, 0.5341552734375, 0.5331834716796875, 0.53369140625, 0.5332572021484375, 0.53432421875, 0.533712890625, 0.5336708984375, 0.533106689453125, 0.533802001953125, 0.5334364013671875, 0.53359716796875, 0.5331845092773437, 0.53380810546875, 0.5332346801757812, 0.5336627197265625, 0.5332254638671875, 0.5337774047851562, 0.5336309814453125, 0.537302001953125, 0.5335510864257812, 0.5340999755859375, 0.5334763793945313, 0.5337241821289063, 0.5331824340820313, 0.5339197387695312, 0.53344970703125, 0.533875732421875, 0.53338623046875, 0.5339381713867187, 0.5334395141601562, 0.5338777465820312, 0.5334159545898437, 0.534012939453125, 0.5338378295898437, 0.5343323974609375, 0.534024169921875, 0.534361083984375, 0.5335521240234375, 0.5342996826171875, 0.5335305786132812, 0.533734375, 0.5334640502929687, 0.5342750854492188, 1.1099852294921875, 0.533232666015625, 0.5340067749023437, 0.5332183227539062, 0.5340108642578125, 0.5337364501953125, 0.534192138671875, 0.533570556640625, 0.5341460571289063, 0.533876708984375, 0.53406005859375, 0.5334681396484375, 0.5340528564453125, 0.5333984985351562, 0.53374462890625, 0.5331937255859375, 0.5337579345703125, 0.533475341796875, 0.5339535522460938, 0.533396484375, 0.53382861328125, 0.5333401489257813, 0.5337302856445313, 0.535394287109375, 0.5337927856445313, 0.5334507446289063, 0.5339443359375, 0.5337825317382813, 0.5344010009765625, 0.533728271484375, 0.5337845458984375, 0.5336329956054687, 0.5338839111328125, 0.5333759765625, 0.5338009643554688, 0.533201904296875, 0.5340794677734375, 0.5336320190429688, 0.5339146118164062, 0.53344873046875, 0.5339638061523437, 0.5336893310546875, 0.5338931274414063, 0.5337426147460937, 0.5341430053710937, 0.5335347290039063, 0.5341624145507813, 0.5345700073242188, 0.5347133178710938, 0.5338163452148438, 0.53465087890625, 0.5338040161132812, 0.5341941528320312, 0.533674072265625, 0.5342606811523437, 0.5343539428710937, 0.5342125854492188, 0.5335050048828125, 0.5339566040039062, 0.533570556640625, 0.5341276245117188, 0.5338695678710937, 0.5341880493164063, 1.111352294921875, 0.533518310546875, 0.534761474609375, 0.5335090942382813, 0.5338511352539063, 0.5336331176757813, 0.5341572265625, 0.5336279296875, 0.5341102294921874, 0.5339033813476562, 0.53374462890625, 0.5332838134765625, 0.5336801147460938, 0.5332142333984375, 0.5337333984375, 0.5331947631835937, 0.534908935546875, 0.533190673828125, 0.5336279296875, 0.5330892944335938, 0.5340794677734375, 0.5336063842773437, 0.5338890380859375, 0.5336463623046875, 0.5338880004882812, 0.5336104736328126, 0.533712890625, 0.5332838134765625, 0.5341624145507813, 0.5335418701171875, 0.5341460571289063, 0.53351220703125, 0.5338890380859375, 0.533243896484375, 0.5342177124023437, 0.5337088012695312, 0.5343447265625, 0.533712890625, 0.5340774536132813, 0.5339156494140626, 0.5344481201171875, 0.5336882934570313, 0.5341992797851562, 0.5336084594726562, 0.533676025390625, 0.5333197021484375, 0.5338849487304688, 0.533917724609375, 0.53441943359375, 0.5336135864257813, 0.5341010131835937, 0.5335132446289063, 0.5340897216796875, 0.5336616821289063, 0.5339934692382813, 0.5337794799804687, 0.5340200805664063, 0.5335142211914062, 0.5342648315429688, 0.533865478515625, 0.5339801635742187, 0.5338695678710937, 0.534255615234375, 1.112015869140625, 0.5335879516601563, 0.534097900390625, 0.5332183227539062, 0.5336187133789062, 0.5332059936523438, 0.5337405395507813, 0.5333309326171874, 0.5338091430664063, 0.533423095703125, 0.5338306274414063, 0.533191650390625, 0.5339586791992188, 0.5338961791992187, 0.5337569580078125, 0.5333165893554688, 0.534349853515625, 0.5343836059570313, 0.5336043701171875, 0.5334517822265625, 0.5338685913085938, 0.5332326049804688, 0.5336350708007812, 0.5330892944335938, 0.5336309814453125, 0.5331548461914063, 0.5336053466796875, 0.5331824340820313, 0.5337999267578125, 0.5332408447265625, 0.533622802734375, 0.5338716430664062, 0.5345494995117187, 0.53351220703125, 0.5343057861328125, 0.5334180297851563, 0.5338480224609375, 0.53349169921875, 0.5338357543945312, 0.533622802734375, 0.533875732421875, 0.5334579467773437, 0.5337610473632812, 0.5334579467773437, 0.5340170288085937, 0.5332745971679688, 0.534091796875, 0.5333995361328125, 0.5338532104492187, 0.5333872680664062, 0.5340620727539063, 0.533886962890625, 0.5340108642578125, 0.5335654296875, 0.533928955078125, 0.5333514404296875, 0.533950439453125, 0.5334404907226562, 0.5343201293945312, 0.5335582885742187, 0.5337221069335938, 0.5335408935546875, 0.5341378784179688, 1.111615478515625, 0.5335418701171875, 0.534171630859375, 0.5334824829101562, 0.533771240234375, 0.5340958862304688, 0.5341583251953125, 0.5335234375, 0.5342811889648438, 0.5338716430664062, 0.53395556640625, 0.533296142578125, 0.5340877075195313, 0.53382861328125, 0.5342125854492188, 0.5335961303710938, 0.5341245727539062, 0.5336575927734375, 0.5344921875, 0.5339248657226563, 0.5340242309570312, 0.533560302734375, 0.5341531982421875, 0.5337302856445313, 0.5339627685546875, 0.5335162963867187, 0.535699462890625, 0.5336187133789062, 0.5340529174804688, 0.5334834594726563, 0.5338634033203125, 0.5333811645507812, 0.5340415649414062, 0.5334630126953125, 0.53372314453125, 0.5333606567382813, 0.5338726196289062, 0.5338736572265625, 0.5340108642578125, 0.5332828369140625, 0.5338992919921876, 0.5332766723632812, 0.5340753784179687, 0.5338142700195313, 0.5339904174804687, 0.5335613403320313, 0.5342269287109375, 0.533306396484375, 0.5340200805664063, 0.5333984985351562, 0.5338890380859375, 0.5335675048828125, 0.5339054565429687, 0.5333565063476563, 0.5340200805664063, 0.5336309814453125, 0.5339617309570313, 0.5336258544921875, 0.5341859741210937, 0.5334886474609375, 0.5339893798828125, 0.53378662109375, 0.534213623046875]",tokens/s,1.8449511479212646,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3934.998528,12732.33408,0.0,12085.886976,11337.370624,s,10,10.913714233398439,1.0913714233398437,0.0019187724556758634,1.0909278564453126,1.0936456420898437,1.0942043518066407,1.0946513195800782,"[1.0947630615234376, 1.093521484375, 1.0894732666015625, 1.0895545654296874, 1.089643798828125, 1.0893114013671874, 1.0899912109375, 1.091864501953125, 1.093216552734375, 1.0923743896484375]",tokens/s,234.56725595451456,kWh,1.2859468128946092e-05,7.046563530275307e-06,6.250710556117145e-05,8.241313722039284e-05,tokens/kWh,3106300.871854854,MB,3934.998528,12732.33408,0.0,12085.886976,11686.804992,s,10,636.61697265625,63.661697265625,0.007269593222884917,63.66015234375,63.672558203125,63.6732771484375,63.6738523046875,"[63.6561171875, 63.66687109375, 63.65788671875, 63.66241796875, 63.65402734375, 63.67399609375, 63.6723984375, 63.656875, 63.65174609375, 63.66463671875]",tokens/s,0.9896060379467405,kWh,0.0007518513685133722,0.0004120810364553653,0.003630345543163038,0.004794277948131776,tokens/kWh,13140.664909624129,,s,629,645.425664550782,1.0261139341029908,0.12959104210993952,1.0104422607421875,1.0109677490234374,1.0113603271484375,2.100010478515625,"[1.0103787231445311, 1.0104750366210937, 1.010914306640625, 1.0098585815429688, 1.0104279174804687, 1.010324462890625, 1.0105446166992187, 1.010070556640625, 1.0100776977539063, 1.0105692138671876, 1.0102251586914062, 1.0103941040039062, 1.0105692138671876, 1.0099476318359375, 1.010234375, 1.0106050415039063, 1.0103367919921875, 1.0100828247070313, 1.0105968627929687, 1.0105446166992187, 1.0105169677734376, 1.0106787719726562, 1.0105886840820313, 1.0103572387695312, 1.0107811889648437, 1.010609130859375, 1.010071533203125, 1.0102784423828124, 1.0099865112304687, 1.010282470703125, 1.0107012939453126, 1.0107606811523437, 1.0109655151367187, 1.0103797607421876, 1.0103644409179688, 1.0103593139648437, 1.0102118530273438, 1.0102118530273438, 1.0101124877929688, 1.0109286499023438, 1.0103910522460937, 1.0106552124023438, 1.011262451171875, 1.0107944946289062, 1.0104873046875, 1.0106204223632813, 1.0099425048828126, 1.0105272216796874, 1.00997119140625, 1.0104494018554688, 1.0101258544921874, 1.010229248046875, 1.0103654174804688, 1.0102947998046874, 1.0099885864257812, 1.0105845947265626, 1.0102702026367187, 1.0102896728515625, 1.0102702026367187, 1.0103797607421876, 1.0103336791992188, 1.0109767456054688, 2.1042431640625, 1.0108385009765626, 1.0106071166992188, 1.0101176147460937, 1.0099619750976563, 1.01039306640625, 1.0101083984375, 1.0104063720703125, 1.0112655639648438, 1.0104002685546876, 1.0102466430664063, 1.0105128784179687, 1.0102671508789063, 1.0101801147460938, 1.0101739501953124, 1.0103828735351563, 1.0103910522460937, 1.0109398803710938, 1.0110330810546875, 1.0103602905273437, 1.0109020385742187, 1.0112982788085938, 1.010745361328125, 1.0104309692382814, 1.0102783813476564, 1.010271240234375, 1.0101309204101563, 1.0107698974609376, 1.0114027709960938, 1.01064501953125, 1.0102230834960937, 1.0105385131835938, 1.0106644287109374, 1.0104248046875, 1.0108897094726563, 1.010545654296875, 1.0105814819335937, 1.0107955322265625, 1.0110156860351562, 1.0106214599609376, 1.01065625, 1.01066650390625, 1.0106920776367188, 1.0103101196289062, 1.0108272705078125, 1.0104053955078125, 1.0105886840820313, 1.010440185546875, 1.0111314086914063, 1.010808837890625, 1.0105968627929687, 1.010450439453125, 1.0106644287109374, 1.010177001953125, 1.0106286010742187, 1.01064599609375, 1.01049853515625, 1.0107146606445312, 1.0105742797851562, 1.0107597045898438, 1.0104780883789062, 1.01089794921875, 1.0108590087890625, 2.10035302734375, 1.0105681762695313, 1.0103634033203126, 1.0108939819335938, 1.0105936279296874, 1.01033984375, 1.0102159423828125, 1.0105128784179687, 1.0100869140625, 1.0100756225585938, 1.0101299438476563, 1.0103162841796876, 1.0100142211914063, 1.010387939453125, 1.0102691650390625, 1.0100736083984374, 1.0103839111328126, 1.0102691650390625, 1.0101923828125, 1.0100633544921875, 1.0100695190429687, 1.0103623657226561, 1.0100408325195311, 1.0103367919921875, 1.0102200317382812, 1.0101944580078126, 1.0105272216796874, 1.0104760131835937, 1.010466796875, 1.0102435913085936, 1.010566162109375, 1.010461669921875, 1.0103326416015626, 1.0106859741210938, 1.0104954833984374, 1.0103705444335938, 1.0109921264648438, 1.0105057373046875, 1.0102046508789062, 1.01007666015625, 1.0101852416992188, 1.0097950439453125, 1.0098770141601563, 1.0107269287109375, 1.0103705444335938, 1.0104033203125, 1.0112020263671875, 1.0103848876953125, 1.0109419555664063, 1.0105897216796875, 1.0109766845703125, 1.010745361328125, 1.0108344116210937, 1.011294189453125, 1.0108703002929686, 1.0104647827148439, 1.0111211547851562, 1.010250732421875, 1.0104473876953124, 1.0103490600585938, 1.0106961669921875, 1.010408447265625, 1.0106736450195313, 2.09991064453125, 1.0102855834960938, 1.010808837890625, 1.0101883544921875, 1.0098963623046875, 1.0099046630859374, 1.0102015380859375, 1.0112860107421875, 1.0114365234375, 1.0114171142578126, 1.011398681640625, 1.0108375854492186, 1.011353515625, 1.0101831665039063, 1.0099578857421876, 1.0098401489257813, 1.0104351806640626, 1.0098911743164063, 1.01047705078125, 1.0109163818359375, 1.0101514282226562, 1.0102528076171875, 1.0105303344726562, 1.010460693359375, 1.0104515380859376, 1.0105640258789061, 1.0108231811523438, 1.0104279174804687, 1.0102528076171875, 1.0102149047851563, 1.0106552124023438, 1.0108037109375, 1.0104227905273437, 1.010044921875, 1.0103521118164063, 1.0104022827148438, 1.0106019897460938, 1.0105252075195312, 1.0107361450195314, 1.0110341186523437, 1.0109389038085939, 1.0102589721679687, 1.0107658081054687, 1.01032958984375, 1.0109112548828125, 1.0104422607421875, 1.0104935302734375, 1.0104278564453124, 1.0109645385742188, 1.0107493896484374, 1.01064501953125, 1.0110023803710937, 1.0106593017578125, 1.0102763671875, 1.01026611328125, 1.0103255004882812, 1.0103726196289062, 1.010171875, 1.0105169677734376, 1.0103091430664062, 1.0100910034179686, 1.0103613891601562, 1.010420654296875, 2.10003857421875, 1.0099517211914062, 1.010044921875, 1.0100490112304688, 1.010212890625, 1.0100838623046875, 1.0100510864257812, 1.0103336791992188, 1.0101473388671875, 1.0099937133789063, 1.0099507446289062, 1.0100213623046874, 1.0101913452148437, 1.010255859375, 1.0110208129882812, 1.0101217041015624, 1.0101422119140624, 1.0106214599609376, 1.0106583251953125, 1.0105067749023438, 1.0107003173828124, 1.0106572875976563, 1.0101596069335939, 1.0103726196289062, 1.0107791137695312, 1.0102958374023439, 1.0105692138671876, 1.010567138671875, 1.0101862182617187, 1.0100357055664062, 1.0101422119140624, 1.0100562133789062, 1.0101636962890626, 1.0102886352539062, 1.0103009033203125, 1.0100858764648437, 1.0102108154296876, 1.0106234741210938, 1.0103255004882812, 1.0102271728515626, 1.0103224487304687, 1.0102650756835938, 1.0104852294921876, 1.0101156005859375, 1.0108948364257813, 1.0105333862304688, 1.0106009521484376, 1.010366455078125, 1.0103654174804688, 1.0105374755859375, 1.0105303344726562, 1.010418701171875, 1.0103674926757813, 1.0106214599609376, 1.0106808471679687, 1.0101104736328126, 1.0104002685546876, 1.010713623046875, 1.010361328125, 1.0107811889648437, 1.0112348022460937, 1.0109830322265625, 1.010874267578125, 2.099938232421875, 1.0099097900390626, 1.010356201171875, 1.0107750244140625, 1.0106531982421876, 1.0112010498046875, 1.0109235229492188, 1.0112593994140624, 1.011103759765625, 1.0114088745117187, 1.0117509155273436, 1.0115809326171874, 1.0110750732421876, 1.0116761474609375, 1.0117703857421876, 1.01070849609375, 1.0111211547851562, 1.0108140869140625, 1.010822021484375, 1.0101463012695313, 1.010524169921875, 1.010092041015625, 1.0108528442382811, 1.0102384643554687, 1.0105374755859375, 1.0103961791992186, 1.0102907104492187, 1.0105763549804687, 1.0101422119140624, 1.0106972045898437, 1.010524169921875, 1.0101381225585937, 1.01081396484375, 1.0101923828125, 1.010398193359375, 1.0102046508789062, 1.0100275268554688, 1.0102159423828125, 1.01076171875, 1.0107730102539063, 1.0104595947265624, 1.0105743408203125, 1.0107811889648437, 1.0105466918945312, 1.010503662109375, 1.0107811889648437, 1.0115594482421875, 1.0113648681640626, 1.0109235229492188, 1.0113966064453126, 1.0115973510742187, 1.0105558471679688, 1.0106337280273439, 1.0101053466796874, 1.0102333374023438, 1.0100643920898438, 1.0104063720703125, 1.0102118530273438, 1.0105886840820313, 1.0109163818359375, 1.01098291015625, 1.0103121948242189, 1.0104627075195312, 2.1005556640625, 1.0100828247070313, 1.0103214111328125, 1.0101217041015624, 1.0104524536132813, 1.0106480712890624, 1.009934326171875, 1.0109644775390625, 1.01163427734375, 1.011917724609375, 1.0118707275390626, 1.0117642211914062, 1.0120970458984375, 1.0110791625976563, 1.0113136596679688, 1.0106326904296874, 1.010629638671875, 1.0110320434570312, 1.0109531860351562, 1.0102200317382812, 1.0101647338867188, 1.01035107421875, 1.0101319580078125, 1.0104688720703126, 1.0105006103515626, 1.01138330078125, 1.010892822265625, 1.0108292846679687, 1.0109214477539064, 1.0098134765625, 1.0101145629882813, 1.0102262573242187, 1.0102721557617187, 1.010165771484375, 1.0108426513671875, 1.0108047485351563, 1.010640869140625, 1.0113280029296876, 1.0108528442382811, 1.010176025390625, 1.0101810913085938, 1.0100869140625, 1.0103460083007811, 1.010440185546875, 1.0107904052734376, 1.0101801147460938, 1.0104586181640625, 1.0103050537109375, 1.0111918334960937, 1.0107914428710938, 1.0108416137695313, 1.0108283081054688, 1.0106810302734375, 1.0106591186523437, 1.0115072021484375, 1.0104279174804687, 1.0108436279296875, 1.0108283081054688, 1.01055078125, 1.0102097778320311, 1.010460693359375, 1.0103121948242189, 1.0103951416015624, 2.102578125, 1.0104268798828124, 1.0102036743164062, 1.010355224609375, 1.0103173217773438, 1.0101637573242188, 1.01056201171875, 1.0103060302734375, 1.0101749877929687, 1.0104330444335938, 1.0104094848632812, 1.0101801147460938, 1.0102282104492188, 1.0102640380859376, 1.0102354125976563, 1.01035107421875, 1.0101268310546876, 1.01020263671875, 1.0103245849609375, 1.010389892578125, 1.0105927734375, 1.0102783813476564, 1.0102159423828125, 1.0109481201171875, 1.0104248046875, 1.0102097778320311, 1.0102999267578125, 1.0106634521484374, 1.01051904296875, 1.0106911010742188, 1.0106603393554687, 1.0103428955078124, 1.010208740234375, 1.0103828735351563, 1.0103501586914063, 1.0101267700195313, 1.010428955078125, 1.0099415283203126, 1.0103070678710937, 1.010746337890625, 1.0105733032226563, 1.0100193481445312, 1.0105620727539062, 1.0108969116210937, 1.010830322265625, 1.010567138671875, 1.0105261840820312, 1.0104903564453125, 1.010502685546875, 1.010763916015625, 1.0106510009765626, 1.01055078125, 1.010597900390625, 1.010534423828125, 1.01037158203125, 1.0100613403320313, 1.0104227905273437, 1.0101196899414062, 1.0103214111328125, 1.0106286010742187, 1.0109337768554687, 1.0104739990234375, 1.0107742309570313, 2.104293212890625, 1.010323486328125, 1.0104320068359376, 1.0102138671875, 1.0099435424804688, 1.0105354614257813, 1.0101022338867187, 1.0104801025390624, 1.0105220947265625, 1.0102742919921874, 1.0100582275390626, 1.0102661743164063, 1.0102158813476563, 1.0104473876953124, 1.0102271728515626, 1.0102630615234376, 1.0100213623046874, 1.0100828247070313, 1.0104155883789063, 1.01005517578125, 1.010208740234375, 1.0101239013671874, 1.0102445068359376, 1.0100910034179686, 1.0104801025390624, 1.0102210693359375, 1.0102210693359375, 1.0105426025390625, 1.0104248046875, 1.0105814819335937, 1.0103746337890624, 1.0100991821289063, 1.0102630615234376, 1.0103203735351562, 1.0105231323242188, 1.0105108642578124, 1.0102191162109375, 1.0101605224609376, 1.0104053955078125, 1.0104586181640625, 1.0102518920898438, 1.0101390380859374, 1.0103285522460939, 1.0101493530273438, 1.0104166259765626, 1.0106941528320312, 1.0107689208984374, 1.0102313232421876, 1.0105620727539062, 1.01051708984375, 1.0104871826171875, 1.01003369140625, 1.0101371459960937, 1.0099342651367187, 1.0106521606445313, 1.0104391479492187, 1.0105620727539062, 1.010513916015625, 1.010798583984375, 1.0104074096679687, 1.0103634033203126, 1.0103224487304687, 1.010681884765625, 2.103435302734375, 1.010229248046875, 1.0106736450195313, 1.0107843017578124, 1.0107432861328125, 1.0110986328125, 1.0106265869140625, 1.010587646484375, 1.0101248168945312, 1.0100542602539062, 1.0107769775390625, 1.0108426513671875, 1.0104627075195312, 1.0106603393554687, 1.0108734741210939, 1.01037255859375, 1.0107083740234375, 1.0104166259765626, 1.0103275756835937, 1.0101319580078125, 1.0107750244140625, 1.0107258911132813, 1.0107606811523437, 1.0102907104492187, 1.0104279174804687, 1.0105446166992187, 1.0105364379882813, 1.0103009643554688, 1.010494384765625, 1.0100807495117188, 1.0106644287109374, 1.0108026733398439, 1.0108375244140626, 1.010492431640625, 1.0103746337890624, 1.0101227416992187, 1.0101484375, 1.0099373168945311, 1.0103756713867187, 1.0104944458007812, 1.0109050903320314, 1.0101473388671875, 1.0104903564453125, 1.0099251098632813, 1.0105374755859375, 1.010134033203125, 1.0101810913085938, 1.0101248168945312, 1.0104063720703125, 1.0102702026367187, 1.0105569458007813, 1.0107965698242187, 1.01083544921875, 1.0107893676757813, 1.01082421875, 1.0101309204101563, 1.0103572387695312, 1.010207763671875, 1.010361328125, 1.0107053833007813, 1.0118082275390625, 1.01166796875, 1.01172021484375]",tokens/s,0.9745506485828798,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949163-5d9eaf703842b2f37ec12ad1;d45172c5-9d7a-47f4-bd58-7ca59a85dbc5) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c45-1493ab1b2b6a2b952f1b01fa;ec323376-0be7-4281-b02e-bb8025b2499f) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1607.868416,8198.291456,0.0,7551.844352,6941.631488,s,10,6.049583618164062,0.6049583618164063,0.00188642846372062,0.6043523254394532,0.6052415832519532,0.6079165069580078,0.6100564459228516,"[0.6105914306640625, 0.6046471557617188, 0.6042630615234375, 0.6042338256835937, 0.6039627075195313, 0.6043169555664063, 0.604395263671875, 0.6042136840820312, 0.6045718383789063, 0.6043876953125]",tokens/s,423.16961985838503,kWh,7.137859198782179e-06,3.9112609274686095e-06,3.3793049910240254e-05,4.484217003649104e-05,tokens/kWh,5708911.94140863,MB,1607.868416,8198.291456,0.0,7551.844352,7068.358144,s,10,357.60694921875006,35.76069492187501,0.005106009215487831,35.758738281250004,35.7667015625,35.76887421875,35.77061234375,"[35.771046875, 35.76394140625, 35.7568125, 35.758265625, 35.75676171875, 35.76621875, 35.7592109375, 35.754734375, 35.75566015625, 35.764296875]",tokens/s,1.7617107312269418,kWh,0.0004221826620896658,0.00023139278631924143,0.0020006029942719558,0.002654178442680863,tokens/kWh,23736.158423608704,,s,629,362.47854425048865,0.5762774948338447,0.07177628984511826,0.5675980834960938,0.567922509765625,0.5680580688476562,1.17174888671875,"[0.5675130615234375, 0.567689208984375, 0.5675007934570313, 0.5676400756835938, 0.5674649658203125, 0.5675069580078125, 0.5678878784179687, 0.567362548828125, 0.5674598388671875, 0.5676318969726563, 0.5675222778320312, 0.5676452026367188, 0.5675714721679688, 0.5676103515625, 0.5676881713867188, 0.5674454956054688, 0.567530517578125, 0.567910400390625, 0.5679451904296875, 0.5677383422851563, 0.5676656494140625, 0.5675929565429687, 0.5674803466796875, 0.5676605224609375, 0.567752685546875, 0.5674424438476563, 0.5676011352539062, 0.5677957153320312, 0.567546875, 0.5675243530273437, 0.5677138061523438, 0.5675284423828125, 0.5678120727539062, 0.5674086303710938, 0.5677987670898438, 0.5677322387695313, 0.5678274536132812, 0.5677987670898438, 0.5678561401367187, 0.5680128173828125, 0.5680640258789063, 0.568237060546875, 0.5680230102539062, 0.56797900390625, 0.5682124633789063, 0.5679288330078125, 0.5680722045898438, 0.5680875244140625, 0.5679697875976563, 0.5680936889648438, 0.5682411499023438, 0.5679902954101562, 0.5680732421875, 0.5680537719726563, 0.5682831420898438, 0.56803125, 0.5680128173828125, 0.5679288330078125, 0.5679288330078125, 0.5681571655273437, 0.5682012329101562, 0.5679349975585938, 1.1725772705078126, 0.5678028564453125, 0.5673922729492188, 0.5673328857421875, 0.5676503295898437, 0.5674823608398437, 0.5677864990234375, 0.5676216430664063, 0.5676328735351562, 0.5674219360351562, 0.5675181884765625, 0.567531494140625, 0.5675028686523438, 0.5675673828125, 0.56765234375, 0.5675458374023438, 0.5677005004882812, 0.5678448486328125, 0.5676810302734375, 0.5675069580078125, 0.5675714721679688, 0.567625732421875, 0.5676769409179687, 0.56762060546875, 0.56759912109375, 0.5676226806640625, 0.567994384765625, 0.5675867919921875, 0.5675130615234375, 0.5675714721679688, 0.5675089721679687, 0.5679052734375, 0.5676492919921875, 0.5678059692382813, 0.56771484375, 0.5675673828125, 0.5678981323242187, 0.5678489379882813, 0.5676195678710938, 0.5676830444335937, 0.5675397338867187, 0.5676615600585937, 0.5679585571289063, 0.567816162109375, 0.5676226806640625, 0.5676912841796875, 0.56768408203125, 0.5677619018554687, 0.5679022216796875, 0.5678663940429688, 0.56771484375, 0.5676287841796875, 0.5677537231445312, 0.5677793579101562, 0.56771484375, 0.5677178955078125, 0.5676318969726563, 0.5678059692382813, 0.5677650146484375, 0.5679185791015625, 0.5677650146484375, 0.5678991088867188, 0.5676861572265625, 1.171936279296875, 0.5674823608398437, 0.56759912109375, 0.5674803466796875, 0.567468017578125, 0.5674844360351563, 0.5675469360351563, 0.5675325317382812, 0.5674557495117187, 0.5675243530273437, 0.56749462890625, 0.5673666381835938, 0.5673912353515626, 0.5676083374023437, 0.5674833984375, 0.5677506713867188, 0.5675888671875, 0.5676615600585937, 0.5675294799804688, 0.5676707763671875, 0.567794677734375, 0.5677701416015625, 0.5675960083007813, 0.5676718139648438, 0.567488525390625, 0.56749365234375, 0.5673687133789063, 0.5673953247070312, 0.5675048828125, 0.5674158325195312, 0.5673799438476562, 0.567678955078125, 0.567593994140625, 0.5675335693359375, 0.5674383544921875, 0.5675346069335937, 0.5674301147460937, 0.5676072998046875, 0.5677158203125, 0.5674608764648438, 0.5674281005859375, 0.5676697387695312, 0.5675172729492187, 0.5675755004882812, 0.567488525390625, 0.5675181884765625, 0.5676513061523437, 0.5677721557617188, 0.5679093627929688, 0.5675867919921875, 0.5678960571289062, 0.5678704833984375, 0.5674854125976563, 0.56757861328125, 0.56753564453125, 0.5677537231445312, 0.567525390625, 0.5675007934570313, 0.567530517578125, 0.5677332763671875, 0.56753662109375, 0.56765234375, 0.5674793090820313, 1.171693603515625, 0.5673564453125, 0.5675079956054687, 0.5678284912109375, 0.56759912109375, 0.5673697509765625, 0.56745166015625, 0.5675796508789063, 0.567320556640625, 0.5678018798828125, 0.5676707763671875, 0.5673635864257812, 0.5676656494140625, 0.5675222778320312, 0.56749462890625, 0.5674741821289062, 0.567573486328125, 0.567699462890625, 0.5673584594726563, 0.5676636352539063, 0.56764208984375, 0.5677854614257812, 0.5675489501953125, 0.5675601806640626, 0.567446533203125, 0.5675335693359375, 0.5677322387695313, 0.567583740234375, 0.5675673828125, 0.5675980834960938, 0.5676871948242187, 0.5675335693359375, 0.567404541015625, 0.5675264282226562, 0.5674711303710938, 0.5676072998046875, 0.5674208984375, 0.5676165161132812, 0.5676124267578125, 0.5678079833984375, 0.5675714721679688, 0.5675264282226562, 0.5675376586914063, 0.5676820678710938, 0.5676615600585937, 0.5676175537109375, 0.5676226806640625, 0.5675980834960938, 0.5676953735351562, 0.567625732421875, 0.56749462890625, 0.5675007934570313, 0.5674485473632812, 0.567520263671875, 0.5676431274414062, 0.5678233642578125, 0.5678878784179687, 0.5677035522460937, 0.5675448608398438, 0.5679554443359375, 0.5675960083007813, 0.5678827514648438, 0.567709716796875, 1.1716024169921875, 0.5677035522460937, 0.56723046875, 0.5673492431640625, 0.5673277587890625, 0.567320556640625, 0.5673185424804688, 0.5673236694335938, 0.5675140991210937, 0.5675294799804688, 0.5676615600585937, 0.5673820190429687, 0.5674485473632812, 0.5675274047851563, 0.5675827026367187, 0.567720947265625, 0.5676400756835938, 0.5675509643554687, 0.5675499267578125, 0.5675950317382813, 0.5675909423828125, 0.5674383544921875, 0.5674721069335937, 0.5676441650390625, 0.5673717651367187, 0.5674035034179687, 0.5676103515625, 0.567657470703125, 0.5675407104492187, 0.5674403686523437, 0.5674895629882812, 0.567446533203125, 0.5674240112304687, 0.5677138061523438, 0.5675427856445312, 0.5676287841796875, 0.5675827026367187, 0.567952392578125, 0.567657470703125, 0.5674956665039063, 0.5674475708007812, 0.5674977416992187, 0.5673901977539062, 0.567546875, 0.5674669799804688, 0.5677168579101562, 0.5674926147460938, 0.56757861328125, 0.5675765991210937, 0.567751708984375, 0.5678325805664063, 0.5675448608398438, 0.5675632934570313, 0.567720947265625, 0.5677639770507813, 0.567773193359375, 0.5677352905273437, 0.56757861328125, 0.5675642700195312, 0.5677168579101562, 0.5677168579101562, 0.5680588989257812, 0.5677189331054687, 1.172095947265625, 0.5675909423828125, 0.5673646240234375, 0.5673492431640625, 0.5674547119140625, 0.5674444580078125, 0.5672734985351563, 0.5673164672851563, 0.5674342651367188, 0.5674281005859375, 0.5676697387695312, 0.5677752075195313, 0.5677977294921875, 0.5678356323242187, 0.5677998046875, 0.5678991088867188, 0.5680814208984375, 0.5678212890625, 0.5678837890625, 0.5678131103515625, 0.5674895629882812, 0.567678955078125, 0.56818994140625, 0.567931884765625, 0.5678868408203125, 0.568015869140625, 0.5680803833007813, 0.5679933471679688, 0.567414794921875, 0.5675847778320312, 0.567510009765625, 0.5673533325195312, 0.5675867919921875, 0.5678397216796875, 0.5681592407226562, 0.5679216918945312, 0.5678868408203125, 0.5680036010742188, 0.5678960571289062, 0.567910400390625, 0.56768408203125, 0.5676759033203125, 0.56764111328125, 0.567841796875, 0.5676144409179688, 0.5677485961914063, 0.5675560913085937, 0.5678561401367187, 0.5677404174804688, 0.5678765869140625, 0.5678756103515625, 0.5678305053710937, 0.5676820678710938, 0.5676103515625, 0.567615478515625, 0.56755712890625, 0.5675775756835938, 0.5678540649414062, 0.5675448608398438, 0.5674608764648438, 0.5675858154296874, 0.5679667358398437, 0.5677393798828125, 1.172157470703125, 0.5676431274414062, 0.5675038452148438, 0.56789404296875, 0.56765234375, 0.567773193359375, 0.5675950317382813, 0.5673973388671875, 0.5673799438476562, 0.5674803466796875, 0.5675591430664062, 0.5674803466796875, 0.5673779296875, 0.56747314453125, 0.5676553955078125, 0.5675550537109375, 0.5675919189453125, 0.5674454956054688, 0.5675775756835938, 0.5674403686523437, 0.5674086303710938, 0.5675755615234375, 0.56759912109375, 0.5675294799804688, 0.56740966796875, 0.5674608764648438, 0.5674270629882813, 0.5675284423828125, 0.5676134643554688, 0.5675919189453125, 0.5675397338867187, 0.5674752197265625, 0.567573486328125, 0.5682390747070313, 0.5677588500976563, 0.5674352416992188, 0.5674403686523437, 0.5675755615234375, 0.5675909423828125, 0.5676226806640625, 0.56793701171875, 0.5677005004882812, 0.5675007934570313, 0.5674823608398437, 0.5673840942382813, 0.567699462890625, 0.5675847778320312, 0.5675458374023438, 0.5675089721679687, 0.5674793090820313, 0.5676072998046875, 0.5675909423828125, 0.5674526977539063, 0.5676298217773438, 0.567678955078125, 0.5677291259765626, 0.5681397705078125, 0.5680568237304687, 0.5680926513671875, 0.5679401245117187, 0.5678806762695312, 0.56757861328125, 0.567404541015625, 1.1717703857421875, 0.56732568359375, 0.567245849609375, 0.5675130615234375, 0.5674526977539063, 0.5675530395507813, 0.5674905395507812, 0.5674536743164063, 0.5673287963867187, 0.5674977416992187, 0.5676021728515624, 0.5677404174804688, 0.5675274047851563, 0.567419921875, 0.5674434814453125, 0.5673963623046875, 0.5674526977539063, 0.56757861328125, 0.5674188842773438, 0.5672969970703124, 0.5674066162109375, 0.5676973876953125, 0.567436279296875, 0.56740966796875, 0.5673236694335938, 0.56744140625, 0.5674004516601563, 0.5676953735351562, 0.5674639282226562, 0.5678069458007813, 0.5674035034179687, 0.5674823608398437, 0.5674240112304687, 0.56744140625, 0.567636962890625, 0.567636962890625, 0.5675264282226562, 0.5676195678710938, 0.5674833984375, 0.5677875366210937, 0.5674434814453125, 0.5675632934570313, 0.5675233154296875, 0.5676011352539062, 0.5673973999023437, 0.5679646606445312, 0.5675233154296875, 0.5678765869140625, 0.56774755859375, 0.5677557983398438, 0.5675827026367187, 0.5676103515625, 0.5677701416015625, 0.5676697387695312, 0.5676113891601563, 0.5676482543945313, 0.5674711303710938, 0.5676072998046875, 0.5674915771484375, 0.5675120849609375, 0.5675172119140625, 0.5675079956054687, 0.5674905395507812, 1.172304931640625, 0.5675038452148438, 0.5672386474609376, 0.5673328857421875, 0.5673697509765625, 0.5676912841796875, 0.567583740234375, 0.5673380126953125, 0.5673441162109375, 0.56738818359375, 0.56747314453125, 0.5674075927734376, 0.567319580078125, 0.5673380126953125, 0.5675089721679687, 0.5675806884765625, 0.567647216796875, 0.5674915771484375, 0.567647216796875, 0.5674557495117187, 0.56743115234375, 0.5675878295898438, 0.5675867919921875, 0.5676287841796875, 0.5674761962890625, 0.567446533203125, 0.5674403686523437, 0.5674004516601563, 0.5676267700195312, 0.5675714721679688, 0.567510009765625, 0.5675950317382813, 0.5674393310546875, 0.5676165161132812, 0.5676932983398437, 0.5677056274414063, 0.5676697387695312, 0.5675755615234375, 0.5675489501953125, 0.5675140991210937, 0.5675038452148438, 0.5677691040039062, 0.5677168579101562, 0.56785302734375, 0.5675059204101562, 0.5676021728515624, 0.5676103515625, 0.5676124267578125, 0.5676318969726563, 0.5675110473632813, 0.5675213012695313, 0.5676707763671875, 0.5675929565429687, 0.5677793579101562, 0.5675653076171875, 0.5676707763671875, 0.5675325317382812, 0.5675663452148437, 0.5677567749023438, 0.56762060546875, 0.567562255859375, 0.5676032104492188, 0.567562255859375, 1.1719853515625, 0.5674874877929688, 0.5673359375, 0.5679820556640625, 0.5675775756835938, 0.567525390625, 0.5675980834960938, 0.5674485473632812, 0.5673912353515626, 0.5674332275390624, 0.567457763671875, 0.567488525390625, 0.5680855102539063, 0.5679380493164062, 0.56792578125, 0.5678489379882813, 0.5679390869140625, 0.5678981323242187, 0.568069091796875, 0.5680824584960937, 0.56776806640625, 0.5675407104492187, 0.5676697387695312, 0.5676544189453125, 0.5677803344726563, 0.56749462890625, 0.5674619140625, 0.567635986328125, 0.5676564331054688, 0.5678848266601563, 0.56764208984375, 0.5677240600585938, 0.5675878295898438, 0.5675458374023438, 0.5677567749023438, 0.5675765991210937, 0.5677138061523438, 0.5677014770507812, 0.5678551025390625, 0.5678653564453126, 0.5678233642578125, 0.5675663452148437, 0.56768408203125, 0.5676195678710938, 0.5673472290039062, 0.567673828125, 0.5675069580078125, 0.5678008422851563, 0.5677434692382812, 0.5677178955078125, 0.5676564331054688, 0.5677291259765626, 0.5676759033203125, 0.5676492919921875, 0.5674803466796875, 0.5675130615234375, 0.567530517578125, 0.5675028686523438, 0.567562255859375, 0.5679052734375, 0.5681500244140625, 0.5680568237304687, 0.5677977294921875]",tokens/s,1.7352751217333675,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fbc-5279434b0e6d0b71590daf18;8565f670-b3dc-4f33-a208-f0c718e5b7f6) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694926b-61c0edd86fea58b017c2a7b0;691613a5-611a-4574-b8a7-5493f400792d) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1950.920704,15045.492736,0.0,14399.045632,13900.420096,s,10,16.790244384765625,1.6790244384765625,0.0010486142582281257,1.6790367431640625,1.6804459716796873,1.6805005737304688,1.6805442553710939,"[1.679192626953125, 1.6789603271484375, 1.6780438232421875, 1.678517333984375, 1.680433837890625, 1.67788916015625, 1.6791131591796875, 1.680181640625, 1.6773572998046875, 1.68055517578125]",tokens/s,152.46949010002365,kWh,1.9813106258710223e-05,1.0857679242523961e-05,9.179171232219785e-05,0.00012246249782343202,tokens/kWh,2090435.8848625156,MB,1950.920704,15045.492736,0.0,14399.045632,14292.420096,s,10,981.8734921875001,98.18734921875,0.011000398968485372,98.1860859375,98.20038515625,98.204403515625,98.207618203125,"[98.184484375, 98.208421875, 98.1873671875, 98.1855078125, 98.1994921875, 98.1972734375, 98.1793359375, 98.1732421875, 98.171703125, 98.1866640625]",tokens/s,0.6416305206452139,kWh,0.0011591991291774646,0.0006353437306229187,0.005368699933845403,0.007163242793645786,tokens/kWh,8794.899435195004,,s,629,995.4237821044923,1.5825497330755043,0.19938333773778583,1.55846142578125,1.5592523681640624,1.559604833984375,3.2361203808593753,"[1.5580538330078124, 1.5583099365234374, 1.558223876953125, 1.55838671875, 1.55867138671875, 1.558048828125, 1.5581358642578125, 1.55835693359375, 1.55815625, 1.5579002685546874, 1.5581112060546876, 1.5579954833984375, 1.5578941650390625, 1.5584471435546876, 1.5588095703125, 1.559251953125, 1.5590963134765625, 1.558498291015625, 1.5594239501953124, 1.559110595703125, 1.558477783203125, 1.5582054443359374, 1.5585330810546876, 1.5583743896484374, 1.558171630859375, 1.5580078125, 1.5581695556640625, 1.5581890869140624, 1.5580272216796875, 1.5581531982421875, 1.5582095947265624, 1.5581317138671875, 1.558971435546875, 1.558867919921875, 1.5581644287109375, 1.5584481201171876, 1.558408203125, 1.558391845703125, 1.558349853515625, 1.5588013916015624, 1.558470703125, 1.558509521484375, 1.5585679931640626, 1.55841943359375, 1.558540283203125, 1.5586170654296876, 1.5587318115234374, 1.558656982421875, 1.558823974609375, 1.5588424072265625, 1.5585596923828124, 1.5589283447265625, 1.5587532958984376, 1.55822900390625, 1.5586396484375, 1.5583641357421876, 1.55864990234375, 1.5585484619140626, 1.5585382080078125, 1.5590430908203126, 1.5587666015625, 1.5589632568359375, 3.237376953125, 1.5580640869140625, 1.558181884765625, 1.557844970703125, 1.5580006103515625, 1.5579954833984375, 1.5581123046875, 1.5579893798828126, 1.558107177734375, 1.5582781982421876, 1.5594127197265626, 1.559099365234375, 1.559131103515625, 1.5585341796875, 1.5591177978515625, 1.55886279296875, 1.5582791748046876, 1.558634521484375, 1.55941064453125, 1.559609375, 1.5590020751953124, 1.5593431396484374, 1.559488525390625, 1.5587420654296875, 1.5596728515625, 1.5594976806640626, 1.5597445068359375, 1.5597117919921875, 1.55930322265625, 1.5584010009765625, 1.5583519287109375, 1.558445068359375, 1.55962060546875, 1.5594803466796876, 1.5595396728515625, 1.5590604248046875, 1.55877783203125, 1.55829150390625, 1.5592386474609374, 1.559795654296875, 1.5599677734375, 1.559572509765625, 1.5594322509765626, 1.5586314697265624, 1.560300537109375, 1.5584686279296875, 1.55837646484375, 1.5587890625, 1.558540283203125, 1.558470703125, 1.5584583740234375, 1.558497314453125, 1.5582698974609375, 1.558455322265625, 1.5591485595703125, 1.5591710205078124, 1.5590338134765624, 1.5596605224609374, 1.5584798583984374, 1.5588690185546874, 1.5584910888671875, 1.5581890869140624, 1.55858740234375, 3.2361298828125, 1.558045654296875, 1.5582381591796874, 1.558180908203125, 1.5579576416015626, 1.5579566650390626, 1.55797705078125, 1.5577784423828125, 1.5582259521484374, 1.5582371826171875, 1.5580057373046876, 1.55856591796875, 1.5581695556640625, 1.5578818359375, 1.55877783203125, 1.5584163818359376, 1.558245361328125, 1.5582525634765625, 1.5581624755859376, 1.5581583251953126, 1.5581051025390624, 1.558556640625, 1.5584256591796875, 1.558476806640625, 1.558287353515625, 1.5582464599609376, 1.558298583984375, 1.5586898193359375, 1.558340576171875, 1.55793505859375, 1.5584154052734376, 1.5581326904296875, 1.55810205078125, 1.558065185546875, 1.5584481201171876, 1.5580743408203126, 1.5586007080078126, 1.558350830078125, 1.558640625, 1.558423583984375, 1.5587113037109375, 1.5584420166015625, 1.55839794921875, 1.5586263427734375, 1.5582689208984375, 1.5584296875, 1.5582177734375, 1.55880859375, 1.558539306640625, 1.558929443359375, 1.558582275390625, 1.558930419921875, 1.558613037109375, 1.55983251953125, 1.56031689453125, 1.5597752685546875, 1.5597598876953125, 1.5601285400390625, 1.55953564453125, 1.5596953125, 1.5594854736328125, 1.55881884765625, 1.5586212158203125, 3.237253173828125, 1.5578316650390625, 1.5583251953125, 1.55785009765625, 1.558055908203125, 1.5579197998046874, 1.5578603515625, 1.55808154296875, 1.558330322265625, 1.55824951171875, 1.5579053955078126, 1.557854248046875, 1.557908447265625, 1.5580303955078125, 1.558086669921875, 1.5586058349609375, 1.55808154296875, 1.5584583740234375, 1.5582320556640625, 1.5581337890625, 1.5581746826171874, 1.5581737060546874, 1.5594495849609376, 1.5593502197265625, 1.55987353515625, 1.559400390625, 1.558656982421875, 1.5581593017578126, 1.558519775390625, 1.5582698974609375, 1.558197265625, 1.558408203125, 1.5585330810546876, 1.5579146728515625, 1.558423583984375, 1.5582259521484374, 1.55835595703125, 1.55822802734375, 1.5586058349609375, 1.5585423583984375, 1.5584420166015625, 1.558414306640625, 1.558656982421875, 1.5590482177734375, 1.55839892578125, 1.5587000732421874, 1.558583251953125, 1.5587010498046876, 1.558455322265625, 1.558435791015625, 1.55865087890625, 1.5584051513671875, 1.5583887939453125, 1.5587010498046876, 1.558929443359375, 1.558476806640625, 1.5586846923828126, 1.5584317626953126, 1.558667236328125, 1.5589652099609375, 1.559405517578125, 1.559698486328125, 1.56028515625, 3.24001904296875, 1.559047119140625, 1.5582115478515626, 1.559482421875, 1.55951416015625, 1.557960693359375, 1.558873046875, 1.5592540283203125, 1.5592652587890625, 1.558792236328125, 1.5587071533203125, 1.5585771484375, 1.557887939453125, 1.5581942138671876, 1.5579146728515625, 1.5587696533203126, 1.5585545654296875, 1.5591290283203125, 1.558961181640625, 1.5583519287109375, 1.558830078125, 1.5593133544921876, 1.5585177001953125, 1.5584798583984374, 1.5584490966796876, 1.558161376953125, 1.5583846435546875, 1.5586396484375, 1.5587840576171874, 1.55858740234375, 1.5587706298828126, 1.5585311279296874, 1.5585853271484376, 1.558718505859375, 1.55877783203125, 1.5583519287109375, 1.5585361328125, 1.5583436279296874, 1.5583160400390625, 1.5586314697265624, 1.55827197265625, 1.5586263427734375, 1.558256591796875, 1.5586048583984375, 1.56061083984375, 1.55869189453125, 1.5582628173828126, 1.55932470703125, 1.559943115234375, 1.55940966796875, 1.55860986328125, 1.558519775390625, 1.5587215576171876, 1.558623291015625, 1.5588004150390624, 1.5584306640625, 1.5585311279296874, 1.559140380859375, 1.5590625, 1.5583037109375, 1.558941650390625, 1.5583519287109375, 1.5587593994140625, 3.236095947265625, 1.55803955078125, 1.5580579833984376, 1.5585648193359376, 1.55922119140625, 1.5585577392578125, 1.5581224365234374, 1.5580743408203126, 1.557939208984375, 1.558950927734375, 1.5590543212890624, 1.5589171142578124, 1.55820751953125, 1.5580906982421876, 1.558582275390625, 1.5586478271484374, 1.5581624755859376, 1.5583251953125, 1.558139892578125, 1.558404052734375, 1.558762451171875, 1.558740966796875, 1.55846044921875, 1.55859765625, 1.5587103271484375, 1.55846044921875, 1.55847265625, 1.55835595703125, 1.5592816162109375, 1.558283203125, 1.5583631591796876, 1.558256591796875, 1.5584962158203124, 1.5584859619140625, 1.5586282958984374, 1.5587747802734375, 1.5586324462890624, 1.558761474609375, 1.558920166015625, 1.5590062255859376, 1.559319580078125, 1.5589990234375, 1.5587880859375, 1.5590902099609374, 1.55905126953125, 1.559146484375, 1.5588126220703125, 1.5589180908203124, 1.5586734619140625, 1.5591004638671875, 1.5590809326171875, 1.5590645751953125, 1.55970458984375, 1.5590850830078125, 1.5590697021484374, 1.558794189453125, 1.558703125, 1.5587318115234374, 1.5590830078125, 1.5591229248046874, 1.5588321533203124, 1.5586334228515626, 1.5587808837890624, 3.23734619140625, 1.558002685546875, 1.5577149658203124, 1.558254638671875, 1.5578726806640626, 1.55822998046875, 1.558177734375, 1.5585648193359376, 1.558667236328125, 1.5586529541015626, 1.5583426513671874, 1.55926123046875, 1.5578367919921876, 1.55795458984375, 1.558508544921875, 1.5586611328125, 1.5580989990234375, 1.5586651611328124, 1.5579781494140625, 1.5578511962890624, 1.558193115234375, 1.558066162109375, 1.5584921875, 1.5580631103515625, 1.558329345703125, 1.5580068359375, 1.5581522216796875, 1.5584962158203124, 1.5587593994140625, 1.558128662109375, 1.558140869140625, 1.558129638671875, 1.5582259521484374, 1.5581961669921875, 1.55856591796875, 1.5580845947265625, 1.558054931640625, 1.5582054443359374, 1.558171630859375, 1.5583519287109375, 1.55859765625, 1.5583262939453124, 1.5581573486328124, 1.558824951171875, 1.558509521484375, 1.5589549560546876, 1.558813720703125, 1.5588546142578126, 1.55884130859375, 1.558962158203125, 1.5589754638671875, 1.5583538818359375, 1.5587542724609376, 1.5585946044921875, 1.5591341552734375, 1.5585167236328126, 1.55884130859375, 1.5582105712890626, 1.5585382080078125, 1.558287353515625, 1.559066650390625, 1.558328369140625, 1.5585638427734374, 3.238383544921875, 1.558488037109375, 1.557611572265625, 1.5580323486328125, 1.5576668701171874, 1.5577364501953126, 1.5578460693359375, 1.557897216796875, 1.5580999755859375, 1.558709228515625, 1.557866455078125, 1.5580303955078125, 1.5580068359375, 1.5578204345703126, 1.5581685791015625, 1.5583262939453124, 1.5580938720703126, 1.558171630859375, 1.55803857421875, 1.5583016357421875, 1.558519775390625, 1.557992431640625, 1.5580426025390626, 1.5578306884765625, 1.5581573486328124, 1.5579320068359375, 1.558286376953125, 1.5582371826171875, 1.5583016357421875, 1.5584952392578124, 1.5585279541015624, 1.558201416015625, 1.558108154296875, 1.558107177734375, 1.558592529296875, 1.558244384765625, 1.558054931640625, 1.5581644287109375, 1.55880859375, 1.5584910888671875, 1.558899658203125, 1.5585997314453126, 1.55806103515625, 1.5583375244140625, 1.5583109130859376, 1.55829345703125, 1.5588638916015625, 1.5589119873046875, 1.5587071533203125, 1.5580262451171876, 1.558403076171875, 1.5584798583984374, 1.558330322265625, 1.55858642578125, 1.558856689453125, 1.558193115234375, 1.558345703125, 1.558108154296875, 1.5583251953125, 1.5581982421875, 1.5610501708984375, 1.5583887939453125, 1.5587952880859375, 3.235346435546875, 1.5581163330078125, 1.55756640625, 1.5580068359375, 1.55763916015625, 1.55790234375, 1.5576688232421876, 1.557738525390625, 1.5576319580078124, 1.558244384765625, 1.557791748046875, 1.5579586181640626, 1.5577528076171876, 1.557613525390625, 1.5579515380859374, 1.5582166748046875, 1.5581358642578125, 1.55793505859375, 1.55818603515625, 1.5580927734375, 1.55803857421875, 1.558433837890625, 1.55839697265625, 1.5587225341796875, 1.5587174072265626, 1.5583846435546875, 1.5585064697265625, 1.5582259521484374, 1.558518798828125, 1.5581593017578126, 1.5583385009765625, 1.5581470947265625, 1.5582484130859375, 1.5581358642578125, 1.5583436279296874, 1.558077392578125, 1.5579207763671874, 1.55822900390625, 1.55804052734375, 1.5581890869140624, 1.558372314453125, 1.5585648193359376, 1.558033447265625, 1.5585955810546874, 1.55846142578125, 1.5585228271484375, 1.558578125, 1.5584296875, 1.55850244140625, 1.558202392578125, 1.55873486328125, 1.5583662109375, 1.55852490234375, 1.558560791015625, 1.5590738525390626, 1.55888330078125, 1.5588905029296876, 1.55881982421875, 1.558825927734375, 1.5586263427734375, 1.5591884765625, 1.558414306640625, 1.5586529541015626, 3.238762451171875, 1.557859375, 1.557813232421875, 1.55837744140625, 1.558393798828125, 1.55814404296875, 1.55791259765625, 1.55789208984375, 1.5579832763671875, 1.5583538818359375, 1.558372314453125, 1.5582945556640626, 1.557918701171875, 1.5579627685546875, 1.5581337890625, 1.558581298828125, 1.5580999755859375, 1.557992431640625, 1.5592130126953125, 1.5587860107421876, 1.5587532958984376, 1.559041015625, 1.5587696533203126, 1.559415771484375, 1.5595980224609376, 1.559413818359375, 1.5599595947265625, 1.559562255859375, 1.5584573974609375, 1.55867236328125, 1.5585638427734374, 1.558118408203125, 1.5581470947265625, 1.5581961669921875, 1.558635498046875, 1.558296630859375, 1.558118408203125, 1.558240234375, 1.5587225341796875, 1.558730712890625, 1.5587952880859375, 1.558772705078125, 1.55820849609375, 1.558361083984375, 1.558244384765625, 1.5583170166015625, 1.558265869140625, 1.55871337890625, 1.5584215087890625, 1.558140869140625, 1.5588372802734376, 1.5583487548828125, 1.5584635009765626, 1.5586375732421875, 1.5585433349609374, 1.558372314453125, 1.5584256591796875, 1.5581634521484375, 1.5582484130859375, 1.558240234375, 1.5586221923828125, 1.5586898193359375, 1.5584942626953124]",tokens/s,0.6318916739865195,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948ce7-7540ffbc4c32cf5c23cc57ca;da49da9d-686c-4114-897b-61c7820ec048) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1542.070272,9574.023168,0.0,8927.576064,8585.053184,s,10,9.482604919433594,0.9482604919433595,0.0008260811719636861,0.9479547424316406,0.94941376953125,0.9494473327636719,0.9494741833496094,"[0.9490130004882813, 0.94773095703125, 0.947888427734375, 0.9473400268554687, 0.9480210571289063, 0.9470042114257813, 0.94784326171875, 0.9488767700195313, 0.9494063110351563, 0.9494808959960938]",tokens/s,269.9680121391065,kWh,1.1186615084156847e-05,6.129816613698998e-06,5.138228858054715e-05,6.8698720278403e-05,tokens/kWh,3726415.848250952,MB,1542.070272,9574.023168,0.0,8927.576064,8848.595456,s,10,563.8236953125,56.382369531250006,0.006341033853176846,56.382623046875,56.388686328125,56.3898587890625,56.390796757812495,"[56.388078125, 56.37173828125, 56.37409375, 56.38842578125, 56.3804140625, 56.38051171875, 56.384734375, 56.39103125, 56.376703125, 56.38796484375]",tokens/s,1.1173705632410884,kWh,0.0006656233578544073,0.00036482027414217567,0.0030422669085590557,0.0040727105405556395,tokens/kWh,15468.813551233847,,s,629,571.4626268310548,0.9085256388411044,0.1125877667908229,0.8949237670898438,0.8955600952148438,0.8957106079101562,1.8423289111328125,"[0.8947117919921875, 0.8944906005859375, 0.894487548828125, 0.8946493530273437, 0.8944937133789063, 0.8946043701171875, 0.8949923095703125, 0.8949176025390625, 0.89470361328125, 0.8947046508789063, 0.8949002075195313, 0.8946964721679688, 0.894624755859375, 0.8946841430664062, 0.8946831665039062, 0.8945592041015625, 0.8946094360351563, 0.8955238647460938, 0.8949595947265625, 0.8950548706054687, 0.8953436279296875, 0.8954111938476562, 0.8952739868164062, 0.8949595947265625, 0.89497802734375, 0.895678466796875, 0.89542041015625, 0.8951244506835937, 0.8955187377929688, 0.8954183959960937, 0.8956139526367187, 0.895541259765625, 0.8952381591796875, 0.8951981811523437, 0.894613525390625, 0.8951449584960938, 0.89522998046875, 0.8949349975585937, 0.8950794067382812, 0.8955852661132813, 0.8955136108398437, 0.8957040405273438, 0.89653759765625, 0.895425537109375, 0.8957429809570312, 0.8955657958984375, 0.8946759643554687, 0.8947702026367188, 0.8948060302734375, 0.8948029174804687, 0.8947312622070313, 0.8949166259765625, 0.894961669921875, 0.894698486328125, 0.8946677856445312, 0.8949760131835938, 0.8948101196289062, 0.8951265258789063, 0.8949155883789063, 0.8949125366210937, 0.8952319946289062, 0.8952125244140625, 1.84270849609375, 0.8942510375976562, 0.8942520141601562, 0.8943861694335937, 0.8943790283203125, 0.89421826171875, 0.8942530517578124, 0.8943472900390625, 0.8943073120117188, 0.8942039184570313, 0.8943523559570312, 0.8947466430664063, 0.8944496459960938, 0.8944578857421875, 0.8946176147460938, 0.8945325927734376, 0.8944998168945313, 0.8944578857421875, 0.8943411254882813, 0.8944066772460938, 0.8947333374023437, 0.8947742919921875, 0.8952371215820313, 0.8951920776367187, 0.8950742797851563, 0.8953036499023438, 0.8958607177734375, 0.8951480102539062, 0.894561279296875, 0.8945745849609374, 0.89464013671875, 0.89449267578125, 0.8947291870117188, 0.8946995239257812, 0.8953272094726562, 0.8949248046875, 0.8945797119140625, 0.894581787109375, 0.8946227416992187, 0.89453466796875, 0.8946360473632813, 0.8944865112304687, 0.894972900390625, 0.8946022338867188, 0.8948131713867188, 0.89630615234375, 0.8950855712890625, 0.8945735473632812, 0.89509375, 0.894877685546875, 0.8948623657226562, 0.8948592529296875, 0.8950701904296875, 0.8948326416015625, 0.8948951416015625, 0.8951039428710937, 0.8956979370117187, 0.8953681640625, 0.8951326904296875, 0.895341552734375, 0.8950026245117187, 0.89509375, 0.89504052734375, 1.842423828125, 0.8941260986328124, 0.8945520629882813, 0.8947220458984375, 0.894656494140625, 0.8945305786132812, 0.89499853515625, 0.8945592041015625, 0.8945950927734375, 0.8945172729492188, 0.8946370849609375, 0.8945059814453125, 0.8944046630859375, 0.8946636352539062, 0.8949012451171875, 0.8946749267578125, 0.8944148559570313, 0.8946463012695313, 0.8946493530273437, 0.8948449096679687, 0.8946841430664062, 0.89457666015625, 0.8946165771484375, 0.8945653686523437, 0.8947998657226562, 0.8951336669921875, 0.89472509765625, 0.8945254516601563, 0.8946647338867187, 0.8947496948242187, 0.8945458984375, 0.8946483154296875, 0.8947752685546875, 0.8946513671875, 0.89472412109375, 0.8949534912109375, 0.89493505859375, 0.8949319458007813, 0.8946903076171875, 0.8946851806640626, 0.8947425537109375, 0.8946165771484375, 0.8947568359375, 0.8950374145507812, 0.8954019775390625, 0.8953005981445312, 0.8949186401367187, 0.895477783203125, 0.8955760498046875, 0.8952893676757813, 0.8947711791992188, 0.8951603393554688, 0.8951644287109375, 0.8949258422851563, 0.8947702026367188, 0.895045654296875, 0.894966796875, 0.8951286010742188, 0.8954326782226563, 0.8955955200195312, 0.8950845336914063, 0.89493505859375, 0.8953036499023438, 1.8420848388671875, 0.8943739013671875, 0.8944742431640625, 0.8944588623046875, 0.8943021850585937, 0.8945551147460937, 0.8949002075195313, 0.8946370849609375, 0.894671875, 0.8950026245117187, 0.8946104125976563, 0.8944783325195312, 0.8946043090820313, 0.894730224609375, 0.8949268188476562, 0.8950210571289062, 0.8947999267578125, 0.8947742309570312, 0.894740478515625, 0.8947445678710938, 0.8951439208984375, 0.8945285034179687, 0.894718994140625, 0.8944946899414062, 0.8946565551757812, 0.8951868896484375, 0.895405029296875, 0.8954634399414062, 0.8955699462890625, 0.89556591796875, 0.8955718994140625, 0.8953794555664063, 0.8954337158203125, 0.8954337158203125, 0.8955197143554687, 0.895415283203125, 0.8947374267578125, 0.8948316040039063, 0.8948848876953125, 0.89493603515625, 0.89607373046875, 0.8953927612304687, 0.8954030151367187, 0.8950763549804688, 0.8948746337890625, 0.8950906982421875, 0.8951173706054687, 0.8950875244140625, 0.8950517578125, 0.895193115234375, 0.8957860107421876, 0.8957142944335937, 0.89510400390625, 0.8949442749023437, 0.8955504760742188, 0.8954183959960937, 0.8954265747070312, 0.8953282470703126, 0.8952658081054687, 0.8953344116210937, 0.8951910400390625, 0.8952115478515625, 0.895372314453125, 1.842798583984375, 0.894867431640625, 0.8944486694335938, 0.8947363891601563, 0.8945643310546875, 0.894624755859375, 0.8943779907226562, 0.8944629516601562, 0.894593017578125, 0.8946360473632813, 0.8946360473632813, 0.8945499877929688, 0.8949258422851563, 0.8945899658203125, 0.8943206176757813, 0.8943810424804688, 0.8943441772460937, 0.8944210205078125, 0.8943964233398437, 0.8946442260742188, 0.8948121337890625, 0.8950394897460937, 0.894508056640625, 0.8946288452148438, 0.894761962890625, 0.894445556640625, 0.894666748046875, 0.8947322998046875, 0.8947803955078125, 0.8947783813476563, 0.8950272216796875, 0.8951142578125, 0.8951838989257812, 0.895194091796875, 0.8952688598632812, 0.8949923706054688, 0.894941162109375, 0.8948592529296875, 0.895120361328125, 0.89495654296875, 0.894834716796875, 0.8950067138671876, 0.8949268188476562, 0.8954276123046875, 0.895098876953125, 0.8948489990234375, 0.89508251953125, 0.8947149047851563, 0.8947844848632812, 0.8949176025390625, 0.8951572265625, 0.8954265747070312, 0.8949329833984375, 0.8952186889648438, 0.89586279296875, 0.8956856079101563, 0.8956375732421875, 0.8958504028320312, 0.8958607177734375, 0.8957890625, 0.896090087890625, 0.8952207641601563, 0.8950046997070312, 1.8418104248046876, 0.894635009765625, 0.8949862670898437, 0.8942847900390625, 0.8947527465820313, 0.89474560546875, 0.8949237670898438, 0.895009765625, 0.8947005615234375, 0.894540771484375, 0.8944691162109375, 0.8947548217773438, 0.8950394897460937, 0.8950814819335937, 0.8949442749023437, 0.8952811279296875, 0.8954982299804688, 0.8953876342773438, 0.8947916870117187, 0.894793701171875, 0.894424072265625, 0.8944189453125, 0.8946421508789062, 0.8948480224609375, 0.8949166259765625, 0.8948336791992187, 0.8950661010742188, 0.894693359375, 0.8947220458984375, 0.8945428466796875, 0.8945264892578125, 0.8946483154296875, 0.8945428466796875, 0.8949534912109375, 0.8950947875976563, 0.8949801025390625, 0.894793701171875, 0.8944804077148437, 0.8948326416015625, 0.8947886352539063, 0.8946463012695313, 0.8950374145507812, 0.8950548706054687, 0.8947046508789063, 0.8948521118164062, 0.8947947387695312, 0.8950343627929688, 0.896016357421875, 0.89537841796875, 0.8949534912109375, 0.8948264770507812, 0.8947291870117188, 0.8948469848632813, 0.8950374145507812, 0.8955863037109375, 0.895625244140625, 0.8950077514648438, 0.8963041381835938, 0.8954111938476562, 0.8953671875, 0.8954623413085937, 0.894798828125, 0.894887939453125, 1.8426265869140626, 0.8947097778320312, 0.8943308715820313, 0.8946094360351563, 0.89487255859375, 0.8948531494140625, 0.8943810424804688, 0.8948029174804687, 0.8945305786132812, 0.894403564453125, 0.8944967651367187, 0.8945111083984375, 0.894256103515625, 0.8942643432617188, 0.8944465942382812, 0.8944373779296875, 0.8948449096679687, 0.8945623168945313, 0.8946790161132813, 0.8952760620117187, 0.8952442626953125, 0.8954480590820313, 0.895604736328125, 0.8951316528320312, 0.895373291015625, 0.8952965087890625, 0.8952135620117188, 0.8954982299804688, 0.89461962890625, 0.894635009765625, 0.89516748046875, 0.8950435791015625, 0.895288330078125, 0.8951705322265625, 0.8953231201171875, 0.8952504272460937, 0.8954859619140625, 0.8955873413085937, 0.8957020874023438, 0.8951807250976562, 0.8950128784179687, 0.8955391845703125, 0.8951060180664062, 0.8948285522460937, 0.8950609741210938, 0.8948336791992187, 0.895288330078125, 0.8960173950195313, 0.8961146850585937, 0.8949033203125, 0.8948756713867188, 0.8947322998046875, 0.8947599487304687, 0.8946739501953125, 0.8947794189453125, 0.89483056640625, 0.8950394897460937, 0.8949503784179688, 0.8951633911132812, 0.895013916015625, 0.8956754150390625, 0.89516748046875, 0.8950999145507812, 1.8436168212890625, 0.8943912963867188, 0.8948367309570312, 0.8948541259765626, 0.8944517211914063, 0.8943073120117188, 0.8948643798828125, 0.8947589111328125, 0.8948408203125, 0.8943790283203125, 0.8945469360351562, 0.8947906494140625, 0.8946472778320312, 0.89457763671875, 0.8946227416992187, 0.8944599609375, 0.8943308715820313, 0.8947947387695312, 0.89451416015625, 0.8946759643554687, 0.8953764038085937, 0.8954818725585938, 0.8954224853515625, 0.8949595947265625, 0.894798828125, 0.89508251953125, 0.89493603515625, 0.8948899536132813, 0.8954071044921875, 0.8956600341796875, 0.8955228271484375, 0.8945531005859375, 0.895091796875, 0.894426025390625, 0.8945940551757813, 0.8949483642578125, 0.8954695434570312, 0.8954337158203125, 0.8955617065429687, 0.895705078125, 0.8956774291992188, 0.8955668334960938, 0.8957470703125, 0.895783935546875, 0.8958381958007813, 0.8955484008789063, 0.8957245483398437, 0.8957757568359375, 0.8956160278320312, 0.8955596923828125, 0.8956641235351562, 0.894929931640625, 0.8956354370117188, 0.8957890625, 0.895720458984375, 0.8952330322265625, 0.8950875854492187, 0.8949013061523438, 0.8949912719726563, 0.8950056762695312, 0.8950292358398437, 0.8950947875976563, 0.895425537109375, 1.844294677734375, 0.8944813842773438, 0.8946903076171875, 0.8944793701171875, 0.8943154907226563, 0.8944281616210937, 0.8947394409179688, 0.8945684204101563, 0.89459814453125, 0.8946411743164062, 0.8946442260742188, 0.8948531494140625, 0.8943698120117187, 0.89447216796875, 0.8945315551757812, 0.894613525390625, 0.8944015502929688, 0.894519287109375, 0.8946494140625, 0.8943318481445313, 0.894856201171875, 0.8947066650390625, 0.8946043090820313, 0.8954736938476563, 0.894951416015625, 0.8945725708007812, 0.8944384155273437, 0.8945336303710938, 0.894424072265625, 0.89456640625, 0.8948255615234375, 0.8949267578125, 0.8951398315429687, 0.8949749755859375, 0.8952821655273437, 0.8951060180664062, 0.8951029663085938, 0.89509375, 0.8948869018554687, 0.89470361328125, 0.8949053344726563, 0.8949002075195313, 0.8952289428710938, 0.8951848754882813, 0.8950517578125, 0.8951705322265625, 0.89493505859375, 0.895046630859375, 0.8949483642578125, 0.895025146484375, 0.8951019287109375, 0.895046630859375, 0.8949944458007812, 0.8952371215820313, 0.8952401733398437, 0.8950508422851563, 0.8953936767578125, 0.8952227783203125, 0.8954398803710938, 0.8953220825195313, 0.8954224853515625, 0.8953753662109375, 0.8951367797851563, 1.844548583984375, 0.8944896240234375, 0.8949319458007813, 0.89477734375, 0.8948880004882812, 0.8951592407226563, 0.8949964599609375, 0.8950630493164062, 0.8949033203125, 0.8947374267578125, 0.8947835083007812, 0.8947149047851563, 0.8950487060546874, 0.8947804565429688, 0.8947496337890625, 0.8945151977539062, 0.89474560546875, 0.894519287109375, 0.894666748046875, 0.8947998657226562, 0.89482958984375, 0.8946575317382812, 0.89470361328125, 0.89495654296875, 0.8948951416015625, 0.8947322387695312, 0.8947548217773438, 0.89498828125, 0.8947803955078125, 0.894624755859375, 0.8946534423828125, 0.8946217041015625, 0.8947435302734374, 0.8945428466796875, 0.8952791137695313, 0.8954193725585937, 0.8954695434570312, 0.8955166625976563, 0.89563134765625, 0.8954306640625, 0.8954142456054688, 0.89560986328125, 0.8953190307617187, 0.8954153442382813, 0.8953906860351563, 0.8954869995117187, 0.8954685668945312, 0.8954306640625, 0.8956671752929688, 0.8955023193359375, 0.8956805419921875, 0.8953036499023438, 0.895129638671875, 0.8948295288085938, 0.8949370727539062, 0.8949995727539063, 0.8952125244140625, 0.8953599853515625, 0.8954337158203125, 0.8954173583984375, 0.8950763549804688, 0.895267822265625, 0.8950804443359375]",tokens/s,1.1006844025619116,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694936a-288a55e40de5c44b1da06344;1cede1ef-bb3c-4e10-9a2c-3a5a69e45696) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1298.558976,1030.22592,0.0,383.778816,312.459776,s,10,0.28084687995910645,0.028084687995910646,0.000728285740183105,0.028046159744262694,0.02834249210357666,0.029145854473114012,0.029788544368743897,"[0.029949216842651366, 0.026798112869262695, 0.028019840240478516, 0.028072479248046876, 0.02816396713256836, 0.02801126480102539, 0.027853151321411133, 0.028088735580444335, 0.02812339210510254, 0.027766719818115234]",tokens/s,9115.28730663754,kWh,3.2420572125929855e-07,1.7764934109754204e-07,7.379792814026657e-07,1.2398343437595062e-06,tokens/kWh,206479197.23189807,MB,1298.886656,1030.22592,0.0,383.778816,321.513984,s,10,17.335703369140624,1.7335703369140625,0.018391524502804384,1.7369953002929686,1.742456408691406,1.7499957214355468,1.7560271716308595,"[1.7575350341796876, 1.681819580078125, 1.740781005859375, 1.7369793701171874, 1.7387105712890625, 1.7344283447265625, 1.73701123046875, 1.7378514404296874, 1.7365494384765625, 1.734037353515625]",tokens/s,36.3411848129258,kWh,1.964119326918637e-05,1.0763519540993226e-05,4.278783331700238e-05,7.319254612718198e-05,tokens/kWh,860743.386225818,,s,629,17.55748351097106,0.0279133283163292,0.003379444288478559,0.027531295776367187,0.027742361068725588,0.028291072082519533,0.05581565963745117,"[0.028317695617675782, 0.02860339164733887, 0.02758246421813965, 0.02776268768310547, 0.02735206413269043, 0.027414560317993164, 0.029547487258911133, 0.029131776809692384, 0.028694528579711914, 0.02916659164428711, 0.028453887939453124, 0.029784063339233398, 0.028712959289550782, 0.028251136779785156, 0.028046335220336914, 0.02874675178527832, 0.028438528060913085, 0.02809343910217285, 0.027811840057373048, 0.02775142478942871, 0.027683839797973633, 0.027563007354736328, 0.02790399932861328, 0.028857343673706053, 0.027925567626953127, 0.027547584533691407, 0.027509759902954102, 0.027442176818847655, 0.027610111236572265, 0.027423744201660157, 0.027511808395385744, 0.027613183975219727, 0.02750873565673828, 0.02756710433959961, 0.027622400283813478, 0.027445247650146484, 0.027481088638305663, 0.027496448516845705, 0.0275230712890625, 0.027481088638305663, 0.027518976211547853, 0.027886592864990234, 0.027596799850463868, 0.02754150390625, 0.027484159469604492, 0.02752102470397949, 0.028536832809448243, 0.02763065528869629, 0.027487167358398436, 0.027603967666625977, 0.027572223663330078, 0.027682815551757813, 0.0275281925201416, 0.02760601615905762, 0.02755788803100586, 0.02758143997192383, 0.027496448516845705, 0.02754969596862793, 0.027570175170898437, 0.02755788803100586, 0.027470848083496095, 0.02771455955505371, 0.053613601684570314, 0.026470367431640624, 0.02636595153808594, 0.026263551712036134, 0.02627174377441406, 0.026299455642700195, 0.026310592651367187, 0.026295295715332033, 0.026380287170410157, 0.02631679916381836, 0.0263055362701416, 0.026335231781005858, 0.02631884765625, 0.026300416946411134, 0.026315776824951172, 0.02655232048034668, 0.0263874568939209, 0.02620217514038086, 0.026357696533203124, 0.026232831954956053, 0.02629631996154785, 0.026489856719970704, 0.02635468864440918, 0.026232831954956053, 0.026274816513061523, 0.02629734420776367, 0.026294271469116212, 0.026457088470458984, 0.02629734420776367, 0.026300479888916015, 0.026314687728881837, 0.026302463531494142, 0.02631270408630371, 0.02627276802062988, 0.026286079406738282, 0.026284032821655274, 0.026343423843383788, 0.02628505516052246, 0.026261503219604493, 0.026417152404785156, 0.02631782341003418, 0.02631270408630371, 0.026234880447387695, 0.026270719528198243, 0.026549247741699217, 0.026566656112670898, 0.02839756774902344, 0.027612159729003907, 0.027510784149169923, 0.027592704772949218, 0.027596799850463868, 0.027694080352783205, 0.027495424270629884, 0.02771865653991699, 0.027663360595703124, 0.02756915283203125, 0.027561983108520507, 0.027578367233276366, 0.02747395133972168, 0.027514848709106445, 0.02759884834289551, 0.027471872329711915, 0.028064767837524415, 0.056030208587646485, 0.02760704040527344, 0.027583488464355467, 0.027578367233276366, 0.02775654411315918, 0.028622848510742187, 0.027812864303588865, 0.027684864044189454, 0.027510784149169923, 0.027461631774902344, 0.02760601615905762, 0.027608064651489257, 0.02754867172241211, 0.02752921676635742, 0.02752204895019531, 0.027511808395385744, 0.02756608009338379, 0.027468799591064453, 0.02755788803100586, 0.0274913272857666, 0.02753638458251953, 0.02757734489440918, 0.02771046447753906, 0.027622400283813478, 0.02751283264160156, 0.02773401641845703, 0.027615232467651366, 0.02750873565673828, 0.027470848083496095, 0.027609088897705077, 0.027501567840576172, 0.02777907180786133, 0.028403711318969727, 0.02777292823791504, 0.027587583541870117, 0.02750668716430664, 0.027579456329345702, 0.027569087982177734, 0.02763270378112793, 0.027585472106933594, 0.027682815551757813, 0.027551744461059572, 0.02751590347290039, 0.027671552658081053, 0.02746678352355957, 0.027465696334838866, 0.027436031341552734, 0.028864511489868162, 0.028021760940551758, 0.027624448776245116, 0.027682815551757813, 0.027538431167602538, 0.02750054359436035, 0.02753023910522461, 0.027807743072509765, 0.02757734489440918, 0.027533376693725586, 0.02744108772277832, 0.02753126335144043, 0.02757529640197754, 0.02715443229675293, 0.027251712799072264, 0.02756403160095215, 0.05607014465332031, 0.02755583953857422, 0.02754969596862793, 0.02751283264160156, 0.02752511978149414, 0.027572223663330078, 0.0277708797454834, 0.027670528411865233, 0.027527168273925783, 0.02756096076965332, 0.02754867172241211, 0.027609088897705077, 0.0275732479095459, 0.027600896835327147, 0.02759065628051758, 0.027572223663330078, 0.02736128044128418, 0.027046911239624022, 0.02754047966003418, 0.027603967666625977, 0.027709440231323244, 0.02751283264160156, 0.027527168273925783, 0.02753331184387207, 0.02752409553527832, 0.027594751358032226, 0.027517951965332032, 0.02754867172241211, 0.027425792694091795, 0.027488256454467775, 0.027443199157714843, 0.02752102470397949, 0.027471872329711915, 0.027583488464355467, 0.02768076705932617, 0.02755891227722168, 0.027622400283813478, 0.02753023910522461, 0.02752511978149414, 0.027724800109863282, 0.027668479919433595, 0.027583488464355467, 0.02753433609008789, 0.02749849510192871, 0.027623424530029295, 0.027578367233276366, 0.027611135482788086, 0.02755583953857422, 0.027643903732299805, 0.027620351791381836, 0.027622400283813478, 0.02753331184387207, 0.027634687423706054, 0.027686912536621092, 0.02753331184387207, 0.027626495361328125, 0.027732992172241212, 0.027653120040893556, 0.027572223663330078, 0.02752409553527832, 0.02753331184387207, 0.02752409553527832, 0.027592704772949218, 0.05624319839477539, 0.02752204895019531, 0.02755583953857422, 0.02752511978149414, 0.02756608009338379, 0.027511808395385744, 0.027465728759765624, 0.027692031860351563, 0.027672576904296874, 0.02756403160095215, 0.027501567840576172, 0.027494400024414063, 0.027615232467651366, 0.027682815551757813, 0.02757529640197754, 0.027893760681152343, 0.027642879486083984, 0.027618303298950195, 0.027651071548461914, 0.027568128585815428, 0.0275599365234375, 0.02747494316101074, 0.0274913272857666, 0.027655168533325194, 0.027490304946899413, 0.027633663177490234, 0.027588607788085938, 0.027483200073242186, 0.027740095138549806, 0.027473920822143554, 0.027470848083496095, 0.02750054359436035, 0.027489280700683592, 0.027600896835327147, 0.02753433609008789, 0.02755788803100586, 0.02756915283203125, 0.02751692771911621, 0.027446271896362305, 0.027482112884521483, 0.027601919174194335, 0.027625471115112304, 0.028243967056274414, 0.027845632553100585, 0.027794431686401368, 0.02756710433959961, 0.02755788803100586, 0.02752204895019531, 0.02834022331237793, 0.02766748809814453, 0.027527135848999025, 0.027579391479492187, 0.027465728759765624, 0.027493375778198242, 0.027579391479492187, 0.02758143997192383, 0.027435007095336913, 0.02759884834289551, 0.027570175170898437, 0.027476991653442383, 0.027546623229980468, 0.027464767456054688, 0.027498432159423828, 0.056005630493164066, 0.02768076705932617, 0.02750771141052246, 0.0275732479095459, 0.02750464057922363, 0.027509759902954102, 0.027625471115112304, 0.027531295776367187, 0.02744828796386719, 0.02758246421813965, 0.027446271896362305, 0.027441152572631834, 0.027445247650146484, 0.027586559295654296, 0.02752409553527832, 0.02778726387023926, 0.027593727111816405, 0.027457536697387694, 0.027485183715820313, 0.02754867172241211, 0.027587583541870117, 0.027494400024414063, 0.027488256454467775, 0.027732992172241212, 0.02750873565673828, 0.027494400024414063, 0.027485183715820313, 0.02755891227722168, 0.02750054359436035, 0.027440128326416017, 0.027476991653442383, 0.027836416244506838, 0.02771865653991699, 0.027458560943603515, 0.02756505584716797, 0.027437055587768554, 0.027521087646484376, 0.027642816543579102, 0.027497472763061522, 0.027472896575927733, 0.02746675109863281, 0.027428863525390625, 0.02735820770263672, 0.027463680267333986, 0.02752409553527832, 0.027406335830688477, 0.027373567581176757, 0.027426816940307616, 0.027428863525390625, 0.027460607528686523, 0.027580415725708008, 0.02755276870727539, 0.027471872329711915, 0.027494400024414063, 0.02757529640197754, 0.027485183715820313, 0.027614208221435548, 0.0275599365234375, 0.027497472763061522, 0.027461631774902344, 0.027511808395385744, 0.02752409553527832, 0.02759782409667969, 0.055839744567871094, 0.0275281925201416, 0.02750873565673828, 0.027660287857055665, 0.028719104766845704, 0.02778316879272461, 0.0275599365234375, 0.027455488204956056, 0.02752511978149414, 0.02771865653991699, 0.027613183975219727, 0.02755583953857422, 0.027482112884521483, 0.027511808395385744, 0.02753433609008789, 0.027601919174194335, 0.02753331184387207, 0.027445247650146484, 0.027546623229980468, 0.02753023910522461, 0.027432960510253908, 0.027496448516845705, 0.02749849510192871, 0.027693056106567384, 0.027557952880859375, 0.027495359420776366, 0.02747494316101074, 0.027480064392089845, 0.028631040573120117, 0.02754560089111328, 0.027595775604248047, 0.02737664031982422, 0.02792448043823242, 0.027652095794677735, 0.02755686378479004, 0.027662336349487306, 0.02750873565673828, 0.027588607788085938, 0.027502592086791993, 0.027546623229980468, 0.027509759902954102, 0.027452415466308593, 0.02751590347290039, 0.027535360336303712, 0.027488256454467775, 0.0276112003326416, 0.027622400283813478, 0.027561920166015625, 0.027012096405029298, 0.027158527374267577, 0.027480064392089845, 0.027421695709228516, 0.02754764747619629, 0.027494400024414063, 0.02750771141052246, 0.027603967666625977, 0.027517951965332032, 0.027517951965332032, 0.027443199157714843, 0.027438079833984375, 0.02754252815246582, 0.02751590347290039, 0.02754047966003418, 0.05600460815429688, 0.027443199157714843, 0.027509759902954102, 0.027493375778198242, 0.027511808395385744, 0.027463680267333986, 0.027494400024414063, 0.027637760162353517, 0.027645952224731447, 0.027543552398681642, 0.027459583282470702, 0.027561983108520507, 0.02754457664489746, 0.027441152572631834, 0.027503616333007814, 0.027595775604248047, 0.027561983108520507, 0.02747398376464844, 0.02769811248779297, 0.027451391220092772, 0.027464767456054688, 0.02740838432312012, 0.027423679351806642, 0.027561983108520507, 0.02754150390625, 0.027533376693725586, 0.027514816284179688, 0.027436031341552734, 0.027654144287109376, 0.027490304946899413, 0.027462656021118165, 0.027414527893066407, 0.027724800109863282, 0.027621376037597657, 0.02755583953857422, 0.027533344268798828, 0.027511775970458983, 0.027583488464355467, 0.027473920822143554, 0.027888639450073242, 0.028013568878173828, 0.02753228759765625, 0.027543552398681642, 0.02751590347290039, 0.02752511978149414, 0.02753740882873535, 0.028087295532226563, 0.02811392021179199, 0.02836070442199707, 0.02752102470397949, 0.027493375778198242, 0.027467775344848632, 0.027691007614135742, 0.02755686378479004, 0.027661312103271486, 0.027489280700683592, 0.02771455955505371, 0.027546655654907225, 0.027526111602783204, 0.02749951934814453, 0.027551744461059572, 0.027627519607543945, 0.02756710433959961, 0.05608652877807617, 0.02756713676452637, 0.027494367599487306, 0.027629568099975587, 0.028110847473144532, 0.027906047821044923, 0.02756403160095215, 0.02756608009338379, 0.027489280700683592, 0.027599872589111327, 0.027708415985107423, 0.027561983108520507, 0.02712166404724121, 0.027146240234375, 0.027496448516845705, 0.02754457664489746, 0.027468799591064453, 0.027538431167602538, 0.027464704513549806, 0.027390975952148438, 0.02750668716430664, 0.027625471115112304, 0.027494400024414063, 0.027482112884521483, 0.027444223403930663, 0.027484159469604492, 0.028370943069458008, 0.028468223571777345, 0.02759065628051758, 0.027627519607543945, 0.027520000457763674, 0.02752614402770996, 0.02773708724975586, 0.027485183715820313, 0.02752204895019531, 0.027460607528686523, 0.027480064392089845, 0.027452415466308593, 0.027447296142578126, 0.027441152572631834, 0.027420671463012695, 0.027578367233276366, 0.027652095794677735, 0.02753228759765625, 0.027483200073242186, 0.027545536041259765, 0.027586559295654296, 0.02748313522338867, 0.027475008010864256, 0.027719615936279297, 0.02758143997192383, 0.027448320388793947, 0.027636735916137696, 0.027439104080200196, 0.027476991653442383, 0.02750873565673828, 0.027513856887817382, 0.02750464057922363, 0.0275230712890625, 0.027460607528686523, 0.027479040145874024, 0.027406335830688477, 0.027627519607543945, 0.05575372695922851, 0.02751692771911621, 0.027454463958740235, 0.02752102470397949, 0.027453439712524414, 0.02749545669555664, 0.027481056213378905, 0.027452415466308593, 0.0274913272857666, 0.0275732479095459, 0.02750054359436035, 0.02749951934814453, 0.02753023910522461, 0.02750668716430664, 0.02751487922668457, 0.027486207962036133, 0.027458560943603515, 0.02746063995361328, 0.027542495727539064, 0.027510784149169923, 0.027472896575927733, 0.02773504066467285, 0.027495424270629884, 0.02754867172241211, 0.027433984756469725, 0.027487232208251954, 0.02759884834289551, 0.027445247650146484, 0.027481088638305663, 0.027494400024414063, 0.02754560089111328, 0.027456512451171877, 0.027470848083496095, 0.027430912017822266, 0.02740121650695801, 0.027614240646362306, 0.027687904357910156, 0.02750054359436035, 0.0275230712890625, 0.02754047966003418, 0.027437055587768554, 0.027554815292358398, 0.027576320648193358, 0.027617279052734374, 0.027453439712524414, 0.02751283264160156, 0.027495424270629884, 0.02752204895019531, 0.02755891227722168, 0.02756096076965332, 0.027717632293701173, 0.027488256454467775, 0.02752614402770996, 0.027511808395385744, 0.02754047966003418, 0.02753945541381836, 0.027455488204956056, 0.027632640838623046, 0.027572223663330078, 0.027554815292358398, 0.027497472763061522, 0.027433984756469725, 0.02750873565673828]",tokens/s,35.82517959404373,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,3984.965632,12732.33408,0.0,12085.886976,11337.364992,s,10,10.92237109375,1.092237109375,0.0021117743883837157,1.0918812255859374,1.0945805541992188,1.0956890197753906,1.0965757922363282,"[1.0967974853515625, 1.094334228515625, 1.0893026123046874, 1.090765625, 1.0907347412109376, 1.09039990234375, 1.0912408447265625, 1.09275048828125, 1.0935235595703126, 1.0925216064453125]",tokens/s,234.38134247836382,kWh,1.2875690162181854e-05,7.054504655134223e-06,6.32269116926043e-05,8.315710650992037e-05,tokens/kWh,3078510.1928655975,MB,3984.965632,12732.33408,0.0,12085.886976,11686.79936,s,10,637.5050507812499,63.75050507812499,0.0049395574759148195,63.749460937500004,63.757564453125,63.759594726562504,63.7612189453125,"[63.74835546875, 63.7478046875, 63.75041796875, 63.7437265625, 63.75711328125, 63.74664453125, 63.761625, 63.75044140625, 63.74853515625, 63.75038671875]",tokens/s,0.9882274645949038,kWh,0.0007525312550531495,0.00041245463154615205,0.0037184816692274027,0.004883467555826704,tokens/kWh,12900.669305119396,,s,629,646.3186791381836,1.027533671125888,0.12973576665422035,1.0118389892578126,1.012325793457031,1.0126516357421875,2.103177431640625,"[1.0117550048828126, 1.0119987182617187, 1.0120816650390625, 1.0120765991210938, 1.0125403442382812, 1.011704833984375, 1.0115768432617187, 1.0117868041992188, 1.01175390625, 1.0114559936523437, 1.0114959106445311, 1.0115798950195312, 1.0117611694335937, 1.01186865234375, 1.0123243408203124, 1.0116085815429687, 1.01121435546875, 1.0115245971679687, 1.0116761474609375, 1.011673095703125, 1.0115952758789062, 1.0115348510742188, 1.011820556640625, 1.0118010864257811, 1.0121104125976563, 1.0117160034179689, 1.0116536254882813, 1.0114908447265625, 1.0122445068359376, 1.0119515991210937, 1.0117140502929687, 1.011957763671875, 1.0116392822265625, 1.0119588012695313, 1.0129981689453125, 1.011984375, 1.0124021606445313, 1.01186865234375, 1.0122096557617188, 1.0120274047851563, 1.0117069091796875, 1.0121103515625, 1.0121615600585938, 1.0125332641601563, 1.0121226196289062, 1.01194140625, 1.0114242553710937, 1.0118287353515625, 1.011631103515625, 1.0119935913085938, 1.0117672729492186, 1.0120878295898437, 1.0116167602539063, 1.0122476196289063, 1.0120057983398438, 1.0121021728515625, 1.011689453125, 1.0124891967773437, 1.0117969970703125, 1.0116424560546875, 1.0114559326171875, 1.0116290283203124, 2.106623046875, 1.0121912841796874, 1.012216796875, 1.0120878295898437, 1.01224755859375, 1.0123724365234374, 1.0121103515625, 1.011904541015625, 1.0120908813476563, 1.0119536743164061, 1.0122240600585937, 1.0117017211914063, 1.0118287353515625, 1.0117017822265626, 1.0116792602539062, 1.0114119873046874, 1.0114088745117187, 1.01127783203125, 1.011852294921875, 1.01174169921875, 1.011968994140625, 1.0117713623046876, 1.0116188354492188, 1.0120038452148437, 1.011915771484375, 1.0112901000976562, 1.0112747802734374, 1.0113668823242188, 1.01144677734375, 1.0113177490234375, 1.0119198608398436, 1.0120233154296876, 1.0116013793945313, 1.0115082397460937, 1.0116761474609375, 1.0115245971679687, 1.0114263305664062, 1.0112634887695313, 1.011557373046875, 1.0114109497070312, 1.0121984252929688, 1.0121513061523437, 1.01176318359375, 1.0114232177734375, 1.011726318359375, 1.011304443359375, 1.0149703979492188, 1.0115143432617189, 1.012031494140625, 1.0119803466796875, 1.0133125, 1.0118717651367188, 1.0115481567382814, 1.0116792602539062, 1.0120653076171875, 1.0115502319335938, 1.0118072509765625, 1.0117089233398437, 1.0117550048828126, 1.0120355834960937, 1.0122034912109374, 1.011757080078125, 1.0119905395507813, 2.102846435546875, 1.0113546142578125, 1.0115645141601564, 1.011766357421875, 1.0118368530273438, 1.011746826171875, 1.0118656005859374, 1.0115020751953125, 1.0114826049804688, 1.0119618530273438, 1.01157373046875, 1.0117109985351562, 1.0118194580078126, 1.0120345458984374, 1.0119188232421874, 1.0118768920898438, 1.0121174926757812, 1.0112481079101563, 1.011708984375, 1.011843017578125, 1.011999755859375, 1.0120202026367187, 1.0120222778320314, 1.0137774047851562, 1.011525634765625, 1.0120479125976563, 1.011984375, 1.0118092651367188, 1.0117805786132812, 1.0120990600585937, 1.0119854125976562, 1.01212158203125, 1.0122670288085938, 1.0120601806640626, 1.0118174438476562, 1.0118410034179688, 1.0115552978515625, 1.011240966796875, 1.011661865234375, 1.0114876708984375, 1.011441650390625, 1.01127783203125, 1.0116198120117188, 1.0113597412109374, 1.0116414184570313, 1.0120242309570313, 1.0122291259765626, 1.013486572265625, 1.0116536254882813, 1.0121564331054687, 1.0123980712890626, 1.0119321899414062, 1.012010009765625, 1.0122987670898438, 1.0121973876953125, 1.0126295166015624, 1.0126663818359376, 1.0119352416992187, 1.0116690063476563, 1.0120150756835937, 1.0120540771484374, 1.01182666015625, 1.0116351928710938, 2.10330615234375, 1.0118348999023437, 1.0117386474609376, 1.0117161254882812, 1.01167822265625, 1.0121646118164063, 1.0119137573242187, 1.01201611328125, 1.0119761962890625, 1.0122608032226563, 1.0121830444335937, 1.0119669799804687, 1.0134896850585937, 1.0120386352539064, 1.0123622436523438, 1.0113034057617187, 1.0113535766601562, 1.011219482421875, 1.0112798461914063, 1.0111006469726562, 1.0113710327148437, 1.0122311401367188, 1.0115225830078125, 1.0115430297851562, 1.0117457885742187, 1.0114498291015626, 1.0119761962890625, 1.0120970458984375, 1.0119505615234374, 1.0119619140625, 1.0121400146484374, 1.0117672729492186, 1.0114600830078124, 1.0114866943359375, 1.011557373046875, 1.011900390625, 1.0118594360351563, 1.01203662109375, 1.0116956176757812, 1.0115460815429687, 1.0117755126953125, 1.0116608276367187, 1.01161474609375, 1.0111918334960937, 1.0146948852539062, 1.01151953125, 1.011521484375, 1.0112010498046875, 1.01165771484375, 1.0114376220703125, 1.01186865234375, 1.0118800048828125, 1.0115624389648437, 1.0114273071289062, 1.011641357421875, 1.0113966064453126, 1.0116741333007813, 1.011451904296875, 1.0115655517578126, 1.011557373046875, 1.012046875, 1.0120038452148437, 1.011968017578125, 2.10362060546875, 1.0117232666015625, 1.0121318359375, 1.0118696899414064, 1.0120601806640626, 1.0119393310546876, 1.012031494140625, 1.0123212890625, 1.0119556884765626, 1.01214208984375, 1.0124891967773437, 1.0119710693359374, 1.0119178466796874, 1.0113935546875, 1.0116761474609375, 1.0115359497070313, 1.011611572265625, 1.01361767578125, 1.0116761474609375, 1.0114703369140625, 1.0116137084960937, 1.011589111328125, 1.0115932006835937, 1.0114774780273437, 1.0115348510742188, 1.0114713745117188, 1.0118184814453124, 1.0116055297851563, 1.01148876953125, 1.0116792602539062, 1.0118379516601563, 1.0119649047851562, 1.0117376708984376, 1.011568603515625, 1.0120294189453125, 1.01218505859375, 1.0119823608398437, 1.0122332153320313, 1.011926025390625, 1.0118748168945313, 1.0124400634765625, 1.01216357421875, 1.011631103515625, 1.0115266723632812, 1.0125834350585938, 1.0121605224609376, 1.0119229736328126, 1.0119270629882813, 1.01148876953125, 1.0118287353515625, 1.011999755859375, 1.011862548828125, 1.0116915283203125, 1.0118143920898437, 1.011768310546875, 1.0122393798828124, 1.0125045776367188, 1.0162032470703124, 1.0122127075195313, 1.0123765869140624, 1.0122915649414062, 1.012389892578125, 1.0124400634765625, 2.102681640625, 1.0123212280273437, 1.0120990600585937, 1.0122465209960938, 1.0117007446289064, 1.012094970703125, 1.0112553100585937, 1.0112625122070313, 1.0114201049804687, 1.0113854370117188, 1.01134228515625, 1.0119854125976562, 1.0117590942382813, 1.0113126220703126, 1.0112696533203125, 1.011483642578125, 1.0116823120117187, 1.0114539794921875, 1.0115082397460937, 1.0114959106445311, 1.0115481567382814, 1.0120242919921876, 1.0115635375976562, 1.0115850219726563, 1.0117254028320313, 1.0121840209960937, 1.0118276977539062, 1.0116751098632812, 1.0118184814453124, 1.0114385986328125, 1.0118215942382813, 1.0118092651367188, 1.0117406616210938, 1.011473388671875, 1.0117386474609376, 1.0118164672851562, 1.01148876953125, 1.0114867553710938, 1.0149037475585938, 1.0118359375, 1.0122781982421876, 1.0117642211914062, 1.0119249877929688, 1.011294189453125, 1.0117027587890626, 1.0116690063476563, 1.0123939819335936, 1.0120908813476563, 1.0122567749023437, 1.0120653076171875, 1.0122485961914063, 1.012369384765625, 1.012421630859375, 1.0119987182617187, 1.0126663818359376, 1.0115481567382814, 1.0117089233398437, 1.0119823608398437, 1.0117867431640626, 1.0115614624023437, 1.0122546997070312, 1.0117652587890624, 1.012073486328125, 2.104281005859375, 1.011789794921875, 1.0118133544921875, 1.0122199096679687, 1.01174169921875, 1.0117130126953124, 1.0126079711914062, 1.0127093505859375, 1.0125383911132813, 1.0122393798828124, 1.01216357421875, 1.0123182373046875, 1.0122199096679687, 1.0121246948242189, 1.0119669799804687, 1.0121113891601563, 1.0128097534179688, 1.01222705078125, 1.012335693359375, 1.0115757446289062, 1.0118441162109375, 1.0118328247070312, 1.011631103515625, 1.0136719360351563, 1.0121174926757812, 1.01250048828125, 1.012552734375, 1.0127390747070313, 1.0122342529296875, 1.0118225708007813, 1.0121768798828126, 1.0121298217773438, 1.0121860961914062, 1.0118389892578126, 1.012316162109375, 1.0120714111328124, 1.0119198608398436, 1.0119669799804687, 1.0115543823242188, 1.0116760864257812, 1.0120601806640626, 1.0119342041015624, 1.0120653076171875, 1.0115552978515625, 1.0121380004882812, 1.0123212890625, 1.0120806274414063, 1.0117672729492186, 1.0117805786132812, 1.0118287353515625, 1.0118615112304687, 1.0117703857421876, 1.0114365234375, 1.0116065063476563, 1.0118512573242187, 1.0121676635742187, 1.0124697875976563, 1.0122608642578126, 1.0120376586914062, 1.0119403686523438, 1.0120181884765624, 1.0120386352539064, 1.01167822265625, 2.1052109375, 1.0120621948242188, 1.01188916015625, 1.0118225708007813, 1.01174169921875, 1.012010009765625, 1.0117171020507814, 1.0117847290039061, 1.0114641723632813, 1.01161474609375, 1.0114928588867187, 1.0118482055664062, 1.0115112915039062, 1.0117078857421875, 1.0117007446289064, 1.011862548828125, 1.0131630249023438, 1.0117345581054686, 1.0117489013671874, 1.0116443481445312, 1.0115972900390624, 1.0121307983398438, 1.01167822265625, 1.0120202026367187, 1.0114754638671875, 1.012262939453125, 1.0119618530273438, 1.0120448608398438, 1.01155224609375, 1.0120181884765624, 1.0120068969726563, 1.0124298095703126, 1.0118533325195314, 1.0121185302734375, 1.0118779296875, 1.0119566650390626, 1.0118379516601563, 1.0119434204101563, 1.0118062133789063, 1.0120775756835938, 1.0119024658203124, 1.0125721435546875, 1.0118563842773438, 1.0118348999023437, 1.0117273559570312, 1.01222705078125, 1.011905517578125, 1.0115379028320313, 1.0121062622070311, 1.0116116333007812, 1.011979248046875, 1.012168701171875, 1.0120171508789062, 1.01224658203125, 1.0122731323242187, 1.0115819091796876, 1.0118994140625, 1.0117642211914062, 1.0116351928710938, 1.011962890625, 1.0119782104492188, 1.01182568359375, 1.012173828125, 2.105998291015625, 1.0114345092773438, 1.0115174560546876, 1.0114590454101562, 1.0117990112304687, 1.011937255859375, 1.0119331665039062, 1.011800048828125, 1.0117058715820313, 1.0113648681640626, 1.0117181396484376, 1.0116608276367187, 1.0114611206054687, 1.0114600830078124, 1.0114508666992188, 1.0114754638671875, 1.0120274047851563, 1.0133237915039062, 1.0123274536132814, 1.0115994262695311, 1.0119392700195313, 1.011694580078125, 1.0119700317382812, 1.0124237060546875, 1.0117488403320312, 1.0116751098632812, 1.0119721069335939, 1.011651611328125, 1.01169970703125, 1.0113812255859376, 1.0121625366210938, 1.0115175170898438, 1.0117620849609374, 1.011493896484375, 1.0118062133789063, 1.0119086303710938, 1.01222607421875, 1.0115870971679688, 1.0119854125976562, 1.0115471801757812, 1.0122720336914062, 1.0117857055664063, 1.0124462280273439, 1.0115880737304688, 1.0122782592773438, 1.0121298217773438, 1.0129663696289062, 1.011736572265625, 1.0120570678710938, 1.0118963623046875, 1.0122628784179688, 1.0121093139648438, 1.0120888061523436, 1.011789794921875, 1.0123253784179687, 1.0119198608398436, 1.0131046142578124, 1.0120990600585937, 1.0116629028320312, 1.0112337646484375, 1.0118236083984375, 1.0113085327148437, 1.0116392822265625, 2.104349609375, 1.0117294311523437, 1.0120775756835938, 1.0116546630859375, 1.01159423828125, 1.0118696899414064, 1.0115491943359376, 1.0113505249023438, 1.0112051391601562, 1.0120068969726563, 1.0118911743164063, 1.011857421875, 1.01180419921875, 1.0114385375976562, 1.011646484375, 1.011788818359375, 1.0118615112304687, 1.0118656005859374, 1.0119014282226562, 1.0118348999023437, 1.011794921875, 1.011900390625, 1.0119556884765626, 1.0122608642578126, 1.0124257202148437, 1.0121984252929688, 1.0144102172851563, 1.011820556640625, 1.0118195190429689, 1.0117386474609376, 1.0117069091796875, 1.0122393798828124, 1.011646484375, 1.0119147338867187, 1.011462158203125, 1.012073486328125, 1.0115911865234375, 1.011684326171875, 1.0119321899414062, 1.0118062133789063, 1.0121994018554688, 1.0123673706054688, 1.0117744750976563, 1.011873779296875, 1.0116044921875, 1.011989501953125, 1.0118911743164063, 1.01174169921875, 1.011662841796875, 1.0115194702148438, 1.0117089233398437, 1.0126878662109375, 1.0129653930664062, 1.0122413940429686, 1.0118389892578126, 1.0119505615234374, 1.0117611694335937, 1.01174169921875, 1.0118338623046874, 1.0117755126953125, 1.0121144409179688, 1.0123131103515626, 1.0117294311523437]",tokens/s,0.973204117879934,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2055.856128,6201.802752,0.0,5555.355648,5273.986048,s,10,6.166099914550782,0.6166099914550782,0.001805187295356428,0.6164970397949219,0.6192070678710938,0.619502490234375,0.619738828125,"[0.6174358520507812, 0.6152646484375, 0.6157444458007812, 0.6149035034179687, 0.615364013671875, 0.6138878173828125, 0.6173106689453125, 0.6197979125976563, 0.6172496337890625, 0.6191414184570313]",tokens/s,415.1732919472978,kWh,7.25516759687e-06,3.97553441218727e-06,3.4973328632232774e-05,4.6204030641290036e-05,tokens/kWh,5540642.157120091,MB,2055.856128,6201.802752,0.0,5555.355648,5324.908032,s,10,365.7995859375,36.57995859374999,0.035939708288129694,36.562037109375,36.625498828125,36.6277962890625,36.6296342578125,"[36.623765625, 36.5703828125, 36.55220703125, 36.541484375, 36.54166015625, 36.54937890625, 36.62498828125, 36.55369140625, 36.61193359375, 36.63009375]",tokens/s,1.7222545465309549,kWh,0.0004313527007732127,0.00023641881037992466,0.002030578407141164,0.002698349918294301,tokens/kWh,23347.602018875292,,s,629,370.76827941894544,0.5894567240364791,0.07322585165475656,0.5803335571289062,0.5820514404296875,0.5823793212890626,1.195406015625,"[0.5803048706054688, 0.5797140502929687, 0.5794703369140625, 0.5797222290039062, 0.57939453125, 0.5800867919921875, 0.5797529296875, 0.5800857543945312, 0.5802291259765625, 0.5800048828125, 0.5795665893554688, 0.5795399780273438, 0.580042724609375, 0.5822300415039062, 0.5817169799804688, 0.5822146606445312, 0.5819473876953125, 0.5820886840820313, 0.5811210327148437, 0.582023193359375, 0.5814353637695312, 0.581949462890625, 0.580401123046875, 0.5816944580078125, 0.5808414916992187, 0.5820006103515625, 0.5810022583007812, 0.5814302978515625, 0.581444580078125, 0.5812797241210937, 0.5823364868164063, 0.5824470825195313, 0.5824716796875, 0.5824655151367187, 0.581918701171875, 0.5819730224609375, 0.58178662109375, 0.5807022094726563, 0.5815429077148437, 0.5823539428710938, 0.5825064697265625, 0.5821757202148438, 0.5818603515625, 0.5820498046875, 0.582350830078125, 0.5819617919921874, 0.5816124877929687, 0.5811609497070312, 0.5808312377929687, 0.582739990234375, 0.5819484252929688, 0.5809541015625, 0.5825567016601563, 0.5805885620117187, 0.5814404907226562, 0.581485595703125, 0.581022705078125, 0.5828966674804688, 0.5815776977539062, 0.5818429565429688, 0.5819647827148438, 0.5811875610351562, 1.1957647705078125, 0.5814425659179687, 0.5816586303710938, 0.5816115112304687, 0.5822146606445312, 0.5820579833984375, 0.5818347778320313, 0.5802680053710938, 0.5797498779296875, 0.5799751586914063, 0.5797396240234375, 0.5802352905273438, 0.5802066040039062, 0.580453369140625, 0.5803786010742188, 0.5806100463867188, 0.5803131103515625, 0.5797007446289062, 0.5798512573242187, 0.5799127197265626, 0.5799597778320312, 0.5807144775390625, 0.5798113403320313, 0.579999755859375, 0.5794037475585937, 0.5798748168945312, 0.5804503173828125, 0.5799270629882812, 0.5803120727539063, 0.5800960083007812, 0.5797396240234375, 0.5799884643554688, 0.5807646484375, 0.5804298095703125, 0.5802833862304687, 0.5804861450195312, 0.5813104858398438, 0.5805506591796875, 0.581159912109375, 0.5806080932617188, 0.5819862670898438, 0.5803499755859375, 0.5799393310546875, 0.5800376586914062, 0.5834240112304687, 0.5802772216796875, 0.5810247802734375, 0.5808568115234375, 0.58094287109375, 0.5806510009765625, 0.580552734375, 0.580220947265625, 0.5798553466796875, 0.5802045288085937, 0.5803714599609375, 0.5809602661132812, 0.5803591918945312, 0.5802659912109375, 0.5799219360351563, 0.5800376586914062, 0.57982568359375, 0.5798696899414062, 0.579905517578125, 1.19632080078125, 0.5800499267578125, 0.5804564208984375, 0.5798563842773438, 0.5799393310546875, 0.57995263671875, 0.5803939819335937, 0.5799925537109375, 0.5806735229492187, 0.5805301513671876, 0.5799966430664063, 0.5798082275390625, 0.5798707275390625, 0.5796608276367188, 0.579547119140625, 0.5798369140625, 0.5801513061523438, 0.57986767578125, 0.5802199096679688, 0.5796167602539063, 0.5794580688476563, 0.5797939453125, 0.5794334716796875, 0.5804932861328125, 0.5801594848632813, 0.5801451416015625, 0.5808414916992187, 0.5797929077148437, 0.5807810668945312, 0.5800017700195312, 0.5802393798828125, 0.5800509643554688, 0.5801564331054687, 0.5809776611328125, 0.579979248046875, 0.580262939453125, 0.5801871337890625, 0.58008984375, 0.58040625, 0.5803786010742188, 0.5804656372070313, 0.5801768798828125, 0.5805813598632813, 0.5812029418945313, 0.5800724487304687, 0.5806858520507813, 0.5798379516601563, 0.580527099609375, 0.5803837280273437, 0.5800488891601563, 0.5802803344726563, 0.5804451904296875, 0.5802803344726563, 0.58022607421875, 0.580389892578125, 0.5802311401367187, 0.580168701171875, 0.5799096069335937, 0.580010986328125, 0.5800120239257812, 0.5806653442382812, 0.5806591796875, 0.5803489379882812, 1.1951103515625, 0.5797929077148437, 0.579947509765625, 0.579768310546875, 0.5811712036132812, 0.5812357177734375, 0.58012060546875, 0.5796597900390625, 0.5797007446289062, 0.5796484985351562, 0.5794017333984375, 0.580021240234375, 0.5799884643554688, 0.5799659423828125, 0.5805444946289062, 0.5804830932617188, 0.5801144409179687, 0.5799618530273437, 0.5795880737304687, 0.579757080078125, 0.579483642578125, 0.5798082275390625, 0.5797939453125, 0.5796690063476563, 0.5802239990234375, 0.57957275390625, 0.5796597900390625, 0.5793659057617188, 0.5799301147460938, 0.579904541015625, 0.5796874389648438, 0.580485107421875, 0.5801973876953125, 0.5806879272460937, 0.5800631713867187, 0.579857421875, 0.5803335571289062, 0.5799208984375, 0.5803970336914063, 0.5798604736328125, 0.5801195678710938, 0.5799127197265626, 0.5803151245117187, 0.5800714111328125, 0.5800929565429688, 0.5801134033203125, 0.579852294921875, 0.580485107421875, 0.5806182250976563, 0.5799198608398437, 0.5808701171875, 0.5802598266601563, 0.5800806274414062, 0.5799147338867188, 0.5800150756835938, 0.5798461303710938, 0.5798594360351562, 0.5798338623046875, 0.5796372680664063, 0.5799874267578125, 0.5799168090820312, 0.57957373046875, 0.580232177734375, 1.19552099609375, 0.5814528198242187, 0.5798369140625, 0.5804021606445312, 0.579589111328125, 0.5795419921875, 0.5797027587890625, 0.5801830444335937, 0.5802567749023437, 0.5800621948242187, 0.57980517578125, 0.5795338134765625, 0.5802721557617188, 0.5796618041992188, 0.5800499267578125, 0.57969970703125, 0.5798021240234374, 0.58012158203125, 0.581080078125, 0.5802280883789063, 0.5805322265625, 0.5801748657226562, 0.57978369140625, 0.5796332397460937, 0.57942626953125, 0.5793402709960938, 0.5803540649414063, 0.5799547119140624, 0.579684326171875, 0.5799802856445313, 0.5798615112304687, 0.5798123779296875, 0.579684326171875, 0.5798983764648438, 0.5798276977539063, 0.579751953125, 0.58046875, 0.5798799438476563, 0.5800233154296875, 0.5798615112304687, 0.5794273071289062, 0.5798963012695313, 0.5797560424804687, 0.5802567749023437, 0.5799588012695313, 0.5798819580078125, 0.5800070190429687, 0.5797190551757813, 0.5804554443359375, 0.5799905395507813, 0.58028955078125, 0.5803919067382812, 0.5802905883789062, 0.5805660400390625, 0.579968994140625, 0.5803079833984375, 0.5797980346679688, 0.580832275390625, 0.580274169921875, 0.5802926025390625, 0.58039501953125, 0.57959423828125, 0.5799966430664063, 1.19406689453125, 0.579810302734375, 0.5797396240234375, 0.5796720581054687, 0.5796259765625, 0.5796741333007812, 0.5809397583007813, 0.5795031127929687, 0.5802731323242187, 0.5795594482421875, 0.5797140502929687, 0.579177490234375, 0.5797734375, 0.5801287841796875, 0.5797498779296875, 0.5798656005859375, 0.5796587524414063, 0.5798328247070312, 0.5797969970703125, 0.5797437744140626, 0.5796495361328124, 0.5797191772460938, 0.5803560791015625, 0.5800233154296875, 0.5815418701171875, 0.5802926025390625, 0.5799188232421875, 0.5799495849609375, 0.5795277099609375, 0.5796403198242187, 0.5796351928710938, 0.5801963500976562, 0.5797037963867188, 0.5795000610351563, 0.5798338623046875, 0.5798881225585938, 0.5797590942382812, 0.5800130615234375, 0.58073193359375, 0.5800908813476563, 0.5806469116210937, 0.5811783447265625, 0.5802403564453125, 0.5803519897460937, 0.58052197265625, 0.5798973388671875, 0.5800233154296875, 0.5798533325195312, 0.5805916137695313, 0.5798184814453125, 0.5794979858398438, 0.579715087890625, 0.5797007446289062, 0.5794171142578125, 0.5801093139648438, 0.58170166015625, 0.5814415283203125, 0.5815838623046875, 0.5820006103515625, 0.57984716796875, 0.5823866577148438, 0.5812367553710938, 0.5812715454101562, 1.19872412109375, 0.5820743408203125, 0.5816565551757813, 0.582371337890625, 0.5821552734375, 0.5816156005859375, 0.5810933837890625, 0.5808988037109375, 0.5822463989257812, 0.5805066528320313, 0.5815418701171875, 0.5819443359375, 0.5820221557617188, 0.5824573364257812, 0.58057421875, 0.5815746459960938, 0.5819218139648438, 0.581970947265625, 0.5814149169921875, 0.58176513671875, 0.5813401489257812, 0.5796444091796875, 0.5797304077148437, 0.57986767578125, 0.57969970703125, 0.5803519897460937, 0.581917724609375, 0.5818121948242188, 0.5813934326171875, 0.5819647827148438, 0.5817763671875, 0.5821880493164062, 0.5812572021484375, 0.5817692260742188, 0.58166064453125, 0.5815797729492187, 0.5820784912109375, 0.5817958374023438, 0.5818296508789063, 0.581411865234375, 0.58090087890625, 0.5812305908203125, 0.5817753295898438, 0.581781494140625, 0.5817088012695313, 0.5801287841796875, 0.5828178100585938, 0.5812131958007812, 0.5801339111328125, 0.5819945068359375, 0.5805414428710938, 0.5801564331054687, 0.58094384765625, 0.5817108764648438, 0.5819852905273437, 0.5817589721679688, 0.5808824462890625, 0.580969482421875, 0.5814876098632813, 0.5799014282226562, 0.5799772338867187, 0.5808035888671875, 0.5819320068359375, 1.200752685546875, 0.5804697875976562, 0.5798113403320313, 0.5795338134765625, 0.57959423828125, 0.5793955688476562, 0.5800294189453125, 0.579673095703125, 0.5795379028320312, 0.5792747802734375, 0.57942529296875, 0.5792655639648437, 0.579324951171875, 0.5791590576171874, 0.579515380859375, 0.5797427368164062, 0.5802495727539062, 0.5797744750976562, 0.58040625, 0.5798430786132812, 0.5793853149414062, 0.5796864013671875, 0.5795184936523438, 0.5801082763671875, 0.5802659912109375, 0.5800826416015625, 0.5807779541015625, 0.5820303344726563, 0.5817589721679688, 0.5796484985351562, 0.5796925659179688, 0.580094970703125, 0.5802587890625, 0.5804124145507813, 0.5822156982421876, 0.5799669799804688, 0.5799188232421875, 0.5803817138671875, 0.5797427368164062, 0.5796126708984375, 0.5807124633789063, 0.5810831298828125, 0.5797539672851563, 0.579541015625, 0.5795164184570313, 0.5796055297851562, 0.5795983276367187, 0.5792973022460938, 0.5793689575195312, 0.5794713745117187, 0.5795768432617188, 0.5799382934570313, 0.5819227905273437, 0.5818388671875, 0.581307373046875, 0.5798348999023437, 0.5799434204101562, 0.5802772216796875, 0.5826939086914062, 0.5821061401367188, 0.582097900390625, 0.5818736572265625, 0.5818245239257812, 1.197048828125, 0.5797693481445313, 0.5795419921875, 0.579435546875, 0.5806448364257812, 0.580769775390625, 0.5797642211914062, 0.5793648681640625, 0.5799239501953125, 0.579820556640625, 0.5798553466796875, 0.5799976806640625, 0.5800867919921875, 0.5793955688476562, 0.579800048828125, 0.5796505737304688, 0.5797386474609375, 0.5797744750976562, 0.5809653930664063, 0.58010009765625, 0.5795747680664063, 0.5806735229492187, 0.5816740112304688, 0.5815050048828125, 0.5816995849609375, 0.581855224609375, 0.5821460571289062, 0.5816473388671874, 0.5822300415039062, 0.5816422119140625, 0.5811865844726563, 0.58155517578125, 0.5818736572265625, 0.5809817504882813, 0.5813842163085937, 0.5811926879882813, 0.5820927734375, 0.5812623291015625, 0.5827706909179687, 0.582930419921875, 0.5829867553710938, 0.58191259765625, 0.5821531982421875, 0.5811548461914062, 0.5818746948242187, 0.5817907104492187, 0.5817804565429687, 0.5819412231445312, 0.5816668090820313, 0.5811885986328125, 0.5814149169921875, 0.5814466552734375, 0.5809336547851562, 0.581085205078125, 0.5813053588867187, 0.581718017578125, 0.5829119873046875, 0.582118408203125, 0.5821614379882812, 0.5819883422851563, 0.5823518676757813, 0.5817088012695313, 0.5822545776367187, 1.201244140625, 0.580126708984375, 0.5799127197265626, 0.5811712036132812, 0.5819514770507812, 0.5811456298828125, 0.5812008666992188, 0.5814640502929688, 0.5819463500976563, 0.5820886840820313, 0.5823846435546876, 0.580305908203125, 0.5809714965820313, 0.5801236572265625, 0.5817006225585938, 0.5822177124023438, 0.5816535034179687, 0.5801830444335937, 0.58051171875, 0.5805444946289062, 0.5798604736328125, 0.5808660278320312, 0.5804462280273438, 0.5811814575195312, 0.5804677124023437, 0.581433349609375, 0.5835038452148438, 0.5813534545898438, 0.5819402465820313, 0.5816535034179687, 0.58176513671875, 0.5820324096679688, 0.5815357666015625, 0.5827686157226563, 0.582540283203125, 0.5820057373046875, 0.5823068237304687, 0.58144970703125, 0.5820303344726563, 0.5807462158203125, 0.5822003173828125, 0.5823150024414062, 0.5814906616210938, 0.5829560546875, 0.5828761596679688, 0.5815275268554687, 0.5806663818359376, 0.5816873168945312, 0.5818255615234375, 0.58170166015625, 0.5825853271484375, 0.5802014770507813, 0.5799505615234375, 0.5799178466796875, 0.581676025390625, 0.5818982543945312, 0.5817825317382812, 0.5805660400390625, 0.581728271484375, 0.581496826171875, 0.5820068359375, 0.5809755859375, 0.5805465087890626]",tokens/s,1.6964773820072907,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948164-47fd127e05a4356c2ec574b8;c3061e5b-3558-4fd8-8cfe-a7ee51ad94f2) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1869.656064,3326.60736,0.0,2682.257408,2578.238464,s,10,1.3870304412841796,0.13870304412841797,0.00244373759901306,0.13811017608642578,0.14131588592529296,0.14323313064575197,0.14476692642211916,"[0.14515037536621095, 0.13752809143066405, 0.1378865966796875, 0.13688485717773438, 0.13833375549316407, 0.13642874145507813, 0.13711347961425782, 0.13835682678222655, 0.1384578857421875, 0.14088983154296875]",tokens/s,1845.6696578554026,kWh,1.6179650182774805e-06,8.864855210497944e-07,6.8487079142743225e-06,9.353158453601597e-06,tokens/kWh,27370433.342912387,MB,1869.656064,3328.704512,0.0,2682.257408,2667.0976,s,10,82.21512695312498,8.2215126953125,0.02726168309176718,8.22408544921875,8.254994140625,8.26155078125,8.26679609375,"[8.218158203125, 8.2300126953125, 8.231037109375, 8.253537109375, 8.2426494140625, 8.18561865234375, 8.1915498046875, 8.18607568359375, 8.268107421875, 8.208380859375]",tokens/s,7.662823416415751,kWh,9.660354430590225e-05,5.294595236465346e-05,0.0003949548913061273,0.000544504387976683,tokens/kWh,115701.54693169857,,s,629,83.32182740783702,0.1324671341936994,0.01648122008836376,0.13012069702148438,0.13181768188476561,0.13225840454101562,0.2677710559082031,"[0.1353492431640625, 0.13449932861328126, 0.13354396057128906, 0.13218812561035156, 0.1321062469482422, 0.132279296875, 0.1320765380859375, 0.1297838134765625, 0.12942950439453124, 0.13008486938476563, 0.1297357482910156, 0.12950111389160157, 0.12954214477539064, 0.12996812438964844, 0.12972236633300782, 0.129586181640625, 0.12967730712890624, 0.12944589233398437, 0.12974386596679688, 0.12966400146484375, 0.12964044189453125, 0.12967730712890624, 0.13006745910644532, 0.13016166687011718, 0.12972647094726564, 0.13020672607421874, 0.1301012420654297, 0.13006541442871095, 0.1300264892578125, 0.13026918029785156, 0.1298155517578125, 0.12965580749511718, 0.12980120849609375, 0.13100137329101563, 0.12973667907714845, 0.12997222900390626, 0.12998655700683595, 0.13009100341796875, 0.13086618041992187, 0.13041253662109376, 0.13003570556640626, 0.13003366088867188, 0.12998655700683595, 0.13012069702148438, 0.12974899291992187, 0.1296711730957031, 0.13009510803222657, 0.13012582397460937, 0.13009408569335937, 0.129723388671875, 0.12967532348632813, 0.13028140258789062, 0.1303726043701172, 0.13008793640136718, 0.13204173278808592, 0.13000090026855468, 0.1302650909423828, 0.12970188903808594, 0.13084979248046874, 0.12998963928222657, 0.12973773193359375, 0.13070130920410156, 0.26767672729492187, 0.13097874450683594, 0.12999679565429687, 0.1298524169921875, 0.12990463256835938, 0.12967730712890624, 0.130735107421875, 0.12993740844726562, 0.12993843078613282, 0.12966195678710937, 0.13075456237792968, 0.1301739501953125, 0.1298462677001953, 0.12979507446289062, 0.12965171813964843, 0.12960255432128906, 0.12983296203613282, 0.1297029113769531, 0.12987289428710938, 0.13024870300292968, 0.12961074829101563, 0.12961587524414062, 0.1304627227783203, 0.13114883422851562, 0.13138124084472655, 0.13123989868164063, 0.1312419891357422, 0.1315799102783203, 0.13145703125, 0.1315246124267578, 0.13152359008789063, 0.13154917907714844, 0.13147648620605468, 0.13150822448730468, 0.1314273223876953, 0.1317058563232422, 0.1314283447265625, 0.13160345458984374, 0.13148159790039063, 0.13152767944335939, 0.13164851379394532, 0.13085594177246093, 0.13025791931152345, 0.12963226318359375, 0.13061222839355469, 0.12971110534667968, 0.1318850860595703, 0.13038383483886717, 0.12956982421875, 0.13069512939453126, 0.12964761352539061, 0.1297664337158203, 0.1301851806640625, 0.13052313232421875, 0.1300756530761719, 0.131198974609375, 0.1302650909423828, 0.13156045532226562, 0.13132595825195312, 0.13137612915039062, 0.13114163208007812, 0.13150822448730468, 0.13148774719238282, 0.26988442993164063, 0.1297592315673828, 0.1303961639404297, 0.13102079772949218, 0.13180621337890625, 0.1311805419921875, 0.131198974609375, 0.1320120391845703, 0.13153382873535155, 0.13055078125, 0.13116621398925782, 0.13224858093261718, 0.1315379180908203, 0.13149183654785157, 0.13085594177246093, 0.1307740173339844, 0.13012069702148438, 0.13034701538085938, 0.13029273986816406, 0.12967219543457031, 0.12965785217285156, 0.13052621459960936, 0.1309951934814453, 0.13132908630371093, 0.12980528259277344, 0.12976431274414063, 0.130735107421875, 0.1307904052734375, 0.13091941833496093, 0.13070028686523438, 0.13097164916992188, 0.13049139404296875, 0.13055282592773437, 0.13094093322753905, 0.1304883270263672, 0.13196185302734376, 0.13015142822265624, 0.13110272216796875, 0.1304780731201172, 0.13022618103027345, 0.13066342163085937, 0.1300070343017578, 0.1298851776123047, 0.12999679565429687, 0.13136691284179688, 0.13021286010742186, 0.1300305938720703, 0.13127577209472657, 0.12978688049316406, 0.12988313293457032, 0.12990975952148437, 0.13098086547851562, 0.12965785217285156, 0.13045555114746094, 0.1315246124267578, 0.1313638458251953, 0.12958309936523438, 0.12952677917480468, 0.12959642028808593, 0.13055795288085936, 0.13012889099121094, 0.13161677551269532, 0.13049856567382812, 0.2679295959472656, 0.13007772827148437, 0.1300623016357422, 0.1296732177734375, 0.1302405090332031, 0.13101568603515626, 0.13053543090820313, 0.12958309936523438, 0.12989543151855468, 0.13040640258789063, 0.12972032165527345, 0.12959744262695314, 0.12965682983398438, 0.12967628479003906, 0.1296046142578125, 0.12968960571289062, 0.1309102020263672, 0.13147955322265625, 0.13136282348632813, 0.13167820739746094, 0.1325506591796875, 0.1314693145751953, 0.1313454132080078, 0.1313280029296875, 0.13255577087402343, 0.1314273223876953, 0.13156965637207033, 0.13132698059082032, 0.1305374755859375, 0.1306746826171875, 0.1309071350097656, 0.13065113830566405, 0.13108326721191407, 0.1307310333251953, 0.13129315185546875, 0.13122866821289061, 0.13088768005371093, 0.13070541381835937, 0.1310146484375, 0.13133619689941406, 0.1298104248046875, 0.13102490234375, 0.13102694702148437, 0.13097779846191407, 0.13345689392089843, 0.13114060974121095, 0.13085081481933594, 0.13208985900878906, 0.13153689575195313, 0.13162701416015626, 0.13186355590820312, 0.13157069396972657, 0.13132492065429688, 0.13146829223632814, 0.13144781494140625, 0.13143244934082032, 0.13133209228515624, 0.13199154663085938, 0.13162086486816407, 0.13136589050292968, 0.13134745788574217, 0.13149900817871094, 0.13125018310546874, 0.27049984741210936, 0.13173965454101563, 0.13157376098632811, 0.13129933166503907, 0.13149183654785157, 0.13152255249023437, 0.13144268798828124, 0.13151744079589844, 0.13144781494140625, 0.1312665557861328, 0.1313638458251953, 0.1312542724609375, 0.13116621398925782, 0.1316812744140625, 0.1304279022216797, 0.13107609558105468, 0.13173452758789062, 0.13157273864746094, 0.13156761169433595, 0.131114013671875, 0.13155325317382813, 0.13144883728027343, 0.13135667419433594, 0.1312972869873047, 0.13135154724121093, 0.13139456176757813, 0.13143653869628907, 0.13130137634277345, 0.13196800231933595, 0.1318707275390625, 0.131631103515625, 0.1325455322265625, 0.13174578857421876, 0.13033779907226561, 0.12979814147949217, 0.129902587890625, 0.13098188781738282, 0.13004287719726562, 0.13019442749023438, 0.12989439392089844, 0.13051187133789063, 0.12981350708007813, 0.12995890808105467, 0.13021900939941405, 0.12963839721679687, 0.12967628479003906, 0.13002546691894531, 0.13009510803222657, 0.1296711730957031, 0.13236019897460938, 0.12973362731933594, 0.13032858276367187, 0.13070643615722657, 0.1302425537109375, 0.1299220428466797, 0.12974490356445312, 0.12966812133789063, 0.12980630493164064, 0.129623046875, 0.13014938354492187, 0.13049958801269532, 0.12993023681640625, 0.1298841552734375, 0.26678680419921874, 0.1296680908203125, 0.12964556884765624, 0.12968447875976563, 0.12973260498046876, 0.129765380859375, 0.1300142059326172, 0.1296855010986328, 0.12954725646972656, 0.1295247344970703, 0.12960870361328125, 0.1295667266845703, 0.12963941955566408, 0.12973158264160156, 0.13008895874023438, 0.13011558532714843, 0.12971827697753907, 0.12998963928222657, 0.12966706848144532, 0.12973568725585938, 0.12970803833007813, 0.1298350067138672, 0.1297090606689453, 0.12965274047851563, 0.12967730712890624, 0.13019648742675782, 0.12975820922851564, 0.12972854614257812, 0.12970799255371093, 0.1302794189453125, 0.12972647094726564, 0.12966400146484375, 0.12970086669921874, 0.12959333801269532, 0.1299220428466797, 0.1300633544921875, 0.1296537628173828, 0.12968038940429688, 0.1319403839111328, 0.12973974609375, 0.13016064453125, 0.13014527893066405, 0.1306234893798828, 0.1300377655029297, 0.13088461303710938, 0.13168946838378906, 0.1299251251220703, 0.12965580749511718, 0.12978688049316406, 0.13135565185546874, 0.13036749267578124, 0.12981248474121093, 0.1297510986328125, 0.1297816925048828, 0.12961383056640624, 0.1298667449951172, 0.12978175354003907, 0.12962611389160156, 0.13000294494628906, 0.12986572265625, 0.1298462677001953, 0.12970803833007813, 0.12991897583007814, 0.2678077392578125, 0.13065216064453125, 0.12982272338867187, 0.13018418884277344, 0.13069625854492187, 0.12963424682617186, 0.12971827697753907, 0.12956063842773438, 0.13137094116210937, 0.13036647033691406, 0.12986265563964844, 0.12970803833007813, 0.1298462677001953, 0.12968447875976563, 0.1297029113769531, 0.12973670959472655, 0.12944793701171875, 0.12968345642089843, 0.1307125701904297, 0.12972134399414062, 0.12976742553710938, 0.12971929931640624, 0.12958309936523438, 0.13087026977539062, 0.12964659118652344, 0.1298032684326172, 0.12998757934570312, 0.13012275695800782, 0.12993536376953124, 0.1304698944091797, 0.1299988555908203, 0.12983807373046874, 0.13000192260742188, 0.13018623352050782, 0.1299199981689453, 0.13019648742675782, 0.13000090026855468, 0.13050265502929687, 0.12989645385742188, 0.12952677917480468, 0.12986778259277343, 0.13002546691894531, 0.13098597717285157, 0.129828857421875, 0.12967730712890624, 0.13003981018066407, 0.1295636444091797, 0.12984115600585938, 0.12994764709472656, 0.13031321716308594, 0.12971315002441405, 0.12961485290527344, 0.12971417236328125, 0.12996202087402345, 0.13037257385253906, 0.12986061096191406, 0.12995584106445313, 0.13010226440429687, 0.1315707550048828, 0.1297571258544922, 0.12957183837890626, 0.12970188903808594, 0.13077810668945314, 0.2692198486328125, 0.13007154846191407, 0.12988313293457032, 0.12971212768554688, 0.12965481567382814, 0.12962608337402343, 0.12967219543457031, 0.12959231567382812, 0.12979200744628908, 0.1296680908203125, 0.13002957153320313, 0.12997938537597656, 0.12960153198242189, 0.13012786865234374, 0.13016986083984375, 0.12967219543457031, 0.1310576629638672, 0.12998348999023437, 0.12976332092285156, 0.1300869140625, 0.1296824951171875, 0.13126751708984374, 0.12993536376953124, 0.13007359313964845, 0.12969984436035156, 0.12999679565429687, 0.1298698272705078, 0.1300858917236328, 0.1298913269042969, 0.1299404754638672, 0.13012275695800782, 0.13052313232421875, 0.1302425537109375, 0.13017805480957031, 0.12996812438964844, 0.12995071411132814, 0.13004083251953125, 0.130302978515625, 0.1300623321533203, 0.13056716918945313, 0.1300695037841797, 0.1299261474609375, 0.12973260498046876, 0.13010330200195314, 0.1296855010986328, 0.13009613037109374, 0.12976742553710938, 0.12981350708007813, 0.12980429077148437, 0.12988723754882814, 0.1295564727783203, 0.12995277404785155, 0.12949913024902343, 0.12956877136230469, 0.129870849609375, 0.13016371154785156, 0.12968754577636718, 0.1295667266845703, 0.12944383239746093, 0.1296282196044922, 0.1295543670654297, 0.12969778442382812, 0.12969062805175782, 0.26870681762695314, 0.1297827911376953, 0.1297592315673828, 0.131557373046875, 0.12989439392089844, 0.13009408569335937, 0.1305999298095703, 0.1311446990966797, 0.13035110473632813, 0.13046885681152343, 0.1303521270751953, 0.13139045715332032, 0.1301749725341797, 0.1304627227783203, 0.13010841369628906, 0.1311856689453125, 0.1305753936767578, 0.1308395233154297, 0.12992515563964843, 0.13041970825195312, 0.13009100341796875, 0.1309265594482422, 0.130155517578125, 0.13011354064941405, 0.12959437561035156, 0.1299148864746094, 0.13058047485351562, 0.13014527893066405, 0.13060198974609374, 0.12966400146484375, 0.12963533020019533, 0.1295380554199219, 0.12953395080566407, 0.1297786865234375, 0.13530213928222656, 0.13242477416992188, 0.13271443176269532, 0.1324779510498047, 0.13233970642089843, 0.13220352172851563, 0.1323294677734375, 0.13220556640625, 0.1319833526611328, 0.1321994171142578, 0.13245542907714844, 0.132347900390625, 0.13226495361328125, 0.13233561706542968, 0.13197415161132814, 0.13208883666992188, 0.13216461181640626, 0.1321881561279297, 0.13220249938964843, 0.13227622985839843, 0.1321246795654297, 0.13223423767089842, 0.13205606079101562, 0.13206629943847656, 0.13218611145019532, 0.13227622985839843, 0.13237759399414062, 0.13250457763671875, 0.13218917846679687, 0.27291647338867187, 0.13057228088378905, 0.12985139465332032, 0.12977766418457032, 0.12981146240234376, 0.1296363525390625, 0.12980120849609375, 0.12959642028808593, 0.12995993041992188, 0.129691650390625, 0.12974592590332032, 0.12970086669921874, 0.12972647094726564, 0.1295984649658203, 0.12972032165527345, 0.13100953674316407, 0.13002957153320313, 0.13050367736816407, 0.13074021911621095, 0.13157478332519532, 0.12986880493164063, 0.12991693115234376, 0.12960050964355468, 0.12965682983398438, 0.13195468139648436, 0.13054464721679687, 0.1317058563232422, 0.13119078063964842, 0.13114982604980469, 0.1297049560546875, 0.12969573974609375, 0.12971315002441405, 0.12958412170410155, 0.1308958740234375, 0.1309696044921875, 0.12978994750976564, 0.12954112243652344, 0.12962713623046876, 0.12967832946777344, 0.12949913024902343, 0.12971827697753907, 0.12982989501953124, 0.1297622985839844, 0.12973464965820314, 0.12974490356445312, 0.1297244110107422, 0.13159628295898437, 0.13082418823242187, 0.1297407989501953, 0.12985548400878907, 0.1299814453125, 0.13154815673828124, 0.13089791870117187, 0.12983602905273436, 0.1315010528564453, 0.13150003051757814, 0.132068359375, 0.13167718505859374, 0.13150309753417969, 0.13090509033203124, 0.12976332092285156, 0.1298053741455078, 0.1295963592529297]",tokens/s,7.549042304619914,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1618.886656,7600.603136,0.0,6954.156032,6314.17344,s,10,6.225003845214843,0.6225003845214843,0.0013149278084200315,0.6224861450195311,0.623799609375,0.6241176086425781,0.6243720080566406,"[0.6244356079101563, 0.620072998046875, 0.6223452758789062, 0.620931640625, 0.6226270141601562, 0.6217324829101563, 0.6218421630859375, 0.6236920776367187, 0.6237289428710937, 0.6235956420898437]",tokens/s,411.2447258916749,kWh,7.3330029845237736e-06,4.018192916897961e-06,3.395321343706048e-05,4.530440933848221e-05,tokens/kWh,5650664.112787582,MB,1618.886656,7600.603136,0.0,6954.156032,6464.046592,s,10,368.56716015625,36.856716015625,0.026041797027344372,36.850578125,36.898916796875,36.9004232421875,36.9016283984375,"[36.86497265625, 36.8430390625, 36.9019296875, 36.8581171875, 36.89858203125, 36.87110546875, 36.828984375, 36.82765625, 36.830046875, 36.8427265625]",tokens/s,1.7093221211920195,kWh,0.0004354530184467634,0.00023866611181760483,0.001976968700854742,0.00265108783111911,tokens/kWh,23763.829798655013,,s,629,373.5824957885741,0.5939308359118828,0.07397669449852429,0.58480126953125,0.5863296997070312,0.586693212890625,1.20664294921875,"[0.5845360717773438, 0.585296875, 0.5849774169921875, 0.5846087646484375, 0.5850286254882813, 0.584748046875, 0.584322021484375, 0.5847828369140625, 0.584279052734375, 0.58402099609375, 0.584068115234375, 0.5846630249023438, 0.584384521484375, 0.5846179809570312, 0.584816650390625, 0.584005615234375, 0.58421142578125, 0.5841326293945313, 0.584958984375, 0.5861488647460937, 0.584427490234375, 0.5843035888671875, 0.5846282348632813, 0.5846927490234375, 0.5846661376953125, 0.5845155639648437, 0.5843814697265625, 0.5843046264648437, 0.5848248291015625, 0.5849682006835938, 0.5866096801757813, 0.5847296142578124, 0.5858385620117188, 0.5855538940429688, 0.58547607421875, 0.5869332275390625, 0.5866168212890625, 0.584468505859375, 0.5869219970703125, 0.5864007568359375, 0.5853429565429688, 0.5859860229492188, 0.5848975219726562, 0.5843538208007812, 0.584753173828125, 0.5861519165039063, 0.585596923828125, 0.5861099243164063, 0.5845759887695312, 0.5846599731445312, 0.5848914184570313, 0.584426513671875, 0.5851033325195313, 0.5860843505859376, 0.5848597412109375, 0.5849896240234375, 0.5867396850585938, 0.586756103515625, 0.5865123901367187, 0.586208251953125, 0.5867110595703126, 0.585744384765625, 1.206224853515625, 0.5858980102539062, 0.5855364990234375, 0.5844613037109375, 0.584605712890625, 0.5844961547851563, 0.5850460205078125, 0.58532861328125, 0.5840045776367188, 0.5837639770507812, 0.5838192749023438, 0.5844100952148438, 0.585660400390625, 0.5845892944335938, 0.5847992553710938, 0.5843865356445312, 0.585439208984375, 0.5858754272460938, 0.5853839111328125, 0.5851627807617188, 0.5849927978515626, 0.5848330078125, 0.58598193359375, 0.58480126953125, 0.5861355590820313, 0.5848627319335937, 0.5850623779296875, 0.5845616455078125, 0.584658935546875, 0.5841868896484375, 0.5848627319335937, 0.5841438598632812, 0.584394775390625, 0.5841991577148438, 0.5862697143554687, 0.5853613891601562, 0.584890380859375, 0.584838134765625, 0.5842606201171875, 0.5844306030273437, 0.5846517944335937, 0.5859328002929688, 0.5848330078125, 0.5846046752929688, 0.58459033203125, 0.5843906860351562, 0.5847449340820312, 0.5849129028320312, 0.58431591796875, 0.5842001953125, 0.58471728515625, 0.5849159545898438, 0.5853143310546876, 0.584321044921875, 0.5845770263671874, 0.58439990234375, 0.584537109375, 0.5842237548828125, 0.5845657348632812, 0.5844357299804688, 0.5841551513671875, 0.5848032836914062, 0.584511474609375, 1.20827490234375, 0.5846036376953125, 0.5843558349609375, 0.5847777099609375, 0.585406494140625, 0.5854955444335938, 0.5848872680664062, 0.58439990234375, 0.5842575073242188, 0.5842974853515625, 0.5841448974609375, 0.5848606567382812, 0.5855733642578125, 0.5853153076171875, 0.5855313720703125, 0.5857454223632812, 0.58644482421875, 0.586587158203125, 0.5863075561523438, 0.5864365844726562, 0.586102783203125, 0.5866045532226563, 0.5849364624023438, 0.587236328125, 0.586629150390625, 0.5864550170898437, 0.5869383544921875, 0.5851351318359375, 0.5868165283203125, 0.5862113037109375, 0.586013671875, 0.5854505004882813, 0.5856593627929687, 0.5856215209960938, 0.5870980834960937, 0.5859666137695313, 0.5857300415039063, 0.585511962890625, 0.5861007080078126, 0.586576904296875, 0.5868328857421875, 0.5858846435546875, 0.5864171752929688, 0.585228271484375, 0.58509521484375, 0.5863771362304687, 0.5851309814453125, 0.5850357666015625, 0.5855375366210938, 0.5851135864257813, 0.585407470703125, 0.5852764282226562, 0.5863362426757812, 0.5863567504882813, 0.5862123413085938, 0.5860802612304687, 0.5851658325195312, 0.5870366821289063, 0.5865308227539062, 0.5864335327148438, 0.5844910278320312, 0.58482275390625, 0.5869608764648437, 1.205981201171875, 0.58442138671875, 0.5845964965820313, 0.58412646484375, 0.5846712036132813, 0.584859619140625, 0.584500244140625, 0.5844193115234375, 0.5846005859375, 0.584226806640625, 0.5847767333984375, 0.5847193603515625, 0.5840773315429687, 0.5841602783203125, 0.5838458862304687, 0.5849825439453125, 0.5842872314453125, 0.5852507934570312, 0.58406298828125, 0.5844735717773437, 0.5843538208007812, 0.5846784057617187, 0.5847019653320312, 0.5851156616210937, 0.5848043823242187, 0.5851064453125, 0.5846159057617187, 0.5852262573242187, 0.5848002319335938, 0.5843343505859375, 0.5843394775390625, 0.5842994995117188, 0.584975341796875, 0.5844992065429687, 0.58461083984375, 0.5840097045898438, 0.5850368041992188, 0.5848576049804688, 0.5844039916992188, 0.5848340454101563, 0.5848616943359375, 0.584585205078125, 0.585101318359375, 0.5855078125, 0.5868196411132812, 0.5852261962890625, 0.5863843994140625, 0.586692626953125, 0.5863259887695312, 0.5871114501953125, 0.5859533081054688, 0.5858959350585937, 0.585849853515625, 0.5871841430664062, 0.5864273681640625, 0.5860311279296875, 0.586377197265625, 0.5844725952148437, 0.5856563720703125, 0.5858579711914063, 0.5854105834960938, 0.5857003784179687, 0.5850337524414062, 1.2078602294921874, 0.5852139282226563, 0.585723876953125, 0.586250244140625, 0.586271728515625, 0.5863413696289063, 0.586808349609375, 0.5861898193359375, 0.5845196533203125, 0.5865604858398438, 0.5842206420898437, 0.58428515625, 0.5850715942382813, 0.5856091918945312, 0.5868994750976563, 0.5851893920898438, 0.585296875, 0.5864417114257813, 0.5843292236328125, 0.5858539428710937, 0.5862738037109375, 0.5860853881835938, 0.5861775512695313, 0.5855631103515625, 0.5856204833984375, 0.5868216552734375, 0.5872056274414063, 0.584875, 0.5847664794921875, 0.5854320678710937, 0.5869844360351563, 0.5866485595703125, 0.5866680297851562, 0.5851105346679687, 0.5844152221679687, 0.5853849487304688, 0.5857136840820313, 0.5857628173828126, 0.5850736694335937, 0.5865482177734375, 0.5847275390625, 0.5847992553710938, 0.5843517456054688, 0.586081298828125, 0.5863280639648437, 0.5844951171875, 0.5858427124023438, 0.5852047119140625, 0.585734130859375, 0.586134521484375, 0.5850726318359375, 0.585660400390625, 0.584953857421875, 0.5860597534179688, 0.586693603515625, 0.5868861694335937, 0.5861458129882813, 0.5863157958984375, 0.5857095947265625, 0.5846128540039063, 0.5858980102539062, 0.5863372802734375, 0.5851473999023438, 1.208574951171875, 0.5858948974609375, 0.5860034790039063, 0.5858488159179688, 0.5846507568359375, 0.5853051147460937, 0.5856901245117188, 0.585691162109375, 0.584543212890625, 0.5852897338867188, 0.5849733276367187, 0.5857167358398437, 0.5865441284179688, 0.585660400390625, 0.5851954956054688, 0.586119140625, 0.58567578125, 0.5863833618164063, 0.5861325073242187, 0.5857044677734375, 0.585565185546875, 0.5863946533203125, 0.585654296875, 0.5863167724609375, 0.5854157104492187, 0.5853399047851563, 0.5847070922851563, 0.5861939086914062, 0.58540234375, 0.5899376831054688, 0.5855396118164062, 0.5845524291992188, 0.5847060546875, 0.584268798828125, 0.5851054077148438, 0.5849722900390625, 0.5850357666015625, 0.5842964477539062, 0.5849876708984375, 0.5849682006835938, 0.5848822021484374, 0.585654296875, 0.584900634765625, 0.5849497680664062, 0.5853214721679687, 0.5848350830078125, 0.5849169921875, 0.5852866821289062, 0.58450537109375, 0.58450537109375, 0.5844418334960938, 0.5846466674804688, 0.584453125, 0.5847265014648437, 0.5847725830078125, 0.5843486938476562, 0.5844705200195313, 0.5843916625976563, 0.5847930908203125, 0.5845043334960938, 0.5840936889648437, 0.5843517456054688, 0.58418994140625, 1.2068055419921875, 0.5848176879882813, 0.5846128540039063, 0.5843087158203125, 0.5846333618164062, 0.5844971313476562, 0.5846210327148438, 0.58439990234375, 0.5845780639648438, 0.585080810546875, 0.5844469604492187, 0.58540234375, 0.5848923950195313, 0.5850890502929688, 0.5851924438476562, 0.5849088134765625, 0.5848033447265625, 0.5851453247070313, 0.5851412353515625, 0.5849180297851563, 0.5844786987304688, 0.5851146240234375, 0.5842401123046875, 0.5852006225585937, 0.5848944702148438, 0.5846077270507812, 0.5847623901367187, 0.5846937866210937, 0.5855672607421875, 0.5844490356445312, 0.584658935546875, 0.584933349609375, 0.5843896484375, 0.5846292724609375, 0.5843240966796875, 0.5845176391601562, 0.5844623413085938, 0.5843394775390625, 0.584595458984375, 0.58416845703125, 0.5841622924804688, 0.5841520385742187, 0.5843251342773438, 0.58414697265625, 0.5844735717773437, 0.5843404541015625, 0.5842247924804688, 0.5839093627929688, 0.5843087158203125, 0.5839298706054687, 0.5844920043945312, 0.5843517456054688, 0.5847521362304687, 0.5843599243164063, 0.5843937377929688, 0.584721435546875, 0.58444287109375, 0.5844132080078125, 0.5843599243164063, 0.58385205078125, 0.5841694946289062, 0.5840465698242188, 0.5851791381835938, 1.2095078125, 0.5847941284179687, 0.5849180297851563, 0.5843486938476562, 0.5844295654296875, 0.5849845581054688, 0.5841305541992188, 0.5846712036132813, 0.58494873046875, 0.584680419921875, 0.585617431640625, 0.5844520874023438, 0.5843486938476562, 0.5840803833007813, 0.5843619995117187, 0.5847122192382812, 0.5845555419921875, 0.5846507568359375, 0.5844510498046875, 0.584573974609375, 0.5840926513671875, 0.5843978271484375, 0.584416259765625, 0.584521728515625, 0.5843097534179688, 0.5844971313476562, 0.5846302490234375, 0.5850009765625, 0.5847193603515625, 0.58450537109375, 0.5842810668945313, 0.5842012329101562, 0.5845913696289062, 0.5843363647460937, 0.5843660888671875, 0.584268798828125, 0.5849375, 0.5842759399414063, 0.5846517944335937, 0.5850183715820313, 0.58439990234375, 0.5842298583984376, 0.5847930908203125, 0.5849313354492187, 0.5852579956054688, 0.58459033203125, 0.5851351318359375, 0.5841787109375, 0.5843189697265625, 0.5841643676757813, 0.5844838256835938, 0.5843446044921875, 0.5842411499023438, 0.5844346923828125, 0.584469482421875, 0.5841571655273438, 0.5850153198242187, 0.5845196533203125, 0.5843230590820313, 0.5843446044921875, 0.5848402099609376, 0.584669189453125, 0.584326171875, 1.2087510986328125, 0.5850153198242187, 0.5850787963867188, 0.5847817993164063, 0.5849047241210937, 0.5845442504882813, 0.5847285766601562, 0.5845186767578125, 0.5842227172851563, 0.5838796997070312, 0.5849476928710937, 0.584469482421875, 0.584501220703125, 0.5847080688476562, 0.5847900390625, 0.5847060546875, 0.5841131591796875, 0.5844295654296875, 0.5843087158203125, 0.5849825439453125, 0.584447998046875, 0.5852078247070313, 0.5841234130859375, 0.5847500610351563, 0.5847091064453125, 0.584690673828125, 0.58425244140625, 0.5844613037109375, 0.584374267578125, 0.5843394775390625, 0.5849129028320312, 0.584479736328125, 0.5842227172851563, 0.5842257690429687, 0.5848576049804688, 0.5845330200195312, 0.5844653930664062, 0.58503271484375, 0.584975341796875, 0.5848627319335937, 0.5849794311523437, 0.5842933959960938, 0.58421044921875, 0.5843302612304687, 0.584958984375, 0.5844111328125, 0.58435888671875, 0.5848289184570312, 0.584627197265625, 0.5843753051757813, 0.5846619873046875, 0.584248291015625, 0.5846046752929688, 0.5841663818359375, 0.5857310791015625, 0.58428515625, 0.5845514526367187, 0.584437744140625, 0.5845524291992188, 0.5843087158203125, 0.584700927734375, 0.5854146728515625, 0.5844777221679688, 1.2091043701171875, 0.5852241821289063, 0.5847367553710937, 0.5846507568359375, 0.5843937377929688, 0.5844100952148438, 0.584079345703125, 0.5851975708007813, 0.5844152221679687, 0.5847306518554688, 0.5847654418945313, 0.5844561767578125, 0.584437744140625, 0.5841787109375, 0.5849343872070313, 0.5845084228515625, 0.5842759399414063, 0.58461181640625, 0.5846661376953125, 0.5853173828125, 0.5845933837890624, 0.5847725830078125, 0.5848923950195313, 0.5846435546875, 0.5854658813476562, 0.5842360229492187, 0.5851566162109375, 0.5853972778320312, 0.5851463623046875, 0.5845339965820312, 0.5844100952148438, 0.5846067504882813, 0.584648681640625, 0.5843169555664063, 0.5846702270507812, 0.5859276733398437, 0.5848790893554687, 0.5843026123046875, 0.584543212890625, 0.5850736694335937, 0.5842780151367187, 0.5850654907226562, 0.5847388305664063, 0.5847439575195312, 0.5851463623046875, 0.5845985107421875, 0.5845514526367187, 0.584332275390625, 0.58463232421875, 0.5850685424804688, 0.5847633666992188, 0.5853829345703125, 0.5849159545898438, 0.585486328125, 0.5849415893554688, 0.584775634765625, 0.5854443359375, 0.5850316772460937, 0.5853870239257812, 0.5852672119140625, 0.5847562255859375, 0.5845729370117188, 0.5851094970703125]",tokens/s,1.6836977296601632,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1485.733888,2103.967744,0.0,1457.52064,1272.750592,s,10,1.3342720336914062,0.13342720336914063,0.0017667378558565091,0.1333674545288086,0.13440001983642577,0.13625353012084962,0.13773633834838866,"[0.13810704040527344, 0.13359715270996095, 0.13159478759765625, 0.13398812866210938, 0.13163987731933594, 0.132114013671875, 0.13270646667480468, 0.1334988098144531, 0.13378965759277345, 0.13323609924316407]",tokens/s,1918.6492224658912,kWh,1.5465521064155546e-06,8.474274160367454e-07,6.439467633532405e-06,8.833447155984705e-06,tokens/kWh,28980758.641495775,MB,1485.733888,2103.967744,0.0,1457.52064,1369.423872,s,10,77.59171484375001,7.759171484375001,0.021550685509445954,7.750958740234375,7.78698193359375,7.79351220703125,7.798736425781249,"[7.7519140625, 7.7428955078125, 7.78553076171875, 7.781552734375, 7.80004248046875, 7.76520556640625, 7.75000341796875, 7.7375029296875, 7.73547216796875, 7.74159521484375]",tokens/s,8.119423591406116,kWh,9.154879443379708e-05,5.017541917467626e-05,0.00037131786234986455,0.0005130420759583379,tokens/kWh,122796.94581057319,,s,629,78.66502353668211,0.12506363042397795,0.0158250884711649,0.12294553375244141,0.1240623077392578,0.1244600341796875,0.2556754669189453,"[0.12330188751220703, 0.12316671752929688, 0.12365106964111328, 0.12313600158691407, 0.12294246673583985, 0.12248063659667968, 0.12371660614013671, 0.12260147094726563, 0.12286566162109375, 0.12282879638671874, 0.12315750122070312, 0.12320972442626953, 0.12280012512207031, 0.12268236541748047, 0.122640380859375, 0.12257997131347656, 0.12299161529541015, 0.1229834213256836, 0.12259737396240235, 0.12290866851806641, 0.12273561859130859, 0.12333164978027343, 0.12384864044189453, 0.12324454498291015, 0.12292301177978515, 0.12274278259277344, 0.1227489242553711, 0.12272640228271485, 0.12298239898681641, 0.12477235412597656, 0.12357427215576172, 0.12362035369873046, 0.12306233978271484, 0.12299974060058594, 0.12285030364990235, 0.12300287628173828, 0.12316060638427734, 0.1231595230102539, 0.12301516723632812, 0.12314726257324218, 0.12401152038574219, 0.12320358276367188, 0.12276121520996094, 0.12288307189941407, 0.12321382141113281, 0.12264653015136719, 0.12301721954345703, 0.12259737396240235, 0.12273664093017578, 0.122893310546875, 0.12342066955566407, 0.1232015380859375, 0.12298239898681641, 0.12279090881347657, 0.1230448989868164, 0.12328444671630859, 0.12287181091308594, 0.12288511657714844, 0.12308684539794922, 0.1227540512084961, 0.12283084869384765, 0.12267212677001953, 0.25649664306640624, 0.1227540512084961, 0.12280220794677735, 0.12263113403320312, 0.12264345550537109, 0.12267417907714843, 0.1228400650024414, 0.12283596801757812, 0.12282470703125, 0.12279603576660156, 0.12281446075439453, 0.12269875335693359, 0.12270694732666015, 0.12278681945800782, 0.12276121520996094, 0.12260044860839844, 0.12260659027099609, 0.12257791900634765, 0.12294041442871094, 0.1228779525756836, 0.12287999725341797, 0.12280831909179687, 0.12294451141357422, 0.12248985290527344, 0.12257484436035156, 0.1226055679321289, 0.12298137664794923, 0.12271308898925781, 0.12264447784423828, 0.12263935852050781, 0.12252365112304688, 0.12261990356445313, 0.12269977569580078, 0.12264345550537109, 0.12255129241943359, 0.12263833618164062, 0.12267212677001953, 0.12288102722167969, 0.12292915344238281, 0.12263935852050781, 0.12389888000488282, 0.12297216033935547, 0.12294553375244141, 0.12293427276611328, 0.12263526153564454, 0.12274585723876953, 0.12553011322021485, 0.12401561737060547, 0.12355072021484376, 0.12335718536376954, 0.12291584014892579, 0.12288819122314452, 0.12369510650634766, 0.12324147033691406, 0.12273667144775391, 0.12291478729248047, 0.12294758605957032, 0.12287385559082031, 0.12286669158935547, 0.12300697326660157, 0.12317391967773438, 0.12299874877929687, 0.1229271011352539, 0.25462066650390625, 0.12265164947509766, 0.1226270751953125, 0.12303667449951172, 0.12276227569580078, 0.12365821075439454, 0.12293939208984375, 0.12266496276855468, 0.12368895721435547, 0.12677529907226562, 0.12441395568847656, 0.12445696258544922, 0.12463513946533203, 0.12443033599853516, 0.12438224029541016, 0.12425520324707032, 0.12467916870117188, 0.12446310424804688, 0.12442931365966797, 0.12455731201171875, 0.12458188629150391, 0.12372480010986328, 0.12267417907714843, 0.12398899078369141, 0.12292813110351562, 0.12310527801513672, 0.12295891571044922, 0.12267308807373047, 0.12258099365234375, 0.12275609588623047, 0.12290764617919922, 0.12266086578369141, 0.12255948638916016, 0.12306329345703125, 0.12263116455078125, 0.12326604461669922, 0.12354764556884766, 0.12335718536376954, 0.12372691345214844, 0.12280210876464843, 0.12289638519287109, 0.12269055938720703, 0.12372787475585938, 0.124010498046875, 0.12365824127197265, 0.12331314849853516, 0.12313497924804688, 0.12537753295898438, 0.12376166534423828, 0.12374527740478515, 0.12372582244873047, 0.1231278076171875, 0.12385382080078125, 0.12404633331298828, 0.12378214263916015, 0.12360908508300782, 0.12295782470703125, 0.12494950103759765, 0.12373811340332032, 0.12298035430908204, 0.12348416137695313, 0.12417740631103516, 0.12380774688720703, 0.2588282775878906, 0.12419481658935547, 0.12356505584716797, 0.12310323333740235, 0.12305203247070312, 0.12337971496582031, 0.12367977905273438, 0.12359062194824219, 0.12260147094726563, 0.12353740692138672, 0.12354354858398438, 0.1232701416015625, 0.12446720123291016, 0.12432588958740234, 0.1239582748413086, 0.12362137603759765, 0.12360499572753907, 0.12407295989990234, 0.12389170837402344, 0.12347392272949219, 0.1235968017578125, 0.12413645172119141, 0.12480512237548828, 0.12312678527832031, 0.12333670043945312, 0.12412928009033203, 0.12416000366210937, 0.12297113800048828, 0.12327529907226563, 0.12358857727050782, 0.12392755126953126, 0.1234176025390625, 0.12362649536132812, 0.12375859069824219, 0.12381183624267578, 0.12392758178710937, 0.12367868804931641, 0.12318924713134766, 0.12382310485839844, 0.12388658905029297, 0.12348006439208985, 0.12391423797607422, 0.12419583892822265, 0.12329676818847657, 0.12479590606689453, 0.12373197174072266, 0.12389478302001954, 0.12393472290039062, 0.12317702484130859, 0.12292499542236328, 0.12298649597167968, 0.12279808044433593, 0.12290457916259766, 0.1228584976196289, 0.12289842987060547, 0.1226424331665039, 0.12311244964599609, 0.12252780914306641, 0.12262598419189454, 0.12282470703125, 0.12261888122558594, 0.12280217742919922, 0.12272025299072266, 0.25568666076660157, 0.1239900131225586, 0.12271616363525391, 0.12272128295898438, 0.1237401580810547, 0.12348108673095703, 0.12270182037353515, 0.12281241607666016, 0.12259839630126954, 0.12345139312744141, 0.12484710693359374, 0.123040771484375, 0.12316671752929688, 0.12355583953857421, 0.12351078033447266, 0.12384358215332031, 0.12355174255371094, 0.12602265930175782, 0.12446208190917969, 0.12428800201416015, 0.12392345428466797, 0.12333055877685548, 0.12432588958740234, 0.12359986877441406, 0.12368793487548828, 0.12390502166748046, 0.12386921691894531, 0.1236797103881836, 0.12345037078857422, 0.12340735626220703, 0.12414361572265625, 0.12356301116943359, 0.12358767700195313, 0.12459613037109375, 0.12439347076416016, 0.12301107025146485, 0.12367359924316407, 0.12390092468261718, 0.12395724487304688, 0.12516966247558595, 0.12354662322998047, 0.12346470642089843, 0.123863037109375, 0.12409139251708984, 0.12322509002685547, 0.12475084686279297, 0.12426445007324219, 0.12396953582763671, 0.12330086517333984, 0.12360908508300782, 0.12444057464599609, 0.12403609466552734, 0.12382208251953125, 0.1240606689453125, 0.12347494506835938, 0.12344012451171875, 0.12362751770019531, 0.1256468505859375, 0.12401663970947266, 0.12414873504638672, 0.12401868438720703, 0.12380364990234374, 0.12371561431884766, 0.25564668273925784, 0.12369203186035156, 0.12362137603759765, 0.12392345428466797, 0.12388044738769531, 0.12370022583007813, 0.12369715118408203, 0.12311347198486328, 0.12364492797851563, 0.123863037109375, 0.12393574523925781, 0.12324249267578125, 0.12262403106689453, 0.12281238555908203, 0.12295372772216796, 0.12290764617919922, 0.12269363403320313, 0.12262297821044922, 0.1231677474975586, 0.12281446075439453, 0.12263526153564454, 0.122714111328125, 0.12280524444580078, 0.12276838684082031, 0.1227110366821289, 0.12276735687255859, 0.12391014099121093, 0.12351795196533204, 0.12277555084228516, 0.12253388977050782, 0.12272025299072266, 0.12271622467041016, 0.12318611145019531, 0.12313190460205078, 0.12436070251464844, 0.12357734680175782, 0.12378726196289062, 0.12353024291992187, 0.12572364807128905, 0.12379545593261719, 0.12325888061523438, 0.12332441711425782, 0.12372991943359375, 0.12286566162109375, 0.1225134048461914, 0.12262400054931641, 0.12326297760009766, 0.12308582305908203, 0.12292403411865234, 0.1229148178100586, 0.12318924713134766, 0.12364492797851563, 0.12364291381835937, 0.12278781127929687, 0.12316159820556641, 0.12317593383789062, 0.12396851348876953, 0.12279193878173827, 0.12260761260986328, 0.12462694549560546, 0.12346470642089843, 0.12266598510742187, 0.1229148178100586, 0.2559385528564453, 0.12347289276123047, 0.12349951934814453, 0.12279500579833984, 0.12253695678710938, 0.1227325439453125, 0.12260454559326171, 0.12274380493164062, 0.12233113861083984, 0.12244992065429687, 0.12274790191650391, 0.12282572937011718, 0.12250214385986329, 0.12254208374023437, 0.12273458862304687, 0.12241919708251953, 0.12272844696044923, 0.1226250228881836, 0.12303974151611329, 0.1227663345336914, 0.12259839630126954, 0.12243353271484375, 0.12274380493164062, 0.12401561737060547, 0.12277452850341797, 0.1226844482421875, 0.12271612548828124, 0.12248268890380859, 0.12263935852050781, 0.12302540588378906, 0.12259327697753906, 0.1225697250366211, 0.12243456268310547, 0.12261273956298828, 0.12260352325439453, 0.12275507354736329, 0.12261068725585937, 0.12276428985595703, 0.12263942718505859, 0.12258604431152344, 0.12272230529785157, 0.12245708465576172, 0.12246527862548828, 0.12284928131103516, 0.12406886291503906, 0.12364492797851563, 0.12372889709472656, 0.12372480010986328, 0.1236131820678711, 0.12349132537841796, 0.12373094177246094, 0.12433715057373047, 0.12420198059082031, 0.12350566101074219, 0.12349030303955078, 0.12351795196533204, 0.1237022705078125, 0.12365004730224609, 0.12362035369873046, 0.12419891357421875, 0.12375347137451172, 0.12319641876220704, 0.12275917053222657, 0.2567250061035156, 0.1229854736328125, 0.12295680236816406, 0.1226270751953125, 0.12288614654541016, 0.12259225463867188, 0.12339711761474609, 0.1230387191772461, 0.12344627380371094, 0.12426547241210938, 0.12386918640136718, 0.12261785888671875, 0.12307660675048829, 0.1229639663696289, 0.12252467346191406, 0.12267314910888671, 0.12372172546386719, 0.12260659027099609, 0.12311961364746093, 0.1224427490234375, 0.12306841278076172, 0.12283699035644531, 0.12256665802001954, 0.12251955413818359, 0.12265580749511719, 0.12248262023925781, 0.12276019287109376, 0.12253388977050782, 0.12335411071777344, 0.12312371063232422, 0.12290866851806641, 0.12254515075683593, 0.12248780822753906, 0.122534912109375, 0.12261682891845703, 0.12288716888427734, 0.12256460571289063, 0.12276838684082031, 0.12297830200195313, 0.1227694091796875, 0.12250418853759766, 0.12257279968261718, 0.1226977310180664, 0.12254617309570312, 0.12250009918212891, 0.122787841796875, 0.122461181640625, 0.12268748474121094, 0.12251545715332031, 0.12266802978515624, 0.1225871353149414, 0.1227623062133789, 0.12271507263183594, 0.12253900909423829, 0.12249292755126953, 0.12263219451904298, 0.12244684600830077, 0.12256870269775391, 0.12278272247314453, 0.12261170959472656, 0.1223895034790039, 0.12428390502929687, 0.12278988647460938, 0.2565724182128906, 0.12282470703125, 0.12249190521240234, 0.12308480072021484, 0.12364595031738282, 0.1230540771484375, 0.12276640319824218, 0.12278163146972657, 0.12248473358154296, 0.12251136016845703, 0.12247654724121093, 0.12248678588867187, 0.12250009918212891, 0.12251955413818359, 0.12259839630126954, 0.12254617309570312, 0.12253593444824219, 0.1236684799194336, 0.12317388916015624, 0.12310221099853516, 0.12281139373779297, 0.12291379547119141, 0.12274483489990234, 0.12282572937011718, 0.12260765075683594, 0.12258812713623046, 0.12272946929931641, 0.12461772918701172, 0.12312989044189453, 0.12266185760498047, 0.12315033721923828, 0.12284620666503906, 0.1230540771484375, 0.122640380859375, 0.12250624084472657, 0.12265676879882813, 0.12255538940429687, 0.12278272247314453, 0.12277555084228516, 0.12254003143310546, 0.12262400054931641, 0.12318208312988281, 0.12256050872802735, 0.12256153869628907, 0.1224816665649414, 0.12287999725341797, 0.12276326751708984, 0.12258406066894531, 0.12270182037353515, 0.12269465637207032, 0.12290150451660156, 0.12344525146484375, 0.12252265930175782, 0.12313801574707031, 0.12258406066894531, 0.12265471649169922, 0.12261888122558594, 0.12246323394775391, 0.12254924774169922, 0.12281753540039063, 0.12254617309570312, 0.12263526153564454, 0.12243353271484375, 0.2562088928222656, 0.12256050872802735, 0.12245913696289062, 0.12246221160888672, 0.122461181640625, 0.12328652954101563, 0.12252774047851563, 0.12238028717041016, 0.12253183746337891, 0.12281037139892578, 0.12281651306152344, 0.12248678588867187, 0.12256358337402344, 0.1226792984008789, 0.12369407653808594, 0.12281958770751954, 0.12281549072265625, 0.12272742462158204, 0.12273356628417968, 0.12324864196777344, 0.12265676879882813, 0.12252365112304688, 0.12428800201416015, 0.12306227111816406, 0.12254208374023437, 0.12252979278564453, 0.12400844573974609, 0.12299775695800781, 0.12261376190185547, 0.12311654663085937, 0.12299980926513672, 0.12294143676757813, 0.12260044860839844, 0.12261682891845703, 0.12266700744628906, 0.1231677474975586, 0.12294656372070313, 0.12281651306152344, 0.12273766326904297, 0.12263731384277343, 0.12283084869384765, 0.12253900909423829, 0.12256768035888672, 0.12280937957763671, 0.12265058898925782, 0.12437811279296875, 0.12318822479248047, 0.12397875213623047, 0.12310733032226563, 0.12303462219238281, 0.12267314910888671, 0.12365312194824218, 0.12290662384033203, 0.12257689666748046, 0.12280012512207031, 0.12264447784423828, 0.12267110443115234, 0.1226455078125, 0.12263731384277343, 0.12309503936767578, 0.12316057586669922, 0.12300800323486329, 0.12256265258789062]",tokens/s,7.995929724812099,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2296.311808,3364.356096,0.0,2717.908992,2483.907584,s,10,2.330847946166992,0.23308479461669923,0.0010429138193785155,0.2329587631225586,0.23404432983398438,0.23490374908447265,0.2355912844848633,"[0.23576316833496094, 0.23205392456054688, 0.23257113647460936, 0.2332529296875, 0.2331095733642578, 0.23302418518066406, 0.2328933410644531, 0.23385334777832031, 0.23206883239746093, 0.23225750732421874]",tokens/s,1098.3127424548827,kWh,2.741206621732375e-06,1.5020620704274751e-06,1.2330117187320244e-05,1.6573385879480095e-05,tokens/kWh,15446451.428911682,MB,2296.311808,3364.356096,0.0,2717.908992,2606.770688,s,10,135.49141015625,13.549141015625,0.005470597609210756,13.5478662109375,13.555178515625,13.558332910156249,13.56085642578125,"[13.5614873046875, 13.552251953125, 13.5473681640625, 13.545138671875, 13.5483642578125, 13.5429267578125, 13.5468681640625, 13.5424833984375, 13.5544775390625, 13.5500439453125]",tokens/s,4.649741258678155,kWh,0.00015991536231278774,8.764646752289069e-05,0.0007088255708478708,0.0009563874006835494,tokens/kWh,65872.8878642406,,s,629,137.37170133972162,0.21839698146219663,0.027667917542091323,0.21500723266601562,0.2154588165283203,0.21557268676757813,0.4473833142089844,"[0.21505740356445313, 0.21482188415527342, 0.21486285400390626, 0.2151321563720703, 0.21520384216308594, 0.21484442138671875, 0.21472972106933594, 0.21478707885742188, 0.21523762512207031, 0.21543218994140625, 0.21555404663085936, 0.21550694274902343, 0.21547418212890626, 0.21545574951171875, 0.2153861083984375, 0.21541888427734374, 0.21541786193847656, 0.21550694274902343, 0.21546803283691407, 0.2156584930419922, 0.2156134338378906, 0.21551411437988283, 0.215841796875, 0.2154475555419922, 0.21554278564453125, 0.2155478973388672, 0.21656781005859374, 0.21537075805664063, 0.21570559692382812, 0.2151690216064453, 0.21536972045898437, 0.21519155883789062, 0.21519155883789062, 0.21512191772460937, 0.2151700439453125, 0.21511680603027344, 0.21506355285644532, 0.2149959716796875, 0.21488128662109374, 0.21483724975585938, 0.21492735290527343, 0.21484646606445312, 0.21488946533203124, 0.21494989013671875, 0.21493145751953124, 0.21494989013671875, 0.21496934509277343, 0.21540966796875, 0.21525914001464844, 0.21563699340820314, 0.21598924255371094, 0.21502156066894532, 0.214908935546875, 0.2154967041015625, 0.2151526336669922, 0.21541375732421875, 0.21517312622070311, 0.21497549438476563, 0.2161397705078125, 0.21507891845703125, 0.215046142578125, 0.21498880004882812, 0.44725146484375, 0.2148833312988281, 0.21473587036132813, 0.2147010498046875, 0.21467135620117186, 0.21518130493164062, 0.21532159423828126, 0.2151383056640625, 0.21513420104980469, 0.21499699401855468, 0.21491506958007814, 0.21502259826660156, 0.21509529113769532, 0.21496524047851562, 0.21494886779785155, 0.21502464294433593, 0.21508709716796875, 0.21516697692871095, 0.21509426879882812, 0.21518949890136718, 0.21508403015136718, 0.2151690216064453, 0.21495706176757812, 0.21509529113769532, 0.21498162841796875, 0.21510450744628906, 0.21498675537109374, 0.215088134765625, 0.21520999145507813, 0.21500518798828125, 0.2154455108642578, 0.21509222412109374, 0.21493760681152344, 0.21534208679199218, 0.21548851013183593, 0.21525811767578126, 0.2152263641357422, 0.21541171264648437, 0.21565644836425782, 0.21525401306152345, 0.21511885070800782, 0.21525709533691406, 0.2151137237548828, 0.2153912353515625, 0.215267333984375, 0.21513011169433593, 0.21617152404785156, 0.21516697692871095, 0.21517721557617187, 0.21505740356445313, 0.21518643188476563, 0.21504818725585936, 0.21551309204101562, 0.21518336486816406, 0.21493145751953124, 0.21510861206054688, 0.21479629516601562, 0.21501235961914061, 0.2149160919189453, 0.21480447387695312, 0.21480242919921874, 0.214935546875, 0.2149222412109375, 0.4477112426757813, 0.21476658630371093, 0.21471026611328126, 0.21476658630371093, 0.21468569946289062, 0.21475942993164063, 0.2149099578857422, 0.21478604125976564, 0.2147553253173828, 0.21471641540527345, 0.2147993621826172, 0.21482290649414063, 0.21487615966796875, 0.21483724975585938, 0.2148311004638672, 0.2148546600341797, 0.2154239959716797, 0.2151372833251953, 0.21490278625488282, 0.214866943359375, 0.21509426879882812, 0.21517318725585938, 0.21492115783691407, 0.21483314514160157, 0.21508607482910155, 0.2149591064453125, 0.2149160919189453, 0.21490278625488282, 0.21483212280273437, 0.21489254760742188, 0.21492428588867188, 0.2148863983154297, 0.21489971923828124, 0.21498162841796875, 0.21520895385742186, 0.21496524047851562, 0.21491302490234376, 0.21487615966796875, 0.2147921905517578, 0.2149601287841797, 0.2154270782470703, 0.21498573303222657, 0.21499699401855468, 0.2154915771484375, 0.21557760620117186, 0.21522329711914062, 0.21523968505859375, 0.2152806396484375, 0.2152929229736328, 0.21521612548828126, 0.2151874542236328, 0.21568409729003907, 0.21566566467285156, 0.21509120178222657, 0.21514035034179688, 0.21538508605957032, 0.21533900451660157, 0.21524581909179688, 0.21518130493164062, 0.21530111694335938, 0.2149529571533203, 0.21484646606445312, 0.215267333984375, 0.44860928344726564, 0.21496835327148436, 0.21504917907714843, 0.21519564819335937, 0.2147788848876953, 0.21473075866699218, 0.214687744140625, 0.21470924377441405, 0.21483622741699218, 0.21488230895996094, 0.21480551147460938, 0.21480447387695312, 0.21488230895996094, 0.2148167724609375, 0.2148423614501953, 0.21475840759277343, 0.21480755615234376, 0.21527244567871093, 0.21494374084472656, 0.2148546600341797, 0.21478604125976564, 0.21492735290527343, 0.21485977172851561, 0.21493452453613282, 0.2148853759765625, 0.21484442138671875, 0.2148116455078125, 0.21499699401855468, 0.21483827209472656, 0.21479731750488282, 0.21485772705078124, 0.21503590393066407, 0.21506048583984375, 0.2151700439453125, 0.21510552978515626, 0.2148853759765625, 0.21489459228515626, 0.21496115112304687, 0.21505229187011718, 0.214935546875, 0.21529087829589844, 0.21523558044433594, 0.21490687561035157, 0.21502873229980468, 0.2155325469970703, 0.21607321166992188, 0.21525914001464844, 0.21513420104980469, 0.21531852722167968, 0.21525914001464844, 0.21559706115722657, 0.21499699401855468, 0.21491302490234376, 0.21523968505859375, 0.21506661987304687, 0.21505946350097657, 0.21485772705078124, 0.21504205322265624, 0.21495603942871094, 0.21500927734375, 0.21486898803710938, 0.21491917419433593, 0.21499699401855468, 0.44823040771484374, 0.2147502136230469, 0.2149713897705078, 0.21477682495117187, 0.21464883422851563, 0.21478604125976564, 0.2148853759765625, 0.21474610900878907, 0.21475430297851564, 0.21482905578613282, 0.21484544372558595, 0.21471641540527345, 0.21488742065429686, 0.21481266784667968, 0.21496524047851562, 0.21491200256347656, 0.21484339904785157, 0.21479014587402342, 0.21488844299316406, 0.2148730926513672, 0.21482598876953124, 0.21536563110351561, 0.2149959716796875, 0.2149160919189453, 0.21479525756835938, 0.214793212890625, 0.21489561462402343, 0.215225341796875, 0.215546875, 0.214830078125, 0.214908935546875, 0.21495808410644532, 0.21555917358398438, 0.21512602233886718, 0.21494682312011718, 0.21519564819335937, 0.2150645751953125, 0.21502975463867188, 0.215077880859375, 0.2150707244873047, 0.21511167907714843, 0.21507994079589843, 0.21511065673828125, 0.2151004180908203, 0.21502566528320313, 0.215046142578125, 0.2153052215576172, 0.2151751708984375, 0.2151628875732422, 0.21534002685546874, 0.21526220703125, 0.21523968505859375, 0.21514239501953125, 0.2150768585205078, 0.21670297241210937, 0.21517414855957032, 0.215046142578125, 0.21518028259277344, 0.21528883361816406, 0.21511270141601563, 0.21514854431152344, 0.21511576843261718, 0.21554585266113283, 0.4488406982421875, 0.21551922607421875, 0.2154485778808594, 0.21546290588378905, 0.21549363708496094, 0.2155397186279297, 0.21553868103027343, 0.2150266876220703, 0.21493760681152344, 0.21507379150390624, 0.21545779418945313, 0.2150645751953125, 0.21477171325683594, 0.21473587036132813, 0.214866943359375, 0.21483827209472656, 0.21489152526855468, 0.2148239288330078, 0.21499903869628906, 0.21492019653320313, 0.2147758026123047, 0.2147368927001953, 0.21461094665527344, 0.21467546081542968, 0.2146693115234375, 0.21468467712402345, 0.21476966857910157, 0.21510552978515626, 0.21500210571289063, 0.21482803344726562, 0.2148116455078125, 0.2148341827392578, 0.21464678955078126, 0.21472154235839844, 0.21472767639160156, 0.21471334838867187, 0.21464781188964843, 0.21483929443359376, 0.21563084411621095, 0.21475634765625, 0.21471026611328126, 0.2149529571533203, 0.21496934509277343, 0.21485261535644531, 0.21489356994628905, 0.21480447387695312, 0.21493145751953124, 0.2149160919189453, 0.21494682312011718, 0.21486285400390626, 0.21494374084472656, 0.21482496643066407, 0.21480140686035157, 0.21474919128417969, 0.21488844299316406, 0.21515058898925782, 0.21508505249023438, 0.21536972045898437, 0.2150338592529297, 0.21493145751953124, 0.21476966857910157, 0.2151946258544922, 0.21496627807617188, 0.44795391845703125, 0.21470413208007813, 0.21475942993164063, 0.21475123596191406, 0.21471334838867187, 0.21477171325683594, 0.21476864624023437, 0.21473893737792968, 0.21490074157714845, 0.21492019653320313, 0.2151065673828125, 0.2153482208251953, 0.21520179748535156, 0.21504920959472656, 0.21515367126464843, 0.21496421813964844, 0.21530726623535157, 0.21506866455078125, 0.21519973754882812, 0.2150830078125, 0.2149283905029297, 0.2150697021484375, 0.2151321563720703, 0.21533798217773437, 0.21507891845703125, 0.21498367309570313, 0.2149591064453125, 0.21478707885742188, 0.2149222412109375, 0.2149959716796875, 0.21502156066894532, 0.21526835632324218, 0.21528985595703126, 0.2153482208251953, 0.21506253051757812, 0.2151014404296875, 0.21500927734375, 0.21537075805664063, 0.21537689208984376, 0.21548851013183593, 0.21524172973632813, 0.2149713897705078, 0.2150440979003906, 0.21492633056640625, 0.21498675537109374, 0.2149396514892578, 0.2149591064453125, 0.21508607482910155, 0.21496730041503906, 0.2149048309326172, 0.21501235961914061, 0.21492326354980468, 0.21479833984375, 0.21476966857910157, 0.2149181365966797, 0.2150502471923828, 0.21507994079589843, 0.2152632293701172, 0.21494682312011718, 0.21491506958007814, 0.21495808410644532, 0.2151751708984375, 0.21494886779785155, 0.4489482116699219, 0.2149918670654297, 0.2148720703125, 0.21471128845214843, 0.21480242919921874, 0.21478092956542968, 0.214908935546875, 0.21469798278808594, 0.21487513732910157, 0.21486285400390626, 0.21475942993164063, 0.2147799072265625, 0.21480140686035157, 0.214972412109375, 0.21482701110839844, 0.21487718200683595, 0.21489254760742188, 0.21487411499023437, 0.21485874938964844, 0.2148536376953125, 0.21483622741699218, 0.21512498474121095, 0.21538201904296875, 0.21494578552246094, 0.21469081115722657, 0.21478297424316406, 0.215546875, 0.21490789794921875, 0.21481369018554688, 0.21466828918457032, 0.21494169616699219, 0.21496217346191407, 0.21491200256347656, 0.21501132202148437, 0.21527757263183595, 0.21506150817871095, 0.2150697021484375, 0.21487001037597656, 0.21500210571289063, 0.2153052215576172, 0.21515980529785156, 0.21500723266601562, 0.21494989013671875, 0.215119873046875, 0.21500006103515626, 0.2150963134765625, 0.21494169616699219, 0.2148720703125, 0.21486079406738282, 0.2149365692138672, 0.21491200256347656, 0.21484646606445312, 0.2148802490234375, 0.21486898803710938, 0.214835205078125, 0.21502156066894532, 0.21651455688476562, 0.214908935546875, 0.21484544372558595, 0.21495706176757812, 0.214898681640625, 0.21494784545898438, 0.21494886779785155, 0.4472668151855469, 0.21480345153808594, 0.21471437072753907, 0.21508403015136718, 0.21481575012207033, 0.2151884765625, 0.21496730041503906, 0.21489663696289063, 0.21552230834960937, 0.2148239288330078, 0.2148863983154297, 0.21482188415527342, 0.215014404296875, 0.21478501892089844, 0.21531852722167968, 0.21555097961425781, 0.21537689208984376, 0.21539430236816406, 0.21550592041015626, 0.2155653076171875, 0.21540045166015626, 0.21561036682128906, 0.21547929382324219, 0.21502156066894532, 0.21487922668457032, 0.21480960083007813, 0.21484133911132813, 0.21497549438476563, 0.21494682312011718, 0.21494989013671875, 0.21501849365234374, 0.2149949493408203, 0.21500825500488283, 0.21494374084472656, 0.21490278625488282, 0.21512294006347657, 0.21492941284179687, 0.21512294006347657, 0.21499699401855468, 0.214972412109375, 0.21519258117675782, 0.21501849365234374, 0.21505126953125, 0.21520384216308594, 0.21509324645996095, 0.21525811767578126, 0.21514035034179688, 0.21502259826660156, 0.2150819854736328, 0.2148536376953125, 0.21539942932128905, 0.21545164489746094, 0.21515980529785156, 0.215193603515625, 0.21507994079589843, 0.2156083221435547, 0.21530726623535157, 0.21668658447265626, 0.2153543701171875, 0.21537791442871093, 0.21531954956054689, 0.21511270141601563, 0.21529702758789063, 0.44742861938476564, 0.21477786254882814, 0.21499903869628906, 0.21493145751953124, 0.21511782836914062, 0.21525299072265625, 0.2149396514892578, 0.21481881713867187, 0.2147573699951172, 0.2147440643310547, 0.21501132202148437, 0.2149396514892578, 0.21487411499023437, 0.21493145751953124, 0.21482598876953124, 0.21519667053222657, 0.21500927734375, 0.21553765869140626, 0.21521714782714843, 0.21505126953125, 0.2152499237060547, 0.21514035034179688, 0.2150440979003906, 0.21515776062011718, 0.21496421813964844, 0.21498880004882812, 0.2155284423828125, 0.21523558044433594, 0.2150697021484375, 0.21503488159179687, 0.21502464294433593, 0.215119873046875, 0.21567385864257813, 0.21532569885253905, 0.2156134338378906, 0.21530624389648437, 0.215046142578125, 0.215510009765625, 0.21509324645996095, 0.21478912353515625, 0.2147440643310547, 0.21493350219726562, 0.21496524047851562, 0.21494374084472656, 0.21493043518066407, 0.21487820434570312, 0.21496319580078124, 0.215119873046875, 0.21509324645996095, 0.21507994079589843, 0.21511474609375, 0.21518028259277344, 0.2149365692138672, 0.2149918670654297, 0.21517721557617187, 0.21499801635742188, 0.2149918670654297, 0.21517721557617187, 0.2150645751953125, 0.21535334777832033, 0.21510552978515626, 0.21535130310058595, 0.21503794860839845]",tokens/s,4.578817863254647,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 124949 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1984.512,5480.382464,0.0,4833.93536,4503.282688,s,10,5.704772033691406,0.5704772033691407,0.0012601125399919193,0.5702384033203125,0.5718997436523438,0.5722093933105469,0.5724571130371093,"[0.5703561401367188, 0.57251904296875, 0.5679871215820312, 0.569779052734375, 0.569513427734375, 0.5698793334960938, 0.5701206665039062, 0.571189208984375, 0.5718309326171875, 0.5715971069335938]",tokens/s,448.7471164283303,kWh,6.715062223834757e-06,3.6795454622733808e-06,3.136956213266472e-05,4.1764169818772856e-05,tokens/kWh,6129656.140918401,MB,1986.158592,5480.382464,0.0,4833.93536,4688.699392,s,10,334.56283984375,33.456283984375,0.0052644785154184115,33.45499609375,33.46062109375,33.46519140625,33.46884765625,"[33.4560859375, 33.4513203125, 33.45960546875, 33.45390625, 33.45316015625, 33.46976171875, 33.4577109375, 33.45030078125, 33.45712109375, 33.4538671875]",tokens/s,1.883054317371969,kWh,0.0003949493847824173,0.00021646648750867824,0.0018252696453997421,0.0024366855176908373,tokens/kWh,25854.793137073728,,s,629,339.16590026855516,0.5392144678355401,0.06782032837407061,0.5309541625976563,0.5315696655273437,0.5319477294921875,1.101242978515625,"[0.5305211181640626, 0.5310648193359375, 0.5307863159179688, 0.53155224609375, 0.5311129760742187, 0.5313126220703125, 0.5308641357421875, 0.5310208129882813, 0.530787353515625, 0.5312348022460938, 0.5306480712890626, 0.5307863159179688, 0.5306419067382813, 0.5310637817382813, 0.530830322265625, 0.5309419555664062, 0.5309429931640625, 0.531325927734375, 0.5308753662109374, 0.5309122314453125, 0.5310187377929687, 0.5310218505859375, 0.5317243041992188, 0.5311610717773437, 0.5310167236328125, 0.5312348022460938, 0.530935791015625, 0.5312266235351563, 0.53100439453125, 0.5311580200195313, 0.5308682250976563, 0.5309685668945312, 0.5309716186523438, 0.5314590454101562, 0.5310986328125, 0.53151025390625, 0.5308231811523437, 0.5309214477539063, 0.5307422485351563, 0.5312634887695312, 0.5309501342773437, 0.5314273071289063, 0.530819091796875, 0.53110986328125, 0.5307781372070313, 0.53089892578125, 0.5306757202148438, 0.53097265625, 0.5308671875, 0.530881591796875, 0.5306766967773437, 0.53083544921875, 0.5309061279296875, 0.5309368286132813, 0.531083251953125, 0.5310986328125, 0.531441650390625, 0.531240966796875, 0.5315379028320313, 0.5314641723632813, 0.5310187377929687, 0.5314037475585938, 1.103942626953125, 0.5305426025390625, 0.5309706420898438, 0.530461669921875, 0.5309747314453125, 0.530756591796875, 0.5311068115234375, 0.5308692626953125, 0.5308150024414062, 0.5308845825195313, 0.5310484619140625, 0.5306859741210938, 0.5307606811523438, 0.5305077514648437, 0.5314058227539062, 0.53094091796875, 0.5307955322265625, 0.5311867065429687, 0.53100341796875, 0.5308580322265625, 0.5310903930664063, 0.5308118896484375, 0.5307975463867187, 0.5306583251953125, 0.5308969116210938, 0.530861083984375, 0.5308999633789062, 0.5306531982421875, 0.5309398803710937, 0.5309183959960937, 0.5308845825195313, 0.530808837890625, 0.5309910888671875, 0.5313074951171874, 0.5310607299804687, 0.530740234375, 0.5311702880859375, 0.53077197265625, 0.5312318115234375, 0.5309869384765625, 0.5307576293945313, 0.5307412719726563, 0.5308344116210938, 0.5307822265625, 0.5309531860351563, 0.5305784301757812, 0.5323253784179688, 0.5312071533203125, 0.5312880859375, 0.5308795166015625, 0.5310740356445313, 0.5312593994140625, 0.5309235229492187, 0.531114990234375, 0.5311170654296875, 0.5312634887695312, 0.5309481201171875, 0.5309020385742188, 0.5311262817382812, 0.5310023803710937, 0.5309122924804688, 0.5308917236328125, 0.5311027221679687, 1.100294189453125, 0.5306101684570312, 0.530904052734375, 0.5310392456054688, 0.5307822265625, 0.530640869140625, 0.5311344604492187, 0.5307207641601562, 0.531220458984375, 0.5305763549804687, 0.530935791015625, 0.5307678833007813, 0.5314918212890625, 0.5320335083007812, 0.5323519897460938, 0.5317181396484375, 0.5310802001953125, 0.5309747314453125, 0.5312337646484375, 0.5312235717773437, 0.5314109497070313, 0.5310474243164063, 0.5315625, 0.5313208618164063, 0.5311416015625, 0.531430419921875, 0.5316690063476562, 0.5314365234375, 0.531483642578125, 0.530713623046875, 0.5309296875, 0.5309389038085938, 0.5307412719726563, 0.5305579223632813, 0.530681884765625, 0.5306705932617187, 0.5307053833007812, 0.5305855712890625, 0.5307422485351563, 0.53066650390625, 0.5308958740234375, 0.532068359375, 0.532295654296875, 0.5322301635742187, 0.5322598266601563, 0.5318870849609375, 0.5309050903320313, 0.5312890625, 0.5309685668945312, 0.5306531982421875, 0.5310576782226563, 0.530798583984375, 0.5307260131835938, 0.5306234130859375, 0.5307207641601562, 0.5306265869140625, 0.5307678833007813, 0.530714599609375, 0.5308375244140625, 0.53097265625, 0.5309849853515625, 0.5310422973632812, 0.5310628051757813, 1.1018045654296875, 0.5307658081054687, 0.5312337646484375, 0.5306675415039063, 0.5308784790039063, 0.5308600463867188, 0.5310167236328125, 0.531009521484375, 0.5312532348632812, 0.5308733520507812, 0.5311190795898437, 0.5311856689453125, 0.5314058227539062, 0.5310842895507812, 0.5315277099609375, 0.5310382080078125, 0.5311743774414063, 0.5315983276367188, 0.5321942749023437, 0.5316853637695312, 0.5308467407226563, 0.5309767456054687, 0.530850830078125, 0.5307125854492187, 0.5308641357421875, 0.5307555541992187, 0.5308436279296875, 0.5307218017578125, 0.5308006591796876, 0.5305630493164063, 0.5307371215820312, 0.530661376953125, 0.5308436889648438, 0.5309583129882812, 0.5307883911132812, 0.5307330322265625, 0.5312798461914062, 0.5310320434570313, 0.5316075439453125, 0.5309869995117188, 0.5313341674804688, 0.5305620727539062, 0.5311702880859375, 0.5308067626953125, 0.5318553466796875, 0.5307586669921875, 0.5308836059570312, 0.5307012939453125, 0.5307760620117188, 0.5307177124023438, 0.5308969116210938, 0.530735107421875, 0.5311006469726562, 0.5306951904296875, 0.5307473754882812, 0.5309122314453125, 0.5312890625, 0.5314263305664062, 0.530976806640625, 0.530935791015625, 0.530862060546875, 0.530808837890625, 0.5308170166015626, 1.100686279296875, 0.5305743408203125, 0.5307484130859375, 0.53056103515625, 0.5306112060546875, 0.5306911010742188, 0.5307637939453125, 0.5306419067382813, 0.5308395385742187, 0.530756591796875, 0.5308426513671874, 0.5308016357421875, 0.5311057739257813, 0.5309327392578125, 0.5313269653320313, 0.5308795166015625, 0.531177490234375, 0.5310422973632812, 0.53136279296875, 0.5311201171875, 0.5312051391601562, 0.53100439453125, 0.5311467895507812, 0.5310556030273438, 0.531061767578125, 0.5310576782226563, 0.5312481079101562, 0.5311692504882812, 0.5314559936523438, 0.5308375244140625, 0.5311375122070312, 0.5306767578125, 0.530946044921875, 0.53065625, 0.5307238159179688, 0.5309081420898437, 0.5308159790039062, 0.5306972045898437, 0.53074951171875, 0.5306746215820313, 0.5307698974609375, 0.5306572875976563, 0.530976806640625, 0.5309541625976563, 0.5310791625976562, 0.531009521484375, 0.5310802001953125, 0.5311498413085938, 0.5309020385742188, 0.5308917846679687, 0.5309102172851563, 0.5309030151367188, 0.5314529418945313, 0.5309439697265625, 0.5307105102539063, 0.5309010009765625, 0.5310361328125, 0.5321871337890625, 0.5309573364257812, 0.5309470825195313, 0.5312696533203125, 0.5320478515625, 0.5322874755859375, 1.10145947265625, 0.5313740844726562, 0.5319639282226563, 0.53125732421875, 0.5318215942382812, 0.5319761962890625, 0.5323817138671875, 0.5318543090820312, 0.5324503173828125, 0.53193115234375, 0.53076171875, 0.5305753784179688, 0.5308323974609375, 0.530567138671875, 0.5308436279296875, 0.5306286010742187, 0.5307053833007812, 0.5305753784179688, 0.53100341796875, 0.5306358032226562, 0.5310361328125, 0.5309552612304688, 0.5309389038085938, 0.53064501953125, 0.53108837890625, 0.5308590087890624, 0.5309389038085938, 0.5310259399414062, 0.5312952270507812, 0.5310709838867187, 0.5311764526367188, 0.5312491455078125, 0.5314150390625, 0.5312532348632812, 0.531252197265625, 0.5313546142578125, 0.531367919921875, 0.5319331665039062, 0.5335838623046875, 0.5311641845703124, 0.531441650390625, 0.5313925170898437, 0.5322434692382813, 0.5322864379882812, 0.5321441040039062, 0.532115478515625, 0.531388427734375, 0.5306961669921875, 0.53096240234375, 0.5312880859375, 0.53085693359375, 0.5306890869140625, 0.5307586059570313, 0.53071875, 0.5307207641601562, 0.53096240234375, 0.5315604248046875, 0.5309337768554687, 0.5310587158203125, 0.5309173583984375, 0.53144677734375, 0.530819091796875, 0.5310504760742187, 1.1015126953125, 0.5308446655273438, 0.53172021484375, 0.5307381591796875, 0.5307678833007813, 0.5306972045898437, 0.5308292846679687, 0.5307166748046875, 0.530808837890625, 0.5306388549804687, 0.5306736450195313, 0.5309235229492187, 0.5309235229492187, 0.53070849609375, 0.5309276123046875, 0.5306429443359375, 0.5307852783203125, 0.5307914428710937, 0.5309736938476562, 0.5309429931640625, 0.5308538818359375, 0.530862060546875, 0.530893798828125, 0.5308037109375, 0.530777099609375, 0.5308395385742187, 0.5308600463867188, 0.5307258911132813, 0.5308436279296875, 0.530629638671875, 0.531146728515625, 0.5308661499023437, 0.531294189453125, 0.5314826049804687, 0.5312481079101562, 0.53166796875, 0.5312839965820313, 0.53075048828125, 0.530819091796875, 0.5311170654296875, 0.5316137084960938, 0.5317027587890625, 0.5319608154296875, 0.5318799438476562, 0.5317857055664063, 0.531937255859375, 0.5316761474609375, 0.5316976928710937, 0.5317744750976563, 0.5308323974609375, 0.531051513671875, 0.5311907958984375, 0.5314478149414062, 0.531346435546875, 0.5310218505859375, 0.5310238647460938, 0.5312337646484375, 0.5313167114257813, 0.5308651733398437, 0.530703369140625, 0.5307924194335938, 0.5308026733398438, 0.5309132690429688, 1.1025264892578126, 0.53065625, 0.5309061279296875, 0.5318810424804687, 0.5307862548828125, 0.530639892578125, 0.53081396484375, 0.5309368286132813, 0.53110888671875, 0.5308538818359375, 0.5307095336914063, 0.5305200805664062, 0.5307893676757812, 0.5306808471679687, 0.530820068359375, 0.5304883422851563, 0.5308170166015626, 0.5304985961914063, 0.5307473754882812, 0.5305302734375, 0.5306531982421875, 0.530513916015625, 0.5310648193359375, 0.5307095336914063, 0.5307955322265625, 0.5309153442382812, 0.5314549560546875, 0.5309439697265625, 0.5312911376953126, 0.5316034545898437, 0.5309531860351563, 0.5309869995117188, 0.5310361328125, 0.5308016357421875, 0.5307443237304688, 0.5306705932617187, 0.5307801513671875, 0.530608154296875, 0.530639892578125, 0.5306634521484375, 0.5308221435546875, 0.5308283081054688, 0.5309696044921876, 0.5313065185546875, 0.5313300170898437, 0.5313269653320313, 0.5309522094726562, 0.531056640625, 0.5314488525390625, 0.5310975952148438, 0.5314590454101562, 0.53098291015625, 0.5310863647460937, 0.5309696044921876, 0.53103515625, 0.5311734008789063, 0.530808837890625, 0.530746337890625, 0.53139453125, 0.5307208251953125, 0.5316126708984374, 0.5313197631835938, 0.5314866943359375, 1.1037603759765624, 0.531114013671875, 0.5313157348632812, 0.5316669311523438, 0.5312061157226563, 0.5314129638671875, 0.5316454467773437, 0.531177490234375, 0.5316536865234375, 0.530905029296875, 0.5309481201171875, 0.5308477172851562, 0.5310320434570313, 0.5309378662109375, 0.5309757690429687, 0.5313228759765625, 0.5311948852539062, 0.5310637817382813, 0.5309552612304688, 0.5314559936523438, 0.5309481201171875, 0.5310003051757812, 0.5309910888671875, 0.5306224365234375, 0.5306593017578125, 0.5306255493164063, 0.5308753662109374, 0.5307627563476562, 0.5307576293945313, 0.530713623046875, 0.53089892578125, 0.530850830078125, 0.5307238159179688, 0.5306982421875, 0.5311590576171875, 0.530724853515625, 0.5312921752929688, 0.5312542724609375, 0.5312317504882812, 0.5316403198242188, 0.5309900512695312, 0.5310330810546875, 0.5311273193359375, 0.5309204711914063, 0.53094091796875, 0.5306009521484375, 0.530967529296875, 0.5310392456054688, 0.5308815307617187, 0.5307689208984375, 0.5308272705078125, 0.5306911010742188, 0.5308804931640625, 0.5311528930664062, 0.5310679321289062, 0.5312716674804687, 0.531794921875, 0.531114013671875, 0.5313935546875, 0.5312348022460938, 0.5312020263671875, 0.5310812377929688, 0.5309931640625, 1.102993408203125, 0.5305927124023437, 0.53087744140625, 0.530703369140625, 0.5310504760742187, 0.5310187377929687, 0.5312553100585937, 0.5311918334960938, 0.5312839965820313, 0.5307781372070313, 0.5309859619140626, 0.5311580200195313, 0.5308999633789062, 0.5308969116210938, 0.5311795043945312, 0.5306071166992188, 0.5307647705078125, 0.5309890747070313, 0.5307586669921875, 0.5306500854492188, 0.530893798828125, 0.530766845703125, 0.5308999633789062, 0.5307760620117188, 0.5306542358398437, 0.530555908203125, 0.5319547119140625, 0.53079345703125, 0.5315061645507813, 0.5311846313476563, 0.5316218872070313, 0.5312348022460938, 0.5321912231445313, 0.5312000122070313, 0.531431396484375, 0.5311488037109375, 0.5308743896484375, 0.530819091796875, 0.5310812377929688, 0.5307105102539063, 0.53081396484375, 0.5309736938476562, 0.53100439453125, 0.5308211059570312, 0.5308876953125, 0.5309081420898437, 0.5309849853515625, 0.5307340698242188, 0.53097265625, 0.5311795043945312, 0.5309010009765625, 0.5311529541015625, 0.5311456909179687, 0.5306522216796875, 0.5312962036132812, 0.5308641357421875, 0.5309163818359375, 0.5308590087890624, 0.5311928100585938, 0.5311190795898437, 0.5309706420898438, 0.530935791015625, 0.531294189453125]",tokens/s,1.8545496451794004,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949111-525794a369858c23485e6ee4;e7a63d89-e585-4e10-9bbe-707be6547645) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694823c-4a01c9f6502a3b8f674a2972;1e80b430-396a-45fa-abde-c723cebba1ac) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2981.855232,9259.450368,0.0,8613.003264,8211.364864,s,10,10.951241455078126,1.0951241455078125,0.002069112877122291,1.0949886474609376,1.0979957763671875,1.0981948486328124,1.0983541064453124,"[1.0979515380859375, 1.0983939208984375, 1.0932901611328125, 1.0924078369140624, 1.0937889404296874, 1.092544677734375, 1.0941727294921875, 1.0958045654296875, 1.096221435546875, 1.0966656494140625]",tokens/s,233.76345143161097,kWh,1.2900002946456276e-05,7.068370924207557e-06,5.9710658879597037e-05,7.967903275026086e-05,tokens/kWh,3212890.4074724964,MB,2986.262528,9330.753536,0.0,8684.306432,8503.627264,s,10,640.8562343749999,64.0856234375,0.007507663361876726,64.08716796875,64.093946875,64.09397148437499,64.09399117187499,"[64.08627734375, 64.0873125, 64.09369921875, 64.09394140625, 64.0870234375, 64.09399609375, 64.08822265625, 64.078796875, 64.0717890625, 64.07517578125]",tokens/s,0.9830597975135754,kWh,0.000756652800159322,0.0004147102533278666,0.0034761336142379974,0.004647496667725186,tokens/kWh,13555.684813613145,,s,629,649.691616149902,1.0328960511127223,0.130104123840355,1.0171883544921876,1.0177021728515625,1.01789716796875,2.1117173046874997,"[1.0174935302734376, 1.0171514892578124, 1.0171883544921876, 1.017997314453125, 1.0175242309570312, 1.0175477905273438, 1.0175784912109376, 1.017881591796875, 1.0172057495117188, 1.0168115234375, 1.0167367553710938, 1.0173132934570313, 1.0168719482421875, 1.0171760864257813, 1.0166824951171876, 1.0168237915039062, 1.0169927978515625, 1.0175252685546874, 1.0170921020507813, 1.0169978637695312, 1.0169354248046876, 1.0170029907226563, 1.0173900756835939, 1.017280517578125, 1.016859619140625, 1.0172426147460938, 1.016791015625, 1.0168565673828125, 1.0172825317382812, 1.01675927734375, 1.0171392211914063, 1.0170572509765625, 1.0173419799804688, 1.0176777954101563, 1.0177576904296874, 1.0170183715820313, 1.0177105712890624, 1.0174996337890625, 1.0173941650390625, 1.0173890380859374, 1.017080810546875, 1.0169651489257812, 1.0173880615234374, 1.0172467041015625, 1.0171791381835937, 1.016754150390625, 1.0172498168945312, 1.0174290161132813, 1.0169159545898439, 1.0170194091796876, 1.016964111328125, 1.0168411865234375, 1.0170531616210938, 1.0171678466796874, 1.0177003784179688, 1.0177116088867189, 1.0175713500976562, 1.0176112670898438, 1.01741259765625, 1.01737060546875, 1.0172507934570312, 1.0171494140625, 2.116391845703125, 1.01690673828125, 1.0169548950195313, 1.0181068725585938, 1.0178170776367188, 1.0172692260742187, 1.0174985961914063, 1.017291748046875, 1.0175293579101563, 1.0175150146484375, 1.016748046875, 1.0170767211914062, 1.0172333984375, 1.0175641479492188, 1.0175477905273438, 1.0169292602539062, 1.016869873046875, 1.0169712524414063, 1.0170634155273437, 1.0173245239257813, 1.0168094482421874, 1.016943603515625, 1.0171064453125, 1.0171781005859375, 1.017275390625, 1.0166548461914062, 1.017407470703125, 1.0169978637695312, 1.016574951171875, 1.0173306884765625, 1.0168370971679688, 1.016875, 1.0170009765625, 1.0174505004882812, 1.0167992553710938, 1.0171494140625, 1.0171607055664063, 1.0168944702148437, 1.0170101928710937, 1.017059326171875, 1.017080810546875, 1.0172037353515626, 1.0174771118164063, 1.0174095458984376, 1.0170245361328125, 1.019852783203125, 1.0168125610351562, 1.0170029907226563, 1.0175538940429687, 1.0178017578125, 1.0176777954101563, 1.016995849609375, 1.0182564086914063, 1.0176224975585937, 1.0173235473632813, 1.01770751953125, 1.0170552368164063, 1.01711669921875, 1.0171351318359374, 1.0170449829101562, 1.016859619140625, 1.0170890502929688, 1.0175088500976563, 2.11154345703125, 1.0170613403320312, 1.0173931274414063, 1.0170582885742188, 1.01701123046875, 1.01686474609375, 1.0172272338867188, 1.0170921020507813, 1.0172078247070313, 1.0167490844726563, 1.0166835327148438, 1.0169088134765625, 1.0174351196289062, 1.0175641479492188, 1.017354248046875, 1.0168699340820313, 1.0168974609375, 1.0171945190429688, 1.0176737670898437, 1.0177105102539064, 1.017945068359375, 1.0175324096679688, 1.0171945190429688, 1.0176880493164062, 1.0172406005859376, 1.0171146240234374, 1.0182410278320313, 1.0179573974609375, 1.0179983520507812, 1.0176327514648438, 1.0174893798828124, 1.0176010131835938, 1.0182543334960938, 1.0177720336914062, 1.0173716430664062, 1.017248779296875, 1.016859619140625, 1.0170859375, 1.0173767700195313, 1.0172733154296876, 1.0168115234375, 1.0170787963867187, 1.0168923950195312, 1.0172713012695314, 1.0172866821289062, 1.0175477905273438, 1.0167623901367187, 1.0171729736328126, 1.0170921020507813, 1.0174443359375, 1.0174791870117188, 1.017380859375, 1.0176481323242188, 1.0177054443359375, 1.0170101928710937, 1.01709619140625, 1.0171812133789062, 1.0176399536132812, 1.0173388671875, 1.017680908203125, 1.0173245239257813, 1.0175344848632812, 1.016958984375, 2.111784912109375, 1.0173778076171875, 1.0168678588867188, 1.0177362060546875, 1.01760205078125, 1.0175693359375, 1.0170152587890624, 1.0167971801757814, 1.0172047119140626, 1.0175958862304688, 1.016896484375, 1.0167449340820311, 1.017417724609375, 1.0172200927734374, 1.016796142578125, 1.0171340942382812, 1.016943603515625, 1.01682177734375, 1.0168862915039063, 1.0173562622070313, 1.017080810546875, 1.0174678955078125, 1.0173972778320313, 1.0175897827148437, 1.017354248046875, 1.0172293090820312, 1.0175057983398437, 1.0172456665039062, 1.0170859375, 1.0177197875976562, 1.017154541015625, 1.0173880615234374, 1.0175590209960939, 1.01743408203125, 1.016933349609375, 1.0172835693359374, 1.017112548828125, 1.0172262573242188, 1.0170572509765625, 1.017354248046875, 1.01725390625, 1.0173184204101562, 1.018461181640625, 1.0179942626953125, 1.0194544677734374, 1.0179215087890625, 1.0178590698242187, 1.0177402954101562, 1.017133056640625, 1.0170347290039063, 1.0172620849609375, 1.0173992919921875, 1.0176665649414063, 1.01707470703125, 1.0167613525390624, 1.0173092041015626, 1.0173604125976563, 1.0174525146484374, 1.017565185546875, 1.0170890502929688, 1.01747509765625, 1.017607177734375, 1.0175682373046875, 2.112203857421875, 1.017459716796875, 1.0166384887695312, 1.0169661865234374, 1.0174985961914063, 1.0174044189453124, 1.016933349609375, 1.0176819458007813, 1.0176041259765625, 1.0174843139648437, 1.017217041015625, 1.0171586303710938, 1.01719140625, 1.017375732421875, 1.0171525268554686, 1.0169210815429688, 1.017154541015625, 1.0176266479492186, 1.0178191528320313, 1.0170921020507813, 1.0168934326171875, 1.0171627807617187, 1.0176942138671874, 1.017333740234375, 1.0175324096679688, 1.0170572509765625, 1.0169077758789062, 1.017111572265625, 1.0173767700195313, 1.0171566162109376, 1.0170460205078125, 1.0172252197265625, 1.0173870239257812, 1.0172252197265625, 1.0171300048828125, 1.0168084716796875, 1.017064453125, 1.0177720336914062, 1.016975341796875, 1.0169774169921875, 1.0169395141601563, 1.0170787963867187, 1.0171217651367188, 1.0177638549804688, 1.0169712524414063, 1.016958984375, 1.01680126953125, 1.0175764770507814, 1.0173480834960937, 1.0168197021484375, 1.017017333984375, 1.0169978637695312, 1.0169896850585938, 1.0174003295898437, 1.0170214233398438, 1.01707568359375, 1.0175559692382812, 1.01893017578125, 1.0172241821289063, 1.0170316772460937, 1.01722216796875, 1.0172119140625, 1.0173163452148437, 2.1100595703125, 1.0167705688476563, 1.0177894287109375, 1.0171873168945313, 1.0167449340820311, 1.0167142333984376, 1.016875, 1.0168862915039063, 1.0167982177734376, 1.0173235473632813, 1.0169313354492187, 1.0169343872070313, 1.0175621337890626, 1.017955322265625, 1.0172938232421875, 1.0173009643554687, 1.017554931640625, 1.0171617431640625, 1.0173388671875, 1.0177402954101562, 1.0175774536132813, 1.0175682373046875, 1.0177136840820313, 1.0181427001953125, 1.0177576904296874, 1.0176788330078126, 1.017692138671875, 1.017617431640625, 1.0178262939453124, 1.0180515747070313, 1.0169354248046876, 1.0168514404296876, 1.0176296997070313, 1.0172395629882813, 1.0167859497070313, 1.0172784423828125, 1.0170859375, 1.0170224609375, 1.0186465454101563, 1.0175170288085937, 1.0169405517578125, 1.0179154052734376, 1.0179379272460938, 1.01726318359375, 1.0171544799804688, 1.0171340942382812, 1.0169609985351562, 1.0169047241210938, 1.017776123046875, 1.017396240234375, 1.0173921508789063, 1.0171002807617187, 1.0185277709960938, 1.017439208984375, 1.017312255859375, 1.017007080078125, 1.0172160034179687, 1.0172160034179687, 1.017218017578125, 1.0172958984375, 1.0172764282226563, 1.0170203857421876, 1.017691162109375, 2.111909912109375, 1.0176123046875, 1.0175139770507813, 1.0174228515625, 1.0174617309570313, 1.0175221557617187, 1.0169579467773437, 1.0177576904296874, 1.0171986083984375, 1.0168289184570312, 1.0170685424804689, 1.0173532104492187, 1.0175938720703126, 1.017185302734375, 1.01743408203125, 1.0171443481445313, 1.016764404296875, 1.0172661743164062, 1.0177013549804688, 1.0171986083984375, 1.017049072265625, 1.0170828857421874, 1.0174166870117187, 1.0170664672851562, 1.0174453735351563, 1.0168790893554687, 1.0178897705078125, 1.0175170288085937, 1.0171791381835937, 1.0170726928710938, 1.0169456176757812, 1.0167869262695313, 1.0170419311523438, 1.0173788452148438, 1.017333740234375, 1.0175242309570312, 1.018076171875, 1.01758056640625, 1.0170736694335938, 1.0170521850585938, 1.0168186645507813, 1.0173572998046876, 1.0176532592773437, 1.01709619140625, 1.0169476928710937, 1.0168115844726562, 1.0169046630859375, 1.0173613891601563, 1.0177177734375, 1.0168975219726561, 1.016826904296875, 1.0167296142578126, 1.017049072265625, 1.0175293579101563, 1.0174402465820314, 1.0174054565429687, 1.0173552856445311, 1.017469970703125, 1.0172620849609375, 1.0167603149414062, 1.0176635131835938, 1.0175570068359374, 1.0175396118164062, 2.113271728515625, 1.0164520874023437, 1.0175570068359374, 1.0175221557617187, 1.0169467163085937, 1.01684326171875, 1.01673779296875, 1.0166527709960937, 1.016406005859375, 1.01699072265625, 1.0166343383789063, 1.0166343383789063, 1.016573974609375, 1.0167418823242187, 1.0167675170898438, 1.0167920532226562, 1.0167357177734375, 1.0169302978515624, 1.0171954956054687, 1.0174054565429687, 1.0170368041992188, 1.0171791381835937, 1.0176737060546874, 1.0177310791015626, 1.0174033813476562, 1.0174044189453124, 1.017459716796875, 1.0175098876953126, 1.0173675537109375, 1.0176378784179687, 1.017259033203125, 1.0173460693359375, 1.0176511840820313, 1.0174822387695313, 1.0173767700195313, 1.0175150146484375, 1.0174218139648437, 1.0178508911132813, 1.0178406372070312, 1.0176204833984375, 1.0173767700195313, 1.0178017578125, 1.0179000244140626, 1.0172119140625, 1.0165678100585938, 1.0165330200195313, 1.0163292236328125, 1.0167285766601561, 1.0169968872070312, 1.0166988525390626, 1.0169210815429688, 1.0170480346679687, 1.0171238403320313, 1.0167817993164063, 1.0167633666992189, 1.0170337524414061, 1.0168043823242188, 1.0171392211914063, 1.017153564453125, 1.0167838745117188, 1.0169251708984375, 1.0174423217773438, 1.01701220703125, 2.113585205078125, 1.0164449462890626, 1.016585205078125, 1.0164551391601562, 1.01684326171875, 1.016680419921875, 1.016753173828125, 1.0173470458984375, 1.0173030395507812, 1.0169528198242188, 1.0168330078125, 1.0170194091796876, 1.01732763671875, 1.0167736206054687, 1.0168514404296876, 1.0169835815429686, 1.0166835327148438, 1.01719140625, 1.0171043701171876, 1.0166845703125, 1.017260009765625, 1.0176849975585938, 1.0174935302734376, 1.017529296875, 1.0172733154296876, 1.0169231567382813, 1.0173982543945312, 1.0170020141601563, 1.016616943359375, 1.0167767333984374, 1.0169467163085937, 1.0172088623046875, 1.01715966796875, 1.016932373046875, 1.0170357666015626, 1.0171320190429687, 1.01718017578125, 1.0174566650390624, 1.0167255249023437, 1.0167971801757814, 1.0167633666992189, 1.0170337524414061, 1.0170050659179688, 1.0170439453125, 1.01673779296875, 1.0168330078125, 1.0170828857421874, 1.017059326171875, 1.0168739624023437, 1.0171300048828125, 1.0171238403320313, 1.016974365234375, 1.0169026489257813, 1.0171504516601562, 1.0168207397460938, 1.0168319702148438, 1.0172692260742187, 1.0173624877929688, 1.016796142578125, 1.0168391723632813, 1.0171893920898438, 1.017365478515625, 1.016943603515625, 2.1139599609375, 1.0168893432617188, 1.0171945190429688, 1.016859619140625, 1.0165811157226563, 1.0167500610351563, 1.01697021484375, 1.016826904296875, 1.0168453369140624, 1.0170132446289062, 1.0168053588867187, 1.0173511962890625, 1.01734912109375, 1.0167859497070313, 1.0169036865234375, 1.0171238403320313, 1.0169866333007813, 1.0171996459960937, 1.0168934326171875, 1.0169763793945312, 1.0167183227539063, 1.0170234985351563, 1.0178928833007812, 1.0167654418945313, 1.0169620361328124, 1.0170040283203126, 1.0167449340820311, 1.0173562622070313, 1.016680419921875, 1.0172979125976562, 1.0165924072265624, 1.017101318359375, 1.0172999877929687, 1.0170224609375, 1.0172262573242188, 1.0168402099609375, 1.0170101928710937, 1.0167879638671875, 1.01705419921875, 1.0175529174804687, 1.0173265991210938, 1.01718017578125, 1.0174719848632812, 1.0170245361328125, 1.0174248657226563, 1.016958984375, 1.0166087646484374, 1.0166466674804688, 1.0171975708007812, 1.01707470703125, 1.0169210815429688, 1.0175795288085938, 1.0173245239257813, 1.0168084716796875, 1.0169722900390625, 1.0170470581054687, 1.0169948120117187, 1.0169896850585938, 1.0172006225585937, 1.0173716430664062, 1.0176337890625, 1.0176676025390625, 1.0172323608398437]",tokens/s,0.9681516343515072,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481ce-544eb32470307c7c2118975a;f8b13f50-216d-4c20-873f-b5e2fce96257) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949482-3719cd0645f9a8c14f888ff0;b01adc61-02a6-4d1d-8afa-ae53d5ffe983) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2265.694208,3330.801664,0.0,2684.35456,2448.256,s,10,2.319208984375,0.2319208984375,0.0009696989948218854,0.2316784973144531,0.23303465728759767,0.23331750717163086,0.23354378707885742,"[0.23360035705566407, 0.23106300354003906, 0.23155523681640625, 0.23113821411132812, 0.23119439697265626, 0.23046585083007812, 0.2318017578125, 0.2329718017578125, 0.23292591857910155, 0.23249244689941406]",tokens/s,1103.824630400822,kWh,2.7226702056147833e-06,1.491899283576025e-06,1.2428040245455892e-05,1.66426097346467e-05,tokens/kWh,15382202.916592907,MB,2265.694208,3330.801664,0.0,2684.35456,2572.686848,s,10,135.717251953125,13.571725195312501,0.01175719247352948,13.5685810546875,13.5886708984375,13.58988818359375,13.59086201171875,"[13.588400390625, 13.583669921875, 13.59110546875, 13.5705322265625, 13.5677392578125, 13.5600986328125, 13.5679013671875, 13.5521083984375, 13.5692607421875, 13.566435546875]",tokens/s,4.642003805216995,kWh,0.000160095536988793,8.774532296239601e-05,0.0007202301292139377,0.0009680709891651268,tokens/kWh,65077.872082843605,,s,629,137.58601713562004,0.21873770609796522,0.027557412902918375,0.21539942932128905,0.2159298553466797,0.2160373779296875,0.4467827087402344,"[0.21633331298828126, 0.21582643127441406, 0.21582028198242187, 0.21581414794921874, 0.21571174621582032, 0.21592576599121094, 0.21572607421875, 0.2158233642578125, 0.2159646759033203, 0.21584690856933594, 0.21583155822753905, 0.2158192596435547, 0.21583155822753905, 0.2157424621582031, 0.21579161071777345, 0.2158305206298828, 0.2159831085205078, 0.2154168395996094, 0.2150440979003906, 0.21582949829101564, 0.2148853759765625, 0.21520895385742186, 0.21547520446777343, 0.21552435302734374, 0.2149591064453125, 0.21545164489746094, 0.21503077697753906, 0.21568716430664062, 0.21583769226074218, 0.21579776000976564, 0.21587968444824218, 0.21599845886230468, 0.21593702697753905, 0.21590733337402343, 0.215088134765625, 0.21520077514648436, 0.21582847595214844, 0.21543833923339845, 0.21507891845703125, 0.21532672119140625, 0.2152755126953125, 0.21532365417480467, 0.21506355285644532, 0.2150564422607422, 0.21531129455566406, 0.21545164489746094, 0.21539430236816406, 0.21608857727050781, 0.21539021301269531, 0.21621043395996092, 0.21590322875976561, 0.21593087768554686, 0.21835877990722657, 0.21600668334960937, 0.21558575439453126, 0.2158970947265625, 0.21553152465820313, 0.21525401306152345, 0.21598208618164064, 0.21550387573242188, 0.2158039093017578, 0.21546188354492188, 0.446887939453125, 0.21535130310058595, 0.21620838928222658, 0.21549568176269532, 0.21547007751464844, 0.21541375732421875, 0.21529087829589844, 0.2152929229736328, 0.2151884765625, 0.2157445068359375, 0.21559091186523438, 0.2155878448486328, 0.21526937866210938, 0.21540966796875, 0.21536972045898437, 0.2159861755371094, 0.21592576599121094, 0.21588890075683595, 0.21573939514160156, 0.21593702697753905, 0.215689208984375, 0.21498162841796875, 0.21494374084472656, 0.21587353515625, 0.21586944580078124, 0.21577932739257813, 0.2158233642578125, 0.21595135498046875, 0.21583973693847655, 0.2160025634765625, 0.2159482879638672, 0.2154659881591797, 0.2158223419189453, 0.2156021728515625, 0.21595545959472656, 0.21571891784667968, 0.215193603515625, 0.21557760620117186, 0.21547520446777343, 0.21582643127441406, 0.21502873229980468, 0.21543014526367188, 0.21531135559082032, 0.2156943359375, 0.21508607482910155, 0.21500416564941408, 0.21636607360839843, 0.21551615905761717, 0.21563497924804687, 0.21572502136230468, 0.21526629638671874, 0.21593600463867188, 0.21595852661132814, 0.21549465942382812, 0.21517926025390624, 0.21620530700683593, 0.21590428161621095, 0.21612130737304688, 0.2151557159423828, 0.21511474609375, 0.21573631286621095, 0.21598719787597656, 0.21551206970214845, 0.44768972778320315, 0.2158203125, 0.21590115356445314, 0.21540249633789063, 0.21497549438476563, 0.21565235900878907, 0.21590118408203124, 0.21540045166015626, 0.21506661987304687, 0.21498982238769532, 0.21571481323242186, 0.21585101318359376, 0.21577728271484375, 0.21612850952148438, 0.2159298553466797, 0.21592268371582032, 0.21620121765136718, 0.2161960906982422, 0.2159800262451172, 0.21565951538085937, 0.21559500122070313, 0.2160394287109375, 0.21583769226074218, 0.21571174621582032, 0.21562162780761718, 0.21567897033691405, 0.2152356414794922, 0.21511981201171876, 0.21545062255859376, 0.21512191772460937, 0.21599845886230468, 0.2161397705078125, 0.21597698974609375, 0.2159718017578125, 0.21596365356445313, 0.2159298553466797, 0.21534310913085938, 0.2151700439453125, 0.21528985595703126, 0.21561856079101563, 0.2161786804199219, 0.2158970947265625, 0.21545062255859376, 0.21527655029296874, 0.21559706115722657, 0.21529702758789063, 0.21561138916015626, 0.21627903747558594, 0.21585714721679689, 0.21586842346191407, 0.21587251281738282, 0.21545574951171875, 0.2160875549316406, 0.2159861755371094, 0.21609368896484374, 0.21604556274414063, 0.21556019592285156, 0.21616026306152344, 0.21599948120117188, 0.21599436950683593, 0.2158745574951172, 0.21579263305664062, 0.21546290588378905, 0.44716851806640623, 0.21555917358398438, 0.2155397186279297, 0.2155018310546875, 0.2152489013671875, 0.21502362060546876, 0.2149601287841797, 0.21482188415527342, 0.2149959716796875, 0.2149283905029297, 0.2150338592529297, 0.2149365692138672, 0.21536358642578124, 0.21536460876464844, 0.21503488159179687, 0.21504512023925781, 0.21507481384277344, 0.21493145751953124, 0.21513523864746092, 0.21498880004882812, 0.21514239501953125, 0.2149949493408203, 0.21579986572265625, 0.2155232696533203, 0.21519258117675782, 0.2149427185058594, 0.21537689208984376, 0.21551615905761717, 0.21563288879394532, 0.21554585266113283, 0.2156093444824219, 0.21598719787597656, 0.2151751708984375, 0.2150102996826172, 0.2150768585205078, 0.21530624389648437, 0.2157660217285156, 0.2154649658203125, 0.21526016235351564, 0.2152314910888672, 0.21540045166015626, 0.21582028198242187, 0.21547520446777343, 0.21540045166015626, 0.2159267883300781, 0.2155018310546875, 0.21591448974609376, 0.2159093780517578, 0.21571484375, 0.21552432250976564, 0.21575372314453126, 0.21584077453613282, 0.21572096252441406, 0.21551615905761717, 0.21536665344238282, 0.2152294464111328, 0.21520384216308594, 0.21572402954101563, 0.21532261657714843, 0.2156615753173828, 0.2162319641113281, 0.21567280578613282, 0.21569638061523438, 0.4465121154785156, 0.21505946350097657, 0.21545677185058593, 0.21541990661621094, 0.2153912353515625, 0.21536665344238282, 0.21509735107421876, 0.2148976593017578, 0.2151024627685547, 0.21509120178222657, 0.215077880859375, 0.2149539794921875, 0.21517312622070311, 0.21521408081054688, 0.21557862854003906, 0.21550079345703124, 0.21530419921875, 0.21543629455566407, 0.21542095947265624, 0.21531336975097656, 0.21564927673339843, 0.21517926025390624, 0.21539840698242188, 0.21542501831054686, 0.2159093780517578, 0.21566566467285156, 0.21548236083984376, 0.21582131958007814, 0.21588275146484376, 0.21536563110351561, 0.21543014526367188, 0.21510450744628906, 0.21565338134765624, 0.21574758911132813, 0.21577317810058594, 0.21563186645507812, 0.21567079162597655, 0.21509017944335937, 0.21550079345703124, 0.2151557159423828, 0.2153717803955078, 0.2156748809814453, 0.21599845886230468, 0.21545677185058593, 0.21524684143066405, 0.21534104919433594, 0.21523968505859375, 0.2151628875732422, 0.21509837341308594, 0.21517208862304688, 0.21517208862304688, 0.21549465942382812, 0.21532159423828126, 0.2152069091796875, 0.215046142578125, 0.21507276916503906, 0.21522125244140625, 0.21588172912597656, 0.21516390991210937, 0.2150471649169922, 0.21543632507324217, 0.2151259765625, 0.2151065673828125, 0.44579736328125, 0.21483929443359376, 0.2148730926513672, 0.2152079315185547, 0.21503077697753906, 0.21551309204101562, 0.2150502471923828, 0.2148833312988281, 0.21498265075683592, 0.21487103271484376, 0.21547007751464844, 0.2154659881591797, 0.21506661987304687, 0.21501849365234374, 0.2155335693359375, 0.2150645751953125, 0.21493760681152344, 0.2147799072265625, 0.21493247985839845, 0.2154977264404297, 0.21500723266601562, 0.21548748779296875, 0.21595852661132814, 0.21516493225097658, 0.21515469360351563, 0.21548133850097656, 0.21518438720703126, 0.21511885070800782, 0.21515058898925782, 0.21548236083984376, 0.21548851013183593, 0.2151690216064453, 0.21543218994140625, 0.2160343017578125, 0.2168115234375, 0.2152611846923828, 0.21511576843261718, 0.21507891845703125, 0.21598104858398437, 0.21516595458984375, 0.21504512023925781, 0.21502464294433593, 0.21505126953125, 0.21498573303222657, 0.2150328369140625, 0.21487411499023437, 0.21494476318359376, 0.21496730041503906, 0.2148341827392578, 0.2150154266357422, 0.21492735290527343, 0.2149713897705078, 0.21527346801757813, 0.21523046875, 0.21507994079589843, 0.2151321563720703, 0.2151208953857422, 0.2154649658203125, 0.2155888671875, 0.21542912292480468, 0.21538406372070312, 0.21596774291992188, 0.21570252990722658, 0.44732620239257814, 0.21537075805664063, 0.21517721557617187, 0.21511065673828125, 0.215103515625, 0.21524374389648437, 0.21536972045898437, 0.21551615905761717, 0.21560012817382812, 0.21542912292480468, 0.21540658569335938, 0.21546394348144532, 0.21541990661621094, 0.21530213928222655, 0.21526629638671874, 0.21543218994140625, 0.21605477905273437, 0.21546086120605468, 0.2154659881591797, 0.21551206970214845, 0.21547212219238282, 0.215625732421875, 0.2154967041015625, 0.216195068359375, 0.21548646545410155, 0.21552333068847657, 0.2154967041015625, 0.2153861083984375, 0.21542604064941406, 0.21543321228027343, 0.21539532470703124, 0.21550592041015626, 0.21550694274902343, 0.21522431945800782, 0.21558067321777344, 0.2154598388671875, 0.21512406921386718, 0.215048095703125, 0.21526220703125, 0.21552024841308592, 0.2155284423828125, 0.21525299072265625, 0.21515058898925782, 0.21556224060058593, 0.21533798217773437, 0.21522023010253907, 0.21514137268066405, 0.21528985595703126, 0.2151065673828125, 0.21532774353027342, 0.21517312622070311, 0.2152191925048828, 0.21511885070800782, 0.21542604064941406, 0.2152048645019531, 0.21541171264648437, 0.21514649963378907, 0.21522738647460937, 0.2151372833251953, 0.2150758361816406, 0.21504103088378906, 0.21516390991210937, 0.21526527404785156, 0.44842291259765626, 0.21500210571289063, 0.21490789794921875, 0.21482496643066407, 0.21479629516601562, 0.21486489868164063, 0.21486489868164063, 0.2146826171875, 0.21485977172851561, 0.2147430419921875, 0.21485055541992187, 0.21492941284179687, 0.21493043518066407, 0.21505229187011718, 0.2149775390625, 0.21499699401855468, 0.21503897094726562, 0.21547929382324219, 0.2150185546875, 0.21497132873535157, 0.21505545043945312, 0.21503683471679688, 0.21546394348144532, 0.2150584259033203, 0.21507891845703125, 0.215046142578125, 0.21530015563964844, 0.2150174102783203, 0.2149171142578125, 0.21496319580078124, 0.21530009460449218, 0.21537689208984376, 0.215151611328125, 0.21528370666503907, 0.21522329711914062, 0.21498367309570313, 0.215446533203125, 0.2151331787109375, 0.21521817016601563, 0.21534413146972656, 0.21516493225097658, 0.21511167907714843, 0.21506048583984375, 0.21544038391113282, 0.215046142578125, 0.21523353576660156, 0.21546394348144532, 0.21502975463867188, 0.21504920959472656, 0.21524479675292968, 0.2151137237548828, 0.2151761932373047, 0.21516082763671876, 0.21502053833007811, 0.21518130493164062, 0.2151751708984375, 0.21505740356445313, 0.21551309204101562, 0.21542501831054686, 0.21525196838378907, 0.21533183288574217, 0.21515776062011718, 0.21511270141601563, 0.4488437805175781, 0.2157373504638672, 0.2151874542236328, 0.21487820434570312, 0.21577113342285156, 0.21489152526855468, 0.21497445678710939, 0.21525605773925782, 0.21551820373535155, 0.2153973693847656, 0.21515776062011718, 0.21496421813964844, 0.21516390991210937, 0.21532159423828126, 0.21534002685546874, 0.21517312622070311, 0.21535845947265625, 0.21573324584960937, 0.21572813415527345, 0.2155530242919922, 0.21528985595703126, 0.21548133850097656, 0.21529087829589844, 0.2149427185058594, 0.2152611846923828, 0.21510552978515626, 0.21537791442871093, 0.21573017883300782, 0.21560525512695314, 0.21562879943847657, 0.21569024658203126, 0.21543014526367188, 0.21560421752929687, 0.21551820373535155, 0.2155847625732422, 0.21509120178222657, 0.21532261657714843, 0.21509939575195314, 0.21506866455078125, 0.2148659210205078, 0.21519667053222657, 0.21552639770507812, 0.21548442077636717, 0.21555097961425781, 0.21562982177734374, 0.21557656860351562, 0.21573529052734375, 0.21503488159179687, 0.21537901306152343, 0.21511468505859374, 0.21543122863769532, 0.21568914794921876, 0.2155816955566406, 0.2152079315185547, 0.21553152465820313, 0.21579571533203126, 0.21550387573242188, 0.21552537536621094, 0.21557554626464845, 0.21545779418945313, 0.21514854431152344, 0.215267333984375, 0.21534104919433594, 0.4483246154785156, 0.21480960083007813, 0.21498265075683592, 0.21514035034179688, 0.2153871307373047, 0.21542604064941406, 0.21523558044433594, 0.21539942932128905, 0.21512602233886718, 0.21509529113769532, 0.21506866455078125, 0.21571994018554688, 0.2154219512939453, 0.21556941223144532, 0.2156195831298828, 0.21523968505859375, 0.21513523864746092, 0.21531033325195312, 0.21538099670410157, 0.21558988952636718, 0.21519667053222657, 0.21521408081054688, 0.21537382507324218, 0.2153605194091797, 0.21499699401855468, 0.21518438720703126, 0.21542912292480468, 0.21530316162109375, 0.21570661926269533, 0.21574143981933594, 0.21519052124023438, 0.21519769287109375, 0.21536058044433593, 0.21544338989257814, 0.21525503540039062, 0.2153164825439453, 0.21523762512207031, 0.2154691162109375, 0.2154884490966797, 0.21557862854003906, 0.215119873046875, 0.21551615905761717, 0.21561447143554688, 0.21519769287109375, 0.21527142333984375, 0.2151526336669922, 0.21513420104980469, 0.21525196838378907, 0.21509120178222657, 0.21508607482910155, 0.21538201904296875, 0.21560012817382812, 0.2152427520751953, 0.21532467651367188, 0.2155294647216797, 0.21564006042480469, 0.21551309204101562, 0.21564723205566405, 0.21547314453125, 0.21527244567871093, 0.21549568176269532, 0.21515673828125, 0.21521612548828126]",tokens/s,4.571685503331254,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1416.183808,1705.508864,0.0,1059.06176,901.251072,s,10,1.2556827774047852,0.12556827774047852,0.0020066947239277956,0.12461985778808593,0.12691755523681642,0.12903935165405273,0.13073678878784178,"[0.13116114807128906, 0.1245798110961914, 0.12455474853515625, 0.12628569793701172, 0.126446044921875, 0.12463142395019532, 0.12460829162597656, 0.12491244506835937, 0.12422150421142578, 0.12428166198730468]",tokens/s,2038.731474274853,kWh,1.4767323517137103e-06,8.091788095589435e-07,5.457882838524886e-06,7.74379399979754e-06,tokens/kWh,33058730.64375074,MB,1416.47872,1705.508864,0.0,1059.06176,931.976704,s,10,75.24034423828124,7.524034423828125,0.018117587645973928,7.51759228515625,7.546301123046875,7.555124682617188,7.562183530273438,"[7.5639482421875, 7.54434033203125, 7.51252392578125, 7.5294287109375, 7.5317822265625, 7.49836865234375, 7.51694482421875, 7.50782275390625, 7.51704736328125, 7.51813720703125]",tokens/s,8.373167432685198,kWh,8.920738770729968e-05,4.889187051034242e-05,0.00032243887947867866,0.0004605381376963208,tokens/kWh,136796.48837582752,,s,629,76.24045568847656,0.12120899155560659,0.014857766034498806,0.119236572265625,0.12046213073730469,0.121122216796875,0.24360714599609376,"[0.12457164764404297, 0.12292813110351562, 0.12142694091796875, 0.12034457397460938, 0.12029849243164062, 0.12040806579589844, 0.12176793670654297, 0.12074700927734375, 0.11929190063476562, 0.11991244506835938, 0.11920486450195313, 0.12124877166748046, 0.12120063781738281, 0.120416259765625, 0.12205363464355469, 0.1201418228149414, 0.11985100555419922, 0.1191731185913086, 0.12010905456542968, 0.12073577880859375, 0.11977008056640626, 0.1192069091796875, 0.11915058898925782, 0.11936870574951172, 0.11920281219482422, 0.11884953308105468, 0.11897650909423828, 0.11890380859375, 0.11936460876464844, 0.11948953247070312, 0.11988787078857421, 0.11970662689208984, 0.11927040100097656, 0.11931136322021485, 0.11918540954589844, 0.11902054595947266, 0.11925606536865234, 0.11928268432617188, 0.1189775390625, 0.12040608215332031, 0.12145555114746094, 0.12119859313964844, 0.11934515380859376, 0.11921715545654298, 0.11957453155517578, 0.11955404663085938, 0.12047564697265625, 0.1195704345703125, 0.11987558746337891, 0.11963187408447265, 0.12172492980957031, 0.12081664276123047, 0.1199636459350586, 0.11969638061523437, 0.119478271484375, 0.11919974517822265, 0.11901952362060547, 0.11969843292236328, 0.11932466888427734, 0.11939430236816406, 0.11932160186767578, 0.11914035034179687, 0.2437795867919922, 0.11910451507568359, 0.11929497528076172, 0.11924992370605468, 0.11924582672119141, 0.1191229476928711, 0.1206702117919922, 0.12058009338378907, 0.11906559753417968, 0.11979878234863281, 0.1204142074584961, 0.120774658203125, 0.12038349151611329, 0.12099174499511718, 0.12017356872558593, 0.12051148986816407, 0.11986022186279296, 0.11982848358154297, 0.12144435119628906, 0.1197875213623047, 0.12066099548339844, 0.11986022186279296, 0.11963085174560546, 0.12066918182373047, 0.12106034851074218, 0.11989606475830078, 0.11947724914550781, 0.12219084930419923, 0.12116684722900391, 0.1208616943359375, 0.12017459106445312, 0.1197844467163086, 0.11975885009765624, 0.11948544311523437, 0.1193359375, 0.11977318572998047, 0.11921920013427735, 0.11890995025634765, 0.11913420867919922, 0.11896729278564454, 0.11922943878173828, 0.11928575897216796, 0.11888127899169922, 0.11916902160644531, 0.11918438720703126, 0.11927756500244141, 0.11924889373779297, 0.11915980529785156, 0.11925708770751953, 0.1192959976196289, 0.11954688262939453, 0.11891097259521484, 0.11927654266357422, 0.11913215637207031, 0.11925504302978515, 0.1196943359375, 0.11949980926513672, 0.11920687866210937, 0.11947622680664062, 0.11929190063476562, 0.11921920013427735, 0.12117094421386719, 0.11926937866210938, 0.24365055847167968, 0.1189969940185547, 0.11894374084472656, 0.11909529876708984, 0.11894271850585937, 0.11864473724365235, 0.11887718200683593, 0.11882701110839844, 0.11909324645996094, 0.11965235137939453, 0.11938406372070312, 0.11919974517822265, 0.11909120178222657, 0.11923046112060547, 0.11929708862304687, 0.11920992279052735, 0.11922946929931641, 0.11920687866210937, 0.11895603179931641, 0.11921510314941407, 0.11924070739746094, 0.11940147399902344, 0.11908505249023438, 0.11927961730957032, 0.11908505249023438, 0.11922227478027343, 0.11948748779296875, 0.11879219055175781, 0.11928473663330078, 0.11918438720703126, 0.11904819488525391, 0.1194260482788086, 0.11940147399902344, 0.11873382568359375, 0.11902361297607422, 0.12077056121826171, 0.11962060546875, 0.11954380798339843, 0.11931648254394531, 0.11892121887207031, 0.11890585327148437, 0.11919769287109375, 0.11923865509033203, 0.11895500946044922, 0.11905228424072266, 0.11905433654785157, 0.11923967742919922, 0.12057087707519532, 0.11927552032470704, 0.11903180694580077, 0.11901952362060547, 0.11904819488525391, 0.11917005157470703, 0.11913728332519531, 0.119225341796875, 0.12148429107666016, 0.11932569885253906, 0.11935846710205078, 0.1191720962524414, 0.1192273941040039, 0.11932876586914062, 0.11924992370605468, 0.11925606536865234, 0.24569036865234375, 0.11900313568115234, 0.11910758209228516, 0.1191229476928711, 0.11929503631591797, 0.11934611511230468, 0.11907071685791015, 0.11920281219482422, 0.11885465240478515, 0.11900006103515624, 0.11911885070800782, 0.1191546859741211, 0.11912806701660156, 0.11937484741210938, 0.1194434585571289, 0.11925606536865234, 0.12019712066650391, 0.12117810821533204, 0.12110336303710938, 0.11963801574707031, 0.11959193420410157, 0.11896627044677735, 0.11922329711914062, 0.11913420867919922, 0.11982438659667968, 0.11933491516113282, 0.11925094604492187, 0.1192959976196289, 0.11907071685791015, 0.1207193603515625, 0.12125491333007812, 0.11938508605957031, 0.1194076156616211, 0.1193338851928711, 0.11934719848632812, 0.11946189117431641, 0.1193482208251953, 0.11920492553710937, 0.11926624298095703, 0.11927142333984375, 0.11934207916259766, 0.11924992370605468, 0.11892121887207031, 0.11940863800048829, 0.119046142578125, 0.12049715423583984, 0.11979878234863281, 0.11931954956054687, 0.11911270141601563, 0.11936768341064453, 0.11907891082763672, 0.11921715545654298, 0.11925094604492187, 0.11922124481201171, 0.11953971099853515, 0.12125491333007812, 0.12110749053955078, 0.1192232666015625, 0.11931136322021485, 0.11899903869628906, 0.12022374725341797, 0.11995238494873046, 0.12026982116699218, 0.247546875, 0.11950592041015624, 0.12091497802734374, 0.12060975646972656, 0.12113203430175781, 0.12091596984863281, 0.12082688140869141, 0.12083404541015624, 0.1219061737060547, 0.12209458923339844, 0.12265574645996094, 0.12046131134033203, 0.11928371429443359, 0.11902365112304687, 0.11975062561035156, 0.11916390228271484, 0.11981926727294921, 0.11897856140136719, 0.11971686553955078, 0.11893145751953126, 0.11913318634033203, 0.11891302490234375, 0.11906358337402344, 0.11885667419433593, 0.11948134613037109, 0.11907788848876953, 0.11915366363525391, 0.1191884765625, 0.11917619323730469, 0.11935641479492187, 0.11914035034179687, 0.11885260772705078, 0.11923353576660156, 0.11925504302978515, 0.11878912353515625, 0.11872665405273437, 0.11999334716796875, 0.11964211273193359, 0.11909737396240234, 0.11932262420654297, 0.119236572265625, 0.11907481384277344, 0.1190799331665039, 0.1189908447265625, 0.11900211334228515, 0.1191956787109375, 0.1193440933227539, 0.1192652816772461, 0.11916185760498046, 0.1206671371459961, 0.11945881652832031, 0.11904307556152344, 0.11939942169189453, 0.11916287994384765, 0.119119873046875, 0.11886386871337891, 0.11886386871337891, 0.11877785491943359, 0.11952845001220704, 0.11905228424072266, 0.11897344207763672, 0.11872563171386719, 0.11880754852294922, 0.24371711730957032, 0.11878604888916015, 0.11898675537109375, 0.11895603179931641, 0.1188629150390625, 0.11867436981201172, 0.11919667053222656, 0.11875635528564453, 0.11883110046386719, 0.11889871978759765, 0.1191341781616211, 0.11897241973876953, 0.11877581024169923, 0.11888025665283203, 0.11885772705078125, 0.11877273559570313, 0.11894477081298828, 0.11868057250976563, 0.11871231842041016, 0.11883929443359376, 0.11886080169677735, 0.11869900512695312, 0.12047666931152344, 0.11934515380859376, 0.11869798278808594, 0.11880044555664063, 0.11878803253173828, 0.1189969940185547, 0.11894886779785156, 0.11878399658203125, 0.12051967620849609, 0.11952333068847656, 0.11878399658203125, 0.11913420867919922, 0.11862118530273437, 0.118830078125, 0.11917721557617188, 0.11974451446533203, 0.11991756439208984, 0.119119873046875, 0.118866943359375, 0.11871743774414062, 0.11889663696289063, 0.11874201965332032, 0.11896115112304688, 0.11909734344482421, 0.11934003448486329, 0.11873280334472656, 0.11927859497070313, 0.1189017562866211, 0.1192652816772461, 0.11888742065429687, 0.11881472015380859, 0.11897138977050781, 0.11900927734375, 0.11879116821289062, 0.11915366363525391, 0.11920281219482422, 0.11915058898925782, 0.1191178207397461, 0.11896422576904297, 0.1189959716796875, 0.1191546859741211, 0.24360858154296874, 0.1190860824584961, 0.11949056243896485, 0.11932466888427734, 0.11897856140136719, 0.12082278442382813, 0.11958783721923828, 0.11911065673828125, 0.11961862182617188, 0.11913311767578125, 0.11897548675537109, 0.11908403015136719, 0.11890585327148437, 0.11893145751953126, 0.11908403015136719, 0.11909324645996094, 0.1192959976196289, 0.119299072265625, 0.11905023956298828, 0.11916390228271484, 0.1194229736328125, 0.11900006103515624, 0.11926016235351562, 0.11916902160644531, 0.1189027862548828, 0.11916185760498046, 0.11893452453613282, 0.11966259002685548, 0.11899494171142579, 0.1189570541381836, 0.11962777709960938, 0.1197875213623047, 0.11925196838378906, 0.11938201904296875, 0.1193338851928711, 0.11967692565917969, 0.11944652557373046, 0.11901542663574219, 0.11911577606201172, 0.11922022247314454, 0.11908710479736329, 0.1187583999633789, 0.12030668640136719, 0.11964620971679688, 0.11896729278564454, 0.1190860824584961, 0.11880140686035157, 0.1189969940185547, 0.11937484741210938, 0.11919155120849609, 0.12108595275878906, 0.12014591979980468, 0.11927244567871094, 0.11913113403320312, 0.11914857482910156, 0.11933999633789062, 0.11990835571289063, 0.11933081817626953, 0.12017356872558593, 0.11968000030517578, 0.11910553741455078, 0.11916902160644531, 0.11892530822753906, 0.24451583862304688, 0.11914854431152344, 0.1193175048828125, 0.11939225769042969, 0.11916492462158203, 0.11957759857177734, 0.1194229736328125, 0.11930931091308594, 0.11903488159179687, 0.11910451507568359, 0.11907584381103516, 0.1191751708984375, 0.12042444610595703, 0.11948851013183594, 0.11933491516113282, 0.11932364654541015, 0.118940673828125, 0.11926220703125, 0.11912397003173827, 0.11893862152099609, 0.1188116455078125, 0.11887615966796874, 0.119299072265625, 0.1189591064453125, 0.11873792266845704, 0.12033638763427734, 0.11966361236572266, 0.11939635467529297, 0.11916799926757812, 0.11972914886474609, 0.11923046112060547, 0.11913420867919922, 0.11927347564697266, 0.11905843353271485, 0.11928371429443359, 0.11918950653076171, 0.11906867218017578, 0.11889049530029297, 0.11908403015136719, 0.11913011169433593, 0.11903385925292968, 0.11906150054931641, 0.11886386871337891, 0.11895807647705078, 0.11882189178466797, 0.11912908935546875, 0.11897856140136719, 0.11894477081298828, 0.11876557159423828, 0.11893965148925781, 0.11924172973632813, 0.11886386871337891, 0.11928883361816406, 0.11904307556152344, 0.11900723266601562, 0.11878195190429687, 0.11875635528564453, 0.11862322998046874, 0.11873996734619141, 0.11917005157470703, 0.1189570541381836, 0.11894579315185547, 0.12027187347412109, 0.24360345458984375, 0.11901952362060547, 0.1191014404296875, 0.11927244567871094, 0.11907481384277344, 0.11987353515625, 0.12033126068115234, 0.11920384216308594, 0.1190287322998047, 0.11961650848388672, 0.11970867156982422, 0.11927961730957032, 0.11992371368408203, 0.1196933135986328, 0.11933695983886719, 0.1192959976196289, 0.11914444732666016, 0.11929804992675781, 0.11933286285400391, 0.11891404724121094, 0.11937894439697265, 0.11968409729003907, 0.11939020538330078, 0.11924992370605468, 0.11931136322021485, 0.11917005157470703, 0.11934207916259766, 0.11891814422607422, 0.11906559753417968, 0.11898982238769532, 0.1191352310180664, 0.1193543701171875, 0.12004045104980468, 0.11920496368408204, 0.11923343658447266, 0.11935132598876953, 0.11902460479736328, 0.1189222412109375, 0.11926732635498047, 0.11971788787841797, 0.11941683197021484, 0.11968409729003907, 0.1194424285888672, 0.11935743713378906, 0.11975987243652343, 0.120447998046875, 0.11971788787841797, 0.11959603118896485, 0.12046540832519531, 0.11941375732421874, 0.11895500946044922, 0.11876659393310547, 0.11868262481689452, 0.1189222412109375, 0.11881267547607421, 0.11860889434814453, 0.1187430419921875, 0.11915058898925782, 0.11882086181640625, 0.11895603179931641, 0.11968102264404297, 0.11917721557617188, 0.11907686614990234, 0.24344166564941405, 0.11888742065429687, 0.1188853759765625, 0.11914854431152344, 0.11891506958007812, 0.11891404724121094, 0.11905023956298828, 0.1190113296508789, 0.11885772705078125, 0.11911065673828125, 0.11948134613037109, 0.11934617614746093, 0.11914956665039063, 0.11941273498535156, 0.11947315216064452, 0.1198571548461914, 0.11957350158691406, 0.12054937744140624, 0.12015001678466797, 0.11967180633544922, 0.1200865249633789, 0.1194434585571289, 0.11917005157470703, 0.11993395233154297, 0.11950182342529297, 0.12140646362304687, 0.1204111328125, 0.11936262512207031, 0.11944236755371093, 0.11988172912597657, 0.11939839935302735, 0.1193912353515625, 0.11940249633789063, 0.1190297622680664, 0.11965542602539063, 0.11903897857666015, 0.11946701049804688, 0.11888540649414063, 0.1192386245727539, 0.1189591064453125, 0.11945471954345703, 0.11889868927001954, 0.1190287322998047, 0.11890380859375, 0.11901644897460938, 0.11902259063720703, 0.11903794860839843, 0.11907481384277344, 0.11954585266113281, 0.11895097351074219, 0.11885465240478515, 0.1198826904296875, 0.11971379089355469, 0.11895398712158203, 0.11973734283447265, 0.11900313568115234, 0.11912397003173827, 0.11940966033935548, 0.11896524810791016, 0.11901952362060547, 0.11930623626708985, 0.118687744140625, 0.11891506958007812]",tokens/s,8.250213017746573,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,,cuda,0,42,,,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1571.749888,5448.925184,0.0,4802.47808,4489.12128,s,10,5.0903373718261715,0.5090337371826171,0.0024963690031113454,0.5090882415771485,0.5114007537841796,0.5133009628295898,0.514821130065918,"[0.515201171875, 0.5058653564453125, 0.5075380249023438, 0.5109784851074218, 0.5074734191894531, 0.5066469421386719, 0.5090344543457032, 0.5092752685546875, 0.5091822204589844, 0.5091420288085937]",tokens/s,502.91362104386286,kWh,5.98198832737075e-06,3.277856253707796e-06,2.697877158300277e-05,3.623861616408132e-05,tokens/kWh,7064287.412104325,MB,1571.749888,5448.925184,0.0,4802.47808,4557.793792,s,10,299.8803828125,29.98803828125,0.03808075694133287,29.9707392578125,30.046216015625,30.058939062500002,30.0691175,"[29.972552734375, 30.043388671875, 30.071662109375, 30.010330078125, 29.96, 29.95865625, 29.96925, 29.972228515625, 29.963451171875, 29.95886328125]",tokens/s,2.1008376543053404,kWh,0.00035374844759702685,0.00019388459414373753,0.001556149911585597,0.0021037829533263617,tokens/kWh,29946.05498651303,,s,629,303.97862612915014,0.48327285553124066,0.06045359810307915,0.47556915283203127,0.47775867309570313,0.47801220703124997,0.9833881201171876,"[0.4750715026855469, 0.4762265625, 0.4773417053222656, 0.47625729370117187, 0.47571148681640624, 0.47557223510742186, 0.47629312133789065, 0.47471615600585937, 0.475789306640625, 0.47461376953125, 0.4746967163085937, 0.474913818359375, 0.47592657470703126, 0.47551071166992187, 0.47595623779296875, 0.47574118041992186, 0.4750817260742187, 0.47586611938476564, 0.4771154479980469, 0.47594082641601565, 0.4757462768554688, 0.47569818115234375, 0.4764293212890625, 0.475978759765625, 0.47632794189453126, 0.47545343017578123, 0.47498138427734377, 0.47522305297851564, 0.47550360107421874, 0.47533575439453124, 0.4750652770996094, 0.4751790161132812, 0.47525888061523436, 0.4754544677734375, 0.4752803955078125, 0.4756971435546875, 0.4763504638671875, 0.4758845520019531, 0.47809332275390626, 0.475462646484375, 0.47741644287109375, 0.47680307006835937, 0.47630642700195314, 0.47653070068359377, 0.47590911865234375, 0.4751380615234375, 0.47556710815429687, 0.475146240234375, 0.47564901733398435, 0.4757647399902344, 0.47530291748046877, 0.4750807189941406, 0.47518719482421873, 0.47629925537109374, 0.4754646911621094, 0.4754155578613281, 0.4753121337890625, 0.4750469055175781, 0.47505612182617185, 0.4759122009277344, 0.47506842041015623, 0.4758763427734375, 0.9829324951171875, 0.4772618103027344, 0.47679385375976563, 0.4752302551269531, 0.47515850830078127, 0.4750141296386719, 0.4753879089355469, 0.47504486083984376, 0.47488204956054686, 0.47477862548828126, 0.4748114013671875, 0.47600946044921877, 0.4750940246582031, 0.4750837707519531, 0.4750745544433594, 0.4750592041015625, 0.47484109497070315, 0.4761292724609375, 0.4751933288574219, 0.4750182495117187, 0.475357177734375, 0.47510528564453125, 0.4762142639160156, 0.47762432861328125, 0.4771512451171875, 0.478097412109375, 0.47743179321289064, 0.4781097106933594, 0.4774246520996094, 0.47769497680664064, 0.4778270568847656, 0.4778741760253906, 0.47795404052734375, 0.47758233642578124, 0.47754238891601564, 0.47758438110351564, 0.4775854187011719, 0.4776581115722656, 0.4781363220214844, 0.47784140014648435, 0.4785827941894531, 0.4778670043945312, 0.478013427734375, 0.4813271179199219, 0.4787435607910156, 0.4778823547363281, 0.4775577697753906, 0.47766937255859376, 0.4776509399414062, 0.47802163696289063, 0.4775925903320313, 0.4771829833984375, 0.47707955932617185, 0.4779130859375, 0.47752191162109375, 0.4774225769042969, 0.47764581298828124, 0.47809228515625, 0.47725054931640626, 0.47956683349609375, 0.47532647705078124, 0.47634738159179685, 0.4757237854003906, 0.9846497192382813, 0.4779151306152344, 0.47775845336914063, 0.4778516540527344, 0.4773468017578125, 0.47776461791992186, 0.47770932006835937, 0.4769669189453125, 0.4777676696777344, 0.4777635803222656, 0.47618765258789064, 0.4749752197265625, 0.4755538024902344, 0.4747980651855469, 0.4747796630859375, 0.4748114013671875, 0.47680514526367185, 0.4753622741699219, 0.47532440185546876, 0.4750438537597656, 0.4750325622558594, 0.47742669677734373, 0.4790394897460937, 0.47991500854492186, 0.4776028137207031, 0.4770672607421875, 0.47781991577148436, 0.4773304443359375, 0.4778946533203125, 0.4778496398925781, 0.4774788818359375, 0.4779632568359375, 0.47771136474609377, 0.47775640869140623, 0.47768267822265625, 0.47796121215820314, 0.4775454711914062, 0.4780103759765625, 0.4779438171386719, 0.4773253173828125, 0.4780001220703125, 0.47749530029296877, 0.478482421875, 0.4767160339355469, 0.47870156860351565, 0.47761203002929686, 0.47860427856445314, 0.47783221435546874, 0.4779366149902344, 0.4776212463378906, 0.4775301208496094, 0.477328369140625, 0.478376953125, 0.4772362365722656, 0.47751168823242185, 0.47775955200195314, 0.47782699584960936, 0.47732632446289064, 0.47806362915039063, 0.4772454528808594, 0.4775782775878906, 0.47722698974609373, 0.477765625, 0.9892290649414063, 0.47758950805664063, 0.47766937255859376, 0.47711026000976564, 0.4785479736328125, 0.4779674072265625, 0.47848751831054687, 0.4773918762207031, 0.47714407348632815, 0.475968505859375, 0.47733248901367187, 0.4777830505371094, 0.4774912109375, 0.4774410095214844, 0.4776990661621094, 0.47711026000976564, 0.47647540283203127, 0.4762900390625, 0.47571044921875, 0.47531622314453126, 0.47745126342773436, 0.4778526611328125, 0.4754595947265625, 0.4753295593261719, 0.47519024658203124, 0.47728536987304687, 0.4776847229003906, 0.47796734619140624, 0.47679693603515627, 0.4769525756835937, 0.47623678588867185, 0.47598489379882813, 0.47588760375976563, 0.47573504638671876, 0.4752127990722656, 0.47496600341796874, 0.4763709411621094, 0.47549542236328124, 0.4751493225097656, 0.4747591552734375, 0.47531109619140627, 0.47625933837890627, 0.47604122924804687, 0.4752691345214844, 0.478866455078125, 0.4751441650390625, 0.4752322692871094, 0.47499981689453125, 0.47643954467773436, 0.4755630187988281, 0.4757166137695312, 0.47565005493164064, 0.476015625, 0.4754155578613281, 0.4754124755859375, 0.47525070190429686, 0.47504281616210936, 0.4759347229003906, 0.47554763793945315, 0.47518923950195313, 0.47525274658203126, 0.475404296875, 0.47679489135742187, 0.9835653076171875, 0.4750100708007812, 0.47497518920898435, 0.47521792602539065, 0.47529168701171876, 0.47549435424804687, 0.47515341186523435, 0.47609548950195313, 0.47539712524414063, 0.47541351318359376, 0.47511859130859374, 0.47484622192382814, 0.47537664794921874, 0.475104248046875, 0.47634738159179685, 0.4752896118164063, 0.47548724365234374, 0.4761077880859375, 0.4751452026367188, 0.4771768188476562, 0.4755333251953125, 0.4755230712890625, 0.4750796813964844, 0.4751923217773438, 0.4752496643066406, 0.4748308410644531, 0.47501516723632814, 0.47498751831054686, 0.4759531555175781, 0.4754883117675781, 0.47538067626953123, 0.4753909912109375, 0.47539712524414063, 0.4753735656738281, 0.475494384765625, 0.47589376831054686, 0.4761006164550781, 0.47518206787109374, 0.4750602111816406, 0.47489434814453124, 0.47634228515625, 0.47604940795898437, 0.47567974853515627, 0.47519744873046876, 0.4751749267578125, 0.47551077270507813, 0.4754002685546875, 0.47505914306640623, 0.47493426513671877, 0.475788330078125, 0.4752465515136719, 0.47501516723632814, 0.47556915283203127, 0.4761507873535156, 0.47565823364257814, 0.47601458740234376, 0.47671194458007815, 0.478308349609375, 0.4755906677246094, 0.47542578125, 0.4754708557128906, 0.4771061706542969, 0.4754810791015625, 0.9827368774414063, 0.47548416137695315, 0.4752916564941406, 0.4750274658203125, 0.4750274658203125, 0.47477658081054686, 0.4755199890136719, 0.47510833740234376, 0.4752332763671875, 0.47511962890625, 0.4750837707519531, 0.47506329345703124, 0.4751718444824219, 0.477048828125, 0.4756756591796875, 0.475315185546875, 0.47508685302734377, 0.474925048828125, 0.476084228515625, 0.4754022521972656, 0.47531417846679686, 0.47490765380859373, 0.47505612182617185, 0.4752547912597656, 0.47493939208984376, 0.4751216735839844, 0.4752363586425781, 0.47553741455078125, 0.47550872802734373, 0.4752025451660156, 0.47503768920898437, 0.47521588134765624, 0.47512063598632814, 0.4751523742675781, 0.4767621154785156, 0.4764487609863281, 0.47532235717773436, 0.4752414855957031, 0.47862374877929686, 0.47638223266601565, 0.47562542724609375, 0.47544525146484373, 0.47627877807617186, 0.47506329345703124, 0.47538995361328124, 0.4754227294921875, 0.47551693725585936, 0.4754288635253906, 0.47596337890625, 0.4758896789550781, 0.4756899719238281, 0.4755988464355469, 0.4755916748046875, 0.4752066650390625, 0.4755640258789062, 0.47575653076171875, 0.47548416137695315, 0.4752998962402344, 0.4755332336425781, 0.4752363586425781, 0.4765552673339844, 0.4758056945800781, 0.47643853759765625, 0.9858508911132813, 0.4760975341796875, 0.4763627624511719, 0.47618048095703125, 0.47583026123046873, 0.4758814697265625, 0.47627365112304687, 0.47550360107421874, 0.4748226623535156, 0.4754380798339844, 0.4754176025390625, 0.47634841918945314, 0.47666278076171875, 0.4759449462890625, 0.47538177490234373, 0.475430908203125, 0.4754606018066406, 0.4765736999511719, 0.47562957763671876, 0.47619277954101563, 0.4762623901367187, 0.47586407470703124, 0.4763156433105469, 0.47800421142578126, 0.47563058471679687, 0.47566949462890623, 0.4764661865234375, 0.47568487548828126, 0.4763525085449219, 0.47546881103515626, 0.4754503784179688, 0.47535821533203126, 0.4751155090332031, 0.476685302734375, 0.4755333251953125, 0.4750540771484375, 0.4750325622558594, 0.4753387451171875, 0.47632281494140627, 0.47543603515625, 0.47514727783203126, 0.47536947631835935, 0.4750110778808594, 0.4750960693359375, 0.47505816650390625, 0.47526605224609375, 0.47514215087890627, 0.47597158813476564, 0.47550054931640623, 0.47509503173828127, 0.47519845581054687, 0.4751452026367188, 0.4753704833984375, 0.474967041015625, 0.47536639404296877, 0.47516876220703125, 0.47490457153320315, 0.4751523742675781, 0.47530905151367187, 0.4764610595703125, 0.47634228515625, 0.47607601928710935, 0.4758845520019531, 0.9857208251953125, 0.4755548095703125, 0.47583026123046873, 0.4755302734375, 0.4761640625, 0.4756316223144531, 0.475904052734375, 0.4751758728027344, 0.47539813232421874, 0.475536376953125, 0.4755210266113281, 0.475514892578125, 0.4755937805175781, 0.47548818969726564, 0.47541656494140627, 0.47544216918945315, 0.477454345703125, 0.4756387939453125, 0.4758138732910156, 0.47603302001953124, 0.47580465698242186, 0.47644158935546876, 0.4751994934082031, 0.47532339477539065, 0.4750745544433594, 0.47546981811523437, 0.4754565124511719, 0.47503768920898437, 0.47527935791015624, 0.4750858154296875, 0.4753018798828125, 0.4747683715820312, 0.4749916076660156, 0.47602789306640625, 0.4755906677246094, 0.4751790161132812, 0.47570123291015626, 0.477876220703125, 0.477338623046875, 0.4760637512207031, 0.4767999877929687, 0.47610983276367186, 0.47703347778320315, 0.47675802612304685, 0.47620712280273436, 0.4752158508300781, 0.47609036254882814, 0.4771328125, 0.4762552185058594, 0.4760504455566406, 0.47554458618164064, 0.475283447265625, 0.47543603515625, 0.4751523742675781, 0.475335693359375, 0.4752138366699219, 0.47528140258789064, 0.475109375, 0.47681332397460935, 0.47562240600585937, 0.47532339477539065, 0.47533465576171874, 0.47510833740234376, 0.984848388671875, 0.4750960693359375, 0.47506024169921873, 0.4759930419921875, 0.47555789184570313, 0.4750254211425781, 0.47533978271484373, 0.4758394775390625, 0.475610107421875, 0.47590911865234375, 0.4757596130371094, 0.4766791687011719, 0.4766371765136719, 0.4757176208496094, 0.4764549255371094, 0.4773990478515625, 0.4758917236328125, 0.476790771484375, 0.47595416259765627, 0.4752404479980469, 0.47515545654296876, 0.475030517578125, 0.4752916564941406, 0.4751278076171875, 0.4755138549804688, 0.47549234008789065, 0.47554150390625, 0.4758425598144531, 0.47556607055664063, 0.4752015380859375, 0.47505612182617185, 0.4750540771484375, 0.47509811401367186, 0.4751697998046875, 0.4750817260742187, 0.4752066650390625, 0.4769587097167969, 0.4757596130371094, 0.47552410888671875, 0.4753387451171875, 0.4751247253417969, 0.47600946044921877, 0.47491787719726564, 0.47531417846679686, 0.47513394165039063, 0.4757739562988281, 0.4750469055175781, 0.475030517578125, 0.4758507385253906, 0.4755599365234375, 0.47600741577148437, 0.4752414855957031, 0.4751247253417969, 0.47610470581054687, 0.4753530883789063, 0.475404296875, 0.4752005004882813, 0.476626953125, 0.4756654052734375, 0.47670578002929687, 0.4761466979980469, 0.4752547912597656, 0.4752209777832031, 0.9850972290039063, 0.47500698852539064, 0.47586099243164065, 0.475536376953125, 0.4751933288574219, 0.4753070068359375, 0.47523434448242186, 0.475157470703125, 0.4753039245605469, 0.4752762756347656, 0.4760606689453125, 0.47565216064453125, 0.4750796203613281, 0.47518106079101563, 0.4763607177734375, 0.47558758544921875, 0.47543499755859375, 0.47558349609375, 0.4752384033203125, 0.47536639404296877, 0.47527423095703125, 0.47547494506835936, 0.4750335998535156, 0.4753049621582031, 0.47516571044921874, 0.4754155578613281, 0.4772812805175781, 0.4751769714355469, 0.4758814697265625, 0.4761343994140625, 0.4757074279785156, 0.4766033630371094, 0.47504281616210936, 0.47508480834960937, 0.4752629699707031, 0.47697714233398436, 0.4764979248046875, 0.4756357421875, 0.4753837890625, 0.4751473083496094, 0.475273193359375, 0.475177978515625, 0.47527835083007813, 0.4753541259765625, 0.4753623046875, 0.4753930358886719, 0.4750796813964844, 0.47559576416015625, 0.47536334228515625, 0.4764241943359375, 0.47548724365234374, 0.47530087280273436, 0.47603302001953124, 0.4752906188964844, 0.47511962890625, 0.47501516723632814, 0.4768563232421875, 0.4758026123046875, 0.47588864135742187, 0.4753950805664062, 0.4752138366699219, 0.47535000610351563, 0.4750120849609375]",tokens/s,2.069224432025556,,,,,,main,False,False -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494e0-06a5921e10bc036224ba3991;86ec66b0-7d04-4792-b49c-193cdbfcdaca) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,2452.258816,7298.613248,0.0,6652.166144,6323.221504,s,10,7.6798339843750005,0.7679833984375,0.0025974055850476797,0.7679208068847656,0.7711121887207032,0.7721169158935547,0.772920697631836,"[0.7685753173828125, 0.7731216430664063, 0.7647974853515624, 0.7675167846679688, 0.76627783203125, 0.7641251220703125, 0.7666953735351563, 0.7683248291015625, 0.770888916015625, 0.7695106811523438]",tokens/s,333.34053902837564,kWh,9.039419593792114e-06,4.95321018510627e-06,4.32610068310048e-05,5.725363660990319e-05,tokens/kWh,4471331.694513176,MB,2452.258816,7298.613248,0.0,6652.166144,6382.564864,s,10,455.74574609375,45.57457460937499,0.015546779743152406,45.57488671875,45.595179296874996,45.5973923828125,45.5991628515625,"[45.5867109375, 45.5946875, 45.59960546875, 45.58337109375, 45.55990625, 45.5529375, 45.558640625, 45.56011328125, 45.5776015625, 45.572171875]",tokens/s,1.3823497101175461,kWh,0.000537889561981909,0.0002948106017116671,0.0025152069288305857,0.0033479070925241613,tokens/kWh,18817.726495660016,,s,629,461.93301361083974,0.7343927084433065,0.09125959398126374,0.7233239135742188,0.7243169799804687,0.7247333496093751,1.4901356689453125,"[0.7238604736328125, 0.7241830444335937, 0.7225497436523437, 0.7234365234375, 0.7223142700195313, 0.7223203735351562, 0.7245137939453125, 0.7236771850585938, 0.7226992797851562, 0.723535888671875, 0.7234600830078125, 0.7246929931640625, 0.7229706420898437, 0.7237723999023438, 0.7233392944335938, 0.7235133666992187, 0.7225231323242187, 0.7234631958007812, 0.723367919921875, 0.7226572875976562, 0.7243929443359375, 0.7232921752929687, 0.7236546630859375, 0.7240488891601562, 0.7235286865234375, 0.7245383911132812, 0.7233167114257812, 0.7230576782226562, 0.7237959594726563, 0.7243140869140625, 0.722629638671875, 0.7226122436523438, 0.7224381713867187, 0.7233873901367187, 0.7249028930664062, 0.7240294189453125, 0.723904541015625, 0.7241912231445312, 0.7243069458007813, 0.7245864868164062, 0.7234006958007813, 0.723114990234375, 0.7231324462890625, 0.7226593017578125, 0.7241062622070312, 0.7237662963867187, 0.7239597778320312, 0.724094970703125, 0.7240611572265625, 0.7237017822265625, 0.7238123779296874, 0.72437451171875, 0.7245363159179687, 0.7248076782226562, 0.7224637451171875, 0.7232000122070312, 0.7239700317382812, 0.7240325317382813, 0.7229603881835938, 0.7228866577148437, 0.7229910888671875, 0.7245496215820313, 1.497416748046875, 0.7238062133789063, 0.7237662963867187, 0.7238911743164063, 0.7240550537109375, 0.7237857055664062, 0.7238358764648437, 0.7228784790039062, 0.72378369140625, 0.7233925170898438, 0.7229235229492188, 0.722534423828125, 0.7236341552734376, 0.723399658203125, 0.7241830444335937, 0.7228897094726563, 0.7232071533203125, 0.7245209350585937, 0.7240325317382813, 0.7224688720703125, 0.7228262329101562, 0.7225323486328125, 0.7236700439453125, 0.7239026489257813, 0.72288037109375, 0.7225128784179687, 0.7245783081054687, 0.7235983276367187, 0.7235532836914063, 0.725017578125, 0.724094970703125, 0.7250708618164062, 0.7244400634765625, 0.7243571166992188, 0.7240745239257812, 0.7232041015625, 0.7236218872070312, 0.7237877807617188, 0.7239588012695313, 0.7241809692382812, 0.7240929565429688, 0.7230279541015625, 0.7229634399414062, 0.7229419555664063, 0.72318359375, 0.7240601806640625, 0.7232184448242187, 0.7226746826171875, 0.7235880737304687, 0.724453369140625, 0.7243407592773438, 0.7238184814453125, 0.7240242919921875, 0.7247308959960937, 0.7247349853515626, 0.7235543212890625, 0.7233402709960938, 0.7242977294921875, 0.7243202514648438, 0.723862548828125, 0.724084716796875, 0.723768310546875, 0.7227473754882813, 1.490913330078125, 0.7229655151367187, 0.7226480712890625, 0.7240775756835938, 0.7235686645507813, 0.7231815795898437, 0.72302490234375, 0.7227381591796875, 0.7236761474609374, 0.724653076171875, 0.724200439453125, 0.7239618530273437, 0.7246776123046875, 0.7250585327148438, 0.7243140869140625, 0.7238225708007813, 0.7248977661132813, 0.725359619140625, 0.7238379516601563, 0.7237171020507812, 0.724158447265625, 0.7239188232421875, 0.7248445434570312, 0.7251189575195313, 0.7247544555664063, 0.7250022583007812, 0.7248936767578125, 0.7236198120117188, 0.7234979858398437, 0.723462158203125, 0.7234559936523437, 0.724263916015625, 0.7227955322265625, 0.7225702514648438, 0.7241932983398438, 0.7234805908203125, 0.7234949340820312, 0.72275146484375, 0.7225385131835937, 0.7228804931640626, 0.7242403564453125, 0.7236218872070312, 0.7236986694335937, 0.723472412109375, 0.724126708984375, 0.7239505615234375, 0.7238901977539063, 0.7237283935546875, 0.7242025146484375, 0.724116455078125, 0.7241031494140625, 0.72374169921875, 0.7228671875, 0.7233228759765625, 0.7237857055664062, 0.7240171508789063, 0.72399462890625, 0.7229276123046875, 0.7241410522460937, 0.7236751098632812, 0.7226521606445313, 0.7233648681640625, 0.7235952758789063, 1.49184814453125, 0.7231047973632813, 0.7224780883789063, 0.7224258422851563, 0.72266650390625, 0.7234283447265625, 0.72353076171875, 0.7227269287109375, 0.7243991088867188, 0.7234119873046875, 0.72285595703125, 0.7229951782226562, 0.7234692993164062, 0.7242373046875, 0.7239915771484375, 0.7242465209960938, 0.7243253784179687, 0.7225200805664063, 0.7226972045898438, 0.7227289428710938, 0.7236956176757813, 0.7238881225585938, 0.7240274047851563, 0.7241492309570312, 0.7236741333007812, 0.7231661987304687, 0.723114990234375, 0.7243571166992188, 0.7238656005859375, 0.723240966796875, 0.7226122436523438, 0.7238615112304687, 0.7242168579101562, 0.7235440673828125, 0.7236239624023437, 0.723926025390625, 0.7243919067382812, 0.7229173583984375, 0.7226542358398438, 0.7227238159179687, 0.7229859619140625, 0.7234990234375, 0.7239567260742188, 0.7231416015625, 0.7239454956054687, 0.7230084838867188, 0.7233607788085937, 0.7238359375, 0.7240099487304688, 0.7241431274414063, 0.7246141357421875, 0.7241359252929688, 0.7245772705078125, 0.7245137939453125, 0.725411865234375, 0.723578857421875, 0.7241543579101563, 0.7230054321289062, 0.7233526000976562, 0.7226992797851562, 0.7232420043945312, 0.7230075073242187, 0.7230187377929688, 1.490323486328125, 0.7229685668945313, 0.723535888671875, 0.72338330078125, 0.723409912109375, 0.7233341674804687, 0.7232112426757813, 0.72279345703125, 0.7230853271484375, 0.7230105590820313, 0.7226060791015625, 0.723346435546875, 0.7229296875, 0.72382568359375, 0.7241666259765625, 0.72303515625, 0.7230812377929687, 0.7226583251953125, 0.7230535888671875, 0.7229890747070312, 0.7228118896484375, 0.7222753295898438, 0.7228641357421876, 0.722740234375, 0.72254052734375, 0.7228671875, 0.7227391967773438, 0.7233505249023438, 0.722619384765625, 0.72300439453125, 0.7234150390625, 0.7230556030273437, 0.7235297241210937, 0.722819091796875, 0.723610595703125, 0.7228836059570313, 0.7235348510742188, 0.7226911010742187, 0.7227914428710938, 0.7233935546875, 0.7231590576171875, 0.7238717651367188, 0.72331982421875, 0.7227811889648438, 0.7233136596679688, 0.7229542236328125, 0.7231754150390625, 0.7233607788085937, 0.7233925170898438, 0.7229081420898438, 0.7231047973632813, 0.7224483642578124, 0.723252197265625, 0.722640869140625, 0.7238778686523437, 0.7232604370117187, 0.7231641845703125, 0.7244615478515625, 0.7236874389648438, 0.72321533203125, 0.7241400146484375, 0.7235686645507813, 0.7226695556640625, 1.48693603515625, 0.7224832153320313, 0.7234774780273437, 0.7231426391601562, 0.724173828125, 0.7240755004882813, 0.7229542236328125, 0.72241357421875, 0.7228876953125, 0.7233106079101562, 0.7236618041992188, 0.7231641845703125, 0.7225897216796875, 0.7224903564453125, 0.7225599975585938, 0.7225702514648438, 0.7227361450195312, 0.7226112060546875, 0.7231498413085937, 0.723304443359375, 0.722703369140625, 0.7227955322265625, 0.7233474731445313, 0.7234703369140625, 0.7267368774414062, 0.7227781982421875, 0.7225128173828125, 0.7224033203125, 0.7225589599609376, 0.7226634521484375, 0.7223306274414063, 0.7225364990234375, 0.7230811767578125, 0.72255078125, 0.7227003173828125, 0.7224832153320313, 0.7223961791992187, 0.7230310668945312, 0.7257907104492187, 0.7230904541015625, 0.7228538818359375, 0.7229788208007812, 0.7225180053710938, 0.7223818359375, 0.7229481201171875, 0.7229378662109375, 0.72367822265625, 0.7230084838867188, 0.7230146484375, 0.7226326904296875, 0.7231416015625, 0.7237335205078125, 0.7241860961914063, 0.7245127563476562, 0.722572265625, 0.7229020385742188, 0.72300341796875, 0.72266650390625, 0.7228159790039063, 0.7224207153320312, 0.7239270629882812, 0.72285693359375, 0.722966552734375, 1.4896527099609376, 0.7232348022460937, 0.7239567260742188, 0.7227739868164063, 0.723078125, 0.7226429443359375, 0.7226798095703125, 0.7228703002929687, 0.7228917846679688, 0.7229112548828125, 0.722935791015625, 0.7229030151367187, 0.7226050415039063, 0.7226798095703125, 0.7230105590820313, 0.7234058227539063, 0.7233239135742188, 0.7225077514648437, 0.7225938110351563, 0.7227412719726563, 0.7225303344726562, 0.72310888671875, 0.7248046264648438, 0.7248180541992187, 0.723907470703125, 0.72346826171875, 0.7232081909179687, 0.7231743774414062, 0.7237877807617188, 0.7234160766601563, 0.7235860595703125, 0.7227647705078125, 0.7225620727539063, 0.7228334350585938, 0.7226992797851562, 0.7227996215820313, 0.7227381591796875, 0.7233106079101562, 0.723267578125, 0.7230556030273437, 0.7226695556640625, 0.723061767578125, 0.7235317993164062, 0.7233689575195312, 0.7238010864257812, 0.7234078979492188, 0.7232849731445312, 0.7231876831054688, 0.7232327880859375, 0.7230422973632813, 0.7230341186523438, 0.7235563354492187, 0.7233157348632813, 0.7235266723632813, 0.723040283203125, 0.7234692993164062, 0.7238154296875, 0.7239547119140625, 0.7235758056640625, 0.7227924194335937, 0.7224013061523438, 0.7225753784179687, 0.7224832153320313, 1.4909122314453125, 0.7237437744140625, 0.7234396362304687, 0.7226849365234375, 0.7229102172851563, 0.7227125854492188, 0.7236638793945313, 0.7237918701171875, 0.722882568359375, 0.7225845947265624, 0.7231959228515625, 0.7231826171875, 0.722893798828125, 0.7226316528320312, 0.7233382568359376, 0.7234171142578125, 0.7229818725585937, 0.722428955078125, 0.7228836059570313, 0.722682861328125, 0.7232973022460938, 0.7227125854492188, 0.7229685668945313, 0.723040283203125, 0.7242997436523437, 0.7234037475585937, 0.7232604370117187, 0.7234345092773438, 0.72426904296875, 0.7237816162109375, 0.7232747802734375, 0.7224575805664063, 0.7224043579101562, 0.7228845825195312, 0.7226439819335938, 0.7229317016601563, 0.722651123046875, 0.7223142700195313, 0.7236024169921875, 0.7237283935546875, 0.7242454833984375, 0.72339453125, 0.7232337646484375, 0.7232767944335937, 0.7239905395507813, 0.7230320434570312, 0.7228037109375, 0.7227996215820313, 0.722904052734375, 0.7226705932617188, 0.7241994018554687, 0.7242670288085937, 0.7234703369140625, 0.723314697265625, 0.7229102172851563, 0.722967529296875, 0.7240089721679688, 0.723979248046875, 0.7236167602539062, 0.7229450073242187, 0.7226654663085937, 0.7226122436523438, 0.722820068359375, 1.495736328125, 0.724738037109375, 0.7240570678710937, 0.72363623046875, 0.72394140625, 0.7237017822265625, 0.7253534545898438, 0.7251988525390625, 0.7241973876953125, 0.7238829956054688, 0.7238615112304687, 0.7239464721679687, 0.7240714111328125, 0.7241809692382812, 0.7237929077148437, 0.72289892578125, 0.7232706298828125, 0.722914306640625, 0.7235338134765625, 0.7231948852539063, 0.7239393310546876, 0.7230525512695313, 0.7231815795898437, 0.7229603881835938, 0.7230853271484375, 0.72344677734375, 0.7233054809570313, 0.72283544921875, 0.7226583251953125, 0.7230003051757813, 0.72287744140625, 0.722608154296875, 0.7231324462890625, 0.7233526000976562, 0.7234529418945312, 0.72300439453125, 0.7229020385742188, 0.7233484497070313, 0.723610595703125, 0.7233925170898438, 0.72308837890625, 0.7229317016601563, 0.7229389038085937, 0.7231682739257812, 0.7229910888671875, 0.7232286987304688, 0.72271875, 0.723019775390625, 0.723493896484375, 0.72239208984375, 0.722845703125, 0.7230084838867188, 0.7235686645507813, 0.7236188354492188, 0.7237345581054687, 0.72363623046875, 0.7234805908203125, 0.7234385986328125, 0.7238164672851563, 0.724041748046875, 0.723356689453125, 0.723030029296875, 0.7237140502929688, 1.4925987548828126, 0.7230996704101562, 0.72384716796875, 0.7239147338867188, 0.7256944580078125, 0.7247103881835938, 0.7244021606445312, 0.7243776245117187, 0.724316162109375, 0.7242546997070313, 0.7244994506835938, 0.724832275390625, 0.7239239501953125, 0.723041259765625, 0.7240745239257812, 0.7239096069335937, 0.723515380859375, 0.7226603393554687, 0.7232348022460937, 0.7224627075195312, 0.7226132202148438, 0.7228047485351563, 0.72265625, 0.7226470947265625, 0.7226756591796875, 0.7231713256835938, 0.72317236328125, 0.722787353515625, 0.7223971557617187, 0.7228057861328125, 0.7233065185546875, 0.7235245971679688, 0.7238748168945313, 0.72374169921875, 0.7234816284179687, 0.7235297241210937, 0.7231426391601562, 0.7237867431640626, 0.7235635375976562, 0.7232808837890625, 0.72345703125, 0.7230924682617188, 0.72296142578125, 0.7232747802734375, 0.7239485473632813, 0.724284423828125, 0.722783203125, 0.7224873046875, 0.7223971557617187, 0.7222200927734375, 0.7224954833984375, 0.7242076416015625, 0.722787353515625, 0.7238604736328125, 0.7227545776367188, 0.7224268798828125, 0.7232020263671874, 0.7240253295898438, 0.722998291015625, 0.7229900512695312, 0.72289892578125, 0.7227340698242187, 0.7230996704101562]",tokens/s,1.36166929287697,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 87078 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493ca-33a0f5b720e70cf7333a0e93;3193f618-aee8-4b60-a74d-5a6f8b0af6dd) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490c0-127c6af3417ed277160f9e7f;5cef989b-7a9b-4aff-8cd8-9e0843e8f847) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1368.358912,4467.458048,0.0,3821.010944,3588.539392,s,10,2.8067558593750004,0.2806755859375,0.0006520085456073243,0.28042581176757814,0.28102059326171874,0.2818000457763672,0.28242360778808595,"[0.28257949829101564, 0.2808473815917969, 0.2804600830078125, 0.28033602905273436, 0.28030355834960935, 0.2803615417480469, 0.280387939453125, 0.2803915405273438, 0.2805464172363281, 0.2805418701171875]",tokens/s,912.0850292159193,kWh,3.3146340721919215e-06,1.8157278611973394e-06,1.546479632244524e-05,2.05951582558345e-05,tokens/kWh,12430105.989958903,MB,1368.358912,4467.458048,0.0,3821.010944,3673.094656,s,10,163.922755859375,16.3922755859375,0.0036930693849752424,16.39014453125,16.397535546874998,16.3983634765625,16.399025820312502,"[16.39692578125, 16.3973515625, 16.3902265625, 16.3898671875, 16.389916015625, 16.38983203125, 16.388861328125, 16.390521484375, 16.3900625, 16.39919140625]",tokens/s,3.8432735997951397,kWh,0.00019353930944461886,0.00010607578792378326,0.0008956917659355523,0.0011953068633039545,tokens/kWh,52706.1308975181,,s,629,166.186866668701,0.26420805511717194,0.03331598698154965,0.2601615295410156,0.260431884765625,0.26055332641601564,0.5405375170898438,"[0.2604738464355469, 0.2601553955078125, 0.2602137451171875, 0.2602014770507812, 0.26009906005859373, 0.2600960083007812, 0.2601891784667969, 0.260052978515625, 0.26007244873046875, 0.26023117065429685, 0.26028134155273436, 0.2602239990234375, 0.260178955078125, 0.2603960266113281, 0.26044927978515625, 0.26019122314453125, 0.2603632507324219, 0.2602874755859375, 0.2603694152832031, 0.26010931396484377, 0.2601082763671875, 0.2601553955078125, 0.26012261962890626, 0.26017691040039065, 0.260384765625, 0.26000897216796875, 0.26050048828125, 0.2604451904296875, 0.2605189208984375, 0.2606510009765625, 0.26038885498046876, 0.26014208984375, 0.26037350463867187, 0.26012875366210936, 0.26038168334960937, 0.26016256713867186, 0.26014309692382814, 0.2602557373046875, 0.26028338623046876, 0.26035711669921874, 0.260173828125, 0.260274169921875, 0.26040423583984373, 0.2602352600097656, 0.2601103210449219, 0.26028646850585935, 0.26021273803710937, 0.2600837097167969, 0.2600867919921875, 0.26013287353515624, 0.26008062744140625, 0.2601809997558594, 0.26045849609375, 0.26033767700195315, 0.2603550720214844, 0.26027316284179686, 0.260358154296875, 0.26017791748046876, 0.26013287353515624, 0.26070016479492186, 0.2606714782714844, 0.2604800109863281, 0.5413314819335937, 0.26024755859375, 0.260279296875, 0.26044723510742185, 0.2606909484863281, 0.2603417663574219, 0.2602362976074219, 0.2602874755859375, 0.26026904296875, 0.26027825927734377, 0.2603980712890625, 0.2603550720214844, 0.26029876708984373, 0.2603970642089844, 0.2604031982421875, 0.2602977294921875, 0.2602874755859375, 0.2606069641113281, 0.2604349365234375, 0.2605721740722656, 0.26039398193359375, 0.2601553955078125, 0.26009088134765623, 0.26001202392578127, 0.26004583740234377, 0.2601062316894531, 0.2600140686035156, 0.26019122314453125, 0.2603263854980469, 0.26014822387695313, 0.26062744140625, 0.2606827392578125, 0.260453369140625, 0.2604513244628906, 0.2603345947265625, 0.260274169921875, 0.2604247131347656, 0.2603755493164063, 0.260379638671875, 0.26031512451171873, 0.26023834228515624, 0.260242431640625, 0.26064486694335937, 0.260105224609375, 0.26019839477539064, 0.2601584777832031, 0.26008779907226565, 0.2601379699707031, 0.26007858276367185, 0.26006427001953125, 0.2601451416015625, 0.26012261962890626, 0.2601871337890625, 0.260168701171875, 0.26012875366210936, 0.26016357421875, 0.260094970703125, 0.26018508911132815, 0.2601615295410156, 0.26018405151367185, 0.2601185302734375, 0.2602147827148438, 0.2601390075683594, 0.5406546020507812, 0.26011239624023436, 0.2602711181640625, 0.2600499267578125, 0.2601041870117187, 0.2603049011230469, 0.2602168273925781, 0.26013388061523435, 0.26018405151367185, 0.26004376220703124, 0.2601246643066406, 0.2600919189453125, 0.260063232421875, 0.260063232421875, 0.26024551391601564, 0.26033050537109376, 0.2602772521972656, 0.26050970458984374, 0.26012057495117186, 0.26016973876953126, 0.26012979125976565, 0.260068359375, 0.2601308288574219, 0.26012875366210936, 0.2602158203125, 0.26017279052734377, 0.26003558349609374, 0.26014822387695313, 0.26015640258789063, 0.2601134033203125, 0.26012774658203125, 0.26010726928710937, 0.2601185302734375, 0.2603263854980469, 0.260105224609375, 0.26043597412109376, 0.26021273803710937, 0.26012979125976565, 0.2602721252441406, 0.26023117065429685, 0.2602301330566406, 0.2601082763671875, 0.26006732177734376, 0.2601082763671875, 0.2602874755859375, 0.26014208984375, 0.2600263671875, 0.2604247131347656, 0.26013592529296875, 0.26005401611328127, 0.2600181884765625, 0.260031494140625, 0.26013491821289064, 0.26009088134765623, 0.2600028076171875, 0.26041241455078123, 0.26015640258789063, 0.2601062316894531, 0.260031494140625, 0.2600693664550781, 0.26033151245117186, 0.2602291259765625, 0.2600919189453125, 0.5405234985351562, 0.25998745727539063, 0.260173828125, 0.26001202392578127, 0.26014105224609374, 0.2600028076171875, 0.26002944946289064, 0.2601031799316406, 0.26008984375, 0.26006527709960936, 0.26004583740234377, 0.26007757568359374, 0.2599424133300781, 0.25997927856445313, 0.26033355712890627, 0.26021170043945313, 0.2601973876953125, 0.26019839477539064, 0.2602711181640625, 0.260389892578125, 0.2600560607910156, 0.2602014770507812, 0.26017279052734377, 0.26028237915039065, 0.26020965576171873, 0.2603263854980469, 0.26026803588867187, 0.26026803588867187, 0.2602158203125, 0.2604656677246094, 0.2606540832519531, 0.26019021606445314, 0.2601605224609375, 0.260173828125, 0.2600714111328125, 0.260063232421875, 0.26014617919921873, 0.26014105224609374, 0.26012261962890626, 0.2600744934082031, 0.2600284118652344, 0.2600663146972656, 0.2600960083007812, 0.26000897216796875, 0.26003762817382814, 0.2601021423339844, 0.26015640258789063, 0.2602014770507812, 0.2601257019042969, 0.26007757568359374, 0.26020352172851563, 0.2601605224609375, 0.26020965576171873, 0.260463623046875, 0.26010009765625, 0.2601164855957031, 0.26010113525390627, 0.26030081176757813, 0.2601236572265625, 0.2600980529785156, 0.2600284118652344, 0.2602229614257813, 0.26016461181640627, 0.54054296875, 0.2599915466308594, 0.2600621948242188, 0.2600335388183594, 0.26055374145507815, 0.26001715087890626, 0.25995367431640626, 0.2599915466308594, 0.25993215942382814, 0.2599864196777344, 0.260073486328125, 0.2600755310058594, 0.2600130615234375, 0.26003250122070315, 0.25998233032226564, 0.26010726928710937, 0.26006427001953125, 0.2600693664550781, 0.2599362487792969, 0.26019021606445314, 0.2601021423339844, 0.26002432250976565, 0.25996697998046875, 0.26015640258789063, 0.26016256713867186, 0.2601584777832031, 0.26004376220703124, 0.2603673706054688, 0.2601922607421875, 0.26005401611328127, 0.26012774658203125, 0.26035711669921874, 0.260073486328125, 0.26015640258789063, 0.2600867919921875, 0.26006427001953125, 0.2600867919921875, 0.26011544799804687, 0.26028646850585935, 0.2602239990234375, 0.26033355712890627, 0.2601922607421875, 0.2602926025390625, 0.2606766052246094, 0.2603632507324219, 0.2603049011230469, 0.2601574401855469, 0.260431884765625, 0.26024038696289065, 0.2603397216796875, 0.260284423828125, 0.2601809997558594, 0.26024551391601564, 0.2603182067871094, 0.26009088134765623, 0.26022503662109375, 0.2602547302246094, 0.2602168273925781, 0.2601441345214844, 0.260168701171875, 0.26013491821289064, 0.2601379699707031, 0.2601062316894531, 0.54051123046875, 0.2601390075683594, 0.2601922607421875, 0.25996697998046875, 0.260052978515625, 0.260136962890625, 0.260094970703125, 0.2600407104492187, 0.2599987182617188, 0.2599987182617188, 0.26010009765625, 0.26005914306640626, 0.26003558349609374, 0.2600110168457031, 0.26003045654296875, 0.26005401611328127, 0.26007858276367185, 0.2603427734375, 0.26014208984375, 0.2601748352050781, 0.2600499267578125, 0.26007757568359374, 0.2600714111328125, 0.26015435791015623, 0.26024856567382815, 0.26022503662109375, 0.26021990966796876, 0.26020452880859374, 0.260316162109375, 0.26029876708984373, 0.26020556640625, 0.260210693359375, 0.26008575439453124, 0.2604656677246094, 0.2604400634765625, 0.2601594848632813, 0.26012774658203125, 0.2602557373046875, 0.260242431640625, 0.26018405151367185, 0.2602342529296875, 0.26016769409179685, 0.260094970703125, 0.26014004516601563, 0.2603407287597656, 0.26011749267578127, 0.26007244873046875, 0.26017074584960936, 0.2601666564941406, 0.2601195373535156, 0.2601523132324219, 0.26002432250976565, 0.26015640258789063, 0.2602721252441406, 0.2601236572265625, 0.2602659912109375, 0.2604195861816406, 0.26026904296875, 0.260105224609375, 0.26010009765625, 0.26001715087890626, 0.26015435791015623, 0.26013592529296875, 0.5406597290039062, 0.25996798706054686, 0.26000384521484377, 0.25998233032226564, 0.26013388061523435, 0.26001611328125, 0.2601871337890625, 0.2602270812988281, 0.2600140686035156, 0.25994854736328127, 0.26010931396484377, 0.2600417175292969, 0.26012161254882815, 0.26007858276367185, 0.2603612060546875, 0.26019021606445314, 0.2600335388183594, 0.2600048522949219, 0.26028851318359375, 0.26029568481445314, 0.2600263671875, 0.2601584777832031, 0.26007244873046875, 0.26013287353515624, 0.26012161254882815, 0.2601451416015625, 0.25998849487304687, 0.2601113586425781, 0.26011444091796876, 0.26016256713867186, 0.26076263427734375, 0.2601973876953125, 0.26007962036132815, 0.26019122314453125, 0.2599557189941406, 0.26013388061523435, 0.26014822387695313, 0.2600744934082031, 0.26005709838867186, 0.2601041870117187, 0.2599925842285156, 0.2600140686035156, 0.260052978515625, 0.26012261962890626, 0.26014309692382814, 0.26016769409179685, 0.26023117065429685, 0.26024551391601564, 0.2600980529785156, 0.260031494140625, 0.26009088134765623, 0.2600816650390625, 0.2601891784667969, 0.26030694580078123, 0.26020660400390627, 0.2601471862792969, 0.26018405151367185, 0.26039398193359375, 0.2601134033203125, 0.2601236572265625, 0.26024551391601564, 0.2604605407714844, 0.2602229614257813, 0.54063818359375, 0.26014309692382814, 0.2600407104492187, 0.26005914306640626, 0.26034481811523436, 0.26052197265625, 0.26023321533203125, 0.26015435791015623, 0.26017691040039065, 0.2602239990234375, 0.26020660400390627, 0.260242431640625, 0.2601533508300781, 0.26014208984375, 0.2602905578613281, 0.26014309692382814, 0.260068359375, 0.2601471862792969, 0.2600407104492187, 0.2601021423339844, 0.2601871337890625, 0.2600888366699219, 0.26002227783203125, 0.25998541259765623, 0.2601748352050781, 0.2601195373535156, 0.26006732177734376, 0.2601502685546875, 0.26017691040039065, 0.2601451416015625, 0.2601615295410156, 0.26021170043945313, 0.26003250122070315, 0.26021786499023436, 0.2601871337890625, 0.2601533508300781, 0.2600130615234375, 0.2600744934082031, 0.260063232421875, 0.26030899047851563, 0.26002944946289064, 0.26009292602539064, 0.26021990966796876, 0.2603765869140625, 0.26019534301757813, 0.26014004516601563, 0.26009088134765623, 0.26026190185546877, 0.26006527709960936, 0.26020452880859374, 0.2601113586425781, 0.2602567749023437, 0.26019021606445314, 0.2602219543457031, 0.2601748352050781, 0.2601943054199219, 0.26019534301757813, 0.26018508911132815, 0.2603212890625, 0.26023934936523435, 0.2601021423339844, 0.2602352600097656, 0.26024856567382815, 0.5407201538085937, 0.260105224609375, 0.260252685546875, 0.2601451416015625, 0.26007858276367185, 0.2602147827148438, 0.26014309692382814, 0.2600284118652344, 0.26005197143554687, 0.25995162963867186, 0.2601553955078125, 0.26008062744140625, 0.2599710693359375, 0.26005914306640626, 0.26005197143554687, 0.2600980529785156, 0.26014822387695313, 0.26045849609375, 0.2603468933105469, 0.26027621459960937, 0.2603212890625, 0.2601441345214844, 0.2602076110839844, 0.2600888366699219, 0.26023934936523435, 0.2602362976074219, 0.2600765380859375, 0.2601891784667969, 0.26019635009765624, 0.26020452880859374, 0.2600396728515625, 0.2601318359375, 0.2600939636230469, 0.26023321533203125, 0.26005810546875, 0.26002328491210935, 0.26006427001953125, 0.260068359375, 0.2600181884765625, 0.2600980529785156, 0.26037042236328123, 0.260126708984375, 0.26012261962890626, 0.2602014770507812, 0.26039910888671874, 0.2602444763183594, 0.2600447998046875, 0.2602997741699219, 0.260210693359375, 0.2601021423339844, 0.2600488891601562, 0.2600816650390625, 0.2602577819824219, 0.2600980529785156, 0.2601891784667969, 0.26029568481445314, 0.26044723510742185, 0.26029464721679685, 0.26002944946289064, 0.2600939636230469, 0.26015435791015623, 0.26023321533203125, 0.26021170043945313, 0.5409474487304687, 0.25998745727539063, 0.2600058898925781, 0.26006427001953125, 0.26001715087890626, 0.25991986083984375, 0.2600611877441406, 0.2601082763671875, 0.26007962036132815, 0.260021240234375, 0.2601134033203125, 0.2601533508300781, 0.2600079345703125, 0.26007858276367185, 0.2602997741699219, 0.26022604370117186, 0.2600488891601562, 0.260200439453125, 0.26017791748046876, 0.26030694580078123, 0.26000897216796875, 0.260173828125, 0.2601164855957031, 0.260173828125, 0.2601441345214844, 0.26056805419921875, 0.2605014953613281, 0.2605014953613281, 0.2604789733886719, 0.2605363159179688, 0.2607964172363281, 0.2606684265136719, 0.2602915954589844, 0.2604461975097656, 0.2603263854980469, 0.26024652099609374, 0.26027825927734377, 0.260305908203125, 0.2603970642089844, 0.26023834228515624, 0.26024856567382815, 0.26026904296875, 0.26051275634765625, 0.26044110107421875, 0.2605823974609375, 0.260569091796875, 0.2605455322265625, 0.26085888671875, 0.26055270385742185, 0.26060595703125, 0.26059674072265626, 0.2604236755371094, 0.2606295166015625, 0.26085479736328127, 0.26018304443359375, 0.26017279052734377, 0.2602342529296875, 0.260431884765625, 0.26019839477539064, 0.26014208984375, 0.2601103210449219, 0.2603325500488281, 0.2602669982910156]",tokens/s,3.7848959584389523,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491e2-6ca0ac411b0e535c5d450960;d366e3c5-63dc-4d39-b796-571c449dc05e) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17833.086976,24058.003456,0.0,23420.993536,21732.465152,s,1,17.19055859375,17.19055859375,0.0,17.19055859375,17.19055859375,17.19055859375,17.19055859375,[17.19055859375],,kWh,0.00012227886416249557,6.700017496263871e-05,0.00027289855165199897,0.0004621775907771333,,MB,4473.028608,24143.986688,0.0,23492.296704,20639.09888,s,10,56.80982421875001,5.680982421874999,0.0005469417422302733,5.681064697265625,5.681471240234376,5.681702172851563,5.681886918945312,"[5.68193310546875, 5.6810205078125, 5.68088525390625, 5.68126708984375, 5.67999462890625, 5.6801240234375, 5.68084814453125, 5.68122265625, 5.681419921875, 5.68110888671875]",tokens/s,45.06262843100078,kWh,6.706942285694441e-05,3.6757945603866714e-05,0.00040948049425079957,0.0005133078627116107,tokens/kWh,498726.04453718895,MB,4477.292544,24146.08384,0.0,23492.296704,20639.10144,s,10,51.57804833984375,5.1578048339843745,0.011611266385491002,5.15583056640625,5.16867578125,5.1782119140625,5.1858408203124995,"[5.14957568359375, 5.1584765625, 5.15012646484375, 5.1586884765625, 5.15574853515625, 5.166556640625, 5.15591259765625, 5.15156640625, 5.187748046875, 5.14364892578125]",tokens/s,12.2144986147785,kWh,6.0925826731805726e-05,3.339277658485637e-05,0.00022184689969960083,0.00031616550301626306,tokens/kWh,199262.7260057508,,s,630,51.575424087524375,0.08186575251988003,0.0007055134113463695,0.08162201690673829,0.08281497955322266,0.08324976539611817,0.0841024235534668,"[0.08223846435546875, 0.08169369506835937, 0.08153804779052734, 0.081617919921875, 0.08143257904052735, 0.08198246765136719, 0.08152268981933594, 0.08148684692382813, 0.08131276702880859, 0.08142131042480469, 0.08193740844726563, 0.08164966583251954, 0.08181145477294922, 0.08179609680175781, 0.08247398376464844, 0.0830730209350586, 0.08147660827636719, 0.08143052673339844, 0.0812769317626953, 0.08143666839599609, 0.08132915496826172, 0.08154521942138672, 0.08130252838134766, 0.08176127624511718, 0.08201625823974609, 0.08228761291503907, 0.0815626220703125, 0.0815994873046875, 0.08135475158691406, 0.08175308990478515, 0.08168755340576171, 0.08160460662841797, 0.08142540740966797, 0.08144281768798828, 0.0817817611694336, 0.08134246063232421, 0.08130764770507813, 0.08176947021484375, 0.08142642974853516, 0.08189542388916016, 0.08141107177734375, 0.08163430023193359, 0.0812779541015625, 0.08145613098144532, 0.08140697479248046, 0.08153702545166015, 0.08360038757324219, 0.0824268798828125, 0.08230707550048828, 0.08209817504882813, 0.08190668487548829, 0.08155033874511719, 0.08130867004394532, 0.08142848205566407, 0.08133222198486328, 0.08150323486328125, 0.0814551010131836, 0.08144588470458984, 0.08172441864013671, 0.08157593536376953, 0.08190668487548829, 0.08342733001708984, 0.08275052642822266, 0.08166809844970703, 0.08199372863769532, 0.08249446105957031, 0.08249753570556641, 0.08152473449707032, 0.08146431732177735, 0.08206950378417968, 0.08211148834228515, 0.08238697814941406, 0.0824002227783203, 0.08199987030029297, 0.0822845458984375, 0.08228966522216796, 0.0828375015258789, 0.08277094268798828, 0.08295629119873046, 0.08193126678466797, 0.08158924865722657, 0.08185139465332031, 0.0816732177734375, 0.08142438507080078, 0.08202547454833985, 0.08134451293945312, 0.08136396789550782, 0.08122061157226562, 0.08143974304199218, 0.08132915496826172, 0.08247602844238282, 0.0813864974975586, 0.08148684692382813, 0.08124518585205077, 0.08202137756347656, 0.08167526245117188, 0.08156671905517578, 0.08130048370361329, 0.08225791931152343, 0.08341094207763672, 0.08167935943603516, 0.08158719635009766, 0.08195993804931641, 0.08146534729003906, 0.08162201690673829, 0.08178892517089843, 0.08151859283447266, 0.08182278442382812, 0.08135059356689453, 0.08126258850097656, 0.08139263916015625, 0.08125337219238281, 0.08139263916015625, 0.08270336151123046, 0.08284159851074219, 0.0815841293334961, 0.08219033813476563, 0.08124620819091796, 0.08205209350585937, 0.08191693115234375, 0.08244838714599609, 0.08119808197021484, 0.08203673553466796, 0.08157798767089844, 0.08200908660888671, 0.08252518463134766, 0.0824791030883789, 0.08149298858642579, 0.08141619110107422, 0.08198143768310547, 0.0815257568359375, 0.08165888214111328, 0.08134349060058593, 0.08147660827636719, 0.08130560302734376, 0.08173056030273437, 0.08148889923095703, 0.08150220489501953, 0.08185958099365234, 0.08146636962890624, 0.08130150604248047, 0.0815288314819336, 0.08135372924804687, 0.081728515625, 0.08139981079101563, 0.08141619110107422, 0.08129843139648438, 0.08139263916015625, 0.08146739196777343, 0.08130150604248047, 0.08124723052978515, 0.08318566131591797, 0.08474419403076172, 0.08200704193115234, 0.08162303924560547, 0.0817100830078125, 0.08147353363037109, 0.08162815856933593, 0.081723388671875, 0.08148377227783203, 0.08157286071777343, 0.08160460662841797, 0.08129843139648438, 0.08170086669921875, 0.08205107116699219, 0.08180223846435547, 0.08272793579101563, 0.08326451110839844, 0.08146329498291016, 0.08146431732177735, 0.08141004943847656, 0.08123699188232422, 0.0814028778076172, 0.08153292846679687, 0.08146534729003906, 0.08153804779052734, 0.0818862075805664, 0.08164147186279297, 0.08140493011474609, 0.08241356658935547, 0.0815257568359375, 0.08153395080566406, 0.08150118255615234, 0.08144281768798828, 0.08181657409667968, 0.08184832000732421, 0.08153907012939453, 0.08180941009521485, 0.08423117065429687, 0.08177152252197266, 0.08159436798095702, 0.08156877136230468, 0.08173260498046875, 0.08169676971435547, 0.08148480224609375, 0.08172441864013671, 0.08205209350585937, 0.08179609680175781, 0.0818892822265625, 0.08133939361572265, 0.0814571533203125, 0.081438720703125, 0.08141311645507812, 0.08136192321777344, 0.08329727935791016, 0.08323174285888672, 0.08227123260498047, 0.08176947021484375, 0.08330137634277343, 0.08195481872558594, 0.08179097747802734, 0.0818155517578125, 0.08148377227783203, 0.08164864349365235, 0.08215449523925782, 0.08193331146240235, 0.08200498962402344, 0.08167935943603516, 0.08251392364501953, 0.0828037109375, 0.08176025390625, 0.08156877136230468, 0.08151551818847656, 0.0812564468383789, 0.0813680648803711, 0.08129945373535157, 0.08147968292236328, 0.08128921508789062, 0.0813803482055664, 0.08129535675048828, 0.0814551010131836, 0.08132710266113281, 0.08154828643798828, 0.0820848617553711, 0.08160050964355468, 0.08130252838134766, 0.08149913787841796, 0.08146329498291016, 0.08162201690673829, 0.08137010955810547, 0.08146841430664062, 0.08817356872558593, 0.08246784210205078, 0.08166092681884765, 0.08244121551513672, 0.08205619049072266, 0.08177254486083985, 0.08300543975830078, 0.081544189453125, 0.08132403564453125, 0.08153497314453124, 0.08152063751220703, 0.08201932525634766, 0.0816394271850586, 0.08138854217529297, 0.08165376281738282, 0.08156569671630859, 0.08327474975585937, 0.08206950378417968, 0.0823193588256836, 0.08211968231201172, 0.08232345581054687, 0.08167628479003906, 0.08105677032470703, 0.08145203399658203, 0.08155852508544922, 0.08096768188476562, 0.0808826904296875, 0.08080691528320312, 0.08120114898681641, 0.08126873779296875, 0.08167424011230469, 0.08207872009277344, 0.08155238342285157, 0.08080998229980468, 0.08115814208984375, 0.08127180480957032, 0.08198553466796875, 0.08197119903564454, 0.08170393371582031, 0.0814725112915039, 0.08322252655029297, 0.0826593246459961, 0.081723388671875, 0.08165888214111328, 0.08378470611572265, 0.08294502258300782, 0.08184627532958984, 0.08137728118896484, 0.08148992156982422, 0.0823920669555664, 0.0818493423461914, 0.08144691467285156, 0.08138956451416016, 0.08237158203125, 0.08272589111328126, 0.082302978515625, 0.08217497253417969, 0.08166809844970703, 0.0832204818725586, 0.0835594253540039, 0.08161689758300782, 0.08165068817138672, 0.08193126678466797, 0.08144895935058594, 0.08156057739257813, 0.08174899291992188, 0.08169369506835937, 0.08154112243652344, 0.08182579040527344, 0.08175001525878907, 0.08164556884765625, 0.0814551010131836, 0.08150732421875, 0.08138956451416016, 0.08249549102783203, 0.08199167633056641, 0.08156774139404296, 0.08157491302490234, 0.0813864974975586, 0.08154828643798828, 0.08157798767089844, 0.08183193969726563, 0.08152063751220703, 0.08203878021240234, 0.0829849624633789, 0.08278425598144531, 0.08146022033691407, 0.08148992156982422, 0.08135884857177735, 0.0814940185546875, 0.08140902709960937, 0.0813117446899414, 0.08129945373535157, 0.08181964874267578, 0.08142745971679688, 0.08148172760009766, 0.08213919830322265, 0.08256915283203126, 0.08198963165283203, 0.08151347351074219, 0.08134758758544922, 0.08148070526123047, 0.08147353363037109, 0.08207257843017578, 0.08125542449951172, 0.08305561828613281, 0.08252006530761719, 0.08411341094970703, 0.08243609619140625, 0.08190156555175782, 0.08146329498291016, 0.08161280059814453, 0.08290201568603515, 0.08316825866699219, 0.08234086608886719, 0.08165478515625, 0.08229785919189453, 0.08220671844482422, 0.08257126617431641, 0.08243507385253906, 0.08149913787841796, 0.08273919677734375, 0.08139469146728516, 0.08199680328369141, 0.08143769836425781, 0.08146125030517579, 0.08125234985351562, 0.08208793640136719, 0.08243199920654297, 0.08198143768310547, 0.08298291015625, 0.08364339447021485, 0.08294092559814453, 0.08336486053466798, 0.08156057739257813, 0.08173670196533203, 0.0813864974975586, 0.08407552337646484, 0.08304946899414062, 0.08156979370117187, 0.08181759643554687, 0.08145203399658203, 0.08159846496582031, 0.08114080047607422, 0.08155948638916016, 0.08132403564453125, 0.08163021087646484, 0.08139059448242188, 0.08161177825927735, 0.08228253173828125, 0.0836454086303711, 0.08306790161132813, 0.08172953796386719, 0.08215142059326172, 0.08178482818603515, 0.08164147186279297, 0.08152371215820313, 0.08254463958740234, 0.08280268859863281, 0.08181350708007812, 0.0843868179321289, 0.08260198211669922, 0.08201522827148437, 0.08252416229248047, 0.08380620574951173, 0.08302796936035156, 0.08336077117919923, 0.08153702545166015, 0.08155033874511719, 0.08133837127685548, 0.08143666839599609, 0.08130457305908204, 0.08163839721679687, 0.08131584167480468, 0.08162611389160156, 0.08187494659423829, 0.08166706848144531, 0.08127487945556641, 0.08141824340820313, 0.0809574432373047, 0.08196505737304688, 0.08151347351074219, 0.08085504150390625, 0.08077311706542968, 0.08110694122314453, 0.08099225616455077, 0.08145305633544922, 0.08161996459960938, 0.08189849853515625, 0.08137728118896484, 0.0814571533203125, 0.08142745971679688, 0.08118988800048828, 0.08118476867675781, 0.08108646392822266, 0.08117657470703125, 0.08215961456298829, 0.08135679626464844, 0.08158207702636719, 0.08161484527587891, 0.08186061096191406, 0.08153497314453124, 0.0816209945678711, 0.0844062728881836, 0.08227532958984375, 0.08157901000976563, 0.08143462371826173, 0.08165682983398438, 0.08140595245361328, 0.081512451171875, 0.08118886566162109, 0.081438720703125, 0.08120524597167969, 0.08320819091796874, 0.08329727935791016, 0.08167116546630859, 0.08127385711669922, 0.08257126617431641, 0.08174285125732422, 0.08203775787353515, 0.08137522888183593, 0.08154828643798828, 0.08119500732421875, 0.08143257904052735, 0.08108236694335938, 0.08194252777099609, 0.08226099395751953, 0.0831098861694336, 0.08266239929199219, 0.08279859161376953, 0.08119500732421875, 0.08144793701171875, 0.08186675262451172, 0.08166297912597656, 0.08113155364990235, 0.08148067474365234, 0.08123085021972656, 0.08144588470458984, 0.08130355072021485, 0.08142848205566407, 0.08129945373535157, 0.081512451171875, 0.0814039077758789, 0.08142131042480469, 0.08292249298095702, 0.08336691284179687, 0.08215756988525391, 0.08152166748046875, 0.0816732177734375, 0.08342835235595703, 0.08223846435546875, 0.08157491302490234, 0.08081510162353515, 0.08122470092773437, 0.08225177764892579, 0.08091545867919922, 0.08084889221191406, 0.0818493423461914, 0.08095641326904297, 0.08152371215820313, 0.08080486297607421, 0.08163839721679687, 0.08144076538085937, 0.08214527893066406, 0.08191590118408203, 0.08151142120361328, 0.08309452819824219, 0.08319692993164063, 0.0819056625366211, 0.08155340576171875, 0.08353382110595703, 0.08166604614257812, 0.08451481628417969, 0.08328806304931641, 0.08312319946289062, 0.08281497955322266, 0.08338841247558594, 0.08303001403808594, 0.08213094329833984, 0.08231423950195313, 0.08257638549804687, 0.0813096923828125, 0.08156774139404296, 0.08137830352783203, 0.08146636962890624, 0.081438720703125, 0.08128511810302734, 0.0814039077758789, 0.08207667541503906, 0.08240128326416016, 0.08235008239746094, 0.08281497955322266, 0.0829665298461914, 0.08266854095458985, 0.08217190551757812, 0.08155033874511719, 0.08194662475585937, 0.08230194854736328, 0.0822507553100586, 0.08076185607910157, 0.08213504028320312, 0.0826593246459961, 0.0832143325805664, 0.08261939239501953, 0.08265113830566406, 0.0812779541015625, 0.0827514877319336, 0.08282828521728515, 0.08146125030517579, 0.08280883026123047, 0.08272486114501953, 0.081870849609375, 0.08135372924804687, 0.08275251007080078, 0.08233369445800781, 0.0826081314086914, 0.08273101043701171, 0.0833955841064453, 0.08356658935546875, 0.08252006530761719, 0.08169676971435547, 0.08270642852783203, 0.08204390716552734, 0.08209203338623047, 0.08218931579589844, 0.08269312286376954, 0.08168141174316407, 0.08138240051269531, 0.08129535675048828, 0.08139571380615235, 0.080636962890625, 0.08091849517822265, 0.08078131103515625, 0.08114380645751954, 0.08056524658203125, 0.08085708618164063, 0.08125132751464843, 0.08128102111816406, 0.08163839721679687, 0.08229478454589843, 0.08117247772216797, 0.08141414642333984, 0.08219136047363282, 0.08138956451416016, 0.08132096099853516, 0.08207667541503906, 0.08181043243408204, 0.0816527328491211, 0.08135475158691406, 0.08159539031982421, 0.08135884857177735, 0.08135065460205078, 0.08140083312988282, 0.08157593536376953, 0.08140595245361328, 0.0825159683227539, 0.08276787567138671, 0.08326860809326173, 0.08203878021240234, 0.08161587524414063, 0.08117453002929688, 0.08141926574707031, 0.08130457305908204, 0.0816701431274414, 0.08143666839599609, 0.08180735778808594, 0.08127897644042968, 0.08148786926269531, 0.08136396789550782, 0.0826081314086914, 0.08293068695068359, 0.08214630126953125, 0.08137113952636718, 0.0812943344116211, 0.08139775848388672, 0.08144895935058594, 0.08131378936767578, 0.08155955505371094, 0.08137010955810547, 0.08147763061523437, 0.08123289489746094, 0.0815615997314453, 0.08151347351074219, 0.08372940826416016, 0.08225177764892579, 0.08160460662841797, 0.08220467376708984, 0.08240332794189453, 0.08263475036621094]",tokens/s,12.215120110905511,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1214.447616,1005.060096,0.0,358.612992,318.913024,s,23,0.16858035135269164,0.007329580493595289,0.0002587299119465544,0.007231008052825927,0.007471584033966065,0.007511299371719361,0.00818978693008423,"[0.008380064010620117, 0.007363743782043457, 0.007418399810791016, 0.007246111869812012, 0.007178239822387696, 0.007166944026947022, 0.007157599925994873, 0.007199552059173584, 0.007136896133422852, 0.007476480007171631, 0.007105247974395752, 0.0071660480499267576, 0.007231008052825927, 0.007161407947540283, 0.007184447765350342, 0.007153855800628662, 0.007515168190002442, 0.007452000141143799, 0.007419456005096436, 0.007367104053497314, 0.007440927982330322, 0.007213119983673096, 0.00744652795791626]",tokens/s,34926.964813838546,kWh,8.452715794226453e-08,4.6316905321826644e-08,1.8026361583691486e-07,3.11107679101006e-07,tokens/kWh,822866220.2738029,MB,1214.447616,1005.060096,0.0,358.612992,328.804864,s,23,9.953621459960937,0.4327661504330842,0.010704234458509567,0.429077880859375,0.44030704345703126,0.44415710449218754,0.4681292211914063,"[0.47477725219726563, 0.4445589294433594, 0.42644979858398435, 0.4242642822265625, 0.42919677734375, 0.42827423095703127, 0.42818402099609376, 0.4242230834960937, 0.42831201171875, 0.429077880859375, 0.4333373718261719, 0.4317017822265625, 0.437157470703125, 0.4248154602050781, 0.42390850830078125, 0.43650335693359377, 0.4387818298339844, 0.4320978088378906, 0.43937249755859376, 0.4277911987304687, 0.42581710815429685, 0.4244781188964844, 0.44054067993164064]",tokens/s,145.5751563216155,kWh,5.077228334491569e-06,2.782076962048813e-06,8.839489976768277e-06,1.6698795273308658e-05,tokens/kWh,3772727.2518095453,,s,1448,10.106253310203536,0.0069794567059416794,0.000952161877616497,0.006773759841918945,0.0070439937591552735,0.007432703852653502,0.014228009071350098,"[0.007984127998352051, 0.007874559879302979, 0.007838719844818116, 0.00810905647277832, 0.007803904056549072, 0.0076360321044921875, 0.007531455993652344, 0.007589888095855713, 0.007426047801971435, 0.007560192108154297, 0.0076063361167907715, 0.007623616218566894, 0.0076574721336364745, 0.007613440036773681, 0.0075335679054260255, 0.007480319976806641, 0.0076472959518432615, 0.007611328125, 0.0075428481101989745, 0.0075550079345703125, 0.00753868818283081, 0.007555071830749512, 0.007600128173828125, 0.0074065918922424315, 0.007469056129455566, 0.0074403839111328125, 0.007422976016998291, 0.0075304961204528805, 0.007518208026885987, 0.007624703884124756, 0.007686143875122071, 0.007682047843933106, 0.007307263851165771, 0.007299071788787842, 0.007363584041595459, 0.007319551944732666, 0.007326720237731933, 0.0074065918922424315, 0.0074741759300231934, 0.007550975799560547, 0.007709695816040039, 0.007379968166351319, 0.007514111995697022, 0.007488512039184571, 0.007510015964508057, 0.007669760227203369, 0.007569407939910889, 0.007693312168121338, 0.0076687359809875484, 0.0076277761459350585, 0.007525375843048096, 0.007512063980102539, 0.007461887836456299, 0.00764415979385376, 0.007400447845458984, 0.0072427520751953125, 0.007208960056304932, 0.007473152160644531, 0.00729702377319336, 0.00703385591506958, 0.007020544052124023, 0.007089183807373047, 0.014714847564697266, 0.007219200134277344, 0.007350272178649903, 0.007436287879943848, 0.007600128173828125, 0.007280640125274658, 0.007155712127685547, 0.007156735897064209, 0.007203839778900147, 0.007258111953735351, 0.00719974422454834, 0.007029759883880615, 0.007017471790313721, 0.007067647933959961, 0.007037951946258545, 0.0069918718338012695, 0.007010335922241211, 0.006995999813079834, 0.006998976230621338, 0.006979584217071533, 0.0070348801612854, 0.007015423774719238, 0.00698367977142334, 0.006958079814910889, 0.006994944095611572, 0.00698367977142334, 0.006978559970855713, 0.006947840213775635, 0.0070348801612854, 0.006961152076721191, 0.007020544052124023, 0.0069683518409729, 0.007003104209899903, 0.007001088142395019, 0.006972415924072266, 0.006957056045532227, 0.007001088142395019, 0.006966271877288818, 0.006965248107910156, 0.007002111911773682, 0.0069816322326660156, 0.007114751815795899, 0.007004159927368164, 0.007090176105499267, 0.007002111911773682, 0.006996992111206054, 0.007081984043121338, 0.007012351989746094, 0.006987775802612305, 0.006988800048828125, 0.0069928960800170895, 0.007177216053009033, 0.007015423774719238, 0.006977536201477051, 0.007003136157989502, 0.007126016139984131, 0.007016448020935059, 0.006986752033233643, 0.006990848064422607, 0.0069632000923156735, 0.0070041918754577635, 0.007000031948089599, 0.007019519805908203, 0.014730239868164062, 0.007007232189178467, 0.0069550080299377445, 0.007005216121673584, 0.0070368962287902835, 0.006977536201477051, 0.006851583957672119, 0.006823935985565186, 0.006765567779541016, 0.006767615795135498, 0.006763519763946534, 0.006762495994567871, 0.0068055038452148435, 0.006909952163696289, 0.006785024166107178, 0.00677785587310791, 0.006772736072540283, 0.0067420158386230465, 0.006738944053649902, 0.006725632190704346, 0.0067010560035705566, 0.006772736072540283, 0.006744063854217529, 0.006779903888702392, 0.006756351947784424, 0.006751232147216797, 0.006719488143920899, 0.006737919807434082, 0.0067758078575134275, 0.006755328178405762, 0.006710271835327148, 0.006716415882110595, 0.006723584175109864, 0.006725632190704346, 0.00672051191329956, 0.0067123198509216305, 0.006694911956787109, 0.006738944053649902, 0.006696959972381592, 0.00672051191329956, 0.006724607944488525, 0.006721536159515381, 0.00674508810043335, 0.006755328178405762, 0.006785024166107178, 0.006756351947784424, 0.006723584175109864, 0.006751232147216797, 0.0067573761940002445, 0.006761472225189209, 0.006666240215301514, 0.006661119937896728, 0.006723584175109864, 0.006661119937896728, 0.006654975891113281, 0.0066375679969787596, 0.006723584175109864, 0.006718463897705078, 0.006703104019165039, 0.006754303932189941, 0.006707200050354004, 0.006717440128326416, 0.006744063854217529, 0.014316543579101563, 0.0067348480224609375, 0.006750207901000976, 0.0067870721817016606, 0.006694911956787109, 0.006708223819732666, 0.0067010560035705566, 0.00672051191329956, 0.0067276802062988285, 0.0066979842185974124, 0.006687744140625, 0.006713344097137451, 0.006761472225189209, 0.006749184131622315, 0.0067645440101623535, 0.006747136116027832, 0.006710271835327148, 0.006718463897705078, 0.006700032234191895, 0.006749184131622315, 0.006680575847625733, 0.006717440128326416, 0.00667852783203125, 0.006739967823028564, 0.006696959972381592, 0.0067010560035705566, 0.006673408031463623, 0.006703104019165039, 0.006800384044647217, 0.00672051191329956, 0.006776832103729248, 0.006722559928894043, 0.006737919807434082, 0.006756351947784424, 0.0067717118263244626, 0.006748159885406494, 0.006724607944488525, 0.00672870397567749, 0.006800384044647217, 0.006699007987976074, 0.0067276802062988285, 0.006737919807434082, 0.0067041277885437015, 0.006756351947784424, 0.006843391895294189, 0.006760447978973389, 0.006755328178405762, 0.006778880119323731, 0.006690815925598144, 0.006692863941192627, 0.006652927875518798, 0.006653952121734619, 0.006666240215301514, 0.006660096168518067, 0.0066375679969787596, 0.006724607944488525, 0.00674508810043335, 0.006793216228485107, 0.006758399963378906, 0.006758399963378906, 0.006765567779541016, 0.006729728221893311, 0.006829055786132812, 0.014258175849914552, 0.006754303932189941, 0.006767615795135498, 0.006770688056945801, 0.006714367866516113, 0.006759424209594727, 0.006788095951080322, 0.006724607944488525, 0.006732800006866455, 0.006737919807434082, 0.006708223819732666, 0.006724607944488525, 0.006731776237487793, 0.0067010560035705566, 0.0067420158386230465, 0.0067717118263244626, 0.006756351947784424, 0.00674508810043335, 0.0067358717918396, 0.00672870397567749, 0.006722623825073242, 0.006734784126281738, 0.006726655960083008, 0.006713344097137451, 0.00687718391418457, 0.006709248065948486, 0.0067051520347595215, 0.006795263767242431, 0.006717440128326416, 0.00669593620300293, 0.00674508810043335, 0.0067338237762451176, 0.00672051191329956, 0.0067758078575134275, 0.006737919807434082, 0.006726655960083008, 0.006694911956787109, 0.0067358717918396, 0.006706175804138184, 0.006700032234191895, 0.006766592025756836, 0.0067348480224609375, 0.0067348480224609375, 0.006829055786132812, 0.0067276802062988285, 0.006977536201477051, 0.006985727787017822, 0.007014400005340577, 0.0069816322326660156, 0.006931456089019775, 0.006980607986450196, 0.006937600135803222, 0.006938623905181885, 0.0069324798583984375, 0.006961152076721191, 0.006959104061126709, 0.006976511955261231, 0.0069847040176391605, 0.006971392154693603, 0.0069550080299377445, 0.006982656002044678, 0.007070720195770264, 0.007047167778015137, 0.014191616058349609, 0.0067123198509216305, 0.006912000179290771, 0.006758399963378906, 0.0068055038452148435, 0.006760447978973389, 0.006781951904296875, 0.006699007987976074, 0.0068280320167541505, 0.006811647891998291, 0.00673689603805542, 0.006723584175109864, 0.006774784088134766, 0.006723584175109864, 0.006739967823028564, 0.0067358717918396, 0.006739967823028564, 0.006717440128326416, 0.006739967823028564, 0.0067051520347595215, 0.006780928134918213, 0.006706175804138184, 0.006874112129211426, 0.006763519763946534, 0.006729728221893311, 0.006726655960083008, 0.006752255916595459, 0.0067010560035705566, 0.006756351947784424, 0.006718463897705078, 0.006817791938781738, 0.007201791763305664, 0.006953983783721924, 0.007018496036529541, 0.006915071964263916, 0.006966271877288818, 0.006953983783721924, 0.006956031799316406, 0.006920191764831543, 0.00694374418258667, 0.00709939193725586, 0.006912000179290771, 0.006710271835327148, 0.006819839954376221, 0.006730751991271973, 0.006739967823028564, 0.0067276802062988285, 0.006729728221893311, 0.006692863941192627, 0.0067051520347595215, 0.0067358717918396, 0.006722559928894043, 0.006740992069244385, 0.006744063854217529, 0.006699007987976074, 0.006892543792724609, 0.006709248065948486, 0.006807551860809326, 0.0067983360290527345, 0.006761472225189209, 0.006715392112731934, 0.00672051191329956, 0.006797311782836914, 0.014218239784240723, 0.0067358717918396, 0.0069621758460998535, 0.007168000221252442, 0.007048192024230957, 0.007046144008636474, 0.006990848064422607, 0.006974463939666748, 0.006982656002044678, 0.006985727787017822, 0.006969344139099121, 0.006968319892883301, 0.006947840213775635, 0.0068249602317810056, 0.006723584175109864, 0.0067348480224609375, 0.0067276802062988285, 0.006848512172698974, 0.00674508810043335, 0.0067051520347595215, 0.006748223781585693, 0.006714303970336914, 0.0067276802062988285, 0.006804480075836182, 0.006754303932189941, 0.0067758078575134275, 0.006685696125030518, 0.006730751991271973, 0.006726655960083008, 0.0067276802062988285, 0.006749216079711914, 0.006711264133453369, 0.006721536159515381, 0.00672870397567749, 0.006696959972381592, 0.006729728221893311, 0.006738944053649902, 0.0067358717918396, 0.006718463897705078, 0.006738944053649902, 0.00674508810043335, 0.0067010560035705566, 0.00672051191329956, 0.0068055038452148435, 0.0067758078575134275, 0.006834176063537598, 0.00673689603805542, 0.0067051520347595215, 0.006752255916595459, 0.006779903888702392, 0.006749184131622315, 0.006716415882110595, 0.006848512172698974, 0.006749184131622315, 0.00676966381072998, 0.006834176063537598, 0.006716415882110595, 0.006725632190704346, 0.00672054386138916, 0.0067338237762451176, 0.006743008136749267, 0.0067686400413513184, 0.006751232147216797, 0.014236672401428223, 0.006725632190704346, 0.0067348480224609375, 0.006715456008911133, 0.006762432098388672, 0.006743040084838867, 0.006689792156219483, 0.006709248065948486, 0.006738944053649902, 0.006714431762695312, 0.006792128086090088, 0.0067686400413513184, 0.006730751991271973, 0.00672870397567749, 0.006724607944488525, 0.006722559928894043, 0.006778880119323731, 0.006765567779541016, 0.006713344097137451, 0.006707200050354004, 0.006724607944488525, 0.006759424209594727, 0.006701087951660157, 0.006740960121154785, 0.006726687908172608, 0.0067266240119934085, 0.006749184131622315, 0.006730751991271973, 0.006681600093841553, 0.006766592025756836, 0.006635519981384277, 0.006624256134033203, 0.006654975891113281, 0.006649856090545654, 0.006687744140625, 0.006709248065948486, 0.006715392112731934, 0.006750207901000976, 0.006752255916595459, 0.006715392112731934, 0.006703104019165039, 0.006806528091430664, 0.006752255916595459, 0.006755328178405762, 0.006747136116027832, 0.006730751991271973, 0.006707200050354004, 0.006713344097137451, 0.006737919807434082, 0.006721536159515381, 0.0067348480224609375, 0.006743040084838867, 0.006743040084838867, 0.006818816184997558, 0.0067358717918396, 0.006817791938781738, 0.006703104019165039, 0.006765567779541016, 0.006790143966674805, 0.0066938881874084475, 0.006729728221893311, 0.006721536159515381, 0.006699007987976074, 0.01420083236694336, 0.00677785587310791, 0.006897664070129395, 0.006767615795135498, 0.0068392958641052244, 0.00672051191329956, 0.006792191982269287, 0.0068351998329162595, 0.006717504024505615, 0.006688704013824463, 0.006752255916595459, 0.006723584175109864, 0.0067870721817016606, 0.00678604793548584, 0.0068884482383728025, 0.007054336071014404, 0.006791168212890625, 0.006876160144805908, 0.007169023990631103, 0.006994944095611572, 0.006714367866516113, 0.006750207901000976, 0.006793216228485107, 0.006711296081542969, 0.006751232147216797, 0.00672870397567749, 0.00667955207824707, 0.006749184131622315, 0.006797311782836914, 0.006726655960083008, 0.006689792156219483, 0.006719488143920899, 0.006754303932189941, 0.0067010560035705566, 0.006744063854217529, 0.00672051191329956, 0.006690815925598144, 0.006833151817321777, 0.0067276802062988285, 0.006732800006866455, 0.00674508810043335, 0.006746111869812011, 0.006723584175109864, 0.006754303932189941, 0.00673689603805542, 0.006783008098602295, 0.006755296230316162, 0.006714367866516113, 0.006714367866516113, 0.006867968082427979, 0.006754303932189941, 0.006830143928527832, 0.006731711864471436, 0.006782976150512696, 0.0067717118263244626, 0.006706175804138184, 0.00684441614151001, 0.006978559970855713, 0.006996992111206054, 0.006918144226074219, 0.006971392154693603, 0.0068853759765625, 0.006964223861694336, 0.015036416053771973, 0.007065599918365479, 0.006990848064422607, 0.007023615837097168, 0.006948863983154297, 0.006990848064422607, 0.006958079814910889, 0.007014400005340577, 0.00693452787399292, 0.006964223861694336, 0.0069253120422363285, 0.006949888229370117, 0.006964223861694336, 0.006959104061126709, 0.006947840213775635, 0.006916096210479736, 0.006853631973266602, 0.006752255916595459, 0.006746111869812011, 0.006703104019165039, 0.006756351947784424, 0.006773759841918945, 0.00672051191329956, 0.006751232147216797, 0.00672051191329956, 0.0067123198509216305, 0.006843391895294189, 0.006763519763946534, 0.006758399963378906, 0.006754303932189941, 0.0067358717918396, 0.006692863941192627, 0.006746111869812011, 0.00674508810043335, 0.006715392112731934, 0.00674508810043335, 0.006738944053649902, 0.006743040084838867, 0.006750207901000976, 0.00672979211807251, 0.00669484806060791, 0.006716415882110595, 0.006749184131622315, 0.006749184131622315, 0.006710271835327148, 0.006737919807434082, 0.006803455829620361, 0.0067051520347595215, 0.006708223819732666, 0.006780928134918213, 0.006688767910003662, 0.0067348480224609375, 0.006717440128326416, 0.006715392112731934, 0.006817791938781738, 0.0067420158386230465, 0.0067870721817016606, 0.006800384044647217, 0.006766592025756836, 0.006739999771118164, 0.006708191871643066, 0.006755328178405762, 0.006746111869812011, 0.014143487930297852, 0.006750207901000976, 0.006739967823028564, 0.006726655960083008, 0.006748159885406494, 0.0067246718406677244, 0.006729663848876953, 0.0067717118263244626, 0.006810624122619629, 0.006706175804138184, 0.006749184131622315, 0.00673689603805542, 0.006711296081542969, 0.0067420158386230465, 0.006732800006866455, 0.006719488143920899, 0.0067645440101623535, 0.006759424209594727, 0.006724607944488525, 0.00672870397567749, 0.006749184131622315, 0.006729728221893311, 0.006677504062652588, 0.006738944053649902, 0.006686751842498779, 0.0066969280242919925, 0.006802432060241699, 0.006756351947784424, 0.007007232189178467, 0.007004159927368164, 0.006947840213775635, 0.007006207942962647, 0.006967296123504638, 0.006986752033233643, 0.006968319892883301, 0.0070553598403930665, 0.006969344139099121, 0.006975488185882568, 0.006986752033233643, 0.0069632000923156735, 0.006961152076721191, 0.006985727787017822, 0.006971392154693603, 0.007068672180175781, 0.006986752033233643, 0.006994944095611572, 0.006972415924072266, 0.006941696166992187, 0.006953983783721924, 0.006989823818206787, 0.006973440170288086, 0.006994944095611572, 0.006929408073425293, 0.006985727787017822, 0.006967296123504638, 0.0070522880554199216, 0.006951935768127441, 0.007003136157989502, 0.006975488185882568, 0.006987775802612305, 0.006959104061126709, 0.00694374418258667, 0.00694271993637085, 0.014244864463806153, 0.006744063854217529, 0.006746111869812011, 0.0067276802062988285, 0.006718463897705078, 0.006841343879699707, 0.006713344097137451, 0.00678604793548584, 0.006906879901885986, 0.006791168212890625, 0.006716415882110595, 0.006849535942077637, 0.006762495994567871, 0.006692863941192627, 0.006747136116027832, 0.006708223819732666, 0.006703104019165039, 0.00679423999786377, 0.006711296081542969, 0.006789120197296142, 0.006779903888702392, 0.00676358413696289, 0.0067491202354431155, 0.006688767910003662, 0.0067420477867126466, 0.00675222396850586, 0.006725632190704346, 0.006752255916595459, 0.0067420158386230465, 0.006717504024505615, 0.006833087921142578, 0.007392255783081054, 0.007624703884124756, 0.007074816226959229, 0.00697654390335083, 0.007215072154998779, 0.006989823818206787, 0.006999040126800537, 0.007142399787902832, 0.006996992111206054, 0.006988800048828125, 0.006978559970855713, 0.006941696166992187, 0.006982656002044678, 0.00693555212020874, 0.006973440170288086, 0.006958079814910889, 0.006959104061126709, 0.006960127830505371, 0.0069959678649902345, 0.0069632000923156735, 0.006716415882110595, 0.006729728221893311, 0.0067062082290649415, 0.006729695796966553, 0.0067348480224609375, 0.006683648109436035, 0.006800384044647217, 0.006818816184997558, 0.006718463897705078, 0.006850560188293457, 0.0067420158386230465, 0.006748159885406494, 0.014536704063415527, 0.006968319892883301, 0.006966271877288818, 0.00695091199874878, 0.00704204797744751, 0.006987775802612305, 0.006970367908477783, 0.006851583957672119, 0.006754303932189941, 0.0068884482383728025, 0.006730751991271973, 0.006762495994567871, 0.006744063854217529, 0.006726655960083008, 0.00675328016281128, 0.006760447978973389, 0.006747136116027832, 0.006714367866516113, 0.006825984001159668, 0.006810624122619629, 0.006713344097137451, 0.006802432060241699, 0.0073820161819458, 0.0069918718338012695, 0.007038976192474365, 0.007009280204772949, 0.006915071964263916, 0.006946815967559815, 0.0069027838706970214, 0.006979584217071533, 0.007037951946258545, 0.0069847040176391605, 0.007095295906066895, 0.006979584217071533, 0.006982656002044678, 0.006957056045532227, 0.006939648151397705, 0.007016448020935059, 0.007017471790313721, 0.006959104061126709, 0.007007232189178467, 0.006953983783721924, 0.006958079814910889, 0.006985727787017822, 0.007005184173583984, 0.00695091199874878, 0.0069959678649902345, 0.006952960014343262, 0.006946815967559815, 0.006927360057830811, 0.006949888229370117, 0.006945792198181152, 0.006974463939666748, 0.007023615837097168, 0.006964223861694336, 0.006966271877288818, 0.006936575889587402, 0.007049215793609619, 0.007016448020935059, 0.006938623905181885, 0.006959104061126709, 0.006936575889587402, 0.006927360057830811, 0.014215167999267577, 0.006761472225189209, 0.006815743923187256, 0.006685696125030518, 0.006874112129211426, 0.006820864200592041, 0.006779903888702392, 0.006732800006866455, 0.006800384044647217, 0.006703104019165039, 0.00673689603805542, 0.006716415882110595, 0.0066979842185974124, 0.006699007987976074, 0.006696959972381592, 0.006717440128326416, 0.006749184131622315, 0.006707200050354004, 0.00672870397567749, 0.006694911956787109, 0.006708223819732666, 0.0066938881874084475, 0.006688767910003662, 0.006703104019165039, 0.006685696125030518, 0.006864895820617676, 0.006916096210479736, 0.0067983360290527345, 0.006809599876403808, 0.006807551860809326, 0.00673689603805542, 0.006708223819732666, 0.006715392112731934, 0.006714367866516113, 0.00678607988357544, 0.006709216117858886, 0.006750207901000976, 0.0067123198509216305, 0.006722591876983643, 0.006701024055480957, 0.006739007949829102, 0.006764480113983154, 0.006756351947784424, 0.006732800006866455, 0.006713344097137451, 0.006716415882110595, 0.006711296081542969, 0.006681600093841553, 0.006714367866516113, 0.006699007987976074, 0.006694911956787109, 0.006732800006866455, 0.006689792156219483, 0.006703104019165039, 0.0067758078575134275, 0.006743040084838867, 0.006756351947784424, 0.006776832103729248, 0.006760447978973389, 0.006729728221893311, 0.006706175804138184, 0.00673689603805542, 0.00672051191329956, 0.014218239784240723, 0.0067010560035705566, 0.006730751991271973, 0.006715392112731934, 0.006696959972381592, 0.006707200050354004, 0.006708223819732666, 0.006744063854217529, 0.006721536159515381, 0.006673408031463623, 0.006746111869812011, 0.006691840171813965, 0.006708223819732666, 0.006749184131622315, 0.006730751991271973, 0.006725696086883545, 0.006710207939147949, 0.006713344097137451, 0.0067041277885437015, 0.006717440128326416, 0.0067051520347595215, 0.006686719894409179, 0.00669593620300293, 0.006739967823028564, 0.006729728221893311, 0.006884352207183838, 0.0067573761940002445, 0.006692863941192627, 0.006700032234191895, 0.006709248065948486, 0.006714367866516113, 0.006707200050354004, 0.006751232147216797, 0.006714367866516113, 0.0067870721817016606, 0.0067420158386230465, 0.006709248065948486, 0.006677504062652588, 0.0067123198509216305, 0.0067041277885437015, 0.0068351998329162595, 0.006729728221893311, 0.006744063854217529, 0.006694911956787109, 0.006737919807434082, 0.0067348480224609375, 0.00672870397567749, 0.006865920066833496, 0.006730751991271973, 0.0067041277885437015, 0.006690815925598144, 0.0067420158386230465, 0.006709248065948486, 0.006683648109436035, 0.006721536159515381, 0.006699007987976074, 0.006702079772949219, 0.006723584175109864, 0.006755328178405762, 0.0067573761940002445, 0.006694911956787109, 0.006703104019165039, 0.0066826238632202144, 0.014199808120727539, 0.006708223819732666, 0.006726655960083008, 0.006726655960083008, 0.0067051520347595215, 0.006708223819732666, 0.006747136116027832, 0.006862847805023193, 0.00672051191329956, 0.00674508810043335, 0.006713344097137451, 0.006717440128326416, 0.006710271835327148, 0.006726655960083008, 0.006721536159515381, 0.006719488143920899, 0.006707200050354004, 0.006739967823028564, 0.006916096210479736, 0.006788095951080322, 0.006837247848510742, 0.006850560188293457, 0.0070225920677185055, 0.007953407764434815, 0.007171072006225586, 0.006988800048828125, 0.0069632000923156735, 0.007014400005340577, 0.00724889612197876, 0.007015423774719238, 0.006952960014343262, 0.007001088142395019, 0.007005184173583984, 0.006944767951965332, 0.00694374418258667, 0.007057407855987549, 0.006978559970855713, 0.006953983783721924, 0.006904831886291504, 0.006892543792724609, 0.006957056045532227, 0.006884352207183838, 0.006863872051239014, 0.00689356803894043, 0.006924287796020508, 0.006998015880584717, 0.006892543792724609, 0.006951935768127441, 0.007155712127685547, 0.007081984043121338, 0.006960127830505371, 0.007018496036529541, 0.0070522880554199216, 0.007150591850280762, 0.0070860800743103025, 0.0070522880554199216, 0.00693452787399292, 0.007058432102203369, 0.006945792198181152, 0.006958079814910889, 0.006944767951965332, 0.006970367908477783, 0.006941696166992187, 0.014789631843566894, 0.0069632000923156735, 0.006973440170288086, 0.006958079814910889, 0.0069632000923156735, 0.0069621758460998535, 0.006931456089019775, 0.006985727787017822, 0.006957056045532227, 0.006941696166992187, 0.006929408073425293, 0.006923264026641846, 0.006958079814910889, 0.006957056045532227, 0.006990848064422607, 0.006978559970855713, 0.00698367977142334, 0.006944767951965332, 0.006917119979858399, 0.00695091199874878, 0.006904831886291504, 0.006956031799316406, 0.006967296123504638, 0.006953983783721924, 0.006938623905181885, 0.006903808116912841, 0.006965248107910156, 0.006949888229370117, 0.00693555212020874, 0.006973440170288086, 0.006994944095611572, 0.006939648151397705, 0.0069816322326660156, 0.006960127830505371, 0.006980607986450196, 0.0070563840866088865, 0.006985727787017822, 0.006752287864685059, 0.0067337918281555175, 0.006853695869445801, 0.006733759880065918, 0.006716415882110595, 0.006744063854217529, 0.006700032234191895, 0.006732800006866455, 0.006743040084838867, 0.006779903888702392, 0.007061503887176514, 0.007062528133392334, 0.006999040126800537, 0.006979584217071533, 0.006915071964263916, 0.006968319892883301, 0.00692633581161499, 0.0069283838272094726, 0.006938623905181885, 0.00801689624786377, 0.007638016223907471, 0.00704307222366333, 0.007014400005340577, 0.006941696166992187, 0.0070225920677185055, 0.007076863765716553, 0.014768128395080566, 0.006933504104614257, 0.006961152076721191, 0.006966271877288818, 0.006978559970855713, 0.006912000179290771, 0.0067358717918396, 0.006732800006866455, 0.0066979842185974124, 0.006715392112731934, 0.006730751991271973, 0.006721536159515381, 0.006842368125915528, 0.00672870397567749, 0.006711296081542969, 0.006741055965423584, 0.00674399995803833, 0.0067758078575134275, 0.006959104061126709, 0.006964223861694336, 0.006971392154693603, 0.006915135860443115, 0.006940608024597168, 0.006872064113616944, 0.0069959678649902345, 0.007156735897064209, 0.006993919849395752, 0.006974463939666748, 0.006920191764831543, 0.006889472007751465, 0.00695091199874878, 0.006952960014343262, 0.00695091199874878, 0.006915071964263916, 0.0070830078125, 0.006978559970855713, 0.006952960014343262, 0.0069632000923156735, 0.006980607986450196, 0.007064576148986816, 0.0069918718338012695, 0.006924287796020508, 0.006948863983154297, 0.006966271877288818, 0.006948863983154297, 0.00679423999786377, 0.006752319812774658, 0.006724544048309326, 0.006639616012573242, 0.0066641921997070315, 0.006675456047058105, 0.006716415882110595, 0.006661119937896728, 0.006628352165222168, 0.006649856090545654, 0.006746111869812011, 0.006722559928894043, 0.006972415924072266, 0.006773759841918945, 0.0067686400413513184, 0.006724607944488525, 0.006721536159515381, 0.006733888149261474, 0.014669759750366211, 0.007174143791198731, 0.007038976192474365, 0.006959104061126709, 0.006977536201477051, 0.007017471790313721, 0.006961152076721191, 0.006960127830505371, 0.007027711868286133, 0.00693452787399292, 0.007000063896179199, 0.006951935768127441, 0.006958079814910889, 0.006951935768127441, 0.006952960014343262, 0.006959104061126709, 0.006951935768127441, 0.006958079814910889, 0.0069816322326660156, 0.006946815967559815, 0.006924287796020508, 0.00698367977142334, 0.006944767951965332, 0.006986752033233643, 0.006969344139099121, 0.006968319892883301, 0.006915103912353515, 0.0069242558479309085, 0.006949888229370117, 0.007111680030822754, 0.007015423774719238, 0.006973440170288086, 0.006959104061126709, 0.007012351989746094, 0.006980607986450196, 0.006960127830505371, 0.00693555212020874, 0.006985727787017822, 0.006936575889587402, 0.006973440170288086, 0.006958079814910889, 0.0069959678649902345, 0.006958079814910889, 0.006933504104614257, 0.007006207942962647, 0.006975552082061767, 0.006967232227325439, 0.006929408073425293, 0.006957056045532227, 0.006936575889587402, 0.006938623905181885, 0.006901792049407959, 0.0069539518356323245, 0.006959104061126709, 0.006945792198181152, 0.006958079814910889, 0.007050240039825439, 0.006960127830505371, 0.00695091199874878, 0.006949888229370117, 0.006945792198181152, 0.006960127830505371, 0.006976511955261231, 0.014671872138977051, 0.00683622407913208, 0.006738944053649902, 0.006726655960083008, 0.006732800006866455, 0.006713344097137451, 0.006724607944488525, 0.006717440128326416, 0.00672051191329956, 0.006718463897705078, 0.006726655960083008, 0.006715392112731934, 0.006681600093841553, 0.006816768169403077, 0.006746111869812011, 0.006716415882110595, 0.006724607944488525, 0.00672051191329956, 0.006711296081542969, 0.006715392112731934, 0.0067123198509216305, 0.006715392112731934, 0.006703104019165039, 0.00672870397567749, 0.0068392958641052244, 0.0067348480224609375, 0.006721536159515381, 0.006716415882110595, 0.006694911956787109, 0.006722559928894043, 0.0067338237762451176, 0.006718463897705078, 0.0066979842185974124, 0.006731776237487793, 0.006715392112731934, 0.006724607944488525, 0.006729728221893311, 0.00672870397567749, 0.0066938881874084475, 0.006746111869812011, 0.006687744140625, 0.006737919807434082, 0.0067010560035705566, 0.0067041277885437015, 0.007002111911773682, 0.006776832103729248, 0.006823935985565186, 0.00672870397567749, 0.0067983360290527345, 0.006721536159515381, 0.006727744102478028, 0.006747072219848633, 0.007356416225433349, 0.006988800048828125, 0.006908927917480469, 0.0069304962158203125, 0.006955967903137207, 0.007030784130096435, 0.00693452787399292, 0.006971392154693603, 0.00694374418258667, 0.007005184173583984, 0.006920191764831543, 0.014754816055297852, 0.00694374418258667, 0.006967296123504638, 0.006953983783721924, 0.007035903930664063, 0.006876160144805908, 0.0067123198509216305, 0.006750207901000976, 0.0067276802062988285, 0.0067276802062988285, 0.006765567779541016, 0.006766592025756836, 0.006710271835327148, 0.006762495994567871, 0.006740992069244385, 0.006776832103729248, 0.006737919807434082, 0.0067276802062988285, 0.006760447978973389, 0.006696959972381592, 0.006744063854217529, 0.006744063854217529, 0.006723584175109864, 0.0068076162338256835, 0.006731711864471436, 0.0067358717918396, 0.006700064182281494, 0.006725599765777588, 0.006718463897705078, 0.006722591876983643, 0.0067337918281555175, 0.0066938881874084475, 0.006730751991271973, 0.006706175804138184, 0.006865920066833496, 0.006724607944488525, 0.006740992069244385, 0.006723584175109864, 0.0067041277885437015, 0.006717440128326416, 0.006737919807434082, 0.006715392112731934, 0.006823935985565186, 0.006739967823028564, 0.006714367866516113, 0.006762495994567871, 0.0067420477867126466, 0.006771679878234863, 0.006731776237487793, 0.006722559928894043, 0.006687744140625, 0.0067338237762451176, 0.006725632190704346, 0.006715392112731934, 0.006706175804138184, 0.006725632190704346, 0.006715392112731934, 0.006710271835327148, 0.006723584175109864, 0.006709248065948486, 0.006684671878814697, 0.006709248065948486, 0.0067123198509216305, 0.01426643180847168, 0.0067255678176879885, 0.006780928134918213, 0.006709248065948486, 0.006739967823028564, 0.006708223819732666, 0.00673689603805542, 0.006763519763946534, 0.006716415882110595, 0.0067051520347595215, 0.006726655960083008, 0.006707200050354004, 0.006721536159515381, 0.00673689603805542, 0.0067348480224609375, 0.006732800006866455, 0.006763519763946534, 0.0067123198509216305, 0.006706175804138184, 0.006708223819732666, 0.0067010560035705566, 0.00676966381072998, 0.00679423999786377, 0.006707200050354004, 0.006684671878814697, 0.00669593620300293, 0.006684671878814697, 0.006699007987976074, 0.006675456047058105, 0.006808576107025147, 0.006714367866516113, 0.006790143966674805, 0.00673689603805542, 0.0067686400413513184, 0.006692863941192627, 0.006718463897705078, 0.0067123198509216305, 0.006731776237487793, 0.006737919807434082, 0.006729728221893311, 0.006708223819732666, 0.0067420158386230465, 0.0067358717918396, 0.006760447978973389, 0.00672870397567749, 0.006732800006866455, 0.006722559928894043, 0.006713344097137451, 0.006749184131622315, 0.006750207901000976, 0.006699007987976074, 0.0067420158386230465, 0.006740992069244385, 0.006699007987976074, 0.006751232147216797, 0.006732800006866455, 0.006831103801727295, 0.0068055038452148435, 0.006739967823028564, 0.006724607944488525, 0.006737919807434082, 0.006761504173278809, 0.006738912105560302, 0.01487667179107666, 0.006949888229370117, 0.00692633581161499, 0.006870016098022461, 0.007027711868286133, 0.006979584217071533, 0.0069632000923156735, 0.006959104061126709, 0.007003136157989502, 0.007009280204772949, 0.006994976043701172, 0.006966303825378418, 0.007005119800567627, 0.007029759883880615, 0.006985727787017822, 0.007004159927368164, 0.007013376235961914, 0.007021567821502686, 0.0069847040176391605, 0.007036928176879883, 0.007038976192474365, 0.006982656002044678, 0.006987775802612305, 0.006975488185882568, 0.007032832145690918, 0.006986752033233643, 0.006975488185882568, 0.006973440170288086, 0.006967296123504638, 0.006989823818206787, 0.006965248107910156, 0.006952960014343262, 0.006985727787017822, 0.006965248107910156, 0.006945792198181152, 0.006945824146270752, 0.0069508800506591795, 0.007271423816680909, 0.0069928960800170895, 0.006933504104614257, 0.0069621758460998535, 0.0069847040176391605, 0.006964223861694336, 0.006957056045532227, 0.007038976192474365, 0.006969344139099121, 0.0069816322326660156, 0.00694374418258667, 0.006977536201477051, 0.006970367908477783, 0.006953983783721924, 0.006970367908477783, 0.006973440170288086, 0.00693555212020874, 0.007002111911773682, 0.0070553598403930665, 0.0070256638526916505, 0.006982656002044678, 0.006973440170288086, 0.007006207942962647, 0.006969344139099121, 0.0069918718338012695, 0.006994944095611572]",tokens/s,143.27762777705752,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,3147.558912,4836.5568,0.0,4206.886912,4087.771648,s,1,10.1425283203125,10.1425283203125,0.0,10.1425283203125,10.1425283203125,10.1425283203125,10.1425283203125,[10.1425283203125],,kWh,3.949090454028819e-05,2.1628224857267693e-05,6.20478274160341e-05,0.00012316695681358997,,MB,3065.659392,4853.334016,0.0,4366.270464,4273.705984,s,10,6.292238037109376,0.6292238037109374,0.0005730729691974808,0.6291887512207032,0.6298815307617188,0.6301056518554687,0.6302849487304688,"[0.6290366821289063, 0.6295560302734375, 0.62889111328125, 0.6285160522460937, 0.6286094970703126, 0.6298317260742188, 0.6285673217773438, 0.6303297729492188, 0.6295590209960937, 0.6293408203125]",tokens/s,406.8504695629812,kWh,7.438861410763468e-06,4.076124877328219e-06,4.386105939437057e-05,5.5376045682462255e-05,tokens/kWh,4622937.532736757,MB,3069.923328,5014.81472,0.0,4368.367616,4274.363904,s,10,23.436071044921878,2.3436071044921873,0.01991585136213405,2.3516604003906254,2.359374609375,2.3617239257812503,2.3636033789062503,"[2.358797119140625, 2.326142822265625, 2.30267822265625, 2.31655712890625, 2.350553955078125, 2.3588525390625, 2.346890380859375, 2.352766845703125, 2.3640732421875, 2.3587587890625]",tokens/s,26.88163893992412,kWh,2.8202151952637e-05,1.5456609765449937e-05,6.189960854743049e-05,0.00010555837026551745,tokens/kWh,596826.1904909315,,s,630,23.433934898376453,0.037196722060915025,0.0008082766267813201,0.03691059303283691,0.03819233207702637,0.03838208084106445,0.03901681690216065,"[0.03670016098022461, 0.03636838531494141, 0.0364769287109375, 0.03636633682250977, 0.03641756820678711, 0.0360109748840332, 0.03630694580078125, 0.03644825744628906, 0.036083713531494144, 0.03582259368896484, 0.03669606399536133, 0.03688243103027344, 0.03633356857299805, 0.039005184173583986, 0.03859763336181641, 0.03828224182128906, 0.03811942291259766, 0.036468734741210936, 0.03680255889892578, 0.0379576301574707, 0.038844417572021485, 0.03828531265258789, 0.0381921272277832, 0.03764633560180664, 0.03781222534179687, 0.03807846450805664, 0.03626803207397461, 0.03867136001586914, 0.038245376586914064, 0.03786547088623047, 0.037473281860351565, 0.03828121566772461, 0.037966846466064456, 0.03719270324707031, 0.03618099212646484, 0.036348926544189454, 0.036004863739013675, 0.03809382247924804, 0.03674009704589844, 0.03738214492797851, 0.0379156494140625, 0.03788185501098633, 0.037961727142333986, 0.037989376068115234, 0.03721830368041992, 0.03768832015991211, 0.03783168029785156, 0.037966846466064456, 0.03801599884033203, 0.03794739151000977, 0.03760537719726562, 0.03771596908569336, 0.03775692749023438, 0.03808665466308594, 0.038013950347900394, 0.038004737854003906, 0.03802422332763672, 0.03787158584594726, 0.03751116943359375, 0.03761459350585938, 0.037425151824951174, 0.03728179168701172, 0.03751628875732422, 0.03818188858032227, 0.037759998321533206, 0.03807129669189453, 0.03787161636352539, 0.037814273834228515, 0.03786240005493164, 0.03779379272460937, 0.03789619064331055, 0.03802009582519531, 0.037771263122558595, 0.03768832015991211, 0.038437889099121096, 0.038079486846923825, 0.037776382446289065, 0.036636672973632815, 0.03619839859008789, 0.03668479919433594, 0.03721830368041992, 0.03792793655395508, 0.03657830429077148, 0.03679641723632812, 0.036549633026123046, 0.036795391082763675, 0.036519935607910156, 0.036598785400390625, 0.03680460739135742, 0.03642060852050781, 0.036567039489746093, 0.03676160049438477, 0.03659571075439453, 0.036550655364990234, 0.03688243103027344, 0.036789249420166016, 0.03652608108520508, 0.036860927581787106, 0.03661312103271484, 0.03639910507202149, 0.03682611083984375, 0.03679436874389649, 0.03649228668212891, 0.03683225631713867, 0.03676160049438477, 0.03656294250488281, 0.03645951843261719, 0.03654655838012695, 0.03617996978759765, 0.036634624481201174, 0.03638988876342773, 0.036219905853271485, 0.03655168151855469, 0.036391937255859375, 0.03647180938720703, 0.03652505493164063, 0.03619839859008789, 0.03636838531494141, 0.03706880187988281, 0.03765760040283203, 0.036549633026123046, 0.03695206451416016, 0.036370433807373044, 0.036206592559814454, 0.03644825744628906, 0.03617279815673828, 0.03694182586669922, 0.03624038314819336, 0.03619430541992188, 0.03652608108520508, 0.03659366226196289, 0.03665407943725586, 0.03686809539794922, 0.03618304061889648, 0.03646467208862305, 0.03653731155395508, 0.03632025527954102, 0.036001792907714845, 0.03638784027099609, 0.03650764846801758, 0.03706675338745117, 0.03662131118774414, 0.03693772888183594, 0.037937152862548826, 0.03808768081665039, 0.038029312133789066, 0.03644416046142578, 0.03621785736083984, 0.0364769287109375, 0.03638169479370117, 0.036465663909912106, 0.03627724838256836, 0.03649638366699219, 0.03650969696044922, 0.03586867141723633, 0.036359169006347655, 0.03625267028808594, 0.03662438583374023, 0.03628134536743164, 0.03650252914428711, 0.036149246215820316, 0.035931137084960936, 0.03654348754882813, 0.03644723129272461, 0.03663564682006836, 0.03662438583374023, 0.03662847900390625, 0.03638476943969727, 0.03633561706542969, 0.03646976089477539, 0.03667763137817383, 0.036512767791748044, 0.0362977294921875, 0.03648102569580078, 0.03644825744628906, 0.03639091110229492, 0.03640422439575195, 0.03653836822509766, 0.03659366226196289, 0.03668582534790039, 0.036615169525146485, 0.036531200408935545, 0.036388927459716794, 0.03661920166015625, 0.03659366226196289, 0.036340736389160154, 0.03671244812011719, 0.036631553649902344, 0.03661312103271484, 0.03671654510498047, 0.036482048034667966, 0.03654553604125976, 0.03605299377441406, 0.036531200408935545, 0.03668384170532227, 0.03660486221313477, 0.03661209487915039, 0.03646156692504883, 0.036452350616455076, 0.03637350463867187, 0.036601856231689454, 0.0364400634765625, 0.03642367935180664, 0.036552703857421875, 0.03653529739379883, 0.03649945449829101, 0.036514816284179685, 0.03654348754882813, 0.03647385787963867, 0.03646054458618164, 0.036560897827148435, 0.03634380722045898, 0.036574207305908206, 0.03633561706542969, 0.03643494415283203, 0.03614617538452149, 0.036495361328125, 0.036506622314453126, 0.03655475234985352, 0.036111358642578126, 0.0362608642578125, 0.036512767791748044, 0.036615169525146485, 0.03638886260986328, 0.036431873321533206, 0.03638988876342773, 0.0361420783996582, 0.03647078323364258, 0.03654655838012695, 0.036393985748291016, 0.03660287857055664, 0.03653324890136719, 0.03641446304321289, 0.03670220947265625, 0.04041318511962891, 0.03859558486938477, 0.03669708633422852, 0.03668377685546875, 0.037884929656982425, 0.03893964767456055, 0.03830374526977539, 0.038267902374267575, 0.03811942291259766, 0.03730944061279297, 0.038150142669677735, 0.03650969696044922, 0.03656806564331055, 0.03666329574584961, 0.036603904724121096, 0.03646771240234375, 0.03655680084228516, 0.03658649444580078, 0.036857856750488284, 0.0363612174987793, 0.0365291519165039, 0.036482048034667966, 0.03654143905639649, 0.03615334320068359, 0.036552703857421875, 0.036367359161376955, 0.03649331283569336, 0.03651891326904297, 0.03637971115112305, 0.03641337585449219, 0.0362342414855957, 0.03648819351196289, 0.037889022827148434, 0.03855052947998047, 0.036618240356445314, 0.036567039489746093, 0.03737395095825195, 0.03816447830200195, 0.03814297485351562, 0.038160385131835936, 0.04084428787231445, 0.03917004776000976, 0.03848191833496094, 0.03885158538818359, 0.038529022216796875, 0.03819417572021484, 0.03837644958496094, 0.03812454223632813, 0.038215679168701173, 0.037628929138183595, 0.03808256149291992, 0.038263809204101565, 0.0365291519165039, 0.03625881576538086, 0.035988479614257815, 0.03712307357788086, 0.03689574432373047, 0.03647488021850586, 0.03655987167358398, 0.03672883224487305, 0.03967795181274414, 0.03847577667236328, 0.03805081558227539, 0.03668787384033203, 0.03710976028442383, 0.037997566223144534, 0.03653324890136719, 0.036705280303955076, 0.036511745452880856, 0.03666124725341797, 0.03629875183105469, 0.037667839050292966, 0.0367053108215332, 0.03669705581665039, 0.03798323059082031, 0.036566017150878906, 0.03663564682006836, 0.03660800170898437, 0.03750297546386719, 0.03799039840698242, 0.03810611343383789, 0.03696537780761719, 0.03693875122070313, 0.03676876831054687, 0.03808051300048828, 0.03787776184082031, 0.03653017425537109, 0.036674560546875, 0.037792766571044925, 0.03797094345092773, 0.03802828979492188, 0.037966846466064456, 0.03792486572265625, 0.037817344665527344, 0.03653529739379883, 0.036334590911865236, 0.037303295135498044, 0.0382033920288086, 0.037921791076660154, 0.03662335968017578, 0.03638169479370117, 0.03747020721435547, 0.03832627105712891, 0.03807743835449219, 0.03797196960449219, 0.036988929748535154, 0.039021568298339845, 0.03834982299804687, 0.038144001007080076, 0.03839590454101562, 0.038091777801513675, 0.03785932922363281, 0.037664768218994144, 0.03824127960205078, 0.03695206451416016, 0.03663564682006836, 0.03632128143310547, 0.03773747253417969, 0.038013950347900394, 0.03797094345092773, 0.037098560333251956, 0.03654751968383789, 0.0365291519165039, 0.038299648284912106, 0.037905406951904294, 0.03779072189331055, 0.03792281723022461, 0.03643084716796875, 0.03834368133544922, 0.03767091369628906, 0.03733913421630859, 0.03751628875732422, 0.03767295837402344, 0.03613798522949219, 0.03715584182739258, 0.03641446304321289, 0.03705558395385742, 0.037529502868652344, 0.036653057098388675, 0.03672371292114258, 0.036947967529296875, 0.038386688232421876, 0.03717529678344727, 0.036528129577636716, 0.037574657440185545, 0.03684864044189453, 0.036291584014892575, 0.0366192626953125, 0.037215232849121094, 0.03790848159790039, 0.03715584182739258, 0.038267902374267575, 0.03811328125, 0.038145023345947264, 0.03794124984741211, 0.03791974258422852, 0.03749683380126953, 0.03800064086914062, 0.037477375030517575, 0.0369766731262207, 0.038143966674804686, 0.03827507019042969, 0.03783270263671875, 0.03695513534545898, 0.036397056579589845, 0.0366929931640625, 0.03805286407470703, 0.03783475112915039, 0.03617996978759765, 0.03627113723754883, 0.03688854217529297, 0.037495807647705076, 0.03763814544677734, 0.03726335906982422, 0.03802521514892578, 0.038624256134033204, 0.03836108779907227, 0.03811635208129883, 0.03810815811157227, 0.038188030242919925, 0.03733196640014649, 0.037648384094238284, 0.03809382247924804, 0.03708006286621094, 0.03649945449829101, 0.03645747375488281, 0.037141502380371096, 0.0379791374206543, 0.03784499359130859, 0.03632025527954102, 0.03592704010009766, 0.03653324890136719, 0.036378623962402344, 0.03657318496704102, 0.0363612174987793, 0.03648614501953125, 0.03645542526245117, 0.03643084716796875, 0.03650457763671875, 0.036371486663818356, 0.03599766540527344, 0.03631923294067383, 0.03657932662963867, 0.03692544174194336, 0.03661004638671875, 0.038316032409667966, 0.03820236968994141, 0.037579776763916016, 0.03808051300048828, 0.038179840087890625, 0.037820415496826174, 0.03797510528564453, 0.03799033737182617, 0.03812966537475586, 0.03871846389770508, 0.03838873672485352, 0.03804467010498047, 0.037615615844726565, 0.037986305236816405, 0.037917697906494144, 0.03781017684936523, 0.0377968635559082, 0.03743641662597656, 0.03827199935913086, 0.0370063362121582, 0.03784703826904297, 0.03807436752319336, 0.037648384094238284, 0.03778047943115234, 0.036345855712890625, 0.03646976089477539, 0.037012481689453126, 0.03744153594970703, 0.036364288330078126, 0.03632844924926758, 0.036334590911865236, 0.03643801498413086, 0.03643084716796875, 0.03645951843261719, 0.03667148971557617, 0.036738048553466796, 0.036928512573242187, 0.03870515060424805, 0.0379422721862793, 0.03799347305297852, 0.036754432678222655, 0.03656499099731445, 0.03658444976806641, 0.03650048065185547, 0.03657625579833984, 0.037996543884277346, 0.038144001007080076, 0.038042625427246096, 0.03811840057373047, 0.03772415924072266, 0.03700121688842774, 0.03788185501098633, 0.03793817520141601, 0.03778355026245117, 0.03632128143310547, 0.03624755096435547, 0.036357120513916014, 0.036596736907958984, 0.036517887115478515, 0.036675582885742186, 0.03646771240234375, 0.036606975555419925, 0.03778355026245117, 0.03648102569580078, 0.038166526794433595, 0.03801497650146484, 0.03772518539428711, 0.03644723129272461, 0.03733299255371094, 0.03809280014038086, 0.03682304000854492, 0.03740467071533203, 0.03771596908569336, 0.036713470458984376, 0.0381399040222168, 0.03880243301391602, 0.03829248046875, 0.03782451248168945, 0.03822489547729492, 0.03750297546386719, 0.037921791076660154, 0.036977664947509765, 0.038231040954589846, 0.03807027053833008, 0.03793612670898437, 0.03882086563110351, 0.03799961471557617, 0.037408767700195314, 0.03637964630126953, 0.036468734741210936, 0.036528129577636716, 0.03806515121459961, 0.037394432067871096, 0.0372305908203125, 0.03613183975219727, 0.03649126434326172, 0.036160511016845705, 0.03620454406738281, 0.03695206451416016, 0.03789311981201172, 0.03818598556518555, 0.0382105598449707, 0.03827302551269531, 0.038539264678955076, 0.03769548797607422, 0.03815116882324219, 0.03785728073120117, 0.037751808166503906, 0.03631923294067383, 0.03692959976196289, 0.036218814849853516, 0.036397056579589845, 0.03808358383178711, 0.03786444854736328, 0.037884929656982425, 0.037773311614990236, 0.03786649703979492, 0.0380549430847168, 0.039410655975341796, 0.03853823852539062, 0.0378869743347168, 0.03781836700439453, 0.03803955078125, 0.03669913482666016, 0.036490238189697266, 0.036357120513916014, 0.0366827507019043, 0.0375551986694336, 0.03803238296508789, 0.037359615325927735, 0.037700607299804685, 0.03783987045288086, 0.03822079849243164, 0.0381952018737793, 0.0380313606262207, 0.03789823913574219, 0.03815731048583984, 0.03806003189086914, 0.03791155242919922, 0.03816550445556641, 0.03816960144042969, 0.038553600311279294, 0.03810815811157227, 0.03683020782470703, 0.03662643051147461, 0.03712921524047851, 0.03727872085571289, 0.03680767822265625, 0.0376995849609375, 0.03828736114501953, 0.039344127655029294, 0.03692544174194336, 0.03647385787963867, 0.03667660903930664, 0.03647385787963867, 0.036664321899414064, 0.037766143798828124, 0.03788185501098633, 0.0378603515625, 0.0380211181640625, 0.03776204681396484, 0.03724697494506836, 0.03761971282958984, 0.036808704376220705, 0.03787776184082031, 0.036691967010498046, 0.03689471817016601, 0.0381952018737793, 0.03853004837036133, 0.038577152252197267, 0.03791974258422852, 0.03803647994995117, 0.03810201644897461, 0.0379576301574707, 0.03786547088623047, 0.03640729522705078, 0.03651897430419922, 0.03651168060302734, 0.03648921585083008, 0.03646361541748047, 0.03650252914428711, 0.03632332611083984, 0.03650252914428711, 0.036452350616455076, 0.036359169006347655, 0.036604927062988284, 0.03642777633666992, 0.036555774688720705, 0.03718860626220703, 0.037917697906494144, 0.03808768081665039]",tokens/s,26.884089365787528,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1325.285376,1361.575936,0.0,731.906048,703.86944,s,1,7.779115234375,7.779115234375,0.0,7.779115234375,7.779115234375,7.779115234375,7.779115234375,[7.779115234375],,kWh,1.1786741974303067e-05,6.444096100601005e-06,1.7085847001996335e-05,3.5316685076900406e-05,,MB,1538.588672,1644.691456,0.0,998.244352,942.610432,s,10,0.7398313674926758,0.07398313674926757,0.00013526273691919044,0.07393272018432617,0.07416210250854492,0.07421676292419434,0.07426049125671387,"[0.07414995574951172, 0.07387673950195313, 0.0738927993774414, 0.07395378875732422, 0.07381123352050781, 0.07406301116943359, 0.07391165161132812, 0.07401554870605469, 0.07388521575927734, 0.07427142333984375]",tokens/s,3460.247986883773,kWh,8.738589379340233e-07,4.788318986848077e-07,4.678880377088014e-06,6.031571213706845e-06,tokens/kWh,42443335.39795332,MB,1573.916672,1653.080064,0.0,1006.63296,942.612992,s,10,13.997427734375002,1.3997427734375,0.01709655374208761,1.4020272827148437,1.4183528930664062,1.4191603332519531,1.4198062854003906,"[1.3957142333984376, 1.3733094482421875, 1.37800830078125, 1.41550048828125, 1.414289794921875, 1.4181734619140625, 1.3964873046875, 1.4199677734375, 1.4075672607421874, 1.37840966796875]",tokens/s,45.00826951603691,kWh,1.7135744638870212e-05,9.390393827364846e-06,3.0215591982915075e-05,5.6741730449150125e-05,tokens/kWh,1110293.9494673733,,s,630,13.99549542808533,0.02221507210807195,0.0005090536556977834,0.022147583961486816,0.02278707275390625,0.023014144039154052,0.02377857000350952,"[0.02244095993041992, 0.02264575958251953, 0.022749183654785156, 0.022578176498413087, 0.022483968734741212, 0.022595584869384764, 0.022090751647949217, 0.022602752685546876, 0.022558719635009765, 0.02226585578918457, 0.02214297676086426, 0.022254592895507814, 0.022311935424804686, 0.022411264419555665, 0.022187007904052734, 0.022391807556152343, 0.022535167694091796, 0.023045120239257814, 0.02245734405517578, 0.02230169677734375, 0.022634496688842775, 0.022200319290161134, 0.022041599273681642, 0.022169599533081053, 0.02226688003540039, 0.022495231628417968, 0.02220134353637695, 0.021767168045043944, 0.021549055099487305, 0.02164735984802246, 0.022288383483886717, 0.0226693115234375, 0.021765119552612306, 0.021569536209106444, 0.02142720031738281, 0.02165760040283203, 0.02164121627807617, 0.02161664009094238, 0.021687295913696288, 0.021644287109375, 0.022339584350585938, 0.02272768020629883, 0.022641664505004884, 0.022383615493774413, 0.02189107131958008, 0.021733375549316408, 0.021924863815307616, 0.02184601593017578, 0.021583871841430666, 0.02183782386779785, 0.021938175201416017, 0.02185215950012207, 0.02192793655395508, 0.02185625648498535, 0.021906431198120118, 0.021902336120605468, 0.021816320419311523, 0.021857280731201172, 0.021987327575683592, 0.02188595199584961, 0.022548479080200197, 0.02264473533630371, 0.022517759323120116, 0.0216494083404541, 0.021588991165161133, 0.02187468719482422, 0.021833728790283204, 0.021924863815307616, 0.021824512481689453, 0.021554176330566405, 0.021615615844726564, 0.02186751937866211, 0.021967872619628907, 0.02188800048828125, 0.021917695999145507, 0.021595136642456055, 0.021497856140136717, 0.021560319900512694, 0.023623680114746092, 0.02287308883666992, 0.022977535247802734, 0.022255615234375, 0.0214968318939209, 0.021865472793579102, 0.021804031372070314, 0.021828607559204103, 0.02183782386779785, 0.02145075225830078, 0.021583871841430666, 0.021807104110717773, 0.021679103851318358, 0.021572608947753907, 0.021772287368774415, 0.02168832015991211, 0.02147123146057129, 0.021529600143432616, 0.02147327995300293, 0.02146406364440918, 0.021396480560302734, 0.021349376678466796, 0.021597183227539063, 0.02145894432067871, 0.021582847595214845, 0.021783552169799804, 0.02182246398925781, 0.021769216537475586, 0.021627904891967774, 0.02146816062927246, 0.02145996856689453, 0.021638143539428712, 0.021540864944458008, 0.021489664077758788, 0.021791744232177734, 0.021710847854614256, 0.022358015060424806, 0.022722560882568358, 0.021791744232177734, 0.021707775115966797, 0.022581247329711913, 0.022429695129394533, 0.021614591598510743, 0.02183782386779785, 0.02166169548034668, 0.02166988754272461, 0.021513216018676756, 0.02153267288208008, 0.021610496520996093, 0.02168934440612793, 0.021525503158569336, 0.021732351303100587, 0.021635072708129883, 0.021786624908447266, 0.021727231979370116, 0.02182963180541992, 0.021779455184936524, 0.02182655906677246, 0.023774208068847655, 0.023780351638793946, 0.02281065559387207, 0.022018016815185545, 0.021818368911743165, 0.021805055618286134, 0.021797887802124022, 0.021823488235473632, 0.021755903244018555, 0.022074367523193358, 0.02248806381225586, 0.021922815322875978, 0.021833728790283204, 0.02187980842590332, 0.02191360092163086, 0.021893119812011717, 0.021772287368774415, 0.02191360092163086, 0.022108160018920898, 0.021893119812011717, 0.021827583312988282, 0.021599231719970705, 0.02186956787109375, 0.02249625587463379, 0.022397951126098634, 0.02187161636352539, 0.02169036865234375, 0.021607423782348634, 0.021833728790283204, 0.021787647247314454, 0.022500352859497072, 0.02246348762512207, 0.02180607986450195, 0.021574655532836915, 0.021635072708129883, 0.021560319900512694, 0.021818368911743165, 0.021938175201416017, 0.02185318374633789, 0.021547008514404296, 0.021424127578735352, 0.02146201515197754, 0.02150297546386719, 0.02145894432067871, 0.02149990463256836, 0.021549055099487305, 0.021497856140136717, 0.021505023956298826, 0.02147327995300293, 0.02162073516845703, 0.021850112915039063, 0.021622783660888673, 0.02147020721435547, 0.021917695999145507, 0.02187264060974121, 0.0218787841796875, 0.02283622360229492, 0.022768640518188478, 0.022929407119750975, 0.02265190315246582, 0.02251468849182129, 0.023254016876220703, 0.02286079978942871, 0.022541311264038084, 0.022569984436035157, 0.021936128616333008, 0.02200377655029297, 0.02174048042297363, 0.02182246398925781, 0.021544960021972655, 0.021529600143432616, 0.021644287109375, 0.021549055099487305, 0.021569536209106444, 0.02173030471801758, 0.021828607559204103, 0.02171801567077637, 0.02169036865234375, 0.023400447845458985, 0.02308403205871582, 0.02269081687927246, 0.02269491195678711, 0.022544384002685547, 0.022567935943603516, 0.022585344314575196, 0.022553600311279298, 0.022552576065063477, 0.022617088317871094, 0.022968320846557616, 0.024285184860229493, 0.022897663116455077, 0.0227194881439209, 0.022603776931762694, 0.022550527572631835, 0.022618112564086915, 0.022748159408569335, 0.022708223342895507, 0.02263654327392578, 0.022543359756469726, 0.02257516860961914, 0.022454208374023437, 0.022566911697387695, 0.022218751907348632, 0.022335487365722655, 0.022500352859497072, 0.02259660720825195, 0.02302566337585449, 0.022502399444580077, 0.022443008422851563, 0.02264473533630371, 0.022970367431640625, 0.02270207977294922, 0.022647808074951172, 0.02260479927062988, 0.022618112564086915, 0.0224399356842041, 0.0219238395690918, 0.02168115234375, 0.021563392639160156, 0.021800960540771484, 0.02185318374633789, 0.022742015838623047, 0.023026687622070312, 0.022601728439331056, 0.022569023132324218, 0.02257094383239746, 0.022598655700683593, 0.022592512130737305, 0.022443071365356445, 0.022619071960449218, 0.022545408248901368, 0.023189504623413085, 0.02302566337585449, 0.022413312911987306, 0.022565887451171874, 0.022626304626464845, 0.02253926467895508, 0.022647808074951172, 0.02247372817993164, 0.02247987174987793, 0.02255564880371094, 0.02252390480041504, 0.022487039566040038, 0.022558719635009765, 0.02265292739868164, 0.022534143447875975, 0.022437887191772463, 0.022982656478881838, 0.022987775802612305, 0.022594560623168947, 0.022517759323120116, 0.023154687881469727, 0.022724607467651366, 0.022579200744628908, 0.0225218563079834, 0.021788671493530275, 0.02185420799255371, 0.02183475112915039, 0.02186649513244629, 0.022684671401977538, 0.022562816619873048, 0.022603776931762694, 0.02265907287597656, 0.022769664764404295, 0.022609920501708985, 0.022572032928466795, 0.022709247589111328, 0.021989376068115234, 0.021411840438842773, 0.021767168045043944, 0.021794815063476563, 0.021935104370117187, 0.021769216537475586, 0.022174720764160157, 0.022595584869384764, 0.022459392547607423, 0.02254643249511719, 0.02308095932006836, 0.023157760620117186, 0.021994495391845705, 0.02192076873779297, 0.021792768478393554, 0.021943296432495117, 0.022606847763061523, 0.022557695388793944, 0.02244710350036621, 0.023589887619018556, 0.022409215927124023, 0.02292736053466797, 0.022607872009277344, 0.02273587226867676, 0.022626304626464845, 0.022607872009277344, 0.022115327835083007, 0.021813247680664064, 0.022622207641601562, 0.02255462455749512, 0.02246348762512207, 0.022427648544311524, 0.022336511611938475, 0.02215116882324219, 0.02229248046875, 0.022684671401977538, 0.022684671401977538, 0.022625280380249024, 0.022569984436035157, 0.02262937545776367, 0.022160383224487306, 0.022801408767700194, 0.02259660720825195, 0.022633472442626954, 0.022624256134033204, 0.023088127136230468, 0.022687744140625, 0.02265292739868164, 0.02266316795349121, 0.022563840866088865, 0.022614015579223632, 0.022583295822143554, 0.02221670341491699, 0.022633472442626954, 0.02211327934265137, 0.02204979133605957, 0.02265190315246582, 0.02265395164489746, 0.022730752944946288, 0.022631423950195313, 0.022641664505004884, 0.022147071838378905, 0.02188697624206543, 0.022714368820190428, 0.02229964828491211, 0.02166681671142578, 0.02297043228149414, 0.02283616065979004, 0.022632448196411133, 0.022799360275268556, 0.022847488403320314, 0.02264371109008789, 0.022759424209594727, 0.022541311264038084, 0.022520832061767578, 0.02190438461303711, 0.02189516830444336, 0.022025215148925782, 0.021816320419311523, 0.021823488235473632, 0.022508544921875, 0.022278144836425783, 0.022518783569335937, 0.022615039825439453, 0.022331392288208008, 0.02262937545776367, 0.02228428840637207, 0.022534143447875975, 0.02229145622253418, 0.02185830307006836, 0.02184806442260742, 0.021747711181640626, 0.022311935424804686, 0.021966848373413086, 0.021941247940063476, 0.022996992111206056, 0.023382015228271484, 0.02290995216369629, 0.022418432235717774, 0.02170982360839844, 0.02184806442260742, 0.021797887802124022, 0.021784576416015625, 0.021816320419311523, 0.021966848373413086, 0.02252288055419922, 0.02148454475402832, 0.021728256225585937, 0.021857280731201172, 0.021827583312988282, 0.021902336120605468, 0.021898239135742188, 0.0218470401763916, 0.0218603515625, 0.0219289608001709, 0.02182246398925781, 0.021752832412719726, 0.021796863555908205, 0.02165350341796875, 0.021770240783691407, 0.021777408599853516, 0.02183782386779785, 0.022120447158813478, 0.023162879943847657, 0.02291916847229004, 0.022724607467651366, 0.022572032928466795, 0.02269593620300293, 0.02269491195678711, 0.022494207382202147, 0.0224849910736084, 0.02244710350036621, 0.02351923179626465, 0.02210508728027344, 0.021857280731201172, 0.021801984786987305, 0.021793792724609375, 0.02188390350341797, 0.022137855529785155, 0.022477823257446287, 0.022156288146972656, 0.02184601593017578, 0.021799936294555664, 0.02185420799255371, 0.021800960540771484, 0.021538816452026367, 0.021884927749633788, 0.021794815063476563, 0.021721088409423828, 0.0224849910736084, 0.022932479858398438, 0.022642688751220705, 0.02254745674133301, 0.022611967086791994, 0.023233535766601563, 0.022807552337646485, 0.02269081687927246, 0.022675455093383787, 0.022589439392089843, 0.022526975631713866, 0.022390783309936522, 0.02184601593017578, 0.022408191680908202, 0.022360063552856444, 0.02250649642944336, 0.022591487884521484, 0.022600704193115235, 0.022606847763061523, 0.022565887451171874, 0.02270207977294922, 0.022556671142578123, 0.02288332748413086, 0.02385408020019531, 0.02290176010131836, 0.022585344314575196, 0.02329702377319336, 0.02248806381225586, 0.022594560623168947, 0.022773759841918945, 0.022574079513549804, 0.023788543701171876, 0.02285670471191406, 0.02248806381225586, 0.02250752067565918, 0.022796287536621093, 0.02255462455749512, 0.02228326416015625, 0.022786048889160155, 0.022540288925170897, 0.022571008682250978, 0.022816768646240236, 0.022631423950195313, 0.02246143913269043, 0.023965696334838867, 0.023037952423095705, 0.022625280380249024, 0.022526975631713866, 0.022466560363769532, 0.022544384002685547, 0.021952512741088868, 0.021739519119262696, 0.02184499168395996, 0.02186751937866211, 0.021816320419311523, 0.0218603515625, 0.021820415496826173, 0.021760000228881835, 0.021752832412719726, 0.021818368911743165, 0.021775360107421874, 0.021735424041748046, 0.021955583572387697, 0.022354944229125977, 0.022916095733642578, 0.022674432754516603, 0.02227609634399414, 0.02206515121459961, 0.022542335510253905, 0.022478847503662108, 0.022594560623168947, 0.022563840866088865, 0.022622207641601562, 0.022994943618774414, 0.02243071937561035, 0.022565887451171874, 0.02252390480041504, 0.02211123275756836, 0.021966848373413086, 0.02246553611755371, 0.022578176498413087, 0.02213478469848633, 0.02203647994995117, 0.021744640350341796, 0.021780479431152345, 0.021808128356933593, 0.023182336807250976, 0.024048639297485352, 0.02311884880065918, 0.022762496948242186, 0.022164480209350586, 0.023000064849853515, 0.02285670471191406, 0.022556671142578123, 0.02265190315246582, 0.02269388771057129, 0.02253004837036133, 0.022598655700683593, 0.022656000137329102, 0.022619136810302733, 0.022651968002319337, 0.02181011199951172, 0.02268671989440918, 0.022682687759399415, 0.022693824768066407, 0.021807104110717773, 0.021769216537475586, 0.02184499168395996, 0.021783552169799804, 0.02225868797302246, 0.021790719985961913, 0.021569536209106444, 0.02148454475402832, 0.023152639389038086, 0.023586816787719726, 0.021984256744384766, 0.021760000228881835, 0.02187161636352539, 0.02182963180541992, 0.022030336380004883, 0.021901311874389647, 0.022425600051879883, 0.022218751907348632, 0.021921791076660157, 0.022076416015625, 0.022418432235717774, 0.023822336196899413, 0.0219238395690918, 0.022197248458862305, 0.021929983139038087, 0.021789695739746092, 0.02188697624206543, 0.021813247680664064, 0.021751808166503905, 0.021782527923583983, 0.021812223434448243, 0.021755903244018555, 0.021719039916992186, 0.021555200576782226, 0.021737472534179687, 0.021753856658935547, 0.021760000228881835, 0.021745664596557617, 0.021734399795532225, 0.021763071060180664, 0.021720064163208007, 0.021788671493530275, 0.021756927490234376, 0.021526527404785157, 0.02162483215332031, 0.021716991424560548, 0.021796863555908205, 0.021809152603149414, 0.02185625648498535, 0.021760000228881835, 0.02187571144104004, 0.021749759674072267, 0.021906431198120118, 0.021765119552612306, 0.021825536727905274, 0.021841920852661133, 0.02169753646850586, 0.02165452766418457, 0.02181427192687988, 0.021773311614990236, 0.021831680297851562, 0.021792768478393554, 0.021840896606445313, 0.021960704803466798, 0.022402048110961914, 0.022148096084594726, 0.021788671493530275, 0.021839872360229492, 0.021584896087646483, 0.021761024475097656, 0.021740543365478517, 0.021782527923583983, 0.021827583312988282]",tokens/s,45.014483641340306,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1573.82656,2057.8304,0.0,1428.160512,1322.516992,s,1,8.09356591796875,8.09356591796875,0.0,8.09356591796875,8.09356591796875,8.09356591796875,8.09356591796875,[8.09356591796875],,kWh,1.579196925695088e-05,8.6202569087742e-06,2.2510295785982404e-05,4.692252195170748e-05,,MB,1622.069248,2082.996224,0.0,1436.54912,1322.072064,s,10,2.372907119750977,0.23729071197509768,0.000131167902568593,0.23726171112060546,0.2373512725830078,0.23750363922119141,0.23762553253173826,"[0.23731475830078125, 0.237656005859375, 0.2372058868408203, 0.23731741333007814, 0.23722618103027343, 0.23716383361816407, 0.23725209045410156, 0.2372980194091797, 0.23720159912109376, 0.23727133178710938]",tokens/s,1078.8454291749345,kWh,2.8067164272933416e-06,1.53732544280315e-06,1.5824069506790704e-05,2.0168111376887196e-05,tokens/kWh,12693305.546367515,MB,1626.394624,2082.996224,0.0,1436.54912,1374.94528,s,10,11.10891943359375,1.1108919433593751,0.008143854214417953,1.1089250488281248,1.1232188598632813,1.1251892028808594,1.126765477294922,"[1.1067989501953126, 1.1007611083984374, 1.1080592041015624, 1.1057093505859374, 1.1145169677734375, 1.1097908935546874, 1.122781005859375, 1.1271595458984376, 1.111818359375, 1.1015240478515624]",tokens/s,56.71118633688696,kWh,1.3516999008887453e-05,7.4073722703443045e-06,2.7304131662405098e-05,4.8228502941636864e-05,tokens/kWh,1306281.4758367823,,s,630,11.105185813903805,0.017627279069688584,0.0003167984387979978,0.017487360000610352,0.018052505111694334,0.018184242820739747,0.018727147674560546,"[0.01746124839782715, 0.018027519226074217, 0.017733631134033204, 0.017921024322509766, 0.0175861759185791, 0.017539072036743163, 0.017921024322509766, 0.01739263916015625, 0.01738444709777832, 0.017917951583862304, 0.017597440719604493, 0.017356800079345702, 0.01742336082458496, 0.01723289680480957, 0.017500160217285156, 0.01740390396118164, 0.017524736404418945, 0.017460224151611328, 0.01765171241760254, 0.017670143127441407, 0.01797427177429199, 0.01741823959350586, 0.017510400772094727, 0.01743155288696289, 0.017385471343994142, 0.01765888023376465, 0.01780633544921875, 0.017771520614624024, 0.01780940818786621, 0.01789030456542969, 0.017451007843017577, 0.017753087997436523, 0.0180316162109375, 0.017966079711914062, 0.01760051155090332, 0.01746124839782715, 0.01747865676879883, 0.01741209602355957, 0.01743155288696289, 0.01748684883117676, 0.017517568588256836, 0.0174704647064209, 0.017525760650634766, 0.01744076728820801, 0.01740492820739746, 0.017523712158203125, 0.017456127166748048, 0.01742438316345215, 0.01745408058166504, 0.01741004753112793, 0.017543167114257813, 0.017364992141723632, 0.01742336082458496, 0.01742540740966797, 0.01745715141296387, 0.017366016387939453, 0.017426431655883787, 0.01743257522583008, 0.017574911117553712, 0.0174653434753418, 0.017941503524780272, 0.01747148895263672, 0.017476608276367187, 0.017558528900146485, 0.017496063232421876, 0.017385471343994142, 0.017337343215942384, 0.017375232696533204, 0.017345535278320313, 0.01741209602355957, 0.01738444709777832, 0.01741209602355957, 0.01744691276550293, 0.01744076728820801, 0.01741312026977539, 0.017460224151611328, 0.017338367462158204, 0.017314815521240236, 0.017369087219238282, 0.017343488693237305, 0.017320959091186524, 0.01741721534729004, 0.01742438316345215, 0.017344512939453126, 0.017352703094482422, 0.01739366340637207, 0.017589248657226563, 0.017475584030151366, 0.01759436798095703, 0.01741721534729004, 0.01741926383972168, 0.01740595245361328, 0.01743667221069336, 0.01744486427307129, 0.017364992141723632, 0.017353727340698243, 0.01742336082458496, 0.017497087478637697, 0.017580032348632812, 0.01743052864074707, 0.017364992141723632, 0.01762611198425293, 0.01738444709777832, 0.017352703094482422, 0.01749504089355469, 0.01744895935058594, 0.017714176177978515, 0.017467391967773437, 0.01740287971496582, 0.017427455902099608, 0.017390592575073242, 0.01741414451599121, 0.01742233657836914, 0.01737113571166992, 0.01740902328491211, 0.01745305633544922, 0.017331199645996095, 0.018155519485473632, 0.018144256591796876, 0.01807257652282715, 0.017754112243652344, 0.017779712677001954, 0.01740083122253418, 0.017356800079345702, 0.017269760131835937, 0.017468416213989257, 0.017354751586914064, 0.017458175659179686, 0.017305599212646485, 0.01735577583312988, 0.01742540740966797, 0.017406976699829102, 0.017565696716308594, 0.01739468765258789, 0.017324031829833983, 0.017328128814697266, 0.017383424758911133, 0.017358848571777344, 0.01741926383972168, 0.017368064880371094, 0.017367040634155274, 0.01742336082458496, 0.017480703353881837, 0.017508352279663086, 0.017485824584960938, 0.017871871948242187, 0.01803878402709961, 0.018018304824829103, 0.017905664443969727, 0.017913856506347657, 0.01761894416809082, 0.01785856056213379, 0.01802956771850586, 0.01762918472290039, 0.017487871170043946, 0.018519039154052733, 0.01782579231262207, 0.0174704647064209, 0.017926143646240233, 0.017494016647338868, 0.0173885440826416, 0.017649663925170898, 0.019204095840454103, 0.018242559432983398, 0.01800499153137207, 0.01744486427307129, 0.01742438316345215, 0.01740492820739746, 0.017487871170043946, 0.017386495590209963, 0.017512447357177736, 0.01740287971496582, 0.017589248657226563, 0.01770086479187012, 0.017442815780639647, 0.01759334373474121, 0.017406976699829102, 0.01742336082458496, 0.017434623718261717, 0.017456127166748048, 0.017398784637451172, 0.01743052864074707, 0.017406976699829102, 0.017368064880371094, 0.017537023544311522, 0.01737932777404785, 0.017373184204101562, 0.017476608276367187, 0.01740185546875, 0.017902591705322265, 0.017992704391479493, 0.017507328033447265, 0.01740390396118164, 0.01779097557067871, 0.017357824325561523, 0.017386495590209963, 0.017715200424194336, 0.017905664443969727, 0.0174704647064209, 0.01741414451599121, 0.01756979179382324, 0.01747148895263672, 0.01743667221069336, 0.017357824325561523, 0.01739468765258789, 0.017386495590209963, 0.01738751983642578, 0.017551359176635743, 0.017376256942749024, 0.017451007843017577, 0.017375232696533204, 0.017307647705078123, 0.017337343215942384, 0.017596416473388672, 0.01741004753112793, 0.01738751983642578, 0.017390592575073242, 0.01739468765258789, 0.017297407150268555, 0.017376256942749024, 0.017324031829833983, 0.017352703094482422, 0.017320959091186524, 0.017435647964477538, 0.0176629753112793, 0.018717695236206054, 0.01839308738708496, 0.017971200942993162, 0.017488895416259767, 0.017509376525878906, 0.01820159912109375, 0.017764352798461915, 0.018280448913574218, 0.017957887649536132, 0.017487871170043946, 0.017369087219238282, 0.017382400512695313, 0.017377279281616212, 0.017318912506103516, 0.017337343215942384, 0.017315839767456053, 0.017920000076293945, 0.017544191360473634, 0.017324031829833983, 0.017435647964477538, 0.017708032608032227, 0.017811456680297853, 0.017324031829833983, 0.01743667221069336, 0.017087488174438475, 0.01759846305847168, 0.017085439682006837, 0.017666048049926757, 0.01747865676879883, 0.01774284744262695, 0.017747968673706056, 0.017334272384643554, 0.017368064880371094, 0.01741721534729004, 0.017383424758911133, 0.017456127166748048, 0.017435647964477538, 0.017369087219238282, 0.01763430404663086, 0.01740083122253418, 0.01739673614501953, 0.017415168762207032, 0.017359872817993165, 0.01744691276550293, 0.017443840026855468, 0.017375232696533204, 0.01743667221069336, 0.01760665512084961, 0.018189311981201172, 0.018249727249145507, 0.01807974433898926, 0.018206720352172853, 0.018128896713256838, 0.017950719833374023, 0.017978368759155275, 0.017548288345336914, 0.017970176696777345, 0.017978368759155275, 0.017482751846313475, 0.017682432174682617, 0.01763020706176758, 0.017501184463500977, 0.017358848571777344, 0.017665023803710937, 0.01821388816833496, 0.01804083251953125, 0.017724416732788087, 0.01741414451599121, 0.017773567199707033, 0.01739468765258789, 0.01747865676879883, 0.017712127685546874, 0.017978368759155275, 0.018128896713256838, 0.017937408447265626, 0.018689023971557618, 0.01843814468383789, 0.018139135360717772, 0.017912832260131836, 0.017683456420898438, 0.017572864532470703, 0.017641471862792968, 0.017466367721557616, 0.017663999557495116, 0.017449983596801756, 0.01741312026977539, 0.01744588851928711, 0.01743769645690918, 0.01745408058166504, 0.01742950439453125, 0.018025472640991212, 0.01761177635192871, 0.017957887649536132, 0.01749504089355469, 0.01745305633544922, 0.017451007843017577, 0.017257471084594727, 0.01719193649291992, 0.01787392044067383, 0.017386495590209963, 0.01738035202026367, 0.017368064880371094, 0.01740287971496582, 0.017353727340698243, 0.01741004753112793, 0.017333248138427734, 0.01747148895263672, 0.01744076728820801, 0.017467391967773437, 0.017506303787231444, 0.01740083122253418, 0.017777664184570312, 0.01742336082458496, 0.017374208450317383, 0.01741107177734375, 0.01741926383972168, 0.017449983596801756, 0.017359872817993165, 0.017427455902099608, 0.01741107177734375, 0.017460224151611328, 0.01741619110107422, 0.01739776039123535, 0.017434623718261717, 0.018214912414550782, 0.01868185615539551, 0.01899929618835449, 0.018272256851196288, 0.018074623107910158, 0.018001920700073244, 0.017861631393432616, 0.01779097557067871, 0.018141183853149414, 0.01797324752807617, 0.017942527770996093, 0.01744179153442383, 0.01741107177734375, 0.017864704132080078, 0.017504255294799806, 0.01742131233215332, 0.01741414451599121, 0.01742336082458496, 0.017533952713012696, 0.017466367721557616, 0.017503231048583985, 0.01748684883117676, 0.017526784896850587, 0.017515520095825195, 0.0176629753112793, 0.017484800338745117, 0.01756876754760742, 0.01763737678527832, 0.017588224411010742, 0.018509824752807616, 0.018174976348876954, 0.01842790412902832, 0.01816985511779785, 0.018145280838012694, 0.01822719955444336, 0.018070528030395508, 0.01801318359375, 0.018070528030395508, 0.01817804718017578, 0.01805721664428711, 0.017724416732788087, 0.017566719055175782, 0.017496063232421876, 0.017687551498413084, 0.017519615173339845, 0.017746944427490235, 0.01817087936401367, 0.01779814338684082, 0.017442815780639647, 0.017456127166748048, 0.017661951065063478, 0.017910783767700195, 0.017472511291503907, 0.017804288864135744, 0.017649663925170898, 0.018149375915527344, 0.018001920700073244, 0.01756876754760742, 0.0173885440826416, 0.017485824584960938, 0.01778278350830078, 0.01742950439453125, 0.017724416732788087, 0.017546239852905272, 0.01803980827331543, 0.017931264877319338, 0.017543167114257813, 0.018036735534667968, 0.01779199981689453, 0.01756979179382324, 0.018882560729980468, 0.017757183074951173, 0.01761689567565918, 0.01883033561706543, 0.017622016906738282, 0.017531904220581054, 0.01740185546875, 0.01742848014831543, 0.017596416473388672, 0.01789030456542969, 0.017466367721557616, 0.01789030456542969, 0.018000896453857423, 0.017513471603393553, 0.017492992401123047, 0.01802649688720703, 0.01743667221069336, 0.017888256072998047, 0.01741209602355957, 0.017508352279663086, 0.018008064270019532, 0.01802239990234375, 0.017682432174682617, 0.017583103179931642, 0.01805619239807129, 0.017969152450561524, 0.017953792572021485, 0.017496063232421876, 0.01738444709777832, 0.017543167114257813, 0.01754521560668945, 0.017532928466796875, 0.017881088256835938, 0.017733631134033204, 0.017571840286254883, 0.018731008529663085, 0.018984960556030273, 0.019187711715698243, 0.01824665641784668, 0.017879039764404296, 0.018126848220825196, 0.01817087936401367, 0.017913856506347657, 0.018685951232910156, 0.01788313674926758, 0.018130943298339842, 0.01821183967590332, 0.018559999465942383, 0.018126848220825196, 0.018052095413208007, 0.01801420783996582, 0.018025472640991212, 0.017501184463500977, 0.017383424758911133, 0.017993728637695314, 0.017555456161499023, 0.01763839912414551, 0.017932287216186525, 0.017458175659179686, 0.01794047927856445, 0.017488895416259767, 0.01787494468688965, 0.018070528030395508, 0.017921024322509766, 0.01809715270996094, 0.018044927597045898, 0.017507328033447265, 0.017580032348632812, 0.01804800033569336, 0.017787904739379884, 0.01775103950500488, 0.018494464874267577, 0.018412544250488282, 0.01802444839477539, 0.01796403121948242, 0.0174335994720459, 0.017879039764404296, 0.017508352279663086, 0.017415168762207032, 0.017761280059814453, 0.017735679626464843, 0.01742233657836914, 0.01744895935058594, 0.017443840026855468, 0.01739776039123535, 0.018068479537963866, 0.018172927856445312, 0.017959936141967774, 0.017364992141723632, 0.01761587142944336, 0.018147327423095702, 0.01745408058166504, 0.01743769645690918, 0.01740595245361328, 0.017406976699829102, 0.01749504089355469, 0.017138687133789063, 0.017522687911987304, 0.017871871948242187, 0.017885183334350584, 0.017839103698730468, 0.01787596893310547, 0.01741004753112793, 0.017434623718261717, 0.01742336082458496, 0.017426431655883787, 0.017458175659179686, 0.01743155288696289, 0.017889280319213868, 0.017438720703125, 0.017375232696533204, 0.017386495590209963, 0.017836032867431642, 0.017526784896850587, 0.01744895935058594, 0.01759539222717285, 0.017552383422851564, 0.018092031478881835, 0.018117631912231445, 0.017908735275268553, 0.018008064270019532, 0.017696767807006835, 0.017481727600097655, 0.01747865676879883, 0.01739673614501953, 0.01777561569213867, 0.017952768325805665, 0.017469440460205078, 0.017408000946044923, 0.017531904220581054, 0.017302528381347656, 0.017288192749023438, 0.01756979179382324, 0.017542144775390626, 0.017864704132080078, 0.017960960388183594, 0.017529855728149413, 0.017541120529174805, 0.017469440460205078, 0.017993728637695314, 0.018010112762451173, 0.017509376525878906, 0.017955839157104494, 0.018318336486816408, 0.01761689567565918, 0.017467391967773437, 0.017532928466796875, 0.017370111465454103, 0.017613824844360353, 0.017589248657226563, 0.017459199905395507, 0.01744588851928711, 0.01743769645690918, 0.01740185546875, 0.01739366340637207, 0.017438720703125, 0.017672191619873046, 0.01797324752807617, 0.017991680145263672, 0.01760358428955078, 0.01741004753112793, 0.018114559173583983, 0.017605632781982423, 0.01746124839782715, 0.01741209602355957, 0.01741209602355957, 0.017533952713012696, 0.017498111724853514, 0.017547264099121093, 0.01742950439453125, 0.01740287971496582, 0.017435647964477538, 0.017500160217285156, 0.01735577583312988, 0.017498111724853514, 0.01740287971496582, 0.01739468765258789, 0.017375232696533204, 0.017443840026855468, 0.017376256942749024, 0.017359872817993165, 0.017340415954589843, 0.01744486427307129, 0.017378303527832033, 0.0174202880859375, 0.017473535537719728, 0.01746227264404297, 0.01743974494934082, 0.01737215995788574, 0.017447935104370118, 0.017427455902099608, 0.01743974494934082, 0.01742540740966797, 0.01738956832885742, 0.01743769645690918, 0.017364992141723632, 0.01743257522583008, 0.01744588851928711, 0.01742848014831543, 0.017481727600097655, 0.01738956832885742, 0.01742131233215332, 0.017460224151611328, 0.017765375137329103, 0.017482751846313475, 0.01740595245361328, 0.017378303527832033, 0.017434623718261717, 0.01743667221069336, 0.01742848014831543, 0.017496063232421876]",tokens/s,56.73025292483026,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 83102 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1576.230912,2057.8304,0.0,1428.160512,1322.516992,s,1,8.2200546875,8.2200546875,0.0,8.2200546875,8.2200546875,8.2200546875,8.2200546875,[8.2200546875],,kWh,1.693116930208589e-05,9.263907532129525e-06,2.5121131208005743e-05,5.131620804222116e-05,,MB,1684.938752,2082.996224,0.0,1436.54912,1322.072064,s,10,2.3725580749511717,0.23725580749511715,7.047874751137718e-05,0.2372340774536133,0.2373291305541992,0.23736702651977537,0.23739734329223633,"[0.23732070922851561, 0.23728195190429688, 0.23718377685546874, 0.2371961669921875, 0.23721250915527345, 0.2372208251953125, 0.2373170166015625, 0.23717286682128907, 0.23724732971191406, 0.23740492248535155]",tokens/s,1079.0041462115469,kWh,2.806903211772995e-06,1.5380489617049838e-06,1.6031143315860803e-05,2.037609548933878e-05,tokens/kWh,12563741.671408284,MB,1691.611136,2082.996224,0.0,1436.54912,1374.94528,s,10,11.137457275390624,1.1137457275390623,0.01686088946428159,1.1174526367187498,1.1319287597656251,1.1327123046875,1.133339140625,"[1.0763741455078124, 1.122857177734375, 1.119144775390625, 1.0943671875, 1.131754638671875, 1.133495849609375, 1.1271658935546875, 1.1083621826171874, 1.1081749267578125, 1.115760498046875]",tokens/s,56.56587355823586,kWh,1.2846854184615152e-05,7.03994499054834e-06,2.621680714913561e-05,4.610360632429911e-05,tokens/kWh,1366487.4621054444,,s,630,11.133532173156746,0.01767227329072498,0.0003913211042527055,0.01779916763305664,0.01799884796142578,0.018091520214080812,0.018695976753234866,"[0.016915456771850586, 0.017076223373413087, 0.017120256423950195, 0.017076223373413087, 0.017160192489624023, 0.017063936233520507, 0.01704140853881836, 0.01704140853881836, 0.017069055557250978, 0.017106943130493164, 0.01702604866027832, 0.01700249671936035, 0.01705369567871094, 0.016947200775146484, 0.01703321647644043, 0.017044479370117188, 0.01720524787902832, 0.01706188774108887, 0.017111040115356444, 0.017089536666870117, 0.017143808364868163, 0.017093631744384767, 0.017124351501464845, 0.01718272018432617, 0.01701273536682129, 0.01702604866027832, 0.017022975921630858, 0.017048576354980468, 0.01703321647644043, 0.0170199031829834, 0.017060863494873048, 0.017102848052978514, 0.01705062484741211, 0.0170383358001709, 0.017082368850708008, 0.017159168243408202, 0.017129472732543945, 0.017069055557250978, 0.017081344604492187, 0.017079296112060546, 0.017146879196166993, 0.017114112854003907, 0.017159168243408202, 0.017051647186279297, 0.017117183685302736, 0.017083391189575196, 0.017084415435791016, 0.017056768417358398, 0.017107967376708985, 0.017117183685302736, 0.017116159439086915, 0.017085439682006837, 0.017098751068115235, 0.017088512420654296, 0.017062911987304686, 0.01703424072265625, 0.017123327255249024, 0.01704243278503418, 0.01704550361633301, 0.017129472732543945, 0.017179647445678712, 0.017097728729248047, 0.017072128295898437, 0.016906240463256835, 0.01704960060119629, 0.01700044822692871, 0.01740595245361328, 0.017838079452514647, 0.01783500862121582, 0.017822719573974608, 0.01781350326538086, 0.017846271514892577, 0.017740800857543947, 0.01783193588256836, 0.017741823196411134, 0.017908735275268553, 0.017699840545654297, 0.017789951324462892, 0.017705984115600586, 0.01780735969543457, 0.018514944076538087, 0.01863884735107422, 0.018520063400268554, 0.01801625633239746, 0.017853439331054686, 0.018058240890502928, 0.018130943298339842, 0.017927167892456054, 0.017689599990844726, 0.017812480926513673, 0.017715200424194336, 0.017811456680297853, 0.017778688430786133, 0.017821695327758787, 0.01774284744262695, 0.01780633544921875, 0.017773567199707033, 0.017727487564086913, 0.01787494468688965, 0.017724416732788087, 0.017777664184570312, 0.017739776611328126, 0.017780736923217775, 0.0178657283782959, 0.017762304306030274, 0.01776742362976074, 0.017819648742675782, 0.01789030456542969, 0.01781760025024414, 0.017872896194458008, 0.01780940818786621, 0.017978368759155275, 0.01778278350830078, 0.017804288864135744, 0.017778688430786133, 0.01784217643737793, 0.017894399642944335, 0.017884159088134767, 0.017854463577270507, 0.017886207580566405, 0.01785651206970215, 0.017933311462402343, 0.01781760025024414, 0.017844224929809572, 0.017872896194458008, 0.017846271514892577, 0.01696767997741699, 0.017085439682006837, 0.017044479370117188, 0.017878015518188475, 0.017943552017211914, 0.01785241508483887, 0.01776639938354492, 0.017762304306030274, 0.01780531120300293, 0.017756160736083985, 0.01781350326538086, 0.01785856056213379, 0.017864704132080078, 0.017747968673706056, 0.01784320068359375, 0.017756160736083985, 0.017743871688842772, 0.01773465538024902, 0.017936384201049805, 0.017913856506347657, 0.01784115219116211, 0.017771520614624024, 0.017745920181274414, 0.017869823455810546, 0.01782476806640625, 0.01782476806640625, 0.017757183074951173, 0.017765375137329103, 0.01784115219116211, 0.01781452751159668, 0.01779814338684082, 0.017819648742675782, 0.017797119140625, 0.017773567199707033, 0.017844224929809572, 0.018083839416503905, 0.017960960388183594, 0.017773567199707033, 0.01779916763305664, 0.017672191619873046, 0.017771520614624024, 0.017717248916625978, 0.01777459144592285, 0.017723392486572266, 0.017719295501708983, 0.01775923156738281, 0.017716224670410157, 0.01775923156738281, 0.017733631134033204, 0.017789951324462892, 0.017710079193115236, 0.017747968673706056, 0.01783500862121582, 0.017754112243652344, 0.017810432434082032, 0.01765990447998047, 0.017810432434082032, 0.01778483200073242, 0.017836032867431642, 0.01779302406311035, 0.017758207321166994, 0.017683456420898438, 0.017640447616577147, 0.01697689628601074, 0.017040384292602538, 0.01724006462097168, 0.01698918342590332, 0.01698406410217285, 0.016963584899902344, 0.017048576354980468, 0.01697177505493164, 0.017031167984008787, 0.01700556755065918, 0.017006591796875, 0.016935935974121095, 0.01700864028930664, 0.017059839248657227, 0.01701888084411621, 0.017006591796875, 0.017894399642944335, 0.018543615341186523, 0.018559999465942383, 0.01784320068359375, 0.017879039764404296, 0.01785241508483887, 0.017862655639648437, 0.01782579231262207, 0.017727487564086913, 0.01775103950500488, 0.017737728118896484, 0.01866035270690918, 0.018710527420043945, 0.017943552017211914, 0.017747968673706056, 0.01779302406311035, 0.017830911636352538, 0.017795072555541993, 0.01785651206970215, 0.017242111206054688, 0.017125375747680666, 0.017048576354980468, 0.017126399993896483, 0.01716223907470703, 0.01719705581665039, 0.017074176788330078, 0.017142784118652343, 0.017077247619628907, 0.017128448486328125, 0.017302528381347656, 0.01721855926513672, 0.01704652786254883, 0.017096704483032226, 0.017125375747680666, 0.017075199127197266, 0.01703424072265625, 0.017366016387939453, 0.017140735626220704, 0.017119232177734374, 0.017091583251953125, 0.01715814399719238, 0.017107967376708985, 0.017136640548706054, 0.017069055557250978, 0.017160192489624023, 0.017217536926269532, 0.017111040115356444, 0.017509376525878906, 0.017923072814941408, 0.017952768325805665, 0.017960960388183594, 0.01798963165283203, 0.01785036849975586, 0.017969152450561524, 0.01779302406311035, 0.017918975830078124, 0.017952768325805665, 0.017908735275268553, 0.017904640197753906, 0.017933311462402343, 0.017893375396728514, 0.01795686340332031, 0.017872896194458008, 0.017870847702026366, 0.017861631393432616, 0.017879039764404296, 0.017917951583862304, 0.01816268730163574, 0.018069503784179687, 0.017924095153808595, 0.017881088256835938, 0.01796505546569824, 0.017896448135375977, 0.017932287216186525, 0.01787494468688965, 0.017854463577270507, 0.017885183334350584, 0.017904640197753906, 0.017931264877319338, 0.01876479911804199, 0.01820057678222656, 0.017985536575317384, 0.017897472381591797, 0.017942527770996093, 0.01799884796142578, 0.017977344512939454, 0.017898496627807618, 0.018101247787475586, 0.017902591705322265, 0.018010112762451173, 0.018008064270019532, 0.017947647094726564, 0.017999872207641602, 0.017960960388183594, 0.017888256072998047, 0.017979391098022462, 0.018019327163696287, 0.017984512329101563, 0.01800294494628906, 0.0180316162109375, 0.017945600509643556, 0.01804800033569336, 0.017975296020507812, 0.017958911895751953, 0.017949695587158202, 0.017949695587158202, 0.017936384201049805, 0.01799884796142578, 0.01798963165283203, 0.017975296020507812, 0.018931711196899414, 0.018120704650878908, 0.018083839416503905, 0.017963008880615236, 0.01803468894958496, 0.01801523208618164, 0.018085887908935547, 0.017950719833374023, 0.01803878402709961, 0.017945600509643556, 0.01798860740661621, 0.017946624755859376, 0.018318336486816408, 0.017960960388183594, 0.017905664443969727, 0.018177024841308592, 0.018033664703369142, 0.017935359954833984, 0.018060287475585936, 0.01825279998779297, 0.01804595184326172, 0.017892351150512697, 0.017946624755859376, 0.017967103958129883, 0.018096128463745118, 0.017833984375, 0.017951744079589844, 0.01781350326538086, 0.01799884796142578, 0.017927167892456054, 0.017953792572021485, 0.01822105598449707, 0.018249727249145507, 0.01799577522277832, 0.018053119659423827, 0.017947647094726564, 0.017912832260131836, 0.01797427177429199, 0.017957887649536132, 0.01782067108154297, 0.017885183334350584, 0.01802342414855957, 0.017952768325805665, 0.017904640197753906, 0.017915903091430666, 0.017844224929809572, 0.017881088256835938, 0.01789030456542969, 0.017915903091430666, 0.017966079711914062, 0.017803264617919923, 0.017917951583862304, 0.017921024322509766, 0.017954816818237306, 0.017819648742675782, 0.017958911895751953, 0.017928192138671875, 0.01785856056213379, 0.018062335968017578, 0.017736703872680663, 0.017927167892456054, 0.017906688690185548, 0.01782476806640625, 0.01842790412902832, 0.01804083251953125, 0.01785651206970215, 0.017914880752563478, 0.01781350326538086, 0.01801523208618164, 0.0178657283782959, 0.017838079452514647, 0.017887231826782226, 0.017897472381591797, 0.01781350326538086, 0.018103296279907227, 0.017967103958129883, 0.01782476806640625, 0.01780531120300293, 0.01788313674926758, 0.01780940818786621, 0.017900543212890627, 0.0178657283782959, 0.0178155517578125, 0.017804288864135744, 0.01780940818786621, 0.017811456680297853, 0.01782374382019043, 0.018865152359008788, 0.01908940887451172, 0.018131967544555663, 0.017760255813598632, 0.01779814338684082, 0.01763942337036133, 0.017769472122192383, 0.017760255813598632, 0.01782681655883789, 0.017691648483276368, 0.017761280059814453, 0.017731584548950196, 0.0178155517578125, 0.017880064010620117, 0.017976320266723633, 0.018134016036987305, 0.01789030456542969, 0.017811456680297853, 0.017797119140625, 0.017773567199707033, 0.01779814338684082, 0.017690624237060547, 0.017773567199707033, 0.017756160736083985, 0.01777459144592285, 0.017724416732788087, 0.017952768325805665, 0.01789030456542969, 0.017895423889160156, 0.017717248916625978, 0.0178155517578125, 0.01784217643737793, 0.01786675262451172, 0.017748992919921876, 0.0177838077545166, 0.017718271255493166, 0.01779916763305664, 0.017938432693481447, 0.017764352798461915, 0.016933887481689454, 0.016933887481689454, 0.017043455123901367, 0.017113088607788086, 0.01706188774108887, 0.01703628730773926, 0.01699635124206543, 0.017068031311035157, 0.017111040115356444, 0.017055744171142577, 0.01706188774108887, 0.01702604866027832, 0.017115135192871094, 0.017051647186279297, 0.017091583251953125, 0.01702809524536133, 0.01701273536682129, 0.01699839973449707, 0.017754112243652344, 0.01778892707824707, 0.017861631393432616, 0.01782579231262207, 0.017739776611328126, 0.017730560302734375, 0.01779916763305664, 0.017794048309326172, 0.017724416732788087, 0.017758207321166994, 0.017755136489868165, 0.017778688430786133, 0.017747968673706056, 0.01789952087402344, 0.01785139274597168, 0.017770496368408203, 0.01782681655883789, 0.017888256072998047, 0.01784012794494629, 0.01782681655883789, 0.017821695327758787, 0.017727487564086913, 0.017822719573974608, 0.01780944061279297, 0.01788412857055664, 0.01780121612548828, 0.01780633544921875, 0.01780633544921875, 0.01779097557067871, 0.01743257522583008, 0.018102272033691406, 0.017903615951538086, 0.017827840805053712, 0.017896448135375977, 0.017779712677001954, 0.017745920181274414, 0.01780531120300293, 0.01787494468688965, 0.01781657600402832, 0.017876991271972655, 0.01781350326538086, 0.017755136489868165, 0.017839103698730468, 0.01780121612548828, 0.017739776611328126, 0.016974847793579103, 0.017130495071411133, 0.01702707290649414, 0.01701580810546875, 0.017072128295898437, 0.017107967376708985, 0.017075199127197266, 0.017054719924926756, 0.017089536666870117, 0.01705062484741211, 0.01724825668334961, 0.018791423797607423, 0.017910783767700195, 0.017827840805053712, 0.01783500862121582, 0.017923072814941408, 0.017753087997436523, 0.01779199981689453, 0.017812480926513673, 0.017649663925170898, 0.017122304916381836, 0.017043455123901367, 0.01705369567871094, 0.01704243278503418, 0.01707827186584473, 0.017006591796875, 0.01705779266357422, 0.01700864028930664, 0.01707827186584473, 0.017086463928222655, 0.017074176788330078, 0.017074176788330078, 0.01759539222717285, 0.017760255813598632, 0.01782374382019043, 0.01775923156738281, 0.01780019187927246, 0.017716224670410157, 0.017787904739379884, 0.017687551498413084, 0.017779712677001954, 0.017731584548950196, 0.017755136489868165, 0.01774284744262695, 0.017771520614624024, 0.017789951324462892, 0.017979391098022462, 0.018552831649780274, 0.01877299118041992, 0.01804697608947754, 0.01786675262451172, 0.01785036849975586, 0.01784832000732422, 0.01778483200073242, 0.017755136489868165, 0.017844224929809572, 0.01780531120300293, 0.01782681655883789, 0.01778892707824707, 0.017731584548950196, 0.01816166305541992, 0.01802956771850586, 0.017773567199707033, 0.017364992141723632, 0.017099775314331055, 0.017104896545410156, 0.017094655990600584, 0.01700966453552246, 0.01698918342590332, 0.01704960060119629, 0.01701171112060547, 0.017382400512695313, 0.01800294494628906, 0.01775103950500488, 0.017712127685546874, 0.017812480926513673, 0.017311744689941407, 0.01717043113708496, 0.017744895935058593, 0.01775103950500488, 0.017735679626464843, 0.017894399642944335, 0.0178155517578125, 0.017812480926513673, 0.017780736923217775, 0.017810432434082032, 0.018587648391723634, 0.01804287910461426, 0.017960960388183594, 0.01781862449645996, 0.01783500862121582, 0.01782374382019043, 0.017718271255493166, 0.01778278350830078, 0.01777459144592285, 0.017901567459106444, 0.017695743560791014, 0.01776742362976074, 0.017715200424194336, 0.017795072555541993, 0.017772544860839845, 0.01782476806640625, 0.01768448066711426, 0.017719295501708983, 0.017863679885864257, 0.01780838394165039, 0.0178155517578125, 0.017780736923217775, 0.017724416732788087, 0.017872896194458008, 0.018051071166992186, 0.017950719833374023, 0.01781862449645996, 0.017771520614624024, 0.01770086479187012, 0.01780940818786621, 0.01780633544921875, 0.017789951324462892, 0.01784320068359375, 0.017836032867431642, 0.017771520614624024, 0.01787596893310547, 0.017718271255493166, 0.017762304306030274, 0.01776742362976074, 0.017768447875976562]",tokens/s,56.585815732310714,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpckq6u6ex/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1241.440256,2645.03296,0.0,1998.585856,1692.285952,s,10,0.1919048309326172,0.01919048309326172,0.0005813408225469491,0.019010607719421386,0.019647625350952148,0.020231876564025877,0.020699277534484865,"[0.02081612777709961, 0.019026687622070312, 0.018811967849731444, 0.01893507194519043, 0.019049951553344727, 0.018704191207885742, 0.01899452781677246, 0.019173408508300783, 0.018875104904174805, 0.019517791748046874]",tokens/s,13339.945573850004,kWh,2.2137678517830954e-07,1.2130415742105693e-07,6.751076978052327e-07,1.0177886404045992e-06,tokens/kWh,251525699.77420157,MB,1241.735168,2645.03296,0.0,1998.585856,1740.085248,s,10,11.541304443359374,1.1541304443359375,0.013489751503469302,1.149205810546875,1.1732497924804688,1.1755724670410155,1.177430606689453,"[1.1778951416015624, 1.1547576904296875, 1.1516905517578124, 1.172733642578125, 1.1467210693359375, 1.1696463623046875, 1.1408421630859376, 1.139661865234375, 1.142564697265625, 1.144791259765625]",tokens/s,54.586550687733464,kWh,1.3797132453780056e-05,7.558356126426566e-06,2.913202725899442e-05,5.048751583920103e-05,tokens/kWh,1247833.230706979,,s,629,11.692877828598027,0.01858963088807317,0.002323445722503397,0.018134016036987305,0.01885880355834961,0.019136306762695315,0.03729248260498047,"[0.019385343551635743, 0.0192225284576416, 0.018934783935546876, 0.01901055908203125, 0.01903001594543457, 0.018994176864624023, 0.019083263397216797, 0.019182592391967773, 0.01884671974182129, 0.018938880920410156, 0.019106815338134766, 0.019335168838500977, 0.01993011283874512, 0.01969254493713379, 0.019323904037475585, 0.018969663619995115, 0.018957248687744142, 0.01906790351867676, 0.01939455986022949, 0.01918976020812988, 0.018973695755004884, 0.018967552185058592, 0.018592767715454102, 0.01882931137084961, 0.01986457633972168, 0.019759103775024413, 0.01927577590942383, 0.018947071075439453, 0.018954240798950195, 0.018718751907348632, 0.01868899154663086, 0.018881536483764647, 0.018890752792358398, 0.01879449653625488, 0.018750463485717773, 0.018704383850097657, 0.018181119918823242, 0.018052095413208007, 0.01804902458190918, 0.018082815170288084, 0.01805414390563965, 0.018017280578613282, 0.018066432952880858, 0.01804287910461426, 0.01808076858520508, 0.01808793640136719, 0.018129919052124025, 0.01807155227661133, 0.017999872207641602, 0.018085887908935547, 0.01802444839477539, 0.018131967544555663, 0.018059263229370116, 0.018091007232666014, 0.018020351409912108, 0.018124799728393554, 0.017999872207641602, 0.01805516815185547, 0.018081792831420897, 0.01821183967590332, 0.018096128463745118, 0.018970624923706055, 0.039049217224121094, 0.01886207962036133, 0.018746368408203123, 0.018647039413452148, 0.01864089584350586, 0.018724863052368163, 0.01864396858215332, 0.018092031478881835, 0.01817190361022949, 0.018177024841308592, 0.018156543731689453, 0.018041856765747072, 0.018233343124389647, 0.018233343124389647, 0.018225151062011717, 0.01824358367919922, 0.018241535186767577, 0.018158592224121094, 0.018133056640625, 0.01810220718383789, 0.01814630317687988, 0.018093055725097656, 0.01806540870666504, 0.018025472640991212, 0.01816166305541992, 0.018155519485473632, 0.018131967544555663, 0.01815449523925781, 0.01820159912109375, 0.018340864181518556, 0.018150400161743165, 0.018102272033691406, 0.018126848220825196, 0.018134016036987305, 0.01820364761352539, 0.018164735794067383, 0.01820876884460449, 0.018197504043579102, 0.01820159912109375, 0.01819443130493164, 0.01808896064758301, 0.018144256591796876, 0.018129919052124025, 0.018091007232666014, 0.018099199295043944, 0.018150400161743165, 0.018698272705078126, 0.018225120544433593, 0.018116607666015624, 0.018067455291748045, 0.01820057678222656, 0.018164735794067383, 0.018551807403564453, 0.018906112670898437, 0.018751487731933594, 0.01883033561706543, 0.018760704040527345, 0.018741247177124023, 0.018735103607177735, 0.0186746883392334, 0.018655231475830078, 0.018259967803955078, 0.018106367111206053, 0.037272575378417966, 0.01804287910461426, 0.018233343124389647, 0.01804697608947754, 0.018033664703369142, 0.018062335968017578, 0.018127872467041017, 0.018066432952880858, 0.01813711929321289, 0.018199520111083986, 0.01845452880859375, 0.018223104476928712, 0.01807360076904297, 0.018113536834716795, 0.018190336227416993, 0.01820979118347168, 0.018121728897094725, 0.018137088775634767, 0.018068479537963866, 0.018098175048828127, 0.018137088775634767, 0.018147327423095702, 0.018077695846557617, 0.018140159606933593, 0.018127872467041017, 0.018028543472290038, 0.018081792831420897, 0.01804697608947754, 0.018106367111206053, 0.018052095413208007, 0.018062335968017578, 0.01801523208618164, 0.018132991790771484, 0.018094079971313477, 0.018069503784179687, 0.018031648635864258, 0.018113504409790038, 0.018044927597045898, 0.018437120437622072, 0.01925632095336914, 0.01884774398803711, 0.018707456588745116, 0.018890752792358398, 0.01904640007019043, 0.018824192047119142, 0.018703359603881836, 0.018787328720092773, 0.018679807662963867, 0.018689023971557618, 0.018731008529663085, 0.018735103607177735, 0.018665504455566407, 0.018631647109985352, 0.01864396858215332, 0.018618368148803712, 0.018663423538208008, 0.018375680923461913, 0.01799884796142578, 0.017819648742675782, 0.01783193588256836, 0.01783296012878418, 0.017921024322509766, 0.01779916763305664, 0.037171199798583986, 0.01801215934753418, 0.01789952087402344, 0.017904640197753906, 0.018226175308227538, 0.01882828712463379, 0.018754560470581053, 0.018746368408203123, 0.018753536224365236, 0.01880985641479492, 0.0188723201751709, 0.01885798454284668, 0.018932735443115235, 0.018494464874267577, 0.018233343124389647, 0.018856960296630858, 0.018811904907226562, 0.01864908790588379, 0.018693119049072265, 0.018749439239501953, 0.018752511978149415, 0.01867263984680176, 0.01923686408996582, 0.019318784713745117, 0.01998028755187988, 0.018965503692626954, 0.018785280227661134, 0.018815999984741212, 0.018700288772583007, 0.01884262466430664, 0.018718719482421875, 0.018868223190307617, 0.018815999984741212, 0.019164159774780275, 0.01888768005371094, 0.018765823364257812, 0.018686975479125977, 0.018481151580810547, 0.018116607666015624, 0.018117631912231445, 0.018100223541259765, 0.018150400161743165, 0.01819545555114746, 0.018114591598510744, 0.018619359970092772, 0.018117631912231445, 0.018189311981201172, 0.01824051284790039, 0.018177024841308592, 0.018135040283203126, 0.01820262336730957, 0.018158624649047852, 0.01811452865600586, 0.018732032775878905, 0.01821696090698242, 0.018974720001220705, 0.01884979248046875, 0.018716672897338867, 0.018817024230957033, 0.01866444778442383, 0.018647039413452148, 0.019155967712402345, 0.019417087554931642, 0.03750092697143555, 0.018118656158447266, 0.01828659248352051, 0.018125823974609375, 0.01805414390563965, 0.018811904907226562, 0.018735103607177735, 0.0184770565032959, 0.018504703521728515, 0.018662399291992187, 0.018761728286743166, 0.018754560470581053, 0.018148351669311523, 0.018076671600341796, 0.018160640716552736, 0.018207744598388673, 0.018172927856445312, 0.018312192916870116, 0.018480127334594726, 0.018217983245849608, 0.018116607666015624, 0.01823641586303711, 0.018152448654174806, 0.018135040283203126, 0.01826304054260254, 0.01798963165283203, 0.01804595184326172, 0.018339839935302735, 0.018109439849853515, 0.018084863662719726, 0.01803264045715332, 0.01802956771850586, 0.018092031478881835, 0.01806438446044922, 0.01800396728515625, 0.018047008514404297, 0.018133983612060547, 0.018128896713256838, 0.018109439849853515, 0.018044927597045898, 0.018116607666015624, 0.018112512588500978, 0.018077695846557617, 0.018052095413208007, 0.018108415603637695, 0.018172927856445312, 0.018084863662719726, 0.018033664703369142, 0.01804595184326172, 0.018159616470336915, 0.018166784286499024, 0.01809715270996094, 0.018106367111206053, 0.018134016036987305, 0.018110464096069336, 0.01809715270996094, 0.018217983245849608, 0.01862451171875, 0.018117631912231445, 0.018137088775634767, 0.018086912155151368, 0.018139135360717772, 0.018082815170288084, 0.037372928619384765, 0.019594240188598632, 0.01887846374511719, 0.018699264526367186, 0.018685951232910156, 0.018745344161987306, 0.0186562557220459, 0.018723840713500976, 0.018711551666259766, 0.018807807922363282, 0.018708480834960937, 0.018749439239501953, 0.018676736831665038, 0.018697216033935548, 0.018767871856689454, 0.018767871856689454, 0.018817024230957033, 0.018683904647827147, 0.018645023345947264, 0.01865622329711914, 0.01864089584350586, 0.018694143295288086, 0.018293760299682618, 0.018084863662719726, 0.018115583419799804, 0.018192384719848635, 0.01815449523925781, 0.018799615859985352, 0.01866035270690918, 0.018714624404907225, 0.01863884735107422, 0.018683904647827147, 0.018699264526367186, 0.0186746883392334, 0.018709503173828124, 0.01882521629333496, 0.018779136657714843, 0.018700288772583007, 0.01929113578796387, 0.01887027168273926, 0.018775039672851563, 0.018670591354370117, 0.018784255981445314, 0.01879859161376953, 0.01863987159729004, 0.018677759170532226, 0.01880985641479492, 0.01878937530517578, 0.018669567108154296, 0.01821696090698242, 0.018050048828125, 0.018114559173583983, 0.018089984893798827, 0.01817190361022949, 0.018095104217529297, 0.01802444839477539, 0.01803059196472168, 0.018067455291748045, 0.018166784286499024, 0.018106367111206053, 0.018116607666015624, 0.018066432952880858, 0.01820979118347168, 0.03734732818603516, 0.01810534477233887, 0.01814630317687988, 0.01808896064758301, 0.018041856765747072, 0.01805619239807129, 0.018103296279907227, 0.018070528030395508, 0.017984512329101563, 0.018099199295043944, 0.018062335968017578, 0.018096128463745118, 0.01801215934753418, 0.018069503784179687, 0.01802444839477539, 0.018127872467041017, 0.018061311721801757, 0.017991680145263672, 0.01801625633239746, 0.01803468894958496, 0.018074623107910158, 0.01801113510131836, 0.018058240890502928, 0.018307071685791015, 0.01817804718017578, 0.018010112762451173, 0.018112512588500978, 0.018100223541259765, 0.018069503784179687, 0.018035711288452147, 0.01807360076904297, 0.01804800033569336, 0.01803264045715332, 0.01805414390563965, 0.018062335968017578, 0.018094079971313477, 0.018051071166992186, 0.01806540870666504, 0.018123775482177733, 0.017978368759155275, 0.018130943298339842, 0.018027519226074217, 0.018078720092773438, 0.01800294494628906, 0.018018304824829103, 0.01806438446044922, 0.018033695220947266, 0.018548704147338866, 0.018318336486816408, 0.018074623107910158, 0.01804287910461426, 0.018137088775634767, 0.018116607666015624, 0.018299903869628906, 0.018448383331298827, 0.018397184371948243, 0.01822105598449707, 0.018160640716552736, 0.018083839416503905, 0.01819340705871582, 0.018143232345581056, 0.018141183853149414, 0.01808896064758301, 0.037615615844726565, 0.01807360076904297, 0.018068479537963866, 0.018083839416503905, 0.018127872467041017, 0.01803775978088379, 0.018241535186767577, 0.01798963165283203, 0.018557952880859374, 0.018586624145507814, 0.018172927856445312, 0.018067455291748045, 0.01803878402709961, 0.018145280838012694, 0.018100223541259765, 0.018101247787475586, 0.018332672119140626, 0.018173952102661133, 0.01820057678222656, 0.017988639831542967, 0.01809404754638672, 0.018164735794067383, 0.018033664703369142, 0.017838079452514647, 0.017934335708618163, 0.018098175048828127, 0.018100223541259765, 0.018076671600341796, 0.018108415603637695, 0.018108415603637695, 0.018044927597045898, 0.01805721664428711, 0.018168832778930662, 0.018124799728393554, 0.017839103698730468, 0.0178657283782959, 0.01810742378234863, 0.018242528915405273, 0.01802649688720703, 0.018190336227416993, 0.018053119659423827, 0.01803468894958496, 0.017968128204345703, 0.017933311462402343, 0.01803878402709961, 0.018069503784179687, 0.0180316162109375, 0.018095104217529297, 0.01797532844543457, 0.018125823974609375, 0.018138080596923827, 0.01808076858520508, 0.01804287910461426, 0.01801215934753418, 0.01802649688720703, 0.01809715270996094, 0.018018304824829103, 0.01804287910461426, 0.017999872207641602, 0.018067455291748045, 0.018019327163696287, 0.01802137565612793, 0.01806540870666504, 0.03730022430419922, 0.018094079971313477, 0.018059263229370116, 0.01804287910461426, 0.018068479537963866, 0.01806540870666504, 0.01783705520629883, 0.017780736923217775, 0.017881088256835938, 0.018093055725097656, 0.018077695846557617, 0.018033664703369142, 0.01805721664428711, 0.018053119659423827, 0.018106367111206053, 0.018098175048828127, 0.01809715270996094, 0.018293760299682618, 0.018572288513183592, 0.018217983245849608, 0.018103296279907227, 0.018078720092773438, 0.01804595184326172, 0.01818009567260742, 0.01806540870666504, 0.018141183853149414, 0.018036735534667968, 0.01805721664428711, 0.01779199981689453, 0.017710079193115236, 0.017810432434082032, 0.017822719573974608, 0.01781760025024414, 0.01787392044067383, 0.01781760025024414, 0.017908735275268553, 0.018152448654174806, 0.018028543472290038, 0.01804902458190918, 0.01803980827331543, 0.018033664703369142, 0.018020351409912108, 0.01802137565612793, 0.018041856765747072, 0.018025472640991212, 0.018141183853149414, 0.018959360122680666, 0.019607551574707033, 0.01887948799133301, 0.01866444778442383, 0.01859174346923828, 0.01863680076599121, 0.018182144165039063, 0.01803980827331543, 0.018092031478881835, 0.01807257652282715, 0.018306047439575195, 0.018158592224121094, 0.018062335968017578, 0.018176000595092775, 0.01822105598449707, 0.018059263229370116, 0.018125823974609375, 0.03789209747314453, 0.018148351669311523, 0.01803468894958496, 0.018094079971313477, 0.017999872207641602, 0.018111488342285157, 0.01804902458190918, 0.018020351409912108, 0.018093055725097656, 0.01810534477233887, 0.018127872467041017, 0.018068511962890624, 0.0180664005279541, 0.018096128463745118, 0.01821696090698242, 0.018103296279907227, 0.018053119659423827, 0.018055200576782227, 0.018034656524658202, 0.01801113510131836, 0.018131967544555663, 0.01807360076904297, 0.01802649688720703, 0.01801420783996582, 0.01799679946899414, 0.018027519226074217, 0.01835212707519531, 0.018299903869628906, 0.018121728897094725, 0.018103296279907227, 0.01825484848022461, 0.018123775482177733, 0.01818623924255371, 0.018493440628051756, 0.01862451171875, 0.01944371223449707, 0.01863167953491211, 0.018110464096069336, 0.018092031478881835, 0.018112512588500978, 0.018126848220825196, 0.01802649688720703, 0.018069503784179687, 0.0180316162109375, 0.018068479537963866, 0.018126848220825196, 0.018089984893798827, 0.018099199295043944, 0.018084863662719726, 0.01804697608947754, 0.01818623924255371, 0.01801625633239746, 0.01804697608947754, 0.018050048828125, 0.018163711547851562, 0.01810534477233887, 0.017994752883911135, 0.018119680404663087, 0.01820057678222656, 0.01805619239807129, 0.018568191528320312, 0.018714624404907225, 0.018725887298583984]",tokens/s,53.79342957484891,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949950-2a69c88c0d96ccfe10b0664e;78d50d99-e0dd-4404-b05f-d052d8ae4bdd) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,deci,MB,4385.292288,5589.434368,0.0,4959.76448,4769.731072,s,1,10.541263671875,10.541263671875,0.0,10.541263671875,10.541263671875,10.541263671875,10.541263671875,[10.541263671875],,kWh,4.41740639263906e-05,2.4179605210214362e-05,8.139867623002672e-05,0.00014975234536663168,,MB,1695.014912,5612.50304,0.0,4966.055936,4251.027456,s,10,12.880404541015626,1.2880404541015626,0.00010681566956238591,1.2880263671874999,1.2881905029296874,1.2882199462890624,1.2882435009765625,"[1.2880411376953125, 1.2879517822265625, 1.2879365234375, 1.2879400634765625, 1.2880115966796875, 1.2879180908203125, 1.2881099853515625, 1.2881839599609375, 1.28806201171875, 1.2882493896484375]",tokens/s,198.75152149515816,kWh,1.522564269770846e-05,8.340867073582643e-06,8.631604127500025e-05,0.00010988255104629136,tokens/kWh,2329760.2536744187,MB,1702.5024,5627.183104,0.0,4980.736,4251.030016,s,10,16.435052856445314,1.6435052856445314,0.006636722169766304,1.6420415649414064,1.6524386108398437,1.6547501892089844,1.6565994519042968,"[1.6387630615234374, 1.64347412109375, 1.6358819580078126, 1.6375252685546875, 1.63692578125, 1.6459522705078125, 1.6519249267578124, 1.6406090087890626, 1.6469346923828125, 1.657061767578125]",tokens/s,38.33270300392942,kWh,1.990120298111132e-05,1.0910124310869577e-05,5.691496219860094e-05,8.772628949058181e-05,tokens/kWh,718142.7638833808,,s,630,16.432270357131966,0.02608296882084438,0.0004455750345238374,0.02592204761505127,0.02663649253845215,0.026884504795074465,0.027765473937988283,"[0.026696704864501954, 0.026045440673828125, 0.025903104782104492, 0.025805824279785155, 0.025806848526000976, 0.025845760345458983, 0.025829376220703124, 0.026183679580688478, 0.025843711853027345, 0.025983999252319336, 0.02572697639465332, 0.0259051513671875, 0.02587648010253906, 0.02592767906188965, 0.025863168716430664, 0.025787391662597657, 0.025870336532592773, 0.02588569641113281, 0.02634137535095215, 0.026480640411376953, 0.025851903915405275, 0.025980928421020507, 0.026013696670532226, 0.02591744041442871, 0.02590003204345703, 0.025981952667236328, 0.02590003204345703, 0.02591436767578125, 0.02591641616821289, 0.025808895111083984, 0.02590822410583496, 0.026021888732910156, 0.026010623931884767, 0.025989120483398437, 0.025969663619995118, 0.026038272857666016, 0.025882623672485353, 0.02590105628967285, 0.025975807189941406, 0.025816064834594726, 0.02588569641113281, 0.02594508743286133, 0.02592051124572754, 0.02589286422729492, 0.026217472076416014, 0.027632640838623046, 0.026714111328125, 0.026186752319335937, 0.025959423065185547, 0.02593075180053711, 0.02597068786621094, 0.02593075180053711, 0.025868288040161135, 0.025883647918701173, 0.02588876724243164, 0.02590924835205078, 0.02590412712097168, 0.026072063446044923, 0.026500095367431642, 0.02632192039489746, 0.025911296844482422, 0.025873407363891602, 0.025838592529296874, 0.02666700744628906, 0.026043392181396483, 0.025825279235839844, 0.025976831436157227, 0.025627647399902344, 0.02588467216491699, 0.025816064834594726, 0.025800703048706054, 0.025823232650756835, 0.02596147155761719, 0.026247167587280275, 0.025948160171508788, 0.02596147155761719, 0.02590003204345703, 0.025784320831298828, 0.025975807189941406, 0.025995264053344725, 0.025887744903564453, 0.026070016860961914, 0.025976831436157227, 0.025891839981079103, 0.02611404800415039, 0.026088447570800782, 0.02594918441772461, 0.02589286422729492, 0.026452991485595705, 0.026294271469116212, 0.025818111419677735, 0.02612735939025879, 0.02590105628967285, 0.025871360778808594, 0.025838592529296874, 0.025867263793945314, 0.026284032821655274, 0.02609561538696289, 0.025972736358642577, 0.025878528594970703, 0.02671001625061035, 0.02774220848083496, 0.026491903305053712, 0.026027008056640624, 0.025963520050048827, 0.026005504608154296, 0.025832447052001953, 0.02593791961669922, 0.025998336791992187, 0.02591436767578125, 0.025944063186645508, 0.025935871124267578, 0.026263551712036134, 0.0263874568939209, 0.025987071990966795, 0.026550271987915038, 0.02652876853942871, 0.026234880447387695, 0.026437631607055666, 0.026063871383666993, 0.02590924835205078, 0.026821632385253907, 0.02633830451965332, 0.025853952407836913, 0.025963520050048827, 0.025882623672485353, 0.026599424362182617, 0.026104831695556642, 0.025842687606811524, 0.02589593505859375, 0.025859071731567384, 0.025765888214111327, 0.025847808837890625, 0.025853952407836913, 0.025811967849731447, 0.02590617561340332, 0.02777497673034668, 0.0277391357421875, 0.026606592178344726, 0.025975807189941406, 0.02592563247680664, 0.02590208053588867, 0.025819135665893556, 0.025869312286376952, 0.025584640502929686, 0.025556991577148438, 0.025820159912109376, 0.02587648010253906, 0.02615705680847168, 0.02648678398132324, 0.025857023239135742, 0.02591334342956543, 0.02574847984313965, 0.025810943603515626, 0.025845760345458983, 0.02590822410583496, 0.025833471298217774, 0.0263505916595459, 0.026227712631225586, 0.026084352493286132, 0.02577305603027344, 0.02588979148864746, 0.02614886474609375, 0.025903104782104492, 0.026000383377075196, 0.026035200119018553, 0.02587238311767578, 0.02593075180053711, 0.025841663360595703, 0.025831424713134765, 0.025826303482055665, 0.025862144470214843, 0.02574131202697754, 0.025851903915405275, 0.025805824279785155, 0.025870336532592773, 0.025832447052001953, 0.025663488388061522, 0.026084352493286132, 0.025833471298217774, 0.025873407363891602, 0.025816064834594726, 0.02576896095275879, 0.025858047485351563, 0.02554572868347168, 0.02553548812866211, 0.025837568283081053, 0.025811967849731447, 0.025835519790649415, 0.026656768798828126, 0.026735616683959962, 0.027158527374267577, 0.02676121520996094, 0.025969663619995118, 0.025861120223999022, 0.02589695930480957, 0.025761791229248047, 0.025796607971191408, 0.02590105628967285, 0.025874431610107423, 0.02587238311767578, 0.0259102725982666, 0.026072063446044923, 0.025899007797241212, 0.0259051513671875, 0.025894912719726562, 0.02588467216491699, 0.02587648010253906, 0.026418176651000977, 0.02637926483154297, 0.025848831176757812, 0.02596147155761719, 0.0259051513671875, 0.02595020866394043, 0.02586419105529785, 0.02589695930480957, 0.02593791961669922, 0.025873407363891602, 0.02631884765625, 0.025799680709838867, 0.025939968109130858, 0.025891839981079103, 0.025915391921997072, 0.025823232650756835, 0.025873407363891602, 0.025871360778808594, 0.025854976654052734, 0.026437631607055666, 0.026284032821655274, 0.025734144210815428, 0.02592051124572754, 0.025886720657348632, 0.025993215560913087, 0.025967615127563477, 0.02590617561340332, 0.02587648010253906, 0.025849855422973633, 0.025881599426269532, 0.02592665672302246, 0.025744384765625, 0.02587238311767578, 0.02591436767578125, 0.025835519790649415, 0.025883647918701173, 0.0258570556640625, 0.025927648544311524, 0.025969663619995118, 0.02591948890686035, 0.025815040588378906, 0.02590412712097168, 0.02588876724243164, 0.025816064834594726, 0.026564607620239256, 0.02608332824707031, 0.02592665672302246, 0.025979904174804686, 0.02589593505859375, 0.026594303131103517, 0.026216447830200194, 0.025830400466918944, 0.025851903915405275, 0.02590412712097168, 0.025867263793945314, 0.02590003204345703, 0.025856000900268555, 0.025894912719726562, 0.025862144470214843, 0.025853952407836913, 0.026436607360839845, 0.026215423583984376, 0.025979904174804686, 0.02592051124572754, 0.025874431610107423, 0.025915391921997072, 0.02588057518005371, 0.025881599426269532, 0.025774080276489256, 0.02592767906188965, 0.02592051124572754, 0.02593894386291504, 0.02589593505859375, 0.02591436767578125, 0.02588979148864746, 0.025870336532592773, 0.025849855422973633, 0.02590105628967285, 0.025830400466918944, 0.02592972755432129, 0.025911296844482422, 0.0259051513671875, 0.025935871124267578, 0.0259051513671875, 0.02591641616821289, 0.02593894386291504, 0.02614271926879883, 0.026185728073120116, 0.026223615646362306, 0.027143167495727538, 0.02611712074279785, 0.025862144470214843, 0.025847808837890625, 0.025955327987670897, 0.025862144470214843, 0.025851903915405275, 0.0259102725982666, 0.025791488647460937, 0.02591846466064453, 0.02588467216491699, 0.025854976654052734, 0.025986047744750978, 0.026060800552368164, 0.025963520050048827, 0.02596249580383301, 0.02590412712097168, 0.02591641616821289, 0.026673152923583986, 0.02609152030944824, 0.02588057518005371, 0.025804800033569338, 0.025829376220703124, 0.025869312286376952, 0.026250240325927734, 0.025983999252319336, 0.025799680709838867, 0.025881599426269532, 0.026047487258911133, 0.028063743591308594, 0.027554815292358398, 0.026646528244018555, 0.02587238311767578, 0.025793535232543945, 0.025814016342163085, 0.02615910339355469, 0.026638336181640625, 0.02657177543640137, 0.025948160171508788, 0.025868288040161135, 0.02686566352844238, 0.026202112197875976, 0.02597068786621094, 0.025850879669189454, 0.02588467216491699, 0.02593791961669922, 0.025816064834594726, 0.025831424713134765, 0.025782272338867186, 0.02574131202697754, 0.025786367416381836, 0.025881599426269532, 0.025862144470214843, 0.02609561538696289, 0.025899007797241212, 0.02586419105529785, 0.02591641616821289, 0.02588467216491699, 0.025867263793945314, 0.025964544296264647, 0.025949216842651366, 0.026045408248901367, 0.026208255767822267, 0.02613248062133789, 0.026017791748046876, 0.025871360778808594, 0.02596659278869629, 0.025981952667236328, 0.026191871643066408, 0.026465280532836914, 0.0265031681060791, 0.02617651176452637, 0.025923583984375, 0.025987071990966795, 0.026193920135498046, 0.026257408142089843, 0.02759884834289551, 0.026416128158569335, 0.025968639373779297, 0.02594713592529297, 0.0259420166015625, 0.027388927459716796, 0.027040767669677734, 0.026792959213256837, 0.026636287689208983, 0.026712064743041993, 0.026385408401489258, 0.025804800033569338, 0.02577305603027344, 0.025809919357299805, 0.027272192001342774, 0.02637926483154297, 0.02592563247680664, 0.025874431610107423, 0.02587648010253906, 0.02595327949523926, 0.02595840072631836, 0.02588057518005371, 0.026307584762573243, 0.026801151275634767, 0.02616524887084961, 0.026008575439453126, 0.02588979148864746, 0.02597068786621094, 0.02591948890686035, 0.025983999252319336, 0.026010623931884767, 0.026003456115722655, 0.02595737648010254, 0.025907199859619142, 0.02593382453918457, 0.025874431610107423, 0.025964544296264647, 0.025903104782104492, 0.02591948890686035, 0.02597478485107422, 0.02591436767578125, 0.02590924835205078, 0.025989120483398437, 0.025967615127563477, 0.02592870330810547, 0.025846784591674804, 0.025983999252319336, 0.02594304084777832, 0.026042367935180662, 0.025988096237182616, 0.02597478485107422, 0.025973760604858398, 0.026589183807373046, 0.027441152572631834, 0.027448320388793947, 0.026064895629882814, 0.02673459243774414, 0.02634854316711426, 0.027219968795776366, 0.02652057647705078, 0.02631679916381836, 0.026659839630126952, 0.026651647567749022, 0.02637107276916504, 0.02608230400085449, 0.02591436767578125, 0.026009599685668947, 0.025874431610107423, 0.027320320129394532, 0.02634854316711426, 0.025863168716430664, 0.02587648010253906, 0.025814016342163085, 0.025820159912109376, 0.025834495544433594, 0.025766912460327147, 0.02574745559692383, 0.025819135665893556, 0.025874431610107423, 0.02633830451965332, 0.026573823928833007, 0.025856000900268555, 0.025849855422973633, 0.02570342445373535, 0.025818111419677735, 0.025794559478759766, 0.025935871124267578, 0.025809919357299805, 0.025866239547729493, 0.025832447052001953, 0.025967615127563477, 0.026049535751342775, 0.02686566352844238, 0.02772377586364746, 0.027892736434936522, 0.02695782470703125, 0.026879999160766603, 0.026056703567504884, 0.0261079044342041, 0.025853952407836913, 0.02576486396789551, 0.025838592529296874, 0.02569113540649414, 0.026021888732910156, 0.026019840240478515, 0.02591436767578125, 0.026009599685668947, 0.025969663619995118, 0.025802751541137696, 0.025887744903564453, 0.02587238311767578, 0.025816064834594726, 0.02576896095275879, 0.02587648010253906, 0.025799680709838867, 0.025805824279785155, 0.025833471298217774, 0.02591948890686035, 0.02592870330810547, 0.025826303482055665, 0.0259051513671875, 0.025903104782104492, 0.02593075180053711, 0.025874431610107423, 0.02609152030944824, 0.025890815734863282, 0.02588569641113281, 0.025853952407836913, 0.02589286422729492, 0.025964544296264647, 0.02595020866394043, 0.02667622375488281, 0.026108928680419922, 0.025990144729614258, 0.026060800552368164, 0.025944063186645508, 0.026042367935180662, 0.026039295196533203, 0.0259420166015625, 0.02596249580383301, 0.025847808837890625, 0.025952255249023438, 0.026100736618041992, 0.026071039199829102, 0.02592051124572754, 0.026809343338012694, 0.027421695709228516, 0.026888191223144533, 0.026874879837036132, 0.02672640037536621, 0.02710323143005371, 0.026755071640014647, 0.027453439712524414, 0.028521472930908204, 0.026403839111328126, 0.026006528854370117, 0.025824256896972656, 0.02591436767578125, 0.025931776046752928, 0.025852928161621092, 0.026063871383666993, 0.026695680618286134, 0.02635775947570801, 0.025836544036865236, 0.025854976654052734, 0.025803775787353517, 0.025848831176757812, 0.025837568283081053, 0.025744384765625, 0.025860095977783205, 0.025808895111083984, 0.02590003204345703, 0.026014720916748047, 0.025907199859619142, 0.0259368953704834, 0.02609561538696289, 0.025991167068481445, 0.0259420166015625, 0.02575257682800293, 0.02591334342956543, 0.025882623672485353, 0.025812992095947264, 0.02591744041442871, 0.025948160171508788, 0.02590208053588867, 0.025992191314697266, 0.025967615127563477, 0.025968639373779297, 0.02565017509460449, 0.025805824279785155, 0.025899007797241212, 0.025820159912109376, 0.025881599426269532, 0.02593484878540039, 0.026600448608398438, 0.02608639907836914, 0.0257126407623291, 0.025939968109130858, 0.025787391662597657, 0.025769983291625977, 0.02590105628967285, 0.025789440155029295, 0.025785343170166015, 0.02594918441772461, 0.02588979148864746, 0.025804800033569338, 0.025827327728271485, 0.025785343170166015, 0.0259420166015625, 0.026063871383666993, 0.026736640930175783, 0.02612735939025879, 0.025992191314697266, 0.02588057518005371, 0.02592870330810547, 0.02612326431274414, 0.02627276802062988, 0.02608332824707031, 0.025952255249023438, 0.025894912719726562, 0.02587238311767578, 0.02610585594177246, 0.02670796775817871, 0.026059776306152343, 0.02590105628967285, 0.02572492790222168, 0.025842687606811524, 0.028288000106811522, 0.026855424880981447, 0.02614681625366211, 0.02759782409667969, 0.027792383193969726, 0.027212799072265623, 0.02595430374145508, 0.025837568283081053, 0.026619903564453123, 0.0267827205657959, 0.026208255767822267, 0.02611199951171875, 0.02634239959716797, 0.027263999938964844, 0.026198015213012696, 0.025976831436157227, 0.026481664657592774, 0.02589388847351074, 0.02592665672302246, 0.02594508743286133, 0.03037696075439453, 0.02755072021484375, 0.026925056457519532, 0.02593484878540039, 0.0265533447265625, 0.026696704864501954, 0.02588467216491699, 0.02589798355102539, 0.025839616775512695, 0.02588572883605957]",tokens/s,38.33919393412161,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,875.855872,793.247744,0.0,163.577856,152.009216,s,1,7.24583837890625,7.24583837890625,0.0,7.24583837890625,7.24583837890625,7.24583837890625,7.24583837890625,[7.24583837890625],,kWh,5.099652515277992e-06,2.778960245098232e-06,6.502505202010678e-06,1.4381117962386902e-05,,MB,1397.518336,847.773696,0.0,201.326592,184.525824,s,31,0.20886140775680542,0.006737464766348563,0.00012045594605294933,0.00671017599105835,0.006796544075012207,0.006829599857330322,0.007200140714645385,"[0.0068089919090271, 0.00671017599105835, 0.006714719772338867, 0.006763264179229737, 0.006708831787109375, 0.006716127872467041, 0.006698016166687012, 0.006674784183502197, 0.006694591999053955, 0.006850207805633545, 0.006735199928283691, 0.0066752638816833494, 0.0066929922103881835, 0.006672383785247803, 0.006675903797149658, 0.006674015998840332, 0.006681312084197998, 0.006704832077026367, 0.00664899206161499, 0.006725312232971192, 0.0067151360511779785, 0.006660031795501709, 0.006758975982666016, 0.0067630081176757815, 0.006678400039672852, 0.006709568023681641, 0.00674121618270874, 0.006747903823852539, 0.006714591979980469, 0.007350111961364746, 0.006796544075012207]",tokens/s,37996.48812690441,kWh,7.902785006132476e-08,4.330337667751709e-08,3.4558964845217314e-07,4.67920875191015e-07,tokens/kWh,547101045.4395639,MB,1432.809472,847.773696,0.0,201.326592,184.528384,s,31,9.96047979736328,0.3213057999149445,0.0033227152836767223,0.3208233642578125,0.3259227294921875,0.32672499084472656,0.32750346679687503,"[0.32417041015625, 0.3239255676269531, 0.32375250244140624, 0.3276725463867188, 0.32393780517578125, 0.31850643920898436, 0.31734002685546875, 0.3221502685546875, 0.3259227294921875, 0.32634103393554686, 0.3183990173339844, 0.3162735595703125, 0.321565673828125, 0.31956283569335936, 0.32710894775390625, 0.31806814575195314, 0.31866281127929685, 0.31702615356445313, 0.3253908081054688, 0.3175795593261719, 0.3208233642578125, 0.31867181396484373, 0.31971380615234374, 0.3206449890136719, 0.3178792724609375, 0.31732672119140626, 0.32330572509765626, 0.325634033203125, 0.321748046875, 0.32218246459960936, 0.3191927185058594]",tokens/s,196.07489194616855,kWh,3.930622751905975e-06,2.1529137146103628e-06,6.378517582681603e-06,1.2462054049197943e-05,tokens/kWh,5055346.394044462,,s,1953,9.947984775543219,0.005093694201507021,0.00014234003328421517,0.005054463863372802,0.005255167961120606,0.005300652694702148,0.0056228249740600585,"[0.0049192957878112795, 0.005087232112884522, 0.005097472190856934, 0.0050800638198852536, 0.005066751956939697, 0.005436416149139404, 0.005306367874145508, 0.0052336640357971195, 0.005131264209747314, 0.00511078405380249, 0.005056511878967285, 0.005071872234344482, 0.005056511878967285, 0.005086207866668701, 0.0050841598510742185, 0.005054463863372802, 0.005053440093994141, 0.0050841598510742185, 0.0050503678321838375, 0.004986879825592041, 0.0050657281875610355, 0.0050124797821044925, 0.004989952087402344, 0.004977663993835449, 0.005005311965942383, 0.005017600059509277, 0.005442560195922852, 0.005726208209991455, 0.005410816192626953, 0.005295104026794434, 0.00536678409576416, 0.00531763219833374, 0.0052930560111999515, 0.005384191989898681, 0.00532480001449585, 0.005294079780578613, 0.005303296089172363, 0.005164031982421875, 0.005154816150665284, 0.005220352172851562, 0.005212160110473632, 0.0051794562339782715, 0.005151679992675781, 0.0052111358642578124, 0.00517632007598877, 0.005040128231048584, 0.004971519947052002, 0.005086207866668701, 0.0050852479934692385, 0.0050533761978149416, 0.0050432000160217285, 0.0050032639503479, 0.004984831809997559, 0.004993087768554687, 0.005138368129730225, 0.005081088066101074, 0.00506879997253418, 0.00506060791015625, 0.005091328144073487, 0.005055488109588623, 0.005044223785400391, 0.005061632156372071, 0.005164031982421875, 0.004888576030731201, 0.00506060791015625, 0.0050503678321838375, 0.005035007953643799, 0.0050728960037231445, 0.005037055969238281, 0.00505241584777832, 0.005018623828887939, 0.004999167919158935, 0.005005311965942383, 0.004968448162078858, 0.005137407779693603, 0.004996096134185791, 0.004988927841186524, 0.0052295680046081545, 0.005294079780578613, 0.005234687805175781, 0.0051363840103149415, 0.005158912181854248, 0.005153791904449463, 0.005149695873260498, 0.005214208126068115, 0.005261312007904053, 0.005223423957824707, 0.005230591773986816, 0.005239808082580567, 0.005237760066986084, 0.00522547197341919, 0.0053821439743042, 0.005189631938934326, 0.0050657281875610355, 0.005426176071166992, 0.005276671886444092, 0.005222400188446045, 0.005220352172851562, 0.005275648117065429, 0.005203968048095703, 0.005221375942230225, 0.00536575984954834, 0.005202943801879883, 0.005194752216339111, 0.005169151782989502, 0.005134335994720459, 0.005128191947937012, 0.0051363840103149415, 0.005169151782989502, 0.0052152638435363765, 0.00522441577911377, 0.005248000144958496, 0.005173247814178467, 0.005033984184265137, 0.005067776203155518, 0.00506060791015625, 0.005048319816589355, 0.005031936168670655, 0.00501043176651001, 0.0049797120094299315, 0.004986879825592041, 0.004998144149780274, 0.004996096134185791, 0.005085184097290039, 0.0050360321998596195, 0.005054463863372802, 0.004846591949462891, 0.005087232112884522, 0.00506982421875, 0.005058559894561767, 0.0050657281875610355, 0.005089280128479004, 0.005063680171966553, 0.005033984184265137, 0.005026815891265869, 0.005449728012084961, 0.005253119945526123, 0.005222400188446045, 0.0052899842262268066, 0.005262400150299072, 0.0052306241989135745, 0.005374879837036133, 0.005206016063690186, 0.005132287979125977, 0.005078015804290771, 0.005062655925750732, 0.005051392078399658, 0.005057536125183106, 0.005123072147369385, 0.0050503678321838375, 0.004983808040618896, 0.004994048118591309, 0.005017600059509277, 0.004992000102996826, 0.005171199798583984, 0.005148672103881836, 0.005178368091583252, 0.0051333122253417966, 0.0051363840103149415, 0.0051701760292053225, 0.005137407779693603, 0.005230591773986816, 0.005243904113769531, 0.0052367358207702636, 0.0052930560111999515, 0.005359615802764893, 0.005250048160552978, 0.00522649621963501, 0.005213183879852295, 0.005264383792877197, 0.005132287979125977, 0.005135359764099121, 0.0051773438453674315, 0.005079040050506592, 0.004998144149780274, 0.005000192165374756, 0.00502784013748169, 0.004980735778808594, 0.004974592208862305, 0.0050094079971313476, 0.005017600059509277, 0.004999167919158935, 0.004976640224456787, 0.005112832069396973, 0.0052705278396606445, 0.005197824001312256, 0.00526643180847168, 0.00521830415725708, 0.005206016063690186, 0.0048865280151367185, 0.005202976226806641, 0.005378015995025635, 0.005204991817474365, 0.005219327926635742, 0.005258240222930908, 0.005332992076873779, 0.00520908784866333, 0.005277696132659912, 0.0052715520858764645, 0.005230591773986816, 0.005234687805175781, 0.005941247940063477, 0.005775360107421875, 0.005362688064575195, 0.0052899842262268066, 0.005757952213287353, 0.005588992118835449, 0.005280767917633057, 0.005306367874145508, 0.005286911964416504, 0.005255167961120606, 0.005237760066986084, 0.005280767917633057, 0.005228544235229492, 0.005222464084625244, 0.005277632236480713, 0.005336063861846924, 0.0051363840103149415, 0.005094399929046631, 0.0050462718009948735, 0.005645311832427978, 0.005313536167144775, 0.005244927883148193, 0.005222400188446045, 0.005104640007019043, 0.005114880084991455, 0.005053440093994141, 0.005045248031616211, 0.005171199798583984, 0.005063680171966553, 0.00506982421875, 0.005056511878967285, 0.005083136081695557, 0.004994048118591309, 0.004989952087402344, 0.004985856056213379, 0.0050063362121582035, 0.004904960155487061, 0.004957183837890625, 0.00499507188796997, 0.005067776203155518, 0.0050657281875610355, 0.005045248031616211, 0.0050800638198852536, 0.005178368091583252, 0.005053440093994141, 0.005053440093994141, 0.005059584140777588, 0.005067776203155518, 0.005047296047210693, 0.0050360321998596195, 0.005091328144073487, 0.0048261117935180665, 0.005037055969238281, 0.005038080215454102, 0.005049344062805176, 0.0050800638198852536, 0.005038080215454102, 0.005026815891265869, 0.005044223785400391, 0.00506879997253418, 0.005064703941345215, 0.0050360321998596195, 0.005064703941345215, 0.005208064079284668, 0.005203968048095703, 0.005342207908630371, 0.005339136123657226, 0.005210112094879151, 0.005214208126068115, 0.0051998720169067385, 0.005097472190856934, 0.0051066880226135255, 0.00511897611618042, 0.005146624088287354, 0.005186560153961181, 0.005201920032501221, 0.005957632064819336, 0.005228544235229492, 0.00515993595123291, 0.005258240222930908, 0.005212160110473632, 0.005206016063690186, 0.005139455795288086, 0.005294079780578613, 0.005257215976715088, 0.005230591773986816, 0.005193727970123291, 0.005195775985717774, 0.005224448204040527, 0.005224448204040527, 0.005202943801879883, 0.005194752216339111, 0.005141503810882568, 0.005161983966827393, 0.005017600059509277, 0.005035007953643799, 0.005066751956939697, 0.005035007953643799, 0.005038080215454102, 0.0050657281875610355, 0.005047296047210693, 0.00502784013748169, 0.005057536125183106, 0.005182464122772217, 0.005032959938049316, 0.005039103984832764, 0.005040128231048584, 0.00506060791015625, 0.0050432000160217285, 0.0050432000160217285, 0.005038080215454102, 0.005078015804290771, 0.005134335994720459, 0.005028863906860351, 0.004787199974060059, 0.005048319816589355, 0.005074944019317627, 0.0049797120094299315, 0.004971519947052002, 0.0049797120094299315, 0.005018623828887939, 0.004979775905609131, 0.00497657585144043, 0.005126143932342529, 0.005074944019317627, 0.005059584140777588, 0.005075967788696289, 0.0051138558387756345, 0.005044223785400391, 0.005045248031616211, 0.005044223785400391, 0.005071872234344482, 0.004983808040618896, 0.005017600059509277, 0.005022751808166504, 0.005086175918579102, 0.005040128231048584, 0.005041152000427246, 0.005049344062805176, 0.005092351913452148, 0.005094399929046631, 0.005051392078399658, 0.005089280128479004, 0.005096447944641113, 0.005074944019317627, 0.005040128231048584, 0.0050841598510742185, 0.005054463863372802, 0.005047296047210693, 0.0050503678321838375, 0.0050657281875610355, 0.005056511878967285, 0.00505241584777832, 0.005032959938049316, 0.0050657281875610355, 0.005040128231048584, 0.0050800638198852536, 0.005082111835479736, 0.005041152000427246, 0.0050135040283203125, 0.005053440093994141, 0.005091328144073487, 0.005032959938049316, 0.005038080215454102, 0.005061632156372071, 0.005079040050506592, 0.005037055969238281, 0.005033984184265137, 0.005055488109588623, 0.00506060791015625, 0.005048319816589355, 0.005037087917327881, 0.005058527946472168, 0.005152768135070801, 0.0050728960037231445, 0.005081088066101074, 0.005100543975830078, 0.004825088024139404, 0.0050462718009948735, 0.005030911922454834, 0.005035007953643799, 0.00502784013748169, 0.004994048118591309, 0.005028863906860351, 0.0050094079971313476, 0.0050022401809692385, 0.005047296047210693, 0.005028863906860351, 0.005040128231048584, 0.005073919773101807, 0.005024767875671386, 0.005041152000427246, 0.005041152000427246, 0.005074944019317627, 0.005054463863372802, 0.004983808040618896, 0.004974592208862305, 0.00499513578414917, 0.004961215972900391, 0.0049725441932678225, 0.004973567962646484, 0.0050135040283203125, 0.004966400146484375, 0.00502784013748169, 0.00502784013748169, 0.005079040050506592, 0.00505241584777832, 0.005023744106292725, 0.005117951869964599, 0.005062655925750732, 0.005058559894561767, 0.005014527797698975, 0.005041152000427246, 0.005046304225921631, 0.005022687911987305, 0.005055488109588623, 0.0050657281875610355, 0.005033984184265137, 0.005033984184265137, 0.005035007953643799, 0.005074944019317627, 0.005127168178558349, 0.004977663993835449, 0.00496230411529541, 0.004993023872375488, 0.004994048118591309, 0.004967423915863037, 0.004956160068511963, 0.004992000102996826, 0.004968448162078858, 0.004980735778808594, 0.0049909758567810054, 0.005444608211517334, 0.005058559894561767, 0.005035007953643799, 0.00506879997253418, 0.0051404800415039064, 0.005059584140777588, 0.005037055969238281, 0.005057536125183106, 0.004837376117706299, 0.005041152000427246, 0.005044223785400391, 0.005028895854949951, 0.005062623977661133, 0.004969471931457519, 0.00497049617767334, 0.004973567962646484, 0.00499513578414917, 0.0049642882347106934, 0.004976640224456787, 0.004951039791107178, 0.00501043176651001, 0.004978687763214112, 0.004968448162078858, 0.0049797120094299315, 0.005100543975830078, 0.005039167881011963, 0.005089216232299805, 0.00517632007598877, 0.005325856208801269, 0.005200863838195801, 0.005241856098175048, 0.005167103767395019, 0.005093376159667969, 0.00495411205291748, 0.005026815891265869, 0.005066751956939697, 0.005033984184265137, 0.005061632156372071, 0.005184512138366699, 0.005131264209747314, 0.005053440093994141, 0.005058559894561767, 0.005075967788696289, 0.0050186882019042965, 0.005174208164215088, 0.005422080039978027, 0.005350399971008301, 0.005223423957824707, 0.0051138558387756345, 0.0051998720169067385, 0.00521830415725708, 0.005327871799468994, 0.005320703983306885, 0.005223423957824707, 0.005302271842956543, 0.0052070398330688475, 0.0051435518264770505, 0.005123104095458984, 0.00511075210571289, 0.005157887935638428, 0.005127168178558349, 0.005125120162963868, 0.005203968048095703, 0.005116928100585938, 0.005108736038208008, 0.005101568222045898, 0.005213183879852295, 0.005138432025909424, 0.005048319816589355, 0.005048319816589355, 0.005053440093994141, 0.004781055927276612, 0.00511897611618042, 0.00497049617767334, 0.004985856056213379, 0.004977663993835449, 0.004976640224456787, 0.004960256099700928, 0.004989952087402344, 0.004980735778808594, 0.004966400146484375, 0.004973567962646484, 0.004964352130889893, 0.004998144149780274, 0.004965375900268554, 0.005030911922454834, 0.005190656185150146, 0.005035007953643799, 0.005067776203155518, 0.005037055969238281, 0.00506982421875, 0.005031936168670655, 0.005049344062805176, 0.00506982421875, 0.0050728960037231445, 0.005135359764099121, 0.005282815933227539, 0.005190656185150146, 0.005238783836364746, 0.005223423957824707, 0.00511078405380249, 0.0050841598510742185, 0.005040128231048584, 0.005048319816589355, 0.005165056228637695, 0.005232639789581299, 0.005056511878967285, 0.005067776203155518, 0.0050769920349121095, 0.005031936168670655, 0.0050503678321838375, 0.005035039901733399, 0.005093344211578369, 0.00517632007598877, 0.00506879997253418, 0.005061632156372071, 0.005063680171966553, 0.005054463863372802, 0.005044223785400391, 0.00520908784866333, 0.005049344062805176, 0.00505241584777832, 0.005037055969238281, 0.005989376068115234, 0.00618393611907959, 0.006304768085479737, 0.006418432235717773, 0.005412864208221436, 0.005273600101470947, 0.005320703983306885, 0.0053012480735778805, 0.0053043198585510255, 0.005444608211517334, 0.005305344104766845, 0.004895743846893311, 0.00506169605255127, 0.005095359802246094, 0.0050657281875610355, 0.005030911922454834, 0.005092351913452148, 0.005096447944641113, 0.005280767917633057, 0.005245952129364013, 0.005186560153961181, 0.005195775985717774, 0.0052971520423889164, 0.005239808082580567, 0.0052674560546875, 0.005249023914337158, 0.005237760066986084, 0.00526643180847168, 0.00521727991104126, 0.005117951869964599, 0.005180416107177735, 0.00515174388885498, 0.005126143932342529, 0.005049344062805176, 0.0050503678321838375, 0.005017600059509277, 0.0050247998237609864, 0.005022687911987305, 0.00506879997253418, 0.005044223785400391, 0.0050432000160217285, 0.005049344062805176, 0.005056511878967285, 0.005031936168670655, 0.005134335994720459, 0.005056511878967285, 0.005023744106292725, 0.00502784013748169, 0.005030911922454834, 0.0050769920349121095, 0.0050432000160217285, 0.005066751956939697, 0.0060999679565429685, 0.005860352039337159, 0.006098944187164307, 0.00573747205734253, 0.005272575855255127, 0.0053309440612792965, 0.0053043198585510255, 0.0051998720169067385, 0.005265408039093018, 0.0052408318519592285, 0.005195775985717774, 0.005249023914337158, 0.00515174388885498, 0.005041152000427246, 0.00506060791015625, 0.005063680171966553, 0.005018623828887939, 0.004931583881378174, 0.005032959938049316, 0.005161983966827393, 0.005026815891265869, 0.005088255882263183, 0.004795392036437988, 0.005056511878967285, 0.005033984184265137, 0.0050165758132934574, 0.0050094079971313476, 0.005070847988128662, 0.005055488109588623, 0.005035007953643799, 0.005021696090698242, 0.005070847988128662, 0.005022719860076904, 0.005282815933227539, 0.005135359764099121, 0.005100543975830078, 0.005030911922454834, 0.0050360321998596195, 0.005041152000427246, 0.005149695873260498, 0.005035007953643799, 0.005038080215454102, 0.0052111358642578124, 0.005051392078399658, 0.005026815891265869, 0.005023744106292725, 0.005057536125183106, 0.005037055969238281, 0.00501043176651001, 0.005074944019317627, 0.005058559894561767, 0.005035007953643799, 0.00501964807510376, 0.005030911922454834, 0.00506879997253418, 0.0050421757698059086, 0.005028863906860351, 0.005070847988128662, 0.005127168178558349, 0.005033984184265137, 0.005047327995300293, 0.005054431915283203, 0.005032959938049316, 0.005028863906860351, 0.005053440093994141, 0.005057536125183106, 0.004965375900268554, 0.005079040050506592, 0.005019711971282959, 0.005075903892517089, 0.005029920101165771, 0.00510972785949707, 0.0049725441932678225, 0.0050032639503479, 0.004969471931457519, 0.00497049617767334, 0.005006400108337403, 0.005173183917999267, 0.005039103984832764, 0.00503715181350708, 0.005046175956726074, 0.005059584140777588, 0.005031936168670655, 0.005026847839355469, 0.005057504177093506, 0.004799488067626953, 0.005032959938049316, 0.005064703941345215, 0.004986879825592041, 0.004987904071807861, 0.004978687763214112, 0.004978687763214112, 0.00495411205291748, 0.005038144111633301, 0.005065663814544678, 0.005031936168670655, 0.005026815891265869, 0.005061632156372071, 0.0050360321998596195, 0.0050462718009948735, 0.005061632156372071, 0.005073919773101807, 0.005048319816589355, 0.0050206718444824215, 0.0050462718009948735, 0.005055488109588623, 0.005032959938049316, 0.005030911922454834, 0.005037055969238281, 0.005055488109588623, 0.0050432000160217285, 0.005082111835479736, 0.005064703941345215, 0.005057536125183106, 0.004995168209075928, 0.005032864093780518, 0.005054463863372802, 0.005048319816589355, 0.005037055969238281, 0.004998144149780274, 0.0050432000160217285, 0.004960256099700928, 0.004958208084106445, 0.004976640224456787, 0.004998144149780274, 0.004992000102996826, 0.004977663993835449, 0.004974592208862305, 0.005081088066101074, 0.005033984184265137, 0.005033984184265137, 0.005029888153076172, 0.00505241584777832, 0.0049797120094299315, 0.004967423915863037, 0.004980735778808594, 0.0050432000160217285, 0.004987904071807861, 0.0049827837944030765, 0.004980735778808594, 0.005001215934753418, 0.004996096134185791, 0.004963327884674072, 0.004977663993835449, 0.005005311965942383, 0.004981760025024414, 0.004981760025024414, 0.0049827837944030765, 0.004798463821411133, 0.005066751956939697, 0.005272575855255127, 0.005238783836364746, 0.005210112094879151, 0.005292031764984131, 0.0052111358642578124, 0.005276671886444092, 0.005260287761688232, 0.005224448204040527, 0.005206079959869385, 0.0052848000526428225, 0.005219327926635742, 0.005237760066986084, 0.005220352172851562, 0.0051701760292053225, 0.005081088066101074, 0.005137407779693603, 0.0051476478576660155, 0.005188608169555664, 0.005230591773986816, 0.005256192207336426, 0.005216256141662597, 0.005122047901153564, 0.005035039901733399, 0.005056479930877686, 0.005038080215454102, 0.004974592208862305, 0.005007359981536865, 0.004994048118591309, 0.004964352130889893, 0.004966400146484375, 0.004952064037322998, 0.005024767875671386, 0.004975615978240967, 0.004976640224456787, 0.004993023872375488, 0.005201920032501221, 0.0050800638198852536, 0.005039103984832764, 0.0051066880226135255, 0.005044223785400391, 0.00505241584777832, 0.0050360321998596195, 0.005081088066101074, 0.0050728960037231445, 0.005040128231048584, 0.005051392078399658, 0.005073919773101807, 0.005025856018066406, 0.005043136119842529, 0.005040128231048584, 0.005083136081695557, 0.005066783905029297, 0.005045216083526611, 0.0050657281875610355, 0.005124095916748047, 0.005049344062805176, 0.005037087917327881, 0.005077983856201172, 0.005041152000427246, 0.005028863906860351, 0.005045248031616211, 0.004827136039733886, 0.005043327808380127, 0.004939648151397705, 0.004997119903564453, 0.004977663993835449, 0.005025792121887207, 0.004977663993835449, 0.0049725441932678225, 0.00501145601272583, 0.005058559894561767, 0.005231616020202637, 0.005243904113769531, 0.005273600101470947, 0.005192704200744629, 0.004977663993835449, 0.004992000102996826, 0.0054241280555725096, 0.005085184097290039, 0.0050503678321838375, 0.005073919773101807, 0.005090303897857666, 0.0050769920349121095, 0.0050432000160217285, 0.005070847988128662, 0.005045248031616211, 0.005135359764099121, 0.005021696090698242, 0.0050503678321838375, 0.004993023872375488, 0.0050032639503479, 0.005024767875671386, 0.00506982421875, 0.0050462718009948735, 0.0050381760597229, 0.00504310417175293, 0.005099520206451416, 0.005031936168670655, 0.005070847988128662, 0.005037055969238281, 0.005197824001312256, 0.005031936168670655, 0.005057536125183106, 0.00506879997253418, 0.005031936168670655, 0.005031007766723633, 0.004972447872161865, 0.005032959938049316, 0.005129216194152832, 0.0051701760292053225, 0.005139455795288086, 0.005149695873260498, 0.004956160068511963, 0.004983808040618896, 0.0050360321998596195, 0.00506060791015625, 0.0050800638198852536, 0.005228544235229492, 0.00530841588973999, 0.005165056228637695, 0.004987967967987061, 0.00497657585144043, 0.0050124797821044925, 0.004994048118591309, 0.004794367790222168, 0.005053440093994141, 0.005053440093994141, 0.005090303897857666, 0.005047296047210693, 0.005075967788696289, 0.0050657281875610355, 0.005098495960235596, 0.005059584140777588, 0.005047296047210693, 0.005096447944641113, 0.005053440093994141, 0.005237760066986084, 0.005059584140777588, 0.005100543975830078, 0.005061632156372071, 0.00501145601272583, 0.004989952087402344, 0.0050124797821044925, 0.0049797120094299315, 0.004986911773681641, 0.005421088218688965, 0.005273536205291748, 0.005265408039093018, 0.005310463905334473, 0.005274623870849609, 0.005255167961120606, 0.005223423957824707, 0.0052715520858764645, 0.0052336640357971195, 0.005252096176147461, 0.00530841588973999, 0.0052367358207702636, 0.005234687805175781, 0.005278719902038574, 0.005228544235229492, 0.005173247814178467, 0.005189631938934326, 0.005228544235229492, 0.005261312007904053, 0.005251071929931641, 0.005294079780578613, 0.005244927883148193, 0.0052367358207702636, 0.005274623870849609, 0.00522547197341919, 0.005230591773986816, 0.005283840179443359, 0.005230591773986816, 0.005252096176147461, 0.005284863948822022, 0.005222400188446045, 0.005231616020202637, 0.0052705278396606445, 0.005232639789581299, 0.005235712051391602, 0.005319680213928223, 0.005265408039093018, 0.0052367358207702636, 0.005214208126068115, 0.005281792163848877, 0.005265408039093018, 0.00522547197341919, 0.004789247989654541, 0.00506879997253418, 0.005062655925750732, 0.005030911922454834, 0.00502784013748169, 0.005172224044799805, 0.005067776203155518, 0.005049344062805176, 0.0050769920349121095, 0.005064703941345215, 0.004973567962646484, 0.0049797120094299315, 0.004967423915863037, 0.005007359981536865, 0.005041152000427246, 0.0050360321998596195, 0.0050462718009948735, 0.005045248031616211, 0.005063680171966553, 0.0050503678321838375, 0.005063680171966553, 0.004956160068511963, 0.004984831809997559, 0.005005311965942383, 0.005056511878967285, 0.005023744106292725, 0.005017600059509277, 0.005044223785400391, 0.005081088066101074, 0.005049344062805176, 0.0050360321998596195, 0.005055488109588623, 0.0050769920349121095, 0.005045248031616211, 0.005037055969238281, 0.0050432000160217285, 0.0050769920349121095, 0.00502784013748169, 0.005054463863372802, 0.005061632156372071, 0.0051036162376403805, 0.005056511878967285, 0.005026815891265869, 0.005082111835479736, 0.005049344062805176, 0.005048319816589355, 0.005049344062805176, 0.005057536125183106, 0.005048319816589355, 0.005039103984832764, 0.0050503678321838375, 0.005073919773101807, 0.005054463863372802, 0.0050421757698059086, 0.005049344062805176, 0.005074944019317627, 0.005025824069976806, 0.005040095806121826, 0.0050728960037231445, 0.005058591842651367, 0.005050335884094238, 0.005025792121887207, 0.005082111835479736, 0.004859903812408447, 0.005022719860076904, 0.005022719860076904, 0.005064703941345215, 0.005048319816589355, 0.005056511878967285, 0.0050094079971313476, 0.004946944236755371, 0.005041152000427246, 0.005028863906860351, 0.0050165758132934574, 0.005025792121887207, 0.005066751956939697, 0.005055488109588623, 0.004956160068511963, 0.004965375900268554, 0.004981760025024414, 0.004981760025024414, 0.004955135822296143, 0.00496127986907959, 0.005451776027679443, 0.00521833610534668, 0.005138400077819824, 0.005053440093994141, 0.005033984184265137, 0.005031936168670655, 0.005015552043914795, 0.0050841598510742185, 0.0052111358642578124, 0.005635072231292724, 0.005216256141662597, 0.005054463863372802, 0.005031936168670655, 0.005045248031616211, 0.005066751956939697, 0.005102591991424561, 0.005054463863372802, 0.005104640007019043, 0.0050954241752624516, 0.005038080215454102, 0.004993023872375488, 0.005024767875671386, 0.004964352130889893, 0.004975615978240967, 0.004974592208862305, 0.0049909758567810054, 0.005131264209747314, 0.005025792121887207, 0.005055488109588623, 0.005058559894561767, 0.004977663993835449, 0.004965375900268554, 0.005004288196563721, 0.005070847988128662, 0.005031936168670655, 0.005051392078399658, 0.004984831809997559, 0.005035007953643799, 0.0050206718444824215, 0.005054463863372802, 0.005038080215454102, 0.005078015804290771, 0.0050135040283203125, 0.004784128189086914, 0.005144576072692871, 0.005045248031616211, 0.005081088066101074, 0.005022719860076904, 0.005051392078399658, 0.005071872234344482, 0.005039103984832764, 0.005031936168670655, 0.005031936168670655, 0.005058591842651367, 0.005062623977661133, 0.004984831809997559, 0.004981760025024414, 0.005001215934753418, 0.004981760025024414, 0.00496230411529541, 0.004974592208862305, 0.004997119903564453, 0.004974592208862305, 0.004967423915863037, 0.00499507188796997, 0.005039103984832764, 0.005063680171966553, 0.005044223785400391, 0.005063680171966553, 0.005062655925750732, 0.005024767875671386, 0.005036064147949219, 0.005024735927581787, 0.0050841598510742185, 0.005040128231048584, 0.005023744106292725, 0.005047296047210693, 0.0050769920349121095, 0.005031936168670655, 0.00501964807510376, 0.005040128231048584, 0.00506879997253418, 0.0050503678321838375, 0.005039103984832764, 0.0050657281875610355, 0.004975615978240967, 0.00506060791015625, 0.005047296047210693, 0.00505241584777832, 0.0050432000160217285, 0.0050360321998596195, 0.005035071849822998, 0.005059519767761231, 0.005056511878967285, 0.0050462718009948735, 0.00502784013748169, 0.005082111835479736, 0.005018623828887939, 0.004978687763214112, 0.004983808040618896, 0.0050032639503479, 0.004969471931457519, 0.004978687763214112, 0.004974592208862305, 0.005028863906860351, 0.004993023872375488, 0.004802495956420899, 0.005031936168670655, 0.005024767875671386, 0.005064703941345215, 0.005033984184265137, 0.005014527797698975, 0.004960256099700928, 0.004998144149780274, 0.00496230411529541, 0.004959231853485108, 0.0050954241752624516, 0.005101568222045898, 0.005030911922454834, 0.005023744106292725, 0.005029888153076172, 0.0050728960037231445, 0.005044223785400391, 0.005031936168670655, 0.005035007953643799, 0.005075007915496826, 0.005035967826843262, 0.005115903854370117, 0.005088255882263183, 0.005073919773101807, 0.00522547197341919, 0.0051333122253417966, 0.0051066880226135255, 0.005417984008789062, 0.005611519813537597, 0.0060631041526794435, 0.0052715520858764645, 0.005285888195037842, 0.00521830415725708, 0.005215231895446777, 0.005251071929931641, 0.005250048160552978, 0.005213183879852295, 0.0052295680046081545, 0.005219327926635742, 0.005126143932342529, 0.005213183879852295, 0.005292031764984131, 0.005338111877441406, 0.005837823867797852, 0.0053309440612792965, 0.005248064041137695, 0.00522438383102417, 0.0052715520858764645, 0.005290016174316407, 0.005218272209167481, 0.005260287761688232, 0.005224448204040527, 0.005134335994720459, 0.005081088066101074, 0.005064703941345215, 0.005058559894561767, 0.005058559894561767, 0.005063680171966553, 0.00505241584777832, 0.005037055969238281, 0.005053440093994141, 0.0050728960037231445, 0.005029888153076172, 0.004810751914978028, 0.005058559894561767, 0.005045248031616211, 0.00505347204208374, 0.00501859188079834, 0.004947968006134033, 0.004965375900268554, 0.004988927841186524, 0.0049827837944030765, 0.0049582719802856445, 0.00496940803527832, 0.005007391929626465, 0.004956128120422364, 0.005030911922454834, 0.005029888153076172, 0.005058559894561767, 0.005039103984832764, 0.005030911922454834, 0.005045248031616211, 0.005031936168670655, 0.004964352130889893, 0.0049797120094299315, 0.004971519947052002, 0.004999167919158935, 0.004985856056213379, 0.004944896221160889, 0.004960256099700928, 0.004999199867248535, 0.004973536014556885, 0.004976640224456787, 0.004973567962646484, 0.004994048118591309, 0.005296127796173096, 0.005181503772735596, 0.0051803522109985355, 0.005129216194152832, 0.005041152000427246, 0.00536678409576416, 0.005073919773101807, 0.005033984184265137, 0.005029888153076172, 0.005026815891265869, 0.005081088066101074, 0.005047296047210693, 0.005022719860076904, 0.0050503678321838375, 0.004958208084106445, 0.004993023872375488, 0.004964384078979492, 0.005170144081115722, 0.00505241584777832, 0.005070847988128662, 0.005049344062805176, 0.005061632156372071, 0.005029888153076172, 0.005028863906860351, 0.0051404800415039064, 0.0050800638198852536, 0.005192704200744629, 0.005082111835479736, 0.005038080215454102, 0.004997151851654053, 0.004965343952178955, 0.004793280124664306, 0.005035071849822998, 0.005041088104248047, 0.005114880084991455, 0.005048351764678955, 0.005047264099121093, 0.005079040050506592, 0.005051392078399658, 0.005022719860076904, 0.0050432000160217285, 0.005057536125183106, 0.005056575775146484, 0.00502675199508667, 0.005029888153076172, 0.005079040050506592, 0.005047359943389893, 0.00506873607635498, 0.005055488109588623, 0.005055488109588623, 0.005047296047210693, 0.004977663993835449, 0.004997119903564453, 0.0050094079971313476, 0.004993023872375488, 0.004985856056213379, 0.004994048118591309, 0.005015552043914795, 0.004977695941925049, 0.005336031913757324, 0.0050769920349121095, 0.00506879997253418, 0.005070847988128662, 0.0051066880226135255, 0.005074944019317627, 0.005053440093994141, 0.005026815891265869, 0.005055488109588623, 0.005075967788696289, 0.0050421757698059086, 0.004980735778808594, 0.0049797120094299315, 0.004992000102996826, 0.004976640224456787, 0.0049725441932678225, 0.004975679874420166, 0.005027775764465332, 0.0049827837944030765, 0.005154816150665284, 0.005252096176147461, 0.005259263992309571, 0.005265408039093018, 0.0052715520858764645, 0.005288959980010987, 0.005263391971588135, 0.0052592320442199705, 0.005313536167144775, 0.005274623870849609, 0.005272575855255127, 0.005307392120361328, 0.005254144191741943, 0.005259263992309571, 0.00511078405380249, 0.0050032639503479, 0.0047861762046813965, 0.005044223785400391, 0.005071872234344482, 0.0050657281875610355, 0.0050421757698059086, 0.005048319816589355, 0.005055488109588623, 0.005075967788696289, 0.005040128231048584, 0.005054463863372802, 0.005049344062805176, 0.005088255882263183, 0.005056511878967285, 0.0050432000160217285, 0.0050728960037231445, 0.005045248031616211, 0.005051392078399658, 0.00505241584777832, 0.005090303897857666, 0.005099520206451416, 0.005047296047210693, 0.005056511878967285, 0.005104640007019043, 0.0050421757698059086, 0.005041152000427246, 0.005037055969238281, 0.005061632156372071, 0.005130239963531494, 0.0050462718009948735, 0.005061632156372071, 0.005058559894561767, 0.004921343803405762, 0.0049162240028381345, 0.0050421757698059086, 0.0050769920349121095, 0.005032959938049316, 0.0050462718009948735, 0.0050657281875610355, 0.0050432000160217285, 0.005026815891265869, 0.005063680171966553, 0.0050657281875610355, 0.0050432000160217285, 0.005062655925750732, 0.005047296047210693, 0.0050769920349121095, 0.005051392078399658, 0.005040192127227784, 0.005075903892517089, 0.005122047901153564, 0.005004288196563721, 0.004989952087402344, 0.00511078405380249, 0.005070847988128662, 0.0050769920349121095, 0.005070847988128662, 0.005085184097290039, 0.005079040050506592, 0.005056511878967285, 0.005071872234344482, 0.005111807823181152, 0.005049344062805176, 0.005056511878967285, 0.00479744005203247, 0.005040128231048584, 0.00506879997253418, 0.005053440093994141, 0.005029888153076172, 0.005081088066101074, 0.005058559894561767, 0.00505241584777832, 0.005041152000427246, 0.005071872234344482, 0.005033984184265137, 0.005051392078399658, 0.00502784013748169, 0.005061632156372071, 0.005022719860076904, 0.005029888153076172, 0.005021696090698242, 0.004983871936798096, 0.005018559932708741, 0.00502784013748169, 0.005024767875671386, 0.005055488109588623, 0.005024767875671386, 0.005030911922454834, 0.005031936168670655, 0.00506060791015625, 0.005018623828887939, 0.004976640224456787, 0.004968448162078858, 0.005018623828887939, 0.004971519947052002, 0.004959263801574707, 0.004964320182800293, 0.005000192165374756, 0.005004288196563721, 0.0054271998405456545, 0.005172224044799805, 0.00506982421875, 0.005026815891265869, 0.004975615978240967, 0.005094399929046631, 0.005033984184265137, 0.005045248031616211, 0.005040128231048584, 0.00506879997253418, 0.004998144149780274, 0.0049827837944030765, 0.004959231853485108, 0.00501964807510376, 0.004989952087402344, 0.0050728960037231445, 0.005142528057098389, 0.005341184139251709, 0.005202943801879883, 0.005222400188446045, 0.00530944013595581, 0.005259263992309571, 0.005192704200744629, 0.00516096019744873, 0.0051435518264770505, 0.005149695873260498, 0.0052336640357971195, 0.005283840179443359, 0.0049827837944030765, 0.005151872158050537, 0.005192575931549072, 0.005255167961120606, 0.00521727991104126, 0.0052367358207702636, 0.0053647360801696775, 0.00521830415725708, 0.005216256141662597, 0.005258240222930908, 0.0052111358642578124, 0.005213183879852295, 0.005224448204040527, 0.005298175811767578, 0.0052008957862854, 0.0050841598510742185, 0.005079040050506592, 0.005044223785400391, 0.004980735778808594, 0.0049797120094299315, 0.0049909758567810054, 0.004977663993835449, 0.004965375900268554, 0.004969471931457519, 0.0050206718444824215, 0.004971519947052002, 0.004973567962646484, 0.005035007953643799, 0.005073919773101807, 0.005023744106292725, 0.0050728960037231445, 0.005299200057983398, 0.005040128231048584, 0.005038080215454102, 0.005031936168670655, 0.0050728960037231445, 0.00510975980758667, 0.005045248031616211, 0.005048319816589355, 0.005083136081695557, 0.005032959938049316, 0.005047296047210693, 0.005037055969238281, 0.005061632156372071, 0.005032959938049316, 0.005023744106292725, 0.00506982421875, 0.005049344062805176, 0.005030911922454834, 0.0050360321998596195, 0.005070847988128662, 0.004946944236755371, 0.005005311965942383, 0.005040128231048584, 0.005089280128479004, 0.0050360321998596195, 0.005044223785400391, 0.005033984184265137, 0.005091328144073487, 0.005026815891265869, 0.0050462718009948735, 0.005023744106292725, 0.005088255882263183, 0.004828159809112549, 0.005031936168670655, 0.005014527797698975, 0.005044223785400391, 0.005083136081695557, 0.005058559894561767, 0.005041152000427246, 0.005051392078399658, 0.005063680171966553, 0.005047296047210693, 0.0050503678321838375, 0.0050063362121582035, 0.004996096134185791, 0.0049827837944030765, 0.004959231853485108, 0.0050462718009948735, 0.0053002238273620605, 0.005044223785400391, 0.00502784013748169, 0.00506982421875, 0.005041152000427246, 0.005048319816589355, 0.00502784013748169, 0.005074944019317627, 0.005031936168670655, 0.004960256099700928, 0.004974592208862305, 0.005008384227752686, 0.004969471931457519, 0.004980735778808594, 0.004965375900268554, 0.005067776203155518, 0.00505241584777832, 0.005056511878967285, 0.005049344062805176, 0.0050800638198852536, 0.004985856056213379, 0.004981760025024414, 0.005057536125183106, 0.0050503678321838375, 0.005051392078399658, 0.0050421757698059086, 0.0050841598510742185, 0.005044223785400391, 0.005039103984832764, 0.005045248031616211, 0.005090303897857666, 0.005045248031616211, 0.005044223785400391, 0.005035007953643799, 0.005075967788696289, 0.005045248031616211, 0.005047296047210693, 0.005039103984832764, 0.005089280128479004, 0.005040128231048584, 0.005049344062805176, 0.005070847988128662, 0.005053440093994141, 0.0050503678321838375, 0.005067776203155518, 0.005085279941558838, 0.005041056156158448, 0.00480460786819458, 0.005026815891265869, 0.004974592208862305, 0.0050360321998596195, 0.0049827837944030765, 0.004974592208862305, 0.004987904071807861, 0.004994048118591309, 0.004986879825592041, 0.0049797120094299315, 0.004959231853485108, 0.005023744106292725, 0.0049879360198974605, 0.0050319042205810545, 0.005057536125183106, 0.00506982421875, 0.005031936168670655, 0.005041152000427246, 0.005036096096038819, 0.005012415885925293, 0.0049827837944030765, 0.004985856056213379, 0.004888576030731201, 0.004965375900268554, 0.004960288047790527, 0.004959199905395508, 0.004963327884674072, 0.005021696090698242, 0.0050421757698059086, 0.005048319816589355, 0.005057536125183106, 0.005082111835479736, 0.005022719860076904, 0.0050657281875610355, 0.0050432000160217285, 0.005062655925750732, 0.005029888153076172, 0.005030911922454834, 0.005026847839355469, 0.0050687680244445804, 0.005100607872009277, 0.005115839958190918, 0.005098495960235596, 0.0050432000160217285, 0.0049827837944030765, 0.0050032639503479, 0.004989952087402344, 0.005414912223815918, 0.005062655925750732, 0.0050421757698059086, 0.005100543975830078, 0.005001215934753418, 0.004987904071807861, 0.004968448162078858, 0.005117951869964599, 0.005045248031616211, 0.005038080215454102, 0.005083136081695557, 0.0050094079971313476, 0.004985856056213379, 0.005264383792877197, 0.005104640007019043, 0.005063680171966553, 0.004800511837005615, 0.0050432000160217285, 0.005044223785400391, 0.005134367942810058, 0.005046239852905273, 0.005037055969238281, 0.004986879825592041, 0.004951039791107178, 0.005152768135070801, 0.005302271842956543, 0.005303296089172363, 0.0052347202301025395, 0.0052326078414916995, 0.005249120235443115, 0.005178271770477295, 0.005058559894561767, 0.005044223785400391, 0.0050728960037231445, 0.005035007953643799, 0.005039103984832764, 0.005312511920928955, 0.005250048160552978, 0.005040128231048584, 0.0050657281875610355, 0.005081088066101074, 0.005058559894561767, 0.005041152000427246, 0.005062655925750732, 0.005082111835479736, 0.005047296047210693, 0.0050503678321838375, 0.005047296047210693, 0.005063680171966553, 0.00506879997253418, 0.005189631938934326, 0.00517632007598877, 0.005567488193511963, 0.005412864208221436, 0.005311488151550293, 0.005326848030090332, 0.005484543800354004, 0.005280767917633057, 0.005260287761688232, 0.005075967788696289, 0.005059584140777588, 0.005108736038208008, 0.005092351913452148, 0.005082111835479736, 0.005175295829772949, 0.005067776203155518, 0.005051392078399658, 0.005099520206451416, 0.005054463863372802, 0.005001215934753418, 0.005028895854949951, 0.0051662402153015135, 0.005193600177764893, 0.0051608958244323735, 0.0050135040283203125, 0.005059584140777588, 0.0050841598510742185, 0.005041152000427246, 0.005066751956939697, 0.004784128189086914, 0.005096447944641113, 0.005182464122772217, 0.006260735988616943, 0.005259263992309571, 0.0052295680046081545, 0.005245952129364013, 0.00530025577545166, 0.005226463794708252, 0.005238783836364746, 0.005089280128479004, 0.004975615978240967, 0.00496230411529541, 0.004969471931457519, 0.005164031982421875, 0.0051998720169067385, 0.005041152000427246, 0.005028863906860351, 0.005074944019317627, 0.005038080215454102, 0.00502784013748169, 0.005215231895446777, 0.00517632007598877, 0.005017600059509277, 0.00502784013748169, 0.005075967788696289, 0.005031936168670655, 0.005026815891265869, 0.00502784013748169, 0.005071872234344482, 0.005025792121887207, 0.005040128231048584, 0.005033984184265137, 0.0050800638198852536, 0.006095871925354004, 0.005154816150665284, 0.0050728960037231445, 0.005030911922454834, 0.0054906878471374515, 0.005294079780578613, 0.005175295829772949, 0.005256192207336426, 0.005269504070281982, 0.005201920032501221, 0.005264383792877197, 0.0052295680046081545, 0.005198847770690918, 0.005188608169555664, 0.005082111835479736, 0.005075967788696289, 0.004957183837890625, 0.004960256099700928, 0.004977663993835449, 0.005056511878967285, 0.0051968002319335935, 0.0051476478576660155, 0.005235775947570801, 0.005195712089538574, 0.005206016063690186, 0.005238783836364746, 0.0052899842262268066, 0.005203968048095703, 0.005197824001312256, 0.004810751914978028, 0.005064735889434815, 0.005025760173797607, 0.00511078405380249, 0.0050503678321838375, 0.00505241584777832, 0.005036096096038819, 0.0050646400451660155, 0.005156864166259765, 0.005277696132659912, 0.005089280128479004, 0.0050503678321838375, 0.005048319816589355, 0.0050360321998596195, 0.005031936168670655, 0.0050657281875610355, 0.00502784013748169, 0.005036064147949219, 0.005030879974365235, 0.005064703941345215, 0.0050432000160217285, 0.005044223785400391, 0.005032959938049316, 0.005078015804290771, 0.005026815891265869, 0.0048895998001098635, 0.004907008171081543, 0.004998144149780274, 0.004958208084106445, 0.004973567962646484, 0.004977663993835449, 0.005010496139526368, 0.004982719898223877, 0.004960256099700928, 0.0049530878067016604, 0.004965375900268554, 0.004989952087402344, 0.004984831809997559, 0.0050032639503479, 0.004976640224456787, 0.0049797120094299315, 0.00497049617767334, 0.004988927841186524, 0.005245952129364013, 0.005250048160552978, 0.00517632007598877, 0.005119999885559082, 0.0051968002319335935, 0.005545983791351319, 0.0052899842262268066, 0.0052930560111999515, 0.005328896045684814, 0.005286911964416504, 0.005252096176147461, 0.0051701760292053225, 0.00516096019744873, 0.005257215976715088, 0.005203968048095703, 0.005221375942230225, 0.005256192207336426, 0.00522547197341919, 0.0053534722328186036, 0.005535744190216065, 0.0050124797821044925, 0.005204991817474365, 0.00532480001449585, 0.005371903896331787, 0.005231616020202637, 0.005231616020202637, 0.005322751998901368, 0.005148672103881836, 0.005139455795288086, 0.0051160001754760745, 0.005064608097076416, 0.005119999885559082, 0.0050432000160217285, 0.00506879997253418, 0.0049725441932678225, 0.00506879997253418, 0.004974592208862305, 0.005033984184265137, 0.00496230411529541, 0.00496230411529541, 0.00496127986907959, 0.005054463863372802, 0.005028863906860351, 0.0050432000160217285, 0.005033984184265137, 0.005056511878967285, 0.005030911922454834, 0.0050206718444824215, 0.005033984184265137, 0.005067776203155518, 0.005030911922454834, 0.004968448162078858, 0.004980735778808594, 0.005005311965942383, 0.00499507188796997, 0.004986879825592041, 0.004966400146484375, 0.005533696174621582, 0.005237760066986084, 0.005269536018371582, 0.005210080146789551, 0.0052008957862854, 0.005286911964416504, 0.005265408039093018, 0.005221375942230225, 0.00522649621963501, 0.005201920032501221, 0.005161983966827393, 0.005129216194152832, 0.005153791904449463, 0.005161983966827393, 0.005131264209747314, 0.005099520206451416, 0.0051435518264770505, 0.0051333122253417966, 0.005224448204040527, 0.005274623870849609, 0.005104640007019043, 0.0049387521743774416, 0.004894720077514648, 0.004980735778808594, 0.004984831809997559, 0.004977663993835449, 0.004723711967468262, 0.00497049617767334, 0.004947968006134033, 0.0050462718009948735, 0.005028863906860351, 0.0050206718444824215, 0.0050503678321838375, 0.0050800638198852536, 0.005031936168670655, 0.005033984184265137, 0.004986879825592041, 0.004978687763214112, 0.004968448162078858, 0.00497049617767334, 0.00502784013748169, 0.0050432319641113285, 0.005025760173797607, 0.005030911922454834, 0.0050432000160217285, 0.005037055969238281, 0.005031936168670655, 0.005028863906860351, 0.005161983966827393, 0.004977663993835449, 0.004975615978240967, 0.0049459199905395506, 0.004988927841186524, 0.005018623828887939, 0.0050135040283203125, 0.0050503678321838375, 0.005049344062805176, 0.005031936168670655, 0.0050063362121582035, 0.005038080215454102, 0.005045248031616211, 0.005037055969238281, 0.004998144149780274, 0.005073919773101807, 0.005022719860076904, 0.0050135040283203125, 0.005044223785400391, 0.005025792121887207, 0.005067776203155518, 0.005044223785400391, 0.004963327884674072, 0.0049797120094299315, 0.005044223785400391, 0.005028863906860351, 0.004998144149780274, 0.0049489917755126955, 0.004989952087402344, 0.004951039791107178, 0.004973567962646484, 0.005024767875671386, 0.005194752216339111, 0.0052070398330688475, 0.005216256141662597, 0.00538316822052002, 0.006074368000030517, 0.005378047943115235, 0.005235712051391602, 0.005214208126068115, 0.005198847770690918]",tokens/s,196.32116896694345,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492ec-390f1a4214ed187e48fc3ce4;4970d307-6071-4f1a-9cca-1cc4a8d6c351) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpizvfil_q/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,893.300736,851.968,0.0,222.298112,199.93344,s,1,7.4846494140625,7.4846494140625,0.0,7.4846494140625,7.4846494140625,7.4846494140625,7.4846494140625,[7.4846494140625],,kWh,7.252737294438955e-06,3.947901182706794e-06,9.812507850009977e-06,2.101314632715573e-05,,MB,1397.735424,908.591104,0.0,262.144,220.883456,s,22,0.43489858818054195,0.01976811764457009,4.1801451190025135e-05,0.019757040023803712,0.019807674026489256,0.01987541198730469,0.019886001434326173,"[0.019878847122192383, 0.019887903213500976, 0.01974198341369629, 0.019810144424438476, 0.019724639892578125, 0.019754560470581054, 0.019728479385375978, 0.019749792098999023, 0.019743711471557616, 0.01975935935974121, 0.019738271713256837, 0.01978544044494629, 0.01975654411315918, 0.01976198387145996, 0.019762336730957033, 0.01975753593444824, 0.019773216247558595, 0.019782495498657227, 0.019753664016723634, 0.019749792098999023, 0.01971971130371094, 0.019778175354003905]",tokens/s,12950.145512226762,kWh,2.3278435814605887e-07,1.2750570978407006e-07,1.2447997709920881e-06,1.6050898389222172e-06,tokens/kWh,159492630.12710765,MB,1432.84224,921.174016,0.0,274.726912,220.886016,s,22,9.797046447753907,0.44532029307972304,0.005803419767543434,0.4451862487792969,0.45232828674316405,0.4523575469970703,0.4554164904785156,"[0.4523500671386719, 0.4470330810546875, 0.4507646484375, 0.43758203125, 0.4372237243652344, 0.43644888305664065, 0.4407872924804688, 0.4404221801757813, 0.44488217163085936, 0.4505748291015625, 0.43943402099609374, 0.43922308349609374, 0.4498250427246094, 0.439134521484375, 0.45213226318359373, 0.44917098999023436, 0.4451618347167969, 0.4422179870605469, 0.4562295227050781, 0.4488796691894531, 0.4523579406738281, 0.4452106628417969]",tokens/s,141.4712084291238,kWh,5.181443631658014e-06,2.8392230837285536e-06,9.119236298008052e-06,1.7139903013394616e-05,tokens/kWh,3675633.400653802,,s,1386,9.78788764047623,0.007061967994571591,0.00018015791335627529,0.0070000319480896,0.0072468481063842774,0.00729804801940918,0.007629926490783693,"[0.00674508810043335, 0.007990272045135497, 0.007158783912658692, 0.007205887794494629, 0.00728985595703125, 0.00719155216217041, 0.007185408115386963, 0.007123968124389648, 0.007195648193359375, 0.007192607879638672, 0.007207903861999512, 0.00719155216217041, 0.007127039909362793, 0.007261184215545655, 0.0074414081573486324, 0.007238656044006348, 0.007180287837982178, 0.0071792640686035155, 0.007256063938140869, 0.007197696208953858, 0.007237631797790528, 0.007261184215545655, 0.007160831928253173, 0.007195648193359375, 0.007184383869171143, 0.007223296165466309, 0.007205887794494629, 0.007217152118682861, 0.007159808158874512, 0.007175168037414551, 0.007180287837982178, 0.007170048236846924, 0.007214079856872559, 0.007169023990631103, 0.007102464199066162, 0.0069550080299377445, 0.006885439872741699, 0.006845503807067871, 0.0069303040504455566, 0.007268383979797364, 0.007180255889892578, 0.007323647975921631, 0.007171072006225586, 0.007228415966033935, 0.007185440063476562, 0.007201759815216064, 0.007124991893768311, 0.007171072006225586, 0.007165984153747558, 0.007173088073730469, 0.007036928176879883, 0.006945792198181152, 0.007129087924957276, 0.0072130560874938965, 0.007156735897064209, 0.007153664112091064, 0.0071495680809020995, 0.007136256217956543, 0.007144447803497315, 0.007135231971740722, 0.007101439952850342, 0.007174143791198731, 0.007228415966033935, 0.00676966381072998, 0.006914112091064453, 0.006973375797271728, 0.006990848064422607, 0.006937600135803222, 0.006914048194885254, 0.006882304191589355, 0.006909952163696289, 0.0068986878395080565, 0.006924287796020508, 0.006948863983154297, 0.006982656002044678, 0.006908927917480469, 0.006953983783721924, 0.006965248107910156, 0.006930431842803955, 0.006907904148101806, 0.006895616054534912, 0.007069695949554444, 0.006973440170288086, 0.00687718391418457, 0.006927360057830811, 0.006929408073425293, 0.006916096210479736, 0.00688640022277832, 0.006952960014343262, 0.0069324798583984375, 0.006876160144805908, 0.006931456089019775, 0.007035903930664063, 0.006946815967559815, 0.006887423992156983, 0.006945792198181152, 0.00708403205871582, 0.007548927783966064, 0.008026111602783203, 0.00733081579208374, 0.008258560180664062, 0.007319583892822266, 0.007199711799621582, 0.007236608028411865, 0.007211008071899414, 0.007176191806793213, 0.00719155216217041, 0.006944767951965332, 0.007131135940551757, 0.00729804801940918, 0.007186431884765625, 0.007156735897064209, 0.007176191806793213, 0.007201791763305664, 0.007210015773773193, 0.007192543983459473, 0.007150591850280762, 0.007171103954315186, 0.007250912189483642, 0.007060480117797851, 0.007301119804382325, 0.007193664073944092, 0.007216063976287842, 0.007238656044006348, 0.007178239822387696, 0.0070830078125, 0.006677504062652588, 0.007080959796905518, 0.00694374418258667, 0.006944767951965332, 0.006900735855102539, 0.006929408073425293, 0.006973440170288086, 0.006927360057830811, 0.00694374418258667, 0.006959104061126709, 0.006945792198181152, 0.007652416229248047, 0.007201727867126465, 0.007216127872467041, 0.007225344181060791, 0.007181312084197998, 0.007181312084197998, 0.007137279987335205, 0.007164927959442138, 0.007160831928253173, 0.007139328002929687, 0.007172095775604248, 0.007187456130981445, 0.007233535766601563, 0.007178239822387696, 0.007258111953735351, 0.007156735897064209, 0.007164927959442138, 0.007247871875762939, 0.007160831928253173, 0.007157760143280029, 0.007158783912658692, 0.007122943878173828, 0.007138304233551026, 0.007206912040710449, 0.007171072006225586, 0.0071833600997924804, 0.007165952205657959, 0.007180287837982178, 0.007208960056304932, 0.007139359951019287, 0.007208928108215332, 0.0071823358535766605, 0.007166975975036621, 0.007168000221252442, 0.007156735897064209, 0.0071107840538024904, 0.007152544021606445, 0.007195615768432617, 0.007268352031707764, 0.0074332160949707035, 0.007155712127685547, 0.007203839778900147, 0.0072468481063842774, 0.007172128200531006, 0.007189472198486328, 0.007163904190063477, 0.007238656044006348, 0.00719155216217041, 0.0071905279159545895, 0.007165952205657959, 0.007131135940551757, 0.007178239822387696, 0.006767615795135498, 0.006959104061126709, 0.006920191764831543, 0.006948863983154297, 0.00694374418258667, 0.006930463790893555, 0.006994912147521973, 0.0069918718338012695, 0.006967296123504638, 0.006996992111206054, 0.006946815967559815, 0.006946815967559815, 0.006959104061126709, 0.006977536201477051, 0.006917119979858399, 0.006953983783721924, 0.006886464118957519, 0.006864831924438476, 0.006910975933074951, 0.00694271993637085, 0.007000063896179199, 0.006933504104614257, 0.006947840213775635, 0.006968319892883301, 0.006964223861694336, 0.00693555212020874, 0.006945792198181152, 0.0070677118301391605, 0.006916031837463379, 0.006896639823913574, 0.006923264026641846, 0.0069212160110473635, 0.006892543792724609, 0.006896639823913574, 0.006929408073425293, 0.0069283838272094726, 0.006922239780426025, 0.006895616054534912, 0.006929408073425293, 0.006912000179290771, 0.00694374418258667, 0.0069621758460998535, 0.006969344139099121, 0.006976511955261231, 0.006927360057830811, 0.006938623905181885, 0.0069212160110473635, 0.006919167995452881, 0.006896639823913574, 0.00693555212020874, 0.006957056045532227, 0.006953983783721924, 0.006870016098022461, 0.006864895820617676, 0.006938623905181885, 0.0068884482383728025, 0.007036928176879883, 0.0070522880554199216, 0.006993919849395752, 0.006884352207183838, 0.006945792198181152, 0.006977536201477051, 0.006964223861694336, 0.006690815925598144, 0.00694374418258667, 0.006906879901885986, 0.006920191764831543, 0.006897664070129395, 0.006945792198181152, 0.006913023948669434, 0.0068884482383728025, 0.006927360057830811, 0.0069283838272094726, 0.006909952163696289, 0.0068884482383728025, 0.007047167778015137, 0.007031807899475098, 0.006930431842803955, 0.006920191764831543, 0.0069928960800170895, 0.006917119979858399, 0.007172128200531006, 0.006964191913604736, 0.006999040126800537, 0.006936575889587402, 0.006957151889801025, 0.006785952091217041, 0.006917119979858399, 0.006933504104614257, 0.0068884482383728025, 0.006924352169036865, 0.006911935806274414, 0.00689356803894043, 0.0068853759765625, 0.007036928176879883, 0.006923264026641846, 0.006969344139099121, 0.007003168106079102, 0.00694268798828125, 0.006929408073425293, 0.006970367908477783, 0.0070256638526916505, 0.006968319892883301, 0.007050240039825439, 0.006912000179290771, 0.006802432060241699, 0.0068280320167541505, 0.006895616054534912, 0.00679423999786377, 0.006817791938781738, 0.006910975933074951, 0.006980607986450196, 0.006910975933074951, 0.006953983783721924, 0.00694271993637085, 0.006988800048828125, 0.006947840213775635, 0.0069253120422363285, 0.006947840213775635, 0.006899712085723877, 0.007031807899475098, 0.006920191764831543, 0.00687718391418457, 0.006904831886291504, 0.0069253120422363285, 0.007098368167877197, 0.006732800006866455, 0.006894591808319092, 0.006929408073425293, 0.006922239780426025, 0.0068986878395080565, 0.006880256175994873, 0.0069253120422363285, 0.006915071964263916, 0.00690176010131836, 0.006933504104614257, 0.006907904148101806, 0.006871039867401123, 0.006799359798431396, 0.006905856132507324, 0.006940671920776367, 0.006913023948669434, 0.006946815967559815, 0.006915071964263916, 0.007032832145690918, 0.006918144226074219, 0.007106560230255127, 0.007186431884765625, 0.007161856174468994, 0.007244800090789795, 0.007147520065307617, 0.006990848064422607, 0.0069212160110473635, 0.006854656219482422, 0.006796288013458252, 0.00679423999786377, 0.00678604793548584, 0.0068853759765625, 0.006931456089019775, 0.006856704235076904, 0.006825984001159668, 0.006801407814025879, 0.006830080032348633, 0.006818816184997558, 0.0068055038452148435, 0.0069253120422363285, 0.007045119762420654, 0.006903808116912841, 0.006887455940246582, 0.006897632122039795, 0.006918144226074219, 0.006870016098022461, 0.006913023948669434, 0.0069212160110473635, 0.006912000179290771, 0.0068986878395080565, 0.0069847040176391605, 0.006938623905181885, 0.006944767951965332, 0.006912000179290771, 0.006945792198181152, 0.0069212160110473635, 0.006972415924072266, 0.0068915200233459475, 0.006923264026641846, 0.006937600135803222, 0.006879231929779053, 0.0069324798583984375, 0.006938623905181885, 0.006658048152923584, 0.006896639823913574, 0.006927360057830811, 0.007002111911773682, 0.0069324798583984375, 0.007013376235961914, 0.0069847040176391605, 0.006929408073425293, 0.0069324798583984375, 0.006944767951965332, 0.00692633581161499, 0.006953983783721924, 0.006901823997497559, 0.006937535762786865, 0.006959104061126709, 0.006940671920776367, 0.0068986878395080565, 0.006988800048828125, 0.007201791763305664, 0.006849535942077637, 0.006788095951080322, 0.006910975933074951, 0.006837247848510742, 0.006912000179290771, 0.007004159927368164, 0.006938623905181885, 0.00692633581161499, 0.006884352207183838, 0.00694271993637085, 0.006973440170288086, 0.006910975933074951, 0.006913023948669434, 0.006949888229370117, 0.006947840213775635, 0.007390207767486572, 0.007200767993927002, 0.00704307222366333, 0.0071833600997924804, 0.007178239822387696, 0.007154687881469727, 0.0071188478469848635, 0.007206912040710449, 0.007146527767181396, 0.007153632164001465, 0.007137279987335205, 0.007097343921661377, 0.007308288097381592, 0.007172095775604248, 0.007180287837982178, 0.007019519805908203, 0.006879231929779053, 0.006940671920776367, 0.00692633581161499, 0.007121920108795166, 0.006908927917480469, 0.006814720153808594, 0.006817791938781738, 0.006964223861694336, 0.006932544231414795, 0.006963136196136474, 0.006973440170288086, 0.006905856132507324, 0.006922239780426025, 0.006762495994567871, 0.006918144226074219, 0.006913023948669434, 0.006938623905181885, 0.006910975933074951, 0.006967296123504638, 0.006847487926483154, 0.006859776020050049, 0.006930463790893555, 0.007283679962158203, 0.0071823358535766605, 0.007187456130981445, 0.007155712127685547, 0.007146495819091797, 0.0071188478469848635, 0.007225344181060791, 0.007223296165466309, 0.007175168037414551, 0.007128064155578613, 0.007134208202362061, 0.007159808158874512, 0.0071823358535766605, 0.007158783912658692, 0.007197696208953858, 0.007146495819091797, 0.006982656002044678, 0.00694271993637085, 0.006915071964263916, 0.006899712085723877, 0.007054336071014404, 0.0069324798583984375, 0.006967296123504638, 0.006976511955261231, 0.006933504104614257, 0.006912000179290771, 0.006996992111206054, 0.006922239780426025, 0.006965248107910156, 0.006939648151397705, 0.006881279945373535, 0.006917119979858399, 0.006904831886291504, 0.0067983360290527345, 0.006876160144805908, 0.006922239780426025, 0.0069283838272094726, 0.0069283838272094726, 0.0069407038688659665, 0.0069437122344970705, 0.00692633581161499, 0.006854656219482422, 0.007035903930664063, 0.006854656219482422, 0.006830080032348633, 0.006785024166107178, 0.006940671920776367, 0.006947840213775635, 0.0068853759765625, 0.0069324798583984375, 0.006968319892883301, 0.0069918718338012695, 0.006903808116912841, 0.00692633581161499, 0.00673689603805542, 0.006993919849395752, 0.006946815967559815, 0.006936575889587402, 0.006940671920776367, 0.00692633581161499, 0.006920191764831543, 0.006923264026641846, 0.006945792198181152, 0.006918144226074219, 0.006957056045532227, 0.006922239780426025, 0.006882304191589355, 0.00687820816040039, 0.006937600135803222, 0.007326720237731933, 0.007195648193359375, 0.0071526398658752445, 0.007150591850280762, 0.007319551944732666, 0.007227392196655274, 0.00714137601852417, 0.007160831928253173, 0.007127039909362793, 0.007177216053009033, 0.007159808158874512, 0.007160831928253173, 0.007234560012817383, 0.007169023990631103, 0.007154687881469727, 0.007128064155578613, 0.0071792640686035155, 0.007105535984039306, 0.007177279949188232, 0.007118783950805664, 0.0071526398658752445, 0.007151616096496582, 0.007123007774353028, 0.007162816047668457, 0.00714035177230835, 0.007173151969909668, 0.0071116480827331545, 0.007172095775604248, 0.007221248149871826, 0.0072120318412780765, 0.0071792640686035155, 0.007106560230255127, 0.007119967937469483, 0.007060383796691895, 0.007097343921661377, 0.006906879901885986, 0.0068853759765625, 0.006978559970855713, 0.006908927917480469, 0.00687718391418457, 0.006912000179290771, 0.006936575889587402, 0.007016479969024658, 0.006908895969390869, 0.006908927917480469, 0.006916096210479736, 0.006929408073425293, 0.006874112129211426, 0.006732800006866455, 0.006924287796020508, 0.006899712085723877, 0.006920191764831543, 0.0069324798583984375, 0.006973440170288086, 0.006863872051239014, 0.006910975933074951, 0.006933568000793457, 0.006932415962219238, 0.0068670082092285156, 0.006906816005706787, 0.006906879901885986, 0.0068915200233459475, 0.006905856132507324, 0.0069283838272094726, 0.00709939193725586, 0.007122943878173828, 0.007062528133392334, 0.007146495819091797, 0.007366655826568603, 0.0072325119972229, 0.007450623989105225, 0.007468031883239746, 0.0072540159225463864, 0.0074403839111328125, 0.007294976234436036, 0.007535615921020508, 0.007610432147979736, 0.007244736194610596, 0.007200767993927002, 0.007193600177764893, 0.007223296165466309, 0.007354368209838867, 0.00724070405960083, 0.007154687881469727, 0.007234560012817383, 0.007361536026000977, 0.007265279769897461, 0.007372799873352051, 0.007112703800201416, 0.007201791763305664, 0.007185408115386963, 0.007228415966033935, 0.007177216053009033, 0.007107583999633789, 0.0071495680809020995, 0.00719974422454834, 0.007137279987335205, 0.007214079856872559, 0.007153664112091064, 0.007178239822387696, 0.007168000221252442, 0.007112703800201416, 0.007172095775604248, 0.007144447803497315, 0.00713427209854126, 0.007181248188018799, 0.0071823358535766605, 0.007099423885345459, 0.007133152008056641, 0.007209983825683594, 0.0072130560874938965, 0.006717440128326416, 0.0068986878395080565, 0.006931456089019775, 0.0069550080299377445, 0.006946815967559815, 0.006919167995452881, 0.006931456089019775, 0.006924287796020508, 0.006904831886291504, 0.0069621758460998535, 0.006980607986450196, 0.00694374418258667, 0.006890495777130127, 0.0069212160110473635, 0.006930431842803955, 0.006938623905181885, 0.006871039867401123, 0.006912000179290771, 0.0069918718338012695, 0.006907904148101806, 0.006889472007751465, 0.00694374418258667, 0.006895616054534912, 0.006811647891998291, 0.007019519805908203, 0.0069918718338012695, 0.006940671920776367, 0.006900735855102539, 0.0069027838706970214, 0.007233535766601563, 0.006980607986450196, 0.006880256175994873, 0.006907936096191406, 0.006906847953796386, 0.006892543792724609, 0.006973440170288086, 0.006941696166992187, 0.00692633581161499, 0.006900735855102539, 0.006994944095611572, 0.006910975933074951, 0.006918144226074219, 0.006909952163696289, 0.00690176010131836, 0.006916096210479736, 0.006914048194885254, 0.006881279945373535, 0.0068986878395080565, 0.006894591808319092, 0.006904831886291504, 0.006972415924072266, 0.006945792198181152, 0.006931456089019775, 0.00691923189163208, 0.006989759922027588, 0.007745535850524903, 0.007271423816680909, 0.0072724480628967286, 0.007168000221252442, 0.007069695949554444, 0.007143424034118652, 0.0071792640686035155, 0.007145472049713135, 0.006711296081542969, 0.0069928960800170895, 0.006958079814910889, 0.00694374418258667, 0.0068731198310852054, 0.006935520172119141, 0.006890495777130127, 0.006951935768127441, 0.006892543792724609, 0.006936575889587402, 0.00693452787399292, 0.006924287796020508, 0.006923264026641846, 0.006969344139099121, 0.00693555212020874, 0.006903808116912841, 0.0069621758460998535, 0.006904831886291504, 0.006927360057830811, 0.006881279945373535, 0.006896639823913574, 0.006912000179290771, 0.00687824010848999, 0.006906847953796386, 0.0069283838272094726, 0.0069253120422363285, 0.007006207942962647, 0.0069027838706970214, 0.006916096210479736, 0.006823935985565186, 0.006957056045532227, 0.006985727787017822, 0.00694271993637085, 0.006905856132507324, 0.006894591808319092, 0.006923264026641846, 0.006929408073425293, 0.006918144226074219, 0.0069253120422363285, 0.00693555212020874, 0.006919167995452881, 0.006907904148101806, 0.006931456089019775, 0.006944767951965332, 0.00692633581161499, 0.0069928960800170895, 0.006924287796020508, 0.0069253120422363285, 0.006985727787017822, 0.006936575889587402, 0.006988863945007324, 0.007, 0.006908927917480469, 0.007223296165466309, 0.007208960056304932, 0.007195712089538574, 0.007176127910614014, 0.0071485438346862796, 0.0071792640686035155, 0.007207935810089112, 0.00714035177230835, 0.007120895862579346, 0.007122943878173828, 0.006635519981384277, 0.006927360057830811, 0.007271423816680909, 0.0076277761459350585, 0.007121920108795166, 0.007145472049713135, 0.007154687881469727, 0.00722431993484497, 0.007168000221252442, 0.0071280961036682125, 0.007166944026947022, 0.007157760143280029, 0.007161856174468994, 0.007112703800201416, 0.0071485438346862796, 0.007177216053009033, 0.007278592109680176, 0.007158783912658692, 0.007161856174468994, 0.007146495819091797, 0.007150591850280762, 0.007261184215545655, 0.007311359882354736, 0.007236608028411865, 0.007136256217956543, 0.007162879943847656, 0.007173120021820068, 0.007165952205657959, 0.007139328002929687, 0.007162879943847656, 0.00714035177230835, 0.00724070405960083, 0.007016448020935059, 0.00695091199874878, 0.006858751773834228, 0.007139328002929687, 0.007164927959442138, 0.007113728046417236, 0.007157760143280029, 0.007175168037414551, 0.007196671962738037, 0.0072120318412780765, 0.007121920108795166, 0.007134208202362061, 0.007142399787902832, 0.007217152118682861, 0.007139328002929687, 0.007145472049713135, 0.0071485438346862796, 0.0071792640686035155, 0.007194623947143554, 0.007117824077606201, 0.007176191806793213, 0.007164927959442138, 0.007123968124389648, 0.007157760143280029, 0.007009280204772949, 0.007128064155578613, 0.006965248107910156, 0.006978559970855713, 0.0069202880859375, 0.006963103771209717, 0.006947840213775635, 0.00689356803894043, 0.006908927917480469, 0.006939648151397705, 0.006964223861694336, 0.006845439910888672, 0.006867968082427979, 0.006905856132507324, 0.0069324798583984375, 0.006947840213775635, 0.006990848064422607, 0.006975488185882568, 0.0069816322326660156, 0.006986752033233643, 0.006988800048828125, 0.006949888229370117, 0.006967296123504638, 0.006916096210479736, 0.006948863983154297, 0.00693452787399292, 0.007023615837097168, 0.006916096210479736, 0.006958079814910889, 0.006940671920776367, 0.006929408073425293, 0.006922239780426025, 0.006956031799316406, 0.006966271877288818, 0.006889472007751465, 0.006949888229370117, 0.00694374418258667, 0.006927360057830811, 0.006892543792724609, 0.006903808116912841, 0.006930431842803955, 0.006923264026641846, 0.006896639823913574, 0.006927360057830811, 0.007016448020935059, 0.0069283838272094726, 0.006895616054534912, 0.006881279945373535, 0.006948863983154297, 0.0068884482383728025, 0.0069212160110473635, 0.006929408073425293, 0.006931456089019775, 0.006938623905181885, 0.006924287796020508, 0.006916096210479736, 0.0069621758460998535, 0.0069253120422363285, 0.006941696166992187, 0.006957056045532227, 0.006881279945373535, 0.006908927917480469, 0.0071485438346862796, 0.0072427520751953125, 0.007249951839447022, 0.007106527805328369, 0.007165952205657959, 0.007170048236846924, 0.007150623798370361, 0.007136223793029785, 0.0067983360290527345, 0.0071905279159545895, 0.00713318395614624, 0.0072120318412780765, 0.007153664112091064, 0.0071823358535766605, 0.0071526398658752445, 0.007177216053009033, 0.007139328002929687, 0.007112703800201416, 0.00706774377822876, 0.007126944065093994, 0.007168000221252442, 0.007120895862579346, 0.007192575931549072, 0.007192575931549072, 0.007158783912658692, 0.007173120021820068, 0.007155712127685547, 0.007130112171173096, 0.007129087924957276, 0.007120895862579346, 0.007165952205657959, 0.007114751815795899, 0.007153664112091064, 0.008437760353088379, 0.007261184215545655, 0.007255040168762207, 0.007196671962738037, 0.007135231971740722, 0.007123968124389648, 0.007169023990631103, 0.007287807941436767, 0.007145472049713135, 0.0071526398658752445, 0.007155712127685547, 0.0071495680809020995, 0.007126016139984131, 0.007172095775604248, 0.007110655784606934, 0.0071792640686035155, 0.007160831928253173, 0.007168000221252442, 0.007131135940551757, 0.00713318395614624, 0.007137279987335205, 0.007173120021820068, 0.007165952205657959, 0.00712502384185791, 0.0071147198677062986, 0.007337984085083008, 0.00723967981338501, 0.0071485438346862796, 0.007139328002929687, 0.007111711978912354, 0.007186399936676025, 0.0071157760620117185, 0.006979584217071533, 0.007001088142395019, 0.007186431884765625, 0.007166975975036621, 0.007126016139984131, 0.007173120021820068, 0.006708223819732666, 0.006940671920776367, 0.006936575889587402, 0.006900735855102539, 0.006919167995452881, 0.006904831886291504, 0.006922239780426025, 0.007017471790313721, 0.007184383869171143, 0.007811103820800781, 0.007531487941741943, 0.007483391761779785, 0.00758681583404541, 0.00765337610244751, 0.007255040168762207, 0.007218175888061523, 0.007275519847869873, 0.0071495680809020995, 0.007093247890472412, 0.007185408115386963, 0.007233535766601563, 0.007355391979217529, 0.0072468481063842774, 0.007243775844573975, 0.007364607810974121, 0.007204864025115967, 0.007215104103088379, 0.007201791763305664, 0.007236608028411865, 0.007218175888061523, 0.007194623947143554, 0.0072120318412780765, 0.007243775844573975, 0.007222271919250488, 0.007243775844573975, 0.007147520065307617, 0.0071833600997924804, 0.007225344181060791, 0.007211008071899414, 0.0072120318412780765, 0.007163904190063477, 0.00719155216217041, 0.007208960056304932, 0.007207935810089112, 0.007214079856872559, 0.007316480159759522, 0.0069324798583984375, 0.006978559970855713, 0.00693555212020874, 0.0068351998329162595, 0.0068915200233459475, 0.006920191764831543, 0.006920191764831543, 0.006910975933074951, 0.00688640022277832, 0.006916096210479736, 0.006922239780426025, 0.006882304191589355, 0.006899712085723877, 0.006923264026641846, 0.006790143966674805, 0.006797311782836914, 0.0068280320167541505, 0.007162879943847656, 0.007243775844573975, 0.007204864025115967, 0.007261184215545655, 0.007252992153167725, 0.007359488010406494, 0.007226367950439453, 0.006941728115081787, 0.006915103912353515, 0.006926271915435791, 0.006882304191589355, 0.006951935768127441, 0.0069212160110473635, 0.006929408073425293, 0.006905856132507324, 0.006938623905181885, 0.006964223861694336, 0.0069212160110473635, 0.006890495777130127, 0.006847487926483154, 0.0069816322326660156, 0.0068986878395080565, 0.0069324798583984375, 0.00692633581161499, 0.00689356803894043, 0.0069550080299377445, 0.006930431842803955, 0.006916096210479736, 0.006906879901885986, 0.006874112129211426, 0.006896639823913574, 0.0069027838706970214, 0.006863872051239014, 0.0068997759819030766, 0.006916031837463379, 0.006895616054534912, 0.006865920066833496, 0.006917119979858399, 0.006982656002044678, 0.0068986878395080565, 0.006867968082427979, 0.007070720195770264, 0.007262207984924316, 0.007207935810089112, 0.007172095775604248, 0.007165952205657959, 0.007223296165466309, 0.00719974422454834, 0.0071833600997924804, 0.0071485438346862796, 0.007200767993927002, 0.00719155216217041, 0.0072120318412780765, 0.007189504146575928, 0.007144447803497315, 0.007255040168762207, 0.007427072048187256, 0.007362559795379638, 0.0073134078979492185, 0.007277567863464355, 0.007241727828979493, 0.00729702377319336, 0.007221248149871826, 0.006711296081542969, 0.006862847805023193, 0.006920191764831543, 0.006929408073425293, 0.006931456089019775, 0.006895616054534912, 0.006875135898590088, 0.006905856132507324, 0.006908927917480469, 0.00690176010131836, 0.006933504104614257, 0.006914048194885254, 0.006883327960968018, 0.006931456089019775, 0.006900735855102539, 0.006987840175628662, 0.00689247989654541, 0.00781824016571045, 0.00744755220413208, 0.007310336112976074, 0.007288832187652588, 0.007267327785491944, 0.007269375801086426, 0.006910975933074951, 0.006931456089019775, 0.0068351998329162595, 0.00682700777053833, 0.006924287796020508, 0.006918144226074219, 0.006965248107910156, 0.006872096061706543, 0.006898655891418457, 0.00688640022277832, 0.006850560188293457, 0.006906879901885986, 0.006908927917480469, 0.006881279945373535, 0.006858751773834228, 0.006929408073425293, 0.006936575889587402, 0.00728166389465332, 0.007326720237731933, 0.007228415966033935, 0.007215104103088379, 0.007188543796539306, 0.007240640163421631, 0.007174143791198731, 0.00722431993484497, 0.007202816009521484, 0.007200767993927002, 0.007223296165466309, 0.007198719978332519, 0.0069324798583984375, 0.006934559822082519, 0.00693449592590332, 0.007021567821502686, 0.006930431842803955, 0.006920191764831543, 0.006916096210479736, 0.006895616054534912, 0.0069283838272094726, 0.0068392958641052244, 0.0069069118499755855, 0.006841343879699707, 0.00725708818435669, 0.007260159969329834, 0.007241727828979493, 0.007247871875762939, 0.007206912040710449, 0.007222271919250488, 0.007211008071899414, 0.007329792022705078, 0.007324672222137451, 0.007200767993927002, 0.007244800090789795, 0.007245823860168457, 0.007443456172943115, 0.007256063938140869, 0.0073768959045410155, 0.007235583782196045, 0.007241727828979493, 0.007235583782196045, 0.007215104103088379, 0.007192575931549072, 0.007173120021820068, 0.007209983825683594, 0.007211008071899414, 0.007208960056304932, 0.007287807941436767, 0.007184383869171143, 0.007211008071899414, 0.007193600177764893, 0.007200767993927002, 0.00723967981338501, 0.007163904190063477, 0.007228415966033935, 0.007236608028411865, 0.0072499198913574215, 0.007252992153167725, 0.007192575931549072, 0.007266304016113281, 0.007219200134277344, 0.0072468481063842774, 0.007203839778900147, 0.007416831970214844, 0.007193600177764893, 0.007203839778900147, 0.007204864025115967, 0.007181312084197998, 0.00719155216217041, 0.007156735897064209, 0.0071823358535766605, 0.00729804801940918, 0.007683072090148926, 0.007319551944732666, 0.00714137601852417, 0.007204864025115967, 0.007203839778900147, 0.007198719978332519, 0.007241727828979493, 0.007290880203247071, 0.007216127872467041, 0.007223296165466309, 0.007219200134277344, 0.007227392196655274, 0.007187456130981445, 0.006689792156219483, 0.006905856132507324, 0.006931456089019775, 0.006937600135803222, 0.006930431842803955, 0.006923359870910644, 0.006880159854888916, 0.006866943836212158, 0.006924287796020508, 0.00690176010131836, 0.006882304191589355, 0.00688640022277832, 0.0068884482383728025, 0.006897664070129395, 0.006847487926483154, 0.006914048194885254, 0.006906879901885986, 0.006920191764831543, 0.007045119762420654, 0.007227392196655274, 0.0072468481063842774, 0.007323679924011231, 0.007244768142700195, 0.007164927959442138, 0.007233535766601563, 0.007241727828979493, 0.0072325119972229, 0.007702527999877929, 0.007349247932434082, 0.00725708818435669, 0.0072468481063842774, 0.007221248149871826, 0.007245823860168457, 0.00723967981338501, 0.007154687881469727, 0.007200767993927002, 0.00722431993484497, 0.007226367950439453, 0.007260159969329834, 0.007193600177764893, 0.0072837119102478025, 0.00724070405960083, 0.007259136199951172, 0.007252992153167725, 0.0072120318412780765, 0.0071833600997924804, 0.00733900785446167, 0.007244800090789795, 0.00724070405960083, 0.007233535766601563, 0.007185408115386963, 0.007206912040710449, 0.00724070405960083, 0.007236608028411865, 0.007241727828979493, 0.007188479900360107, 0.007227392196655274, 0.0072325119972229, 0.007080959796905518, 0.006898719787597657, 0.0068853440284729, 0.006909952163696289, 0.006929408073425293, 0.006667263984680176, 0.006915071964263916, 0.006930431842803955, 0.0069253120422363285, 0.007094272136688232, 0.007181312084197998, 0.007229440212249756, 0.007203839778900147, 0.007207935810089112, 0.007291903972625732, 0.00801587200164795, 0.007601151943206787, 0.007642111778259277, 0.008120320320129394, 0.007384064197540283, 0.0073062400817871095, 0.007435264110565186, 0.007258111953735351, 0.00724889612197876, 0.007291903972625732, 0.007284736156463623, 0.007277567863464355, 0.007237631797790528, 0.007302144050598145, 0.007227392196655274, 0.007258111953735351, 0.007234560012817383, 0.007227392196655274, 0.007180287837982178, 0.0071792640686035155, 0.007203839778900147, 0.007216127872467041, 0.007235583782196045, 0.007185408115386963, 0.007156735897064209, 0.00719155216217041, 0.007216127872467041, 0.007226367950439453, 0.007127039909362793, 0.007211071968078613, 0.007194560050964355, 0.007219200134277344, 0.007014400005340577, 0.006763519763946534, 0.006825024127960205, 0.006907872200012207, 0.0069549760818481444, 0.0068884482383728025, 0.006909952163696289, 0.006920191764831543, 0.00688640022277832, 0.006910975933074951, 0.006916096210479736, 0.006936575889587402, 0.006867968082427979, 0.006909952163696289, 0.00690176010131836, 0.00713318395614624, 0.007431168079376221, 0.0074106879234313965, 0.007278592109680176, 0.007231488227844239, 0.007189504146575928, 0.006842368125915528, 0.00694374418258667, 0.006903808116912841, 0.0069621758460998535, 0.006896639823913574, 0.00694374418258667, 0.006929408073425293, 0.006917119979858399, 0.006858751773834228, 0.006825984001159668, 0.006909952163696289, 0.00695091199874878, 0.006929408073425293, 0.006944767951965332, 0.006965248107910156, 0.0068986878395080565, 0.00693452787399292, 0.006913023948669434, 0.006921279907226563, 0.006880191802978516, 0.006890495777130127, 0.006913023948669434, 0.006915071964263916, 0.006900735855102539, 0.006923264026641846, 0.006927360057830811, 0.006971392154693603, 0.006924287796020508, 0.006909984111785889, 0.006915040016174316, 0.006879295825958252, 0.006904767990112305, 0.006910975933074951, 0.0069324798583984375, 0.007047167778015137, 0.007265279769897461, 0.007287807941436767, 0.007184383869171143, 0.007193600177764893, 0.007211008071899414, 0.007209983825683594, 0.00719974422454834, 0.007202816009521484, 0.007223296165466309, 0.007127039909362793, 0.007163904190063477, 0.0072130560874938965, 0.007255040168762207, 0.007260159969329834, 0.007207935810089112, 0.007245823860168457, 0.0073471999168395995, 0.007262239933013916, 0.007310304164886474, 0.007233535766601563, 0.007291903972625732, 0.007250944137573242, 0.007233535766601563, 0.007304192066192627, 0.007285759925842285, 0.007193600177764893, 0.007211008071899414, 0.007277567863464355]",tokens/s,141.6035870976309,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gptj,MB,3841.032192,5463.605248,0.0,4833.93536,4546.659328,s,1,9.6448310546875,9.6448310546875,0.0,9.6448310546875,9.6448310546875,9.6448310546875,9.6448310546875,[9.6448310546875],,kWh,3.399129549236162e-05,1.8614283855845525e-05,6.279143912199525e-05,0.00011539701847020238,,MB,2106.740736,5499.256832,0.0,4852.809728,4095.21408,s,10,10.69994921875,1.069994921875,0.0003820383036143575,1.0699613647460937,1.0703675537109376,1.0706032104492187,1.0707917358398438,"[1.0696763916015626, 1.070089111328125, 1.070047607421875, 1.070315185546875, 1.0698458251953125, 1.069434326171875, 1.0698751220703124, 1.0708388671875, 1.0702027587890626, 1.0696240234375]",tokens/s,239.25347192433375,kWh,1.2644577642638083e-05,6.928540980028382e-06,7.129430703539941e-05,9.086742565806588e-05,tokens/kWh,2817291.214602337,MB,2112.012288,5501.353984,0.0,4852.809728,4202.507264,s,10,25.723919921874995,2.5723919921875003,0.03170192501043793,2.5836536865234376,2.60007939453125,2.6104792724609376,2.6187991748046873,"[2.551198974609375, 2.620879150390625, 2.597768310546875, 2.575357177734375, 2.597622802734375, 2.5919501953125, 2.59327099609375, 2.540228271484375, 2.519615234375, 2.53602880859375]",tokens/s,24.490824178948838,kWh,2.9987507535972914e-05,1.6435938233742835e-05,7.748920088020417e-05,0.00012391264664991987,tokens/kWh,508422.6808421636,,s,630,25.71593009948731,0.04081893666585286,0.0009962796389494275,0.04124415969848633,0.04164559860229492,0.04198702163696289,0.042644383468627936,"[0.03952640151977539, 0.03925503921508789, 0.03931545639038086, 0.03945779037475586, 0.03929292678833008, 0.03946086502075195, 0.039539710998535156, 0.03910246276855469, 0.03955199813842773, 0.03988172912597656, 0.039357440948486325, 0.03926630401611328, 0.03938611221313477, 0.039087104797363284, 0.03928166580200195, 0.03926323318481445, 0.039218177795410154, 0.03919257736206055, 0.03920793533325195, 0.03926220703125, 0.039196670532226564, 0.0392355842590332, 0.03923660659790039, 0.03933900833129883, 0.03971891021728516, 0.039362560272216796, 0.0391536636352539, 0.03926835250854492, 0.03945369720458984, 0.04169318389892578, 0.04177407836914063, 0.04137881469726563, 0.04129280090332031, 0.041388031005859374, 0.0415467529296875, 0.04205055999755859, 0.04146380615234375, 0.041450496673583984, 0.041178112030029294, 0.04135321426391601, 0.04134502410888672, 0.04139212799072266, 0.04136447906494141, 0.04130099105834961, 0.041183231353759765, 0.041265151977539063, 0.04137062454223633, 0.041299968719482424, 0.041472000122070314, 0.04188467025756836, 0.04146380615234375, 0.04148940658569336, 0.04150374221801758, 0.04134092712402344, 0.0413573112487793, 0.04147814559936523, 0.041247745513916016, 0.04131123352050781, 0.041460735321044925, 0.04175360107421875, 0.04170649719238281, 0.04151910400390625, 0.041442302703857424, 0.0412334098815918, 0.041285633087158206, 0.042229759216308595, 0.04126105499267578, 0.0413675537109375, 0.0413829116821289, 0.04126924896240235, 0.042464256286621094, 0.041436161041259766, 0.04135935974121094, 0.04117299270629883, 0.04144844818115234, 0.041644065856933594, 0.04135011291503906, 0.04151193618774414, 0.04137984085083008, 0.041207809448242184, 0.04141567993164062, 0.04120678329467774, 0.04139519882202149, 0.04194611358642578, 0.041596927642822266, 0.04163891220092773, 0.04134912109375, 0.04134400177001953, 0.041296897888183595, 0.04365619277954102, 0.04207923126220703, 0.041409534454345705, 0.04122521591186523, 0.04209766387939453, 0.041611263275146484, 0.0414648323059082, 0.041180160522460936, 0.04128768157958984, 0.04138905715942383, 0.0425082893371582, 0.04329574584960937, 0.04238134384155273, 0.04141052627563477, 0.04131737518310547, 0.041280513763427736, 0.04093753433227539, 0.04127328109741211, 0.04141567993164062, 0.04210892868041992, 0.04156934356689453, 0.041363391876220706, 0.04139519882202149, 0.042028030395507815, 0.043156478881835936, 0.04153139114379883, 0.041319423675537106, 0.041322494506835936, 0.04128768157958984, 0.04168294525146484, 0.04150476837158203, 0.04133375930786133, 0.041381889343261716, 0.041560062408447264, 0.041523200988769535, 0.04132863998413086, 0.04181401443481445, 0.039626750946044925, 0.03930112075805664, 0.039218177795410154, 0.03928985595703125, 0.039256065368652344, 0.03941888046264649, 0.041708545684814455, 0.04167679977416992, 0.04125286483764649, 0.04128768157958984, 0.041210880279541014, 0.04117094421386719, 0.04134297561645508, 0.04131532669067383, 0.04141056060791016, 0.04189286422729492, 0.04129177474975586, 0.04126822280883789, 0.04130099105834961, 0.04135424041748047, 0.04133171081542969, 0.04127436828613281, 0.04127334213256836, 0.041365505218505856, 0.04125696182250976, 0.041247745513916016, 0.04108806228637695, 0.041484222412109376, 0.04130918502807617, 0.04138598251342773, 0.041632766723632815, 0.043222015380859374, 0.041627647399902344, 0.041644065856933594, 0.04132553482055664, 0.041373695373535156, 0.04117606353759766, 0.041193473815917966, 0.041366527557373044, 0.042505214691162106, 0.04145663833618164, 0.041335807800292966, 0.04137881469726563, 0.04132044982910156, 0.041452545166015625, 0.04133990478515625, 0.04158259201049805, 0.04125183868408203, 0.041398273468017575, 0.04129894256591797, 0.041289726257324216, 0.0412231674194336, 0.04113817596435547, 0.04117401504516602, 0.04128870391845703, 0.041204734802246096, 0.04185702514648437, 0.04144025421142578, 0.04154470443725586, 0.04134400177001953, 0.041150463104248046, 0.041312255859375, 0.04143820953369141, 0.03956326293945313, 0.039258113861083986, 0.04025446319580078, 0.04231679916381836, 0.041390079498291016, 0.04151500701904297, 0.04134092712402344, 0.04099379348754883, 0.04073984146118164, 0.040943614959716795, 0.04128460693359375, 0.0411514892578125, 0.041188350677490236, 0.041181182861328124, 0.04084428787231445, 0.04058828735351563, 0.04108492660522461, 0.04148633575439453, 0.0416255989074707, 0.041180160522460936, 0.04112076950073242, 0.04128566360473633, 0.04138800048828125, 0.0413757438659668, 0.0412938232421875, 0.041164798736572264, 0.041171966552734376, 0.0408350715637207, 0.04129587173461914, 0.04074291229248047, 0.04130815887451172, 0.04126009750366211, 0.041221118927001955, 0.04108793640136719, 0.041201663970947267, 0.041109504699707033, 0.041280513763427736, 0.039206912994384766, 0.03926528167724609, 0.03931340789794922, 0.039408641815185545, 0.03916287994384766, 0.039316478729248046, 0.04006604766845703, 0.03936665725708008, 0.03926220703125, 0.03932364654541016, 0.04054323196411133, 0.041744384765625, 0.04128460693359375, 0.041133056640625, 0.04118425750732422, 0.041388031005859374, 0.041312255859375, 0.04122009658813477, 0.041371646881103515, 0.041163776397705076, 0.04130508804321289, 0.04134502410888672, 0.04121395111083984, 0.04107980728149414, 0.0411514892578125, 0.04219084930419922, 0.040174591064453126, 0.039347198486328124, 0.03927552032470703, 0.039293952941894535, 0.039863296508789066, 0.04000460815429688, 0.03931340789794922, 0.04132556915283203, 0.04139519882202149, 0.04140236663818359, 0.041267200469970705, 0.04127231979370117, 0.04122623825073242, 0.04120064163208008, 0.043747329711914064, 0.04158464050292969, 0.041332736968994144, 0.041171966552734376, 0.04132761764526367, 0.04124883270263672, 0.04105414581298828, 0.041237503051757815, 0.0412006721496582, 0.041229278564453124, 0.04117814254760742, 0.04141257476806641, 0.041180160522460936, 0.04142387390136719, 0.04125286483764649, 0.041981952667236325, 0.041523200988769535, 0.041420799255371094, 0.041204734802246096, 0.04127948760986328, 0.041294849395751954, 0.04090265655517578, 0.04134707260131836, 0.041294849395751954, 0.041420799255371094, 0.04248473739624024, 0.0415098876953125, 0.041335807800292966, 0.04130201721191406, 0.041245697021484375, 0.041285633087158206, 0.04132044982910156, 0.04152524948120117, 0.041003009796142575, 0.04133375930786133, 0.04213350296020508, 0.04120576095581055, 0.041299968719482424, 0.04136243057250977, 0.04127641677856445, 0.04199116897583008, 0.041883647918701174, 0.041760768890380856, 0.04131840133666992, 0.041534465789794923, 0.041350143432617184, 0.04123648071289063, 0.041229312896728515, 0.04125491333007812, 0.03945471954345703, 0.0392355842590332, 0.0392806396484375, 0.03934105682373047, 0.03910358428955078, 0.03919862365722656, 0.03981318283081055, 0.04268742370605469, 0.041839614868164066, 0.04123347091674805, 0.04135212707519531, 0.04191641616821289, 0.041452545166015625, 0.04137779235839844, 0.041212928771972655, 0.04141260910034179, 0.0422553596496582, 0.041202686309814454, 0.04122623825073242, 0.04134400177001953, 0.04157440185546875, 0.04154982376098633, 0.04149555206298828, 0.04153036880493164, 0.041373695373535156, 0.041406463623046875, 0.04132147216796875, 0.04140031814575195, 0.04116275024414062, 0.04094668960571289, 0.04117401504516602, 0.04138700866699219, 0.041452545166015625, 0.041404415130615234, 0.04120678329467774, 0.04157440185546875, 0.041596927642822266, 0.04134809494018555, 0.04117708969116211, 0.0417781753540039, 0.04149964904785156, 0.04164198303222656, 0.04130815887451172, 0.04146278381347656, 0.04123040008544922, 0.04236383819580078, 0.04128255844116211, 0.040997886657714845, 0.04130201721191406, 0.04128153610229492, 0.04134707260131836, 0.041285633087158206, 0.04115865707397461, 0.041060352325439455, 0.041336830139160154, 0.04122828674316406, 0.04119756698608398, 0.041565185546875, 0.04111667251586914, 0.04114944076538086, 0.04130713653564453, 0.0399441909790039, 0.039231487274169925, 0.03993088150024414, 0.03914137649536133, 0.04008857727050781, 0.03930828857421875, 0.03923865509033203, 0.039367679595947266, 0.040313854217529296, 0.04119039916992188, 0.041280513763427736, 0.04135424041748047, 0.041169921875, 0.04135424041748047, 0.041319423675537106, 0.04174335861206055, 0.04134707260131836, 0.04130815887451172, 0.04139929580688476, 0.04120883178710937, 0.04124262237548828, 0.04084838485717773, 0.04137472152709961, 0.04122009658813477, 0.04144844818115234, 0.041599998474121096, 0.04163174438476563, 0.041275390625, 0.04157952117919922, 0.04206796646118164, 0.041543678283691404, 0.041264129638671876, 0.041614334106445314, 0.0413941764831543, 0.04125286483764649, 0.04117401504516602, 0.041193473815917966, 0.041106433868408204, 0.04119756698608398, 0.04122214508056641, 0.04134092712402344, 0.042145790100097655, 0.041708545684814455, 0.041442302703857424, 0.04129587173461914, 0.041128959655761715, 0.042006526947021484, 0.041322494506835936, 0.04115660858154297, 0.04118937683105469, 0.039570430755615234, 0.041659393310546876, 0.041215999603271485, 0.041365505218505856, 0.042148929595947265, 0.04166342544555664, 0.04127334213256836, 0.04122012710571289, 0.04117500686645508, 0.041207809448242184, 0.04128870391845703, 0.041232383728027344, 0.041275390625, 0.041128959655761715, 0.041003009796142575, 0.03967795181274414, 0.03927040100097656, 0.03926937484741211, 0.039395328521728515, 0.039172096252441405, 0.03927347183227539, 0.03940249633789063, 0.03942195129394531, 0.039332862854003905, 0.03936460876464844, 0.03909222412109375, 0.0392171516418457, 0.03928678512573242, 0.0393891830444336, 0.041896961212158204, 0.04253900909423828, 0.04155084609985352, 0.041393150329589845, 0.041196544647216796, 0.041201663970947267, 0.04116787338256836, 0.041171966552734376, 0.04110540771484375, 0.04118425750732422, 0.041219070434570314, 0.04116275024414062, 0.04246527862548828, 0.0413941764831543, 0.04120883178710937, 0.041132030487060545, 0.0412119026184082, 0.04133375930786133, 0.04143308639526367, 0.04150067138671875, 0.04115763092041016, 0.04112486267089844, 0.04147916793823242, 0.041365505218505856, 0.04194614410400391, 0.041964511871337894, 0.041319423675537106, 0.04109414291381836, 0.041240577697753904, 0.04081356811523437, 0.04116070556640625, 0.03917926406860352, 0.039229438781738284, 0.039054336547851565, 0.03919257736206055, 0.039137279510498044, 0.03914956665039063, 0.0392355842590332, 0.03926425552368164, 0.03902873611450195, 0.03922739028930664, 0.039139328002929685, 0.03935027313232422, 0.03936153411865234, 0.039180286407470705, 0.03958169555664062, 0.039049217224121094, 0.03913216018676758, 0.03920076751708984, 0.04019507217407227, 0.040151039123535154, 0.03979776000976563, 0.039376895904541014, 0.0393175048828125, 0.039367679595947266, 0.03934310531616211, 0.039188480377197264, 0.03921920013427734, 0.039215103149414066, 0.039289886474609376, 0.03922633743286133, 0.039256065368652344, 0.03930931091308594, 0.03907788848876953, 0.03932364654541016, 0.039229438781738284, 0.03950387191772461, 0.039362560272216796, 0.03928166580200195, 0.03926015853881836, 0.03929600143432617, 0.03926630401611328, 0.03954073715209961, 0.03926937484741211, 0.03981414413452149, 0.03924070358276367, 0.039981056213378906, 0.03923865509033203, 0.03926323318481445, 0.03920896148681641, 0.03932467269897461, 0.039242752075195314, 0.03927552032470703, 0.03914342498779297, 0.0390830078125, 0.03930214309692383, 0.03984588623046875, 0.038882305145263675, 0.03897651290893555, 0.03899903869628906, 0.03926937484741211, 0.039218177795410154, 0.03992268753051758, 0.04127743911743164, 0.04133990478515625, 0.04109414291381836, 0.041411582946777346, 0.04134092712402344, 0.04181196975708008, 0.041816062927246093, 0.04158566284179688, 0.04146688079833984, 0.04192256164550781, 0.04130815887451172, 0.04134502410888672, 0.041264129638671876, 0.04134912109375, 0.04110028839111328, 0.04106137466430664, 0.041128959655761715, 0.04189184188842773, 0.04128870391845703, 0.03931545639038086, 0.03916185760498047, 0.039382015228271484, 0.03931852722167969, 0.039332862854003905, 0.03930419158935547, 0.039109630584716795, 0.039193599700927735, 0.03922227096557617, 0.03927859115600586, 0.03922227096557617, 0.039185409545898435, 0.03915059280395508, 0.039090175628662106, 0.040010753631591796, 0.03990937423706055, 0.03930419158935547, 0.039725055694580076, 0.039926784515380856, 0.03924889755249023, 0.039798782348632815, 0.039359489440917966, 0.04032716751098633, 0.041204734802246096, 0.04127231979370117, 0.04104806518554688, 0.04124160003662109, 0.04146585464477539, 0.04205875015258789, 0.04129792022705078, 0.04163891220092773, 0.041073665618896485, 0.041204734802246096, 0.04123648071289063, 0.04130201721191406, 0.0416255989074707, 0.041289726257324216, 0.04113715362548828, 0.04127641677856445, 0.04132966232299805, 0.04231577682495117, 0.041306110382080076, 0.04127436828613281, 0.04110847854614258, 0.0412149772644043, 0.041186336517333985, 0.039282657623291015, 0.03920588684082031, 0.03926835250854492, 0.039229438781738284, 0.03905535888671875, 0.03923763275146484, 0.039416831970214845, 0.03945062255859375, 0.0392806396484375, 0.03917107009887695, 0.039067649841308595, 0.03924070358276367, 0.03927654266357422, 0.040264705657958984, 0.04188467025756836, 0.043225120544433594, 0.0415846061706543]",tokens/s,24.49843336650539,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948139-2e5cac304163c3a310aca370;1e056ae5-db30-4221-8c89-a45b3dbb08a2) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7421.128704,9691.46368,0.0,9061.793792,8463.626752,s,1,11.7207158203125,11.7207158203125,0.0,11.7207158203125,11.7207158203125,11.7207158203125,11.7207158203125,[11.7207158203125],,kWh,5.8419946472921925e-05,3.200354085903299e-05,0.00011869842829204114,0.00020912191562399605,,MB,1786.355712,9708.240896,0.0,9061.793792,7981.770752,s,10,23.695999267578127,2.3695999267578123,0.00029233305156627456,2.3694516601562503,2.3700342041015623,2.370054089355469,2.370069997558594,"[2.36999951171875, 2.370073974609375, 2.37002978515625, 2.369398681640625, 2.369565673828125, 2.36945654296875, 2.369369873046875, 2.36934033203125, 2.36944677734375, 2.369318115234375]",tokens/s,108.0351147504761,kWh,2.7993941554375402e-05,1.534050643689369e-05,0.00016053193398100075,0.00020386638197226982,tokens/kWh,1255724.4481575263,MB,1797.402624,9708.240896,0.0,9061.793792,8267.815936,s,10,17.752324462890627,1.7752324462890627,0.016548791522396755,1.7707942504882812,1.78956201171875,1.8031971435546874,1.8141052490234375,"[1.76678759765625, 1.7531986083984374, 1.7733011474609375, 1.76299072265625, 1.7816817626953125, 1.786531982421875, 1.776363037109375, 1.816832275390625, 1.768287353515625, 1.7663499755859375]",tokens/s,35.488310351523204,kWh,2.091660045541655e-05,1.1465140950445376e-05,8.009381407499404e-05,0.00011247555548085593,tokens/kWh,560121.705828988,,s,630,17.749883913040172,0.028174418909587565,0.0006436458263497911,0.027877887725830077,0.02887813186645508,0.029266892623901367,0.03038768617630005,"[0.029578239440917968, 0.027816959381103516, 0.027704320907592773, 0.028240896224975585, 0.027997184753417968, 0.0275599365234375, 0.027268096923828124, 0.02752511978149414, 0.02775449562072754, 0.027644927978515626, 0.027658239364624023, 0.027609088897705077, 0.02756710433959961, 0.027622400283813478, 0.02774323272705078, 0.027479040145874024, 0.027637760162353517, 0.027667455673217774, 0.0277391357421875, 0.02814975929260254, 0.028239871978759764, 0.027670528411865233, 0.02816716766357422, 0.027517951965332032, 0.027496448516845705, 0.027666431427001953, 0.0276889591217041, 0.027572223663330078, 0.027619327545166016, 0.027603967666625977, 0.027664384841918944, 0.02768998336791992, 0.027440128326416017, 0.027271167755126953, 0.027661312103271486, 0.027655168533325194, 0.027711488723754882, 0.02794598388671875, 0.02860748863220215, 0.02771455955505371, 0.030692352294921874, 0.029077503204345705, 0.02774323272705078, 0.02772377586364746, 0.027797504425048827, 0.028257280349731444, 0.028085248947143555, 0.027991039276123047, 0.02977689552307129, 0.02917888069152832, 0.028820480346679687, 0.02960383987426758, 0.028636159896850585, 0.027571199417114257, 0.027701248168945314, 0.02791219139099121, 0.028391424179077147, 0.028180479049682617, 0.02834534454345703, 0.028419071197509766, 0.028513280868530274, 0.028821504592895508, 0.02873958396911621, 0.02875801658630371, 0.027893760681152343, 0.02728447914123535, 0.027478015899658204, 0.027778047561645508, 0.02774630355834961, 0.027806720733642577, 0.0287324161529541, 0.027882495880126954, 0.027612159729003907, 0.027656192779541015, 0.02758143997192383, 0.027447296142578126, 0.027666431427001953, 0.02775551986694336, 0.02755379295349121, 0.02769715118408203, 0.02771251106262207, 0.028224512100219725, 0.02833203125, 0.027850751876831056, 0.02772275161743164, 0.02794495964050293, 0.027681791305541992, 0.027636735916137696, 0.027650047302246093, 0.027720703125, 0.027580415725708008, 0.027637760162353517, 0.028268543243408203, 0.027819007873535157, 0.02778316879272461, 0.027535360336303712, 0.027645952224731447, 0.0276889591217041, 0.027673599243164062, 0.027679744720458983, 0.027640832901000976, 0.027570175170898437, 0.027664384841918944, 0.027681791305541992, 0.02771251106262207, 0.02778828811645508, 0.027838464736938476, 0.027732992172241212, 0.027599872589111327, 0.02772684860229492, 0.027587583541870117, 0.027704320907592773, 0.027671552658081053, 0.02774015998840332, 0.027554815292358398, 0.027646976470947264, 0.027681791305541992, 0.02769817543029785, 0.027669504165649415, 0.028008447647094727, 0.02856857681274414, 0.028050432205200194, 0.028597248077392577, 0.02852659225463867, 0.0286167049407959, 0.02855833625793457, 0.028670976638793946, 0.028252159118652344, 0.031253503799438476, 0.029466623306274413, 0.02882252883911133, 0.02872422409057617, 0.02880614471435547, 0.028056575775146485, 0.027666431427001953, 0.027625471115112304, 0.02779545593261719, 0.027685888290405275, 0.027613183975219727, 0.027768831253051757, 0.027682815551757813, 0.028048383712768556, 0.028227584838867188, 0.028597248077392577, 0.028735488891601563, 0.029314048767089845, 0.028660736083984374, 0.027711488723754882, 0.027709440231323244, 0.027720703125, 0.02770534324645996, 0.027599872589111327, 0.027846656799316406, 0.027657215118408202, 0.027716608047485353, 0.027812864303588865, 0.02780467224121094, 0.02770636749267578, 0.027670528411865233, 0.027571199417114257, 0.027623424530029295, 0.02772275161743164, 0.02770636749267578, 0.027704320907592773, 0.027778047561645508, 0.028424192428588867, 0.028791807174682618, 0.02871603202819824, 0.028676095962524413, 0.028496896743774414, 0.02858393669128418, 0.02872422409057617, 0.02880102348327637, 0.028404735565185548, 0.02778828811645508, 0.02778009605407715, 0.02791116714477539, 0.02817433547973633, 0.02771455955505371, 0.027864063262939453, 0.027863040924072265, 0.028012544631958007, 0.02872319984436035, 0.028035072326660155, 0.027876352310180662, 0.028026880264282225, 0.027646976470947264, 0.027320320129394532, 0.028465152740478516, 0.028818431854248046, 0.028727296829223634, 0.028092416763305664, 0.027535360336303712, 0.027659263610839844, 0.02769817543029785, 0.027874303817749024, 0.027640832901000976, 0.027674623489379883, 0.02755583953857422, 0.028282880783081055, 0.02774323272705078, 0.02755379295349121, 0.027634687423706054, 0.027669504165649415, 0.027757568359375, 0.027839487075805663, 0.027749376296997072, 0.027938816070556642, 0.028515327453613282, 0.028011520385742186, 0.028737535476684572, 0.028957696914672853, 0.027709440231323244, 0.027586559295654296, 0.027624448776245116, 0.02776371192932129, 0.027650047302246093, 0.0277258243560791, 0.027716608047485353, 0.027694080352783205, 0.027671552658081053, 0.027681791305541992, 0.027653120040893556, 0.02778316879272461, 0.027700223922729493, 0.02818355178833008, 0.02775449562072754, 0.027668479919433595, 0.027619327545166016, 0.027683839797973633, 0.027778047561645508, 0.02772275161743164, 0.028283903121948242, 0.027821056365966795, 0.02775654411315918, 0.02775142478942871, 0.02773196792602539, 0.027608064651489257, 0.027633663177490234, 0.027682815551757813, 0.027611135482788086, 0.027621376037597657, 0.027741184234619142, 0.027694080352783205, 0.027679744720458983, 0.028252159118652344, 0.028997631072998048, 0.030697471618652345, 0.02953011131286621, 0.028933120727539063, 0.028846080780029298, 0.028819456100463867, 0.028693504333496093, 0.027852800369262694, 0.027752447128295898, 0.027703296661376952, 0.027640832901000976, 0.028282880783081055, 0.031084543228149415, 0.029104127883911132, 0.028690431594848635, 0.029241344451904298, 0.02895359992980957, 0.02878361511230469, 0.02857574462890625, 0.028366847991943358, 0.02854092788696289, 0.028430335998535155, 0.02874163246154785, 0.028678144454956055, 0.029410303115844725, 0.029258752822875978, 0.03000934410095215, 0.028725248336791992, 0.028648448944091798, 0.028618751525878908, 0.028626943588256838, 0.028194816589355468, 0.027593727111816405, 0.027851776123046876, 0.027846656799316406, 0.02773094367980957, 0.02772275161743164, 0.027857919692993165, 0.027639808654785155, 0.027675647735595704, 0.028068864822387695, 0.027634687423706054, 0.02854707145690918, 0.02855014419555664, 0.028668928146362304, 0.028260351181030274, 0.0289617919921875, 0.028852224349975586, 0.02872831916809082, 0.028624895095825196, 0.027626495361328125, 0.027609088897705077, 0.027775999069213866, 0.027752447128295898, 0.027691007614135742, 0.027849727630615235, 0.027668479919433595, 0.02754457664489746, 0.02787942314147949, 0.027703296661376952, 0.028211200714111328, 0.02838118362426758, 0.028140544891357422, 0.027651071548461914, 0.02771251106262207, 0.027658239364624023, 0.027648000717163085, 0.02778009605407715, 0.02773196792602539, 0.030263296127319338, 0.029261823654174804, 0.02876108741760254, 0.028402687072753906, 0.028658687591552736, 0.0288143367767334, 0.028545024871826172, 0.028358655929565428, 0.02862387275695801, 0.028638208389282226, 0.028839935302734376, 0.02872831916809082, 0.028601343154907227, 0.028734464645385743, 0.028710912704467774, 0.028306432723999023, 0.028244991302490235, 0.030737407684326173, 0.029945856094360353, 0.028900352478027344, 0.028625919342041017, 0.02916044807434082, 0.02954751968383789, 0.02944000053405762, 0.0287825927734375, 0.028880895614624022, 0.027810815811157227, 0.02755072021484375, 0.027671552658081053, 0.02779545593261719, 0.027650047302246093, 0.027591680526733397, 0.028342271804809572, 0.02776268768310547, 0.027683839797973633, 0.02836070442199707, 0.02858700752258301, 0.02832793617248535, 0.028653568267822265, 0.02797772789001465, 0.027427839279174804, 0.027669504165649415, 0.02732748794555664, 0.02752511978149414, 0.027437055587768554, 0.02756608009338379, 0.028865535736083983, 0.02820403289794922, 0.027676671981811524, 0.027614208221435548, 0.027631616592407225, 0.02760704040527344, 0.02768076705932617, 0.027782144546508788, 0.028189695358276368, 0.028232704162597655, 0.027593727111816405, 0.02754457664489746, 0.028066816329956053, 0.028848127365112306, 0.028808191299438478, 0.028648448944091798, 0.028058624267578124, 0.028726272583007813, 0.02780467224121094, 0.027668479919433595, 0.02751590347290039, 0.027629568099975587, 0.02791219139099121, 0.028851200103759765, 0.02874675178527832, 0.02858700752258301, 0.02837299156188965, 0.02815795135498047, 0.02789990425109863, 0.027809791564941407, 0.027694080352783205, 0.02773504066467285, 0.027672576904296874, 0.02774220848083496, 0.028411903381347657, 0.028308479309082032, 0.02796236801147461, 0.027588607788085938, 0.027675647735595704, 0.027769855499267578, 0.027686912536621092, 0.027608064651489257, 0.028417024612426758, 0.02838835144042969, 0.027631616592407225, 0.027717632293701173, 0.027923456192016603, 0.028005376815795898, 0.027830272674560546, 0.027643903732299805, 0.028003328323364256, 0.027801599502563477, 0.02776166343688965, 0.02757734489440918, 0.02774732780456543, 0.027840511322021484, 0.027632640838623046, 0.02772889518737793, 0.02852556800842285, 0.028795904159545898, 0.028281856536865234, 0.028677120208740234, 0.028490751266479493, 0.0287457275390625, 0.02872319984436035, 0.028625919342041017, 0.028589056015014647, 0.028831743240356447, 0.028528640747070313, 0.02775859260559082, 0.027808767318725586, 0.031473663330078124, 0.029502464294433595, 0.028906496047973632, 0.028676095962524413, 0.02876313591003418, 0.02775142478942871, 0.027893760681152343, 0.02877644729614258, 0.02880512046813965, 0.030003200531005858, 0.029149183273315428, 0.029016063690185546, 0.028645376205444335, 0.028580863952636718, 0.028663808822631837, 0.028799999237060548, 0.028753919601440428, 0.028733440399169922, 0.028727296829223634, 0.02878156852722168, 0.028827648162841796, 0.02879283142089844, 0.028657663345336915, 0.028719104766845704, 0.028832767486572267, 0.029437952041625977, 0.029253631591796874, 0.028876800537109375, 0.028907520294189453, 0.028725248336791992, 0.028694528579711914, 0.028648448944091798, 0.028832767486572267, 0.02892185592651367, 0.02875289535522461, 0.029057024002075195, 0.02874470329284668, 0.028829696655273438, 0.02877542304992676, 0.0287324161529541, 0.028655616760253907, 0.029691904067993165, 0.029276159286499022, 0.028893184661865235, 0.02929254341125488, 0.0303636474609375, 0.02814873504638672, 0.02778828811645508, 0.027486207962036133, 0.02772787284851074, 0.029037567138671876, 0.028697599411010744, 0.028619775772094725, 0.028857343673706053, 0.028579839706420897, 0.02876518440246582, 0.02892902374267578, 0.02858291244506836, 0.028703744888305665, 0.028916736602783204, 0.028606464385986328, 0.02922598457336426, 0.030113792419433592, 0.029050880432128907, 0.028795904159545898, 0.028637184143066406, 0.028291072845458985, 0.02916044807434082, 0.028810239791870116, 0.028674047470092775, 0.028669952392578125, 0.028664831161499024, 0.028820480346679687, 0.02793984031677246, 0.027627519607543945, 0.02772275161743164, 0.027652095794677735, 0.027669504165649415, 0.027658239364624023, 0.027610111236572265, 0.027437055587768554, 0.02756915283203125, 0.027701248168945314, 0.027693056106567384, 0.027621376037597657, 0.02800127983093262, 0.028358655929565428, 0.02840985679626465, 0.027987968444824218, 0.02758143997192383, 0.027975679397583008, 0.028613632202148437, 0.028527616500854492, 0.028472320556640625, 0.02857574462890625, 0.027860992431640624, 0.027599872589111327, 0.02768998336791992, 0.02754252815246582, 0.027671552658081053, 0.027651071548461914, 0.02930790328979492, 0.030397504806518556, 0.029383615493774416, 0.0285614070892334, 0.028433408737182617, 0.028398591995239256, 0.027469823837280274, 0.027615232467651366, 0.027616256713867186, 0.027656192779541015, 0.027720703125, 0.02768998336791992, 0.02795315170288086, 0.028656639099121094, 0.028396543502807618, 0.027646976470947264, 0.027691007614135742, 0.027828224182128908, 0.02793369674682617, 0.027797504425048827, 0.02779545593261719, 0.02775551986694336, 0.028088319778442384, 0.029271039962768555, 0.027666431427001953, 0.028793855667114256, 0.028702720642089844, 0.02820812797546387, 0.0285296630859375, 0.028914688110351562, 0.027874303817749024, 0.02770227241516113, 0.027669504165649415, 0.027678720474243163, 0.028693504333496093, 0.02795622444152832, 0.027437055587768554, 0.02756403160095215, 0.027620351791381836, 0.027665407180786132, 0.027660287857055665, 0.027594751358032226, 0.027658239364624023, 0.027502592086791993, 0.02773196792602539, 0.027625471115112304, 0.027627519607543945, 0.027885568618774413, 0.02936832046508789, 0.02925056076049805, 0.028494848251342773, 0.02856755256652832, 0.02798182487487793, 0.027632640838623046, 0.027621376037597657, 0.02759782409667969, 0.028499967575073244, 0.028579839706420897, 0.028869632720947266, 0.028065792083740236, 0.02875187110900879, 0.028491775512695314, 0.028663808822631837, 0.028662784576416016, 0.028684288024902343, 0.02850099182128906, 0.027905023574829102, 0.0283371524810791, 0.029921279907226563, 0.02813030433654785, 0.02759782409667969, 0.0275732479095459, 0.027717632293701173, 0.027602943420410156, 0.02752204895019531, 0.027660287857055665, 0.027732992172241212, 0.027663360595703124, 0.027635711669921875, 0.027551744461059572, 0.02771455955505371, 0.0276889591217041, 0.02757734489440918, 0.02772377586364746, 0.02776678466796875, 0.02815590476989746, 0.02810572814941406, 0.027711488723754882, 0.027586559295654296, 0.027628543853759766, 0.027628543853759766, 0.02756915283203125, 0.027673599243164062, 0.027675647735595704, 0.028877824783325196, 0.029154304504394532, 0.02879795265197754]",tokens/s,35.49318987585958,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694903f-36aa65bd66fc1618112f4c74;29748bff-44b6-49df-828e-0890bfcfdea6) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmprcrhg91d/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4285.382656,6019.350528,0.0,5389.68064,5000.446464,s,1,10.1308857421875,10.1308857421875,0.0,10.1308857421875,10.1308857421875,10.1308857421875,10.1308857421875,[10.1308857421875],,kWh,3.927882649444807e-05,2.1506906507279964e-05,7.509978230196834e-05,0.00013588551530369636,,MB,1504.768,6040.322048,0.0,5393.874944,4700.829696,s,10,11.91363720703125,1.1913637207031251,9.31727951083712e-05,1.1913497314453125,1.1915127319335939,1.191520037841797,1.1915258825683595,"[1.19152734375, 1.19124072265625, 1.1915111083984375, 1.191333740234375, 1.19128173828125, 1.1912508544921876, 1.1913475341796875, 1.1913519287109375, 1.19138330078125, 1.191408935546875]",tokens/s,214.879801651936,kWh,1.4076326137916592e-05,7.713446608190165e-06,8.102403704139949e-05,0.00010281380978750625,tokens/kWh,2489937.884113975,MB,1538.953216,6040.322048,0.0,5393.874944,4877.49632,s,10,15.086487426757811,1.508648742675781,0.018665656543717334,1.5076434936523437,1.533105078125,1.53652666015625,1.53926392578125,"[1.5399482421875, 1.4827049560546874, 1.487322265625, 1.5227332763671875, 1.4853779296875, 1.5067100830078124, 1.508576904296875, 1.5323447265625, 1.5160992431640625, 1.5046697998046874]",tokens/s,41.759223481180555,kWh,1.8197236319166576e-05,9.973668538861612e-06,5.6097294877799345e-05,8.426819973582754e-05,tokens/kWh,747612.9809050005,,s,630,15.084146699905405,0.023943089999849836,0.0006418801673082948,0.023727120399475096,0.02459555759429932,0.02488089599609375,0.025841213550567628,"[0.02457907295227051, 0.023442432403564452, 0.023448575973510744, 0.024218624114990234, 0.024507392883300783, 0.02451046371459961, 0.024423423767089843, 0.024521728515625, 0.024374271392822267, 0.02451353645324707, 0.024626176834106447, 0.024401920318603516, 0.02470604705810547, 0.02450227165222168, 0.024667135238647463, 0.024412160873413087, 0.024147968292236328, 0.02515660858154297, 0.024826879501342772, 0.024588287353515623, 0.02496512031555176, 0.024460287094116212, 0.024558591842651366, 0.024170495986938476, 0.024518655776977538, 0.02452070426940918, 0.024409088134765625, 0.02449305534362793, 0.02453606414794922, 0.02452479934692383, 0.0245032958984375, 0.02448691177368164, 0.0245534725189209, 0.0245483512878418, 0.02454528045654297, 0.02503167915344238, 0.024887296676635744, 0.024425472259521484, 0.024600576400756836, 0.0245534725189209, 0.02452377510070801, 0.02463641548156738, 0.024398847579956053, 0.023459840774536132, 0.02345062446594238, 0.023451648712158202, 0.02349158477783203, 0.02335436820983887, 0.024423423767089843, 0.024716287612915038, 0.024213504791259766, 0.024408063888549804, 0.02447052764892578, 0.024791040420532227, 0.024556543350219725, 0.02456268882751465, 0.024408063888549804, 0.024367103576660155, 0.024525823593139647, 0.02508185577392578, 0.025019392013549805, 0.025028608322143556, 0.02451148796081543, 0.024412160873413087, 0.023398399353027344, 0.0233123836517334, 0.023392255783081056, 0.023302143096923827, 0.02347520065307617, 0.02392064094543457, 0.023468032836914062, 0.02332569694519043, 0.02347417640686035, 0.023358463287353515, 0.02344960021972656, 0.023599103927612306, 0.02349260711669922, 0.02332467269897461, 0.02348646354675293, 0.0237127685546875, 0.02351513671875, 0.02294988822937012, 0.023378944396972655, 0.023266304016113282, 0.02323865509033203, 0.023368703842163087, 0.023444480895996093, 0.023388160705566406, 0.023439359664916993, 0.023372800827026367, 0.02348748779296875, 0.02353152084350586, 0.02330009651184082, 0.02332979202270508, 0.023393280029296876, 0.023390207290649414, 0.02391756820678711, 0.023682048797607422, 0.023441408157348635, 0.023299072265625, 0.023572479248046875, 0.023333887100219726, 0.023412736892700195, 0.02331340789794922, 0.023442432403564452, 0.02332979202270508, 0.023421951293945312, 0.023378944396972655, 0.023372800827026367, 0.023357440948486328, 0.023415807723999024, 0.02348953628540039, 0.023569408416748046, 0.023069696426391603, 0.023370752334594725, 0.02369331169128418, 0.02390323257446289, 0.024215551376342775, 0.024213504791259766, 0.02424115180969238, 0.02434764862060547, 0.023957504272460937, 0.023408639907836915, 0.023384063720703126, 0.024300544738769532, 0.02391347122192383, 0.02429439926147461, 0.023358463287353515, 0.023444480895996093, 0.023399423599243165, 0.02373324775695801, 0.0234967041015625, 0.023556095123291015, 0.024024063110351563, 0.023752704620361328, 0.023347200393676756, 0.023438335418701172, 0.023198720932006835, 0.023426048278808592, 0.023440383911132814, 0.023374847412109375, 0.023386112213134767, 0.023386112213134767, 0.02330112075805664, 0.023384063720703126, 0.023613439559936524, 0.023472127914428712, 0.02332467269897461, 0.02329804801940918, 0.02330521583557129, 0.023404544830322265, 0.023343103408813477, 0.023573503494262696, 0.023358463287353515, 0.023021568298339845, 0.024202239990234374, 0.025851903915405275, 0.025869312286376952, 0.02443059158325195, 0.0243507194519043, 0.02430771255493164, 0.023592960357666014, 0.02325503921508789, 0.023403520584106444, 0.023274496078491212, 0.023550975799560548, 0.023597055435180665, 0.02347724723815918, 0.02329599952697754, 0.023576576232910155, 0.023622655868530275, 0.023330816268920897, 0.02372505569458008, 0.023400447845458985, 0.023411712646484374, 0.023427072525024413, 0.023538688659667968, 0.023988256454467772, 0.023648223876953124, 0.02328883171081543, 0.023408639907836915, 0.023394304275512694, 0.023117824554443358, 0.023235584259033205, 0.023402496337890624, 0.02346188735961914, 0.023402528762817384, 0.02340656089782715, 0.024368127822875976, 0.024498176574707032, 0.02346700859069824, 0.023455743789672853, 0.02389811134338379, 0.0241582088470459, 0.02437222480773926, 0.02446950340270996, 0.024403968811035157, 0.024379392623901368, 0.024541183471679686, 0.024300544738769532, 0.024426496505737305, 0.024230911254882814, 0.028031007766723633, 0.025435104370117187, 0.02454732894897461, 0.023606271743774415, 0.024349695205688478, 0.024394752502441407, 0.024311807632446288, 0.024431615829467773, 0.024421375274658205, 0.0242739200592041, 0.024412160873413087, 0.0247193603515625, 0.024705024719238283, 0.024393728256225586, 0.02433126449584961, 0.024357887268066408, 0.024217599868774413, 0.023367679595947266, 0.023421951293945312, 0.02433024024963379, 0.02428927993774414, 0.024187904357910156, 0.02429952049255371, 0.02434252738952637, 0.024250368118286132, 0.024260608673095704, 0.024260608673095704, 0.023990272521972656, 0.024138751983642577, 0.023620607376098633, 0.023369728088378908, 0.023411712646484374, 0.023339008331298827, 0.02408857536315918, 0.025610240936279297, 0.02488319969177246, 0.024435712814331056, 0.024436735153198243, 0.02434252738952637, 0.0237260799407959, 0.023455743789672853, 0.023384063720703126, 0.02326016044616699, 0.023349248886108398, 0.023456768035888673, 0.023617536544799804, 0.023576576232910155, 0.023428096771240234, 0.023448575973510744, 0.023571456909179687, 0.024571903228759767, 0.023464960098266603, 0.023413759231567383, 0.02456985664367676, 0.02433433532714844, 0.02348236846923828, 0.023908351898193358, 0.023468032836914062, 0.02369740867614746, 0.02346598434448242, 0.023378944396972655, 0.02347315216064453, 0.023451648712158202, 0.023403520584106444, 0.02348543930053711, 0.023435264587402343, 0.02349260711669922, 0.023427072525024413, 0.023500799179077148, 0.02310041618347168, 0.02309222412109375, 0.02305740737915039, 0.023307264328002928, 0.023433216094970705, 0.02352025604248047, 0.02342911911010742, 0.023402496337890624, 0.023241727828979493, 0.02345369529724121, 0.02333695983886719, 0.024363008499145508, 0.024341503143310548, 0.02445414352416992, 0.02448793601989746, 0.024352767944335937, 0.024255487442016603, 0.02446335983276367, 0.024187904357910156, 0.023274496078491212, 0.02335436820983887, 0.023410688400268553, 0.02405580711364746, 0.023758848190307616, 0.02349977684020996, 0.02349977684020996, 0.023804927825927736, 0.023384063720703126, 0.02306662368774414, 0.022976512908935546, 0.02324684715270996, 0.023195648193359376, 0.023396352767944335, 0.023380992889404296, 0.023345151901245118, 0.023428096771240234, 0.02302463912963867, 0.02329599952697754, 0.023347200393676756, 0.023162879943847657, 0.023376895904541017, 0.023387136459350585, 0.023371776580810546, 0.023400447845458985, 0.024616960525512696, 0.0242227840423584, 0.023731136322021486, 0.023504896163940428, 0.023603200912475586, 0.023402496337890624, 0.023379968643188476, 0.023415807723999024, 0.023440383911132814, 0.023417856216430662, 0.023455743789672853, 0.023347200393676756, 0.023397375106811523, 0.0233123836517334, 0.02325299263000488, 0.023790592193603514, 0.023900159835815428, 0.023426048278808592, 0.023441408157348635, 0.023326719284057617, 0.0232806396484375, 0.023323648452758788, 0.023398399353027344, 0.023358463287353515, 0.023405567169189453, 0.023374847412109375, 0.023371776580810546, 0.023318527221679687, 0.023423999786376954, 0.023405567169189453, 0.026421247482299806, 0.02854195213317871, 0.0247511043548584, 0.024366079330444337, 0.024391679763793944, 0.024351743698120116, 0.024375295639038085, 0.024415231704711913, 0.02454732894897461, 0.024198144912719727, 0.02434252738952637, 0.024242176055908202, 0.024352767944335937, 0.024373247146606446, 0.024424448013305664, 0.02435686492919922, 0.02447974395751953, 0.02433126449584961, 0.023547903060913086, 0.02348236846923828, 0.02349363136291504, 0.0233123836517334, 0.02347110366821289, 0.023362592697143556, 0.023448543548583986, 0.023384063720703126, 0.02350182342529297, 0.02347315216064453, 0.02453811264038086, 0.024953855514526366, 0.024421375274658205, 0.02429132843017578, 0.02348236846923828, 0.024506368637084962, 0.0236943359375, 0.024311807632446288, 0.02431692886352539, 0.02346700859069824, 0.02345881652832031, 0.023342079162597656, 0.02387455940246582, 0.02431283187866211, 0.024360960006713867, 0.024377376556396484, 0.024302560806274413, 0.024184831619262694, 0.02344550323486328, 0.024567808151245117, 0.024345600128173828, 0.024340511322021485, 0.024434656143188477, 0.02447257614135742, 0.023447551727294923, 0.024369152069091796, 0.02429132843017578, 0.024254463195800782, 0.024384511947631835, 0.02449305534362793, 0.023558143615722657, 0.023566335678100587, 0.02343731117248535, 0.023979007720947267, 0.024988672256469727, 0.025556991577148438, 0.024989696502685548, 0.024671232223510742, 0.024349695205688478, 0.024397823333740236, 0.023575551986694337, 0.023439359664916993, 0.023415807723999024, 0.023430143356323242, 0.023362560272216795, 0.023433216094970705, 0.02345369529724121, 0.023541759490966797, 0.02329497528076172, 0.023411712646484374, 0.023427072525024413, 0.023444480895996093, 0.023478271484375, 0.02346188735961914, 0.02350387191772461, 0.02329804801940918, 0.02347929573059082, 0.02353152084350586, 0.023186431884765626, 0.02346188735961914, 0.02346291160583496, 0.023536640167236327, 0.023456768035888673, 0.02347520065307617, 0.025472000122070314, 0.024550399780273437, 0.024411136627197266, 0.02449407958984375, 0.025332735061645507, 0.02350284767150879, 0.02447667121887207, 0.02448793601989746, 0.02446233558654785, 0.025043968200683595, 0.024832000732421877, 0.024517631530761717, 0.024392704010009765, 0.024423423767089843, 0.024439807891845702, 0.024596479415893553, 0.024420352935791017, 0.024878080368041993, 0.025366527557373047, 0.025815040588378906, 0.02476851272583008, 0.024740863800048828, 0.025985023498535157, 0.024650751113891603, 0.024397823333740236, 0.0243558406829834, 0.02472755241394043, 0.024459264755249024, 0.024640512466430665, 0.02475212860107422, 0.025060352325439454, 0.024498176574707032, 0.024627199172973634, 0.024186880111694335, 0.024203264236450195, 0.024344575881958007, 0.024358911514282225, 0.024387584686279298, 0.024244224548339844, 0.024424448013305664, 0.024414207458496092, 0.024363008499145508, 0.024426496505737305, 0.024392704010009765, 0.024707071304321288, 0.02452479934692383, 0.024381439208984376, 0.024363008499145508, 0.023843839645385743, 0.02447052764892578, 0.02434048080444336, 0.02505523109436035, 0.02466815948486328, 0.02351513671875, 0.023553024291992186, 0.023430143356323242, 0.023390207290649414, 0.023334911346435547, 0.023431167602539063, 0.02326835250854492, 0.023406591415405274, 0.023395328521728515, 0.023427072525024413, 0.023435264587402343, 0.023153663635253906, 0.023179264068603517, 0.023446527481079102, 0.024327167510986326, 0.023416831970214845, 0.02332876777648926, 0.023370752334594725, 0.023409664154052736, 0.023206911087036132, 0.02326835250854492, 0.023371776580810546, 0.023362560272216795, 0.023413759231567383, 0.023326719284057617, 0.024614912033081054, 0.026073087692260744, 0.024823808670043947, 0.02451968002319336, 0.024452096939086915, 0.024137727737426756, 0.024035327911376952, 0.024337408065795898, 0.024369152069091796, 0.024723455429077147, 0.025068544387817384, 0.02467532730102539, 0.02452479934692383, 0.024416255950927734, 0.024434688568115235, 0.024246271133422852, 0.024365055084228517, 0.02445516777038574, 0.024464384078979492, 0.024460287094116212, 0.024397823333740236, 0.024551424026489257, 0.024595455169677736, 0.024105983734130858, 0.024328191757202147, 0.024393728256225586, 0.024602624893188478, 0.024481792449951172, 0.024588287353515623, 0.023521280288696288, 0.02346086311340332, 0.023380992889404296, 0.023431167602539063, 0.023465024948120118, 0.023498687744140625, 0.023427072525024413, 0.023394304275512694, 0.023413759231567383, 0.023448575973510744, 0.023404544830322265, 0.023426048278808592, 0.023430143356323242, 0.023998464584350586, 0.024386560440063477, 0.02447769546508789, 0.024534015655517577, 0.02451353645324707, 0.024412160873413087, 0.024258560180664062, 0.02456985664367676, 0.02347315216064453, 0.02349056053161621, 0.024382463455200197, 0.023547903060913086, 0.02345062446594238, 0.02348236846923828, 0.023390207290649414, 0.02345779228210449, 0.023394304275512694, 0.023315456390380858, 0.023384063720703126, 0.023809024810791016, 0.023987199783325194, 0.02332159996032715, 0.023136255264282226, 0.023228416442871092, 0.023362560272216795, 0.023396352767944335, 0.02372403144836426, 0.023371776580810546, 0.023274496078491212, 0.023413759231567383, 0.023421951293945312, 0.023368703842163087, 0.023355392456054686, 0.023350271224975586, 0.02351923179626465, 0.023435264587402343, 0.023443456649780273, 0.023728160858154296, 0.024460256576538084, 0.024303615570068358, 0.02434252738952637, 0.024434688568115235, 0.024427520751953126, 0.02448691177368164, 0.024444927215576173, 0.02437222480773926, 0.02425753593444824, 0.02502348709106445, 0.024805376052856445, 0.024486944198608397, 0.024467424392700197, 0.025133056640625, 0.02513408088684082, 0.024411136627197266, 0.023928831100463867, 0.02389811134338379, 0.024186880111694335, 0.024142847061157227, 0.023946239471435548, 0.024049663543701173, 0.024442880630493165, 0.024591360092163086, 0.02408550453186035, 0.024453119277954103, 0.02391244888305664, 0.02394726371765137, 0.0239052791595459, 0.023933952331542968, 0.023759872436523437, 0.023196672439575194, 0.023000064849853515, 0.023456768035888673, 0.023155712127685548]",tokens/s,41.7657035915695,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17657.061376,22207.266816,0.0,21577.596928,20724.970496,s,1,18.358060546875,18.358060546875,0.0,18.358060546875,18.358060546875,18.358060546875,18.358060546875,[18.358060546875],,kWh,0.00013670052892847008,7.490777089511016e-05,0.00029014217655799757,0.0005017504763815778,,MB,1749.782528,22286.958592,0.0,21640.511488,19428.81536,s,10,63.228764648437505,6.32287646484375,0.0010487599952775446,6.323275634765626,6.324061376953125,6.3241442138671875,6.324210483398438,"[6.3236201171875, 6.32356982421875, 6.32422705078125, 6.32404296875, 6.32370263671875, 6.3229814453125, 6.3217724609375, 6.32140673828125, 6.3215859375, 6.32185546875]",tokens/s,40.48790157824572,kWh,7.465308476277733e-05,4.09125852687979e-05,0.0004302240941790006,0.0005457897642105758,tokens/kWh,469045.0733723003,MB,1769.55392,22314.221568,0.0,21665.677312,19428.81792,s,10,37.862713867187495,3.78627138671875,0.008993870237976966,3.7825434570312497,3.7951511962890625,3.802124548339844,3.807703229980469,"[3.7936015625, 3.789929443359375, 3.809097900390625, 3.779288818359375, 3.77664111328125, 3.783013671875, 3.780538330078125, 3.78202001953125, 3.7820732421875, 3.786509765625]",tokens/s,16.639060850468226,kWh,4.4780012408056e-05,2.4545419833814492e-05,0.00019451785005859947,0.00026384328230047015,tokens/kWh,238778.10892397218,,s,630,37.860096931457534,0.060095391954694474,0.00034256641282519654,0.05995315170288086,0.060615782928466796,0.0607723518371582,0.06117512336730957,"[0.06053171157836914, 0.061023231506347655, 0.06087475204467773, 0.06013337707519531, 0.060439552307128906, 0.05988556671142578, 0.06005759811401367, 0.05988044738769531, 0.060034046173095705, 0.06038323211669922, 0.06054604721069336, 0.06025523376464844, 0.06007910537719727, 0.05991321563720703, 0.05996134567260742, 0.059886592864990235, 0.05989273452758789, 0.05984460830688477, 0.06005350494384765, 0.06056959915161133, 0.06112768173217773, 0.06058086395263672, 0.06055731201171875, 0.06038425445556641, 0.060060672760009766, 0.060508159637451174, 0.06080204772949219, 0.06016614532470703, 0.06039039993286133, 0.06022553634643555, 0.060477439880371096, 0.06037606430053711, 0.06038016128540039, 0.059832321166992185, 0.05991731262207031, 0.06046822357177734, 0.06046105575561524, 0.06030131149291992, 0.06024499130249023, 0.06001356887817383, 0.05992652893066406, 0.06022655868530274, 0.06050611114501953, 0.060219390869140625, 0.06035865783691406, 0.06012211227416992, 0.06022860717773437, 0.05997158432006836, 0.05995008087158203, 0.05998591995239258, 0.06009446334838867, 0.059930622100830076, 0.05992243194580078, 0.05978112030029297, 0.05991321563720703, 0.060034046173095705, 0.06017433547973633, 0.05992550277709961, 0.05998796844482422, 0.06041907119750976, 0.060286975860595705, 0.05990195083618164, 0.059958271026611325, 0.0617922248840332, 0.060728321075439455, 0.06062694549560547, 0.06042726516723633, 0.0597760009765625, 0.05983846282958984, 0.05997772979736328, 0.05967359924316406, 0.06014566421508789, 0.060270591735839846, 0.06039449691772461, 0.06055731201171875, 0.059834369659423826, 0.05999513626098633, 0.05989376068115235, 0.05989580917358398, 0.06018355178833008, 0.06032588958740234, 0.061459457397460934, 0.060590080261230465, 0.06015283203125, 0.05998899078369141, 0.05979852676391602, 0.06050099182128906, 0.059870208740234375, 0.05979852676391602, 0.05977088165283203, 0.060609535217285154, 0.060199935913085936, 0.05993471908569336, 0.059819007873535154, 0.05991219329833984, 0.059840511322021485, 0.06027571105957031, 0.05981081771850586, 0.05990092849731445, 0.05994598388671875, 0.059769855499267575, 0.0597657585144043, 0.05986611175537109, 0.06056857681274414, 0.05989273452758789, 0.05990911865234375, 0.060393470764160156, 0.060189697265625, 0.06005452728271484, 0.060273662567138675, 0.05987228775024414, 0.0598691520690918, 0.05994393539428711, 0.06134374237060547, 0.05990707015991211, 0.059873279571533204, 0.05990399932861328, 0.05993983840942383, 0.059888641357421876, 0.06023680114746094, 0.0602716178894043, 0.0599101448059082, 0.05999513626098633, 0.06015283203125, 0.06060236740112305, 0.060761089324951174, 0.061472766876220705, 0.06002483367919922, 0.05994496154785156, 0.05993983840942383, 0.06004838562011719, 0.06032793426513672, 0.06017331314086914, 0.06010265731811523, 0.06018867111206055, 0.059862014770507815, 0.06014054489135742, 0.06016307067871094, 0.05984460830688477, 0.059835391998291014, 0.05997158432006836, 0.06054604721069336, 0.06118195343017578, 0.060044288635253906, 0.059976703643798826, 0.06011904144287109, 0.06006886291503906, 0.06115737533569336, 0.061077503204345705, 0.0600709114074707, 0.06013542556762695, 0.060071937561035155, 0.060469249725341796, 0.06070579147338867, 0.06073241424560547, 0.060627967834472656, 0.06069964981079102, 0.060781566619873044, 0.06063820648193359, 0.060483585357666014, 0.060644351959228515, 0.06061568069458008, 0.060598270416259765, 0.06061670303344727, 0.06091571044921875, 0.06060851287841797, 0.06061670303344727, 0.060680191040039064, 0.06074060821533203, 0.060494846343994144, 0.06072627258300781, 0.06057376098632813, 0.06055929565429687, 0.060524543762207034, 0.06067507171630859, 0.06047334289550781, 0.06062899017333984, 0.06026649475097656, 0.06073241424560547, 0.06059724807739258, 0.060625919342041014, 0.060843006134033206, 0.0607457275390625, 0.06055936050415039, 0.06084403228759765, 0.060652542114257815, 0.06022246551513672, 0.0600186882019043, 0.06041292953491211, 0.060781566619873044, 0.060075008392333984, 0.05989990234375, 0.05995008087158203, 0.05981798553466797, 0.05982822418212891, 0.05976268768310547, 0.059843582153320314, 0.059860000610351564, 0.05980259323120117, 0.059870208740234375, 0.060273662567138675, 0.05978112030029297, 0.06053068923950195, 0.06115327835083008, 0.06079283142089844, 0.06023884963989258, 0.06014771270751953, 0.05986816024780273, 0.06019583892822265, 0.059947006225585936, 0.0598364143371582, 0.059822078704833984, 0.05989785766601562, 0.060158977508544924, 0.06016614532470703, 0.0598364143371582, 0.059974655151367184, 0.06056755065917969, 0.06010572814941406, 0.06008729553222656, 0.05998694229125977, 0.05988556671142578, 0.05972684860229492, 0.059799552917480465, 0.0598548469543457, 0.060133438110351566, 0.059939777374267575, 0.059856895446777345, 0.05982720184326172, 0.05984767913818359, 0.06013747024536133, 0.05995315170288086, 0.05987123107910156, 0.05989990234375, 0.059888641357421876, 0.05985587310791016, 0.059848705291748044, 0.059862014770507815, 0.05972172927856445, 0.05974835205078125, 0.059821056365966796, 0.06006886291503906, 0.0599101448059082, 0.05980672073364258, 0.05987123107910156, 0.059921409606933596, 0.05993983840942383, 0.05975449752807617, 0.05988351821899414, 0.05975040054321289, 0.059924480438232425, 0.059842559814453126, 0.060609535217285154, 0.059937793731689455, 0.05993369674682617, 0.06001561737060547, 0.059812862396240236, 0.05984972763061523, 0.06010367965698242, 0.0598364143371582, 0.05977395248413086, 0.05990911865234375, 0.05983846282958984, 0.05980057525634765, 0.06118297576904297, 0.06014361572265625, 0.05995929718017578, 0.060114944458007816, 0.05996441650390625, 0.059889663696289064, 0.059919361114501954, 0.05982003021240234, 0.059851776123046874, 0.05976473617553711, 0.05985587310791016, 0.05984153747558594, 0.05988351821899414, 0.05986099243164063, 0.059881473541259764, 0.059786239624023435, 0.059906047821044923, 0.05990911865234375, 0.05992038345336914, 0.05979340744018555, 0.05988556671142578, 0.059925537109375, 0.05999100875854492, 0.05978112030029297, 0.05998591995239258, 0.05979443359375, 0.059870208740234375, 0.06031769561767578, 0.05987942504882812, 0.0599101448059082, 0.06013542556762695, 0.060715007781982425, 0.06001049423217773, 0.05982310485839844, 0.05992243194580078, 0.059850753784179686, 0.05990399932861328, 0.059837440490722656, 0.05974118423461914, 0.05987737655639649, 0.05992038345336914, 0.05999411010742187, 0.059850753784179686, 0.059843582153320314, 0.05989273452758789, 0.05975142288208008, 0.05992652893066406, 0.05981388854980469, 0.05986099243164063, 0.059808799743652344, 0.05991113662719726, 0.06052864074707031, 0.05992959976196289, 0.05987635040283203, 0.060009471893310545, 0.06052249526977539, 0.059935745239257814, 0.05972787094116211, 0.05978316879272461, 0.059802623748779295, 0.05980364990234375, 0.061104129791259766, 0.06043852615356445, 0.059873279571533204, 0.06005145645141602, 0.059821056365966796, 0.05982617568969727, 0.05999718475341797, 0.05997369766235352, 0.05978518295288086, 0.05991420745849609, 0.05977395248413086, 0.060009471893310545, 0.05976063919067383, 0.05990092849731445, 0.05994598388671875, 0.060012542724609375, 0.05976473617553711, 0.059774974822998046, 0.05977804946899414, 0.059878398895263675, 0.059862014770507815, 0.059842559814453126, 0.0598394889831543, 0.05990911865234375, 0.05978828811645508, 0.05979852676391602, 0.05988351821899414, 0.060071937561035155, 0.05996134567260742, 0.05990195083618164, 0.05986099243164063, 0.05993881607055664, 0.05988761520385742, 0.05991424179077148, 0.05979545593261719, 0.0599818229675293, 0.05998796844482422, 0.0601077766418457, 0.06005964660644531, 0.060505088806152345, 0.06091571044921875, 0.06045491027832031, 0.060440574645996094, 0.0603955192565918, 0.06066995239257812, 0.06052864074707031, 0.06080614471435547, 0.06065971374511719, 0.05998387145996094, 0.060104705810546874, 0.059805694580078124, 0.059924480438232425, 0.059858943939208986, 0.060611583709716796, 0.059829246520996096, 0.060142593383789064, 0.06009548950195313, 0.0600186882019043, 0.05980979156494141, 0.05986713409423828, 0.059772926330566405, 0.0607303695678711, 0.060862464904785155, 0.06065971374511719, 0.06044160079956055, 0.0599552001953125, 0.05997772979736328, 0.060025856018066405, 0.05995315170288086, 0.059862014770507815, 0.05968281555175781, 0.05995622253417969, 0.05993881607055664, 0.05989888000488281, 0.05978112030029297, 0.06014156723022461, 0.06051123046875, 0.06061056137084961, 0.05997158432006836, 0.05996953582763672, 0.05981798553466797, 0.05992038345336914, 0.059905025482177736, 0.05986918258666992, 0.05994496154785156, 0.05995008087158203, 0.05983334350585937, 0.05990092849731445, 0.05989888000488281, 0.0611962890625, 0.060281856536865235, 0.05992959976196289, 0.059840511322021485, 0.05989580917358398, 0.05993471908569336, 0.06002380752563476, 0.05978828811645508, 0.059862014770507815, 0.059843582153320314, 0.05988761520385742, 0.05982720184326172, 0.05998284912109375, 0.05987225723266602, 0.059881473541259764, 0.05969715118408203, 0.059851776123046874, 0.06010879898071289, 0.0599818229675293, 0.05978521728515625, 0.059840511322021485, 0.059786239624023435, 0.059851776123046874, 0.05972377777099609, 0.05992345428466797, 0.05974937438964844, 0.05981081771850586, 0.060478462219238284, 0.0606300163269043, 0.06004019165039062, 0.059936767578125, 0.05979238510131836, 0.05983846282958984, 0.060047359466552735, 0.06108979034423828, 0.06051327896118164, 0.05982515335083008, 0.059862014770507815, 0.0597760009765625, 0.05980364990234375, 0.060385280609130856, 0.05995929718017578, 0.05996646499633789, 0.059853824615478515, 0.060007423400878904, 0.05990911865234375, 0.060262401580810546, 0.059963390350341796, 0.05983334350585937, 0.05974630355834961, 0.06008115386962891, 0.059921409606933596, 0.059843582153320314, 0.059824127197265625, 0.05990092849731445, 0.059805694580078124, 0.060111873626708986, 0.0598394889831543, 0.06000435256958008, 0.059888702392578125, 0.059915199279785156, 0.06004838562011719, 0.05997158432006836, 0.05996851348876953, 0.05996236801147461, 0.06097715377807617, 0.06085836791992188, 0.06089932632446289, 0.06029414367675781, 0.06023168182373047, 0.06000435256958008, 0.05979238510131836, 0.05989068984985352, 0.059843582153320314, 0.059891712188720705, 0.05997260665893555, 0.05993267059326172, 0.05995315170288086, 0.06009657669067383, 0.05989779281616211, 0.05998591995239258, 0.059862014770507815, 0.05990399932861328, 0.059786239624023435, 0.05982617568969727, 0.059881473541259764, 0.059834369659423826, 0.05974835205078125, 0.05994188690185547, 0.0598394889831543, 0.06061151885986328, 0.05991424179077148, 0.05995315170288086, 0.059891712188720705, 0.05984460830688477, 0.06043648147583008, 0.06042316818237305, 0.05983027267456055, 0.05989068984985352, 0.059865089416503904, 0.0602501106262207, 0.061158401489257816, 0.06076006317138672, 0.059902976989746094, 0.060219390869140625, 0.05995622253417969, 0.05992041778564453, 0.05985788726806641, 0.05996543884277344, 0.05991116714477539, 0.05990092849731445, 0.059894783020019535, 0.05982310485839844, 0.05981798553466797, 0.059954177856445315, 0.05973811340332031, 0.059848705291748044, 0.060257278442382815, 0.0601712646484375, 0.05994393539428711, 0.05984972763061523, 0.05992243194580078, 0.06034329605102539, 0.060091392517089844, 0.06032896041870117, 0.05998284912109375, 0.060527614593505856, 0.05996646499633789, 0.06015180969238281, 0.06005350494384765, 0.059865089416503904, 0.05987737655639649, 0.05989785766601562, 0.05978009414672852, 0.06050406265258789, 0.059822078704833984, 0.05988556671142578, 0.05997260665893555, 0.05992345428466797, 0.059990016937255856, 0.05998080062866211, 0.05972377777099609, 0.0599552001953125, 0.05991424179077148, 0.0599920654296875, 0.06006988906860351, 0.05989785766601562, 0.05977804946899414, 0.059894783020019535, 0.05979443359375, 0.059862014770507815, 0.05992243194580078, 0.060349441528320315, 0.06100582504272461, 0.060409854888916016, 0.05998387145996094, 0.061036544799804686, 0.06080819320678711, 0.06008115386962891, 0.059843582153320314, 0.059829246520996096, 0.0599101448059082, 0.06065151977539063, 0.060020736694335934, 0.059957248687744144, 0.05982617568969727, 0.06040576171875, 0.060028926849365234, 0.059982879638671875, 0.05986300659179687, 0.05998489761352539, 0.05991219329833984, 0.05996851348876953, 0.059789310455322264, 0.05989273452758789, 0.05983129501342774, 0.06009446334838867, 0.059851776123046874, 0.06032691192626953, 0.06002175903320312, 0.060129280090332034, 0.06018048095703125, 0.05992243194580078, 0.06022143936157227, 0.06075801467895508, 0.06046310424804688, 0.060625919342041014, 0.06040883255004883, 0.05994803237915039, 0.05993267059326172, 0.05991219329833984, 0.05986099243164063, 0.05991116714477539, 0.05992345428466797, 0.05997260665893555, 0.06041190338134766, 0.06017433547973633, 0.05991526412963867, 0.059845630645751956, 0.05997568130493164, 0.060014591217041016, 0.05998489761352539, 0.059916385650634764, 0.059955104827880856, 0.05987635040283203, 0.05991526412963867, 0.05992959976196289, 0.05973299026489258, 0.05988556671142578, 0.059862014770507815, 0.059791358947753906, 0.05977804946899414, 0.060572673797607425, 0.06047436904907227, 0.06066483306884766, 0.06008422470092773]",tokens/s,16.640210962495985,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,1513.377792,1766.326272,0.0,1136.656384,1111.384576,s,1,8.530623046875,8.530623046875,0.0,8.530623046875,8.530623046875,8.530623046875,8.530623046875,[8.530623046875],,kWh,2.0174561512496643e-05,1.1040216910879692e-05,3.261752609401425e-05,6.383230451739059e-05,,MB,1623.691264,1839.726592,0.0,1193.279488,1029.128704,s,10,2.338778732299805,0.2338778732299805,0.00019466772475317827,0.2339172592163086,0.23402420806884766,0.23411501693725587,0.23418766403198243,"[0.2335544891357422, 0.2340040283203125, 0.23352076721191406, 0.23381929016113281, 0.2339233856201172, 0.23420582580566407, 0.23390538024902344, 0.23397244262695313, 0.23396199035644533, 0.2339111328125]",tokens/s,1094.5883698380737,kWh,2.758925741214609e-06,1.511736174076046e-06,1.58964209858609e-05,2.0167082901151552e-05,tokens/kWh,12693952.876317192,MB,1628.766208,1839.726592,0.0,1193.279488,1081.667584,s,10,8.335770446777344,0.8335770446777344,0.010198291489102881,0.8316475219726562,0.8445331298828125,0.8487540222167969,0.8521307360839844,"[0.8435951538085937, 0.839307373046875, 0.82737841796875, 0.8317738037109375, 0.8193349609375, 0.84191552734375, 0.831521240234375, 0.8277444458007812, 0.820224609375, 0.8529749145507812]",tokens/s,75.57789697094664,kWh,9.840141683091007e-06,5.392038878074194e-06,2.1641636822337066e-05,3.687381738350227e-05,tokens/kWh,1708529.3704412295,,s,630,8.331810804367068,0.013225096514868358,0.0002843009474810708,0.013079551696777344,0.013571276473999022,0.013637921285629273,0.014051594123840336,"[0.01268838405609131, 0.012993535995483398, 0.012966912269592285, 0.012928000450134277, 0.01328435230255127, 0.01376255989074707, 0.013520895957946777, 0.013470720291137696, 0.013429759979248047, 0.013389823913574218, 0.013429759979248047, 0.013420543670654296, 0.013560832023620606, 0.013511679649353027, 0.013414400100708008, 0.01344102382659912, 0.013501440048217773, 0.013624320030212403, 0.013608960151672364, 0.013614080429077148, 0.01349120044708252, 0.013488127708435058, 0.013517824172973633, 0.013428735733032226, 0.013557760238647461, 0.01347481632232666, 0.013515775680541992, 0.013417471885681152, 0.013472767829895019, 0.013521920204162598, 0.013541376113891602, 0.013446144104003906, 0.013600768089294434, 0.013479935646057128, 0.013495295524597169, 0.013642751693725585, 0.013705216407775878, 0.013624320030212403, 0.013624320030212403, 0.01356492805480957, 0.013523967742919921, 0.013611007690429687, 0.013521920204162598, 0.013586432456970214, 0.0134717435836792, 0.013475839614868163, 0.01349120044708252, 0.013548607826232911, 0.013091775894165039, 0.0131778564453125, 0.013057024002075195, 0.01306112003326416, 0.013195263862609862, 0.012932095527648926, 0.013007871627807617, 0.013011967658996582, 0.013092864036560058, 0.013128704071044921, 0.01307545566558838, 0.013011967658996582, 0.013083647727966309, 0.013419520378112794, 0.013398015975952148, 0.013741120338439942, 0.013297599792480469, 0.013012991905212403, 0.013100031852722169, 0.012942336082458495, 0.012976127624511719, 0.013026304244995117, 0.013044735908508302, 0.012926976203918457, 0.013085696220397949, 0.01345740795135498, 0.013436927795410156, 0.013071359634399414, 0.013022208213806152, 0.01306726360321045, 0.013030400276184082, 0.012999679565429688, 0.013048831939697265, 0.013112319946289062, 0.012948479652404785, 0.012955648422241211, 0.013206527709960938, 0.013421567916870117, 0.012975104331970215, 0.012941311836242676, 0.013103103637695313, 0.012988415718078614, 0.013100031852722169, 0.013026304244995117, 0.013040639877319337, 0.013561856269836426, 0.01349120044708252, 0.013512703895568847, 0.0140830717086792, 0.013900799751281738, 0.013437952041625977, 0.013647871971130371, 0.01346560001373291, 0.013412351608276368, 0.013455360412597657, 0.01347481632232666, 0.013486080169677735, 0.013505536079406738, 0.013404159545898438, 0.013455360412597657, 0.013466624259948731, 0.013404159545898438, 0.013531135559082032, 0.013385727882385253, 0.013420543670654296, 0.013488127708435058, 0.013427712440490723, 0.01354751968383789, 0.013446144104003906, 0.013421567916870117, 0.013438976287841797, 0.013516799926757812, 0.013411328315734864, 0.013453311920166015, 0.013425663948059082, 0.013470720291137696, 0.013670399665832519, 0.013592576026916504, 0.013621248245239258, 0.013222911834716796, 0.013004799842834473, 0.013010944366455078, 0.013095935821533204, 0.013049856185913086, 0.01295462417602539, 0.01304576015472412, 0.012982272148132324, 0.013009920120239257, 0.013253631591796875, 0.013003775596618652, 0.013054976463317871, 0.013001728057861327, 0.013025279998779296, 0.012973055839538575, 0.012980223655700684, 0.012975104331970215, 0.013065216064453124, 0.013106176376342774, 0.012962847709655763, 0.012997599601745605, 0.012996607780456543, 0.01297715187072754, 0.013112319946289062, 0.012990464210510254, 0.012991488456726074, 0.013458432197570801, 0.014120960235595703, 0.013587455749511718, 0.01354751968383789, 0.013590592384338379, 0.013482975959777831, 0.013491168022155761, 0.013495295524597169, 0.013519871711730956, 0.013431808471679688, 0.013036543846130372, 0.013040639877319337, 0.01305395221710205, 0.013038592338562012, 0.013015040397644043, 0.013159423828125, 0.0130764799118042, 0.012963839530944824, 0.013020159721374512, 0.013017087936401368, 0.01305190372467041, 0.013035519599914551, 0.013007871627807617, 0.013000703811645508, 0.013073408126831054, 0.012992511749267579, 0.013024255752563477, 0.013050880432128906, 0.013031423568725586, 0.013008895874023438, 0.013090815544128418, 0.012990464210510254, 0.013002752304077148, 0.013019136428833008, 0.012982272148132324, 0.013024255752563477, 0.013376511573791505, 0.013181952476501465, 0.012942336082458495, 0.013001728057861327, 0.013014016151428222, 0.013124608039855956, 0.013042688369750977, 0.012967935562133789, 0.013018112182617187, 0.012995583534240723, 0.012995583534240723, 0.012997632026672363, 0.01305292797088623, 0.013049856185913086, 0.013027327537536621, 0.01297920036315918, 0.01298534393310547, 0.012990464210510254, 0.012963839530944824, 0.012969984054565429, 0.012976127624511719, 0.012894207954406739, 0.012951552391052246, 0.014414848327636719, 0.01396735954284668, 0.013974528312683105, 0.013479935646057128, 0.013544447898864746, 0.013616127967834473, 0.013603839874267578, 0.013640704154968262, 0.013538304328918458, 0.013503487586975099, 0.013527039527893067, 0.013489151954650879, 0.013466624259948731, 0.013592576026916504, 0.013631487846374512, 0.013209600448608399, 0.01305395221710205, 0.012975104331970215, 0.01297715187072754, 0.013018112182617187, 0.013079551696777344, 0.013022208213806152, 0.012959744453430176, 0.012995583534240723, 0.01306112003326416, 0.013504511833190918, 0.013587455749511718, 0.01307545566558838, 0.013023232460021973, 0.013114368438720703, 0.013035519599914551, 0.013027327537536621, 0.013001728057861327, 0.013002752304077148, 0.013027327537536621, 0.012999679565429688, 0.012964863777160645, 0.013150208473205567, 0.013049856185913086, 0.013017087936401368, 0.012791808128356934, 0.01286143970489502, 0.013087743759155274, 0.012994624137878419, 0.013056960105895996, 0.013023232460021973, 0.013064191818237305, 0.013035519599914551, 0.013020159721374512, 0.013007871627807617, 0.01307545566558838, 0.013012991905212403, 0.013046784400939941, 0.013001728057861327, 0.012967935562133789, 0.01307545566558838, 0.012991488456726074, 0.013010944366455078, 0.013048831939697265, 0.013007871627807617, 0.013079551696777344, 0.013085696220397949, 0.013001728057861327, 0.012965888023376464, 0.013050880432128906, 0.013055999755859376, 0.013020159721374512, 0.013015040397644043, 0.013003775596618652, 0.012999679565429688, 0.013134847640991211, 0.012981247901916505, 0.013074432373046875, 0.01307750415802002, 0.01298739242553711, 0.012822527885437012, 0.01285529613494873, 0.012752896308898925, 0.012818431854248047, 0.013026304244995117, 0.013034496307373047, 0.01296895980834961, 0.013031423568725586, 0.01297920036315918, 0.012975104331970215, 0.012992511749267579, 0.012992511749267579, 0.012940287590026855, 0.012935168266296386, 0.012713983535766601, 0.012705792427062988, 0.012720128059387208, 0.012930047988891602, 0.012966912269592285, 0.012947456359863281, 0.01284812831878662, 0.012914688110351562, 0.01297920036315918, 0.012997632026672363, 0.012931072235107421, 0.013090815544128418, 0.012956671714782715, 0.014233599662780762, 0.01345638370513916, 0.013534208297729493, 0.0134901762008667, 0.013560832023620606, 0.013512703895568847, 0.013596672058105469, 0.013528063774108886, 0.013659135818481445, 0.013496319770812988, 0.01344921588897705, 0.013727744102478028, 0.013570048332214356, 0.0134901762008667, 0.013674495697021484, 0.013634559631347656, 0.01346560001373291, 0.01435750389099121, 0.013913087844848633, 0.013460479736328124, 0.013516799926757812, 0.01325875186920166, 0.013212672233581543, 0.013240320205688477, 0.013107199668884278, 0.013115391731262208, 0.01337241554260254, 0.013480959892272949, 0.013631487846374512, 0.013522944450378417, 0.013511679649353027, 0.013543423652648925, 0.013516799926757812, 0.013454336166381836, 0.013518848419189454, 0.01345740795135498, 0.013409279823303222, 0.013064191818237305, 0.013038592338562012, 0.013048831939697265, 0.013225983619689942, 0.013644800186157227, 0.014812159538269042, 0.013722623825073242, 0.013238271713256837, 0.013100031852722169, 0.012955648422241211, 0.012994560241699218, 0.013012991905212403, 0.012990464210510254, 0.013019136428833008, 0.012923904418945312, 0.01296076774597168, 0.013036543846130372, 0.012988415718078614, 0.013130751609802246, 0.012998656272888183, 0.013128704071044921, 0.013095935821533204, 0.012992511749267579, 0.012990464210510254, 0.013023232460021973, 0.012975104331970215, 0.013006848335266113, 0.013517824172973633, 0.013225983619689942, 0.013203455924987792, 0.013004799842834473, 0.012951552391052246, 0.013065216064453124, 0.01306931209564209, 0.013090815544128418, 0.012989439964294434, 0.013074432373046875, 0.012959744453430176, 0.013087743759155274, 0.013020159721374512, 0.013017087936401368, 0.01295462417602539, 0.013039615631103516, 0.013000703811645508, 0.013055999755859376, 0.013055999755859376, 0.012969984054565429, 0.013029376029968261, 0.012911616325378418, 0.012906496047973632, 0.013003775596618652, 0.012969984054565429, 0.013011967658996582, 0.012992511749267579, 0.013007871627807617, 0.012964863777160645, 0.01297715187072754, 0.01306009578704834, 0.013032511711120605, 0.012970944404602051, 0.012965888023376464, 0.012981247901916505, 0.012931072235107421, 0.01315225601196289, 0.013513728141784668, 0.013575167655944824, 0.01345638370513916, 0.013507583618164062, 0.013409279823303222, 0.01358028793334961, 0.013419520378112794, 0.013433856010437012, 0.013494272232055664, 0.013212672233581543, 0.01307852840423584, 0.012986368179321289, 0.012963839530944824, 0.013137920379638672, 0.013557760238647461, 0.013501440048217773, 0.013524991989135742, 0.013487104415893555, 0.013411328315734864, 0.01344921588897705, 0.013462528228759766, 0.01345740795135498, 0.013407232284545899, 0.013617152214050294, 0.013678591728210449, 0.013587455749511718, 0.013294591903686523, 0.013271039962768554, 0.01308672046661377, 0.013029376029968261, 0.013162495613098145, 0.013014016151428222, 0.013203455924987792, 0.013011967658996582, 0.01305907154083252, 0.012957695960998536, 0.013022208213806152, 0.013509632110595703, 0.013496352195739745, 0.013479904174804687, 0.013501440048217773, 0.013470720291137696, 0.013092864036560058, 0.013094911575317383, 0.013011967658996582, 0.013020159721374512, 0.013010944366455078, 0.013016063690185547, 0.012898303985595704, 0.013167615890502929, 0.013065216064453124, 0.013041664123535156, 0.013039615631103516, 0.013004799842834473, 0.012989439964294434, 0.013027327537536621, 0.013096960067749023, 0.01298739242553711, 0.013008895874023438, 0.013156352043151855, 0.013023232460021973, 0.013042688369750977, 0.013036543846130372, 0.01306214427947998, 0.013033472061157226, 0.012990464210510254, 0.01316044807434082, 0.013210623741149903, 0.012899328231811523, 0.012996607780456543, 0.013039615631103516, 0.013035519599914551, 0.013122559547424317, 0.013020159721374512, 0.01298534393310547, 0.013101056098937988, 0.013005824089050292, 0.01296895980834961, 0.013011967658996582, 0.012978176116943359, 0.013009920120239257, 0.012966912269592285, 0.013317119598388672, 0.013641728401184081, 0.013525055885314942, 0.013526975631713867, 0.013496319770812988, 0.013378560066223144, 0.013480959892272949, 0.013500415802001953, 0.013224960327148438, 0.013173760414123535, 0.013036543846130372, 0.012982272148132324, 0.013010944366455078, 0.01307545566558838, 0.012983296394348144, 0.012972031593322754, 0.012988415718078614, 0.01297100830078125, 0.012990464210510254, 0.013114368438720703, 0.013039615631103516, 0.013122559547424317, 0.012959744453430176, 0.012942336082458495, 0.012919808387756348, 0.013007871627807617, 0.013009920120239257, 0.0129617919921875, 0.012949503898620606, 0.013016063690185547, 0.012967935562133789, 0.012990464210510254, 0.012940287590026855, 0.013001728057861327, 0.013066240310668945, 0.01308672046661377, 0.012956671714782715, 0.013017087936401368, 0.01297100830078125, 0.012989439964294434, 0.012976127624511719, 0.013025279998779296, 0.012958720207214355, 0.013009920120239257, 0.012928000450134277, 0.012890111923217774, 0.012992511749267579, 0.01308672046661377, 0.012973055839538575, 0.013159423828125, 0.012953599929809571, 0.012955648422241211, 0.013120512008666992, 0.01305190372467041, 0.01298739242553711, 0.013015040397644043, 0.01297100830078125, 0.012984319686889649, 0.012957695960998536, 0.012965888023376464, 0.012992511749267579, 0.013023232460021973, 0.012956671714782715, 0.012965888023376464, 0.01294643211364746, 0.01298739242553711, 0.013031423568725586, 0.013079551696777344, 0.012993535995483398, 0.012992511749267579, 0.013902848243713378, 0.01412611198425293, 0.013640671730041505, 0.013552639961242676, 0.013796352386474609, 0.013565952301025391, 0.013686783790588379, 0.013254655838012695, 0.013137920379638672, 0.012969984054565429, 0.01353932762145996, 0.013590527534484862, 0.013493247985839844, 0.013519871711730956, 0.01347481632232666, 0.013544447898864746, 0.013541376113891602, 0.013528063774108886, 0.013495295524597169, 0.013557760238647461, 0.013500415802001953, 0.013575167655944824, 0.013501440048217773, 0.013511679649353027, 0.013565952301025391, 0.01346457576751709, 0.013566975593566894, 0.013570048332214356, 0.01348300838470459, 0.0135731201171875, 0.013496319770812988, 0.013424639701843261, 0.01348300838470459, 0.01346560001373291, 0.013523967742919921, 0.013571071624755859, 0.013678591728210449, 0.013536255836486816, 0.013470720291137696, 0.013433856010437012, 0.013607935905456543, 0.013505536079406738, 0.013540351867675781, 0.013510656356811524, 0.013567999839782715, 0.013510656356811524, 0.013708288192749024, 0.013842432022094727, 0.01353932762145996, 0.013587519645690918, 0.013593536376953125, 0.013615103721618652, 0.013537280082702637, 0.013431808471679688, 0.013453311920166015, 0.013480959892272949, 0.01345638370513916, 0.013466624259948731, 0.013493247985839844, 0.013467647552490235, 0.013546496391296388, 0.013384703636169434, 0.013428735733032226]",tokens/s,75.61381490681347,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp5sj3r5_q/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495df-2cbdc8593eea08c73e900421;367ac5f9-0f91-4fe6-bec2-088720c44ad8) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4037, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 146, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 79, in post_init - self.q4 = exl_ext.make_q4( -RuntimeError: scales and qweight have incompatible shapes - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1065.71776,1200.095232,0.0,570.425344,525.840896,s,1,7.75338818359375,7.75338818359375,0.0,7.75338818359375,7.75338818359375,7.75338818359375,7.75338818359375,[7.75338818359375],,kWh,9.693106088890445e-06,5.296258769312481e-06,1.2844454719995735e-05,2.783381957819866e-05,,MB,1455.747072,1244.135424,0.0,597.68832,584.940544,s,10,0.6401412467956543,0.06401412467956544,2.214395008882896e-05,0.0640129280090332,0.06404741439819336,0.06405069999694823,0.06405332847595215,"[0.06400358581542968, 0.06400911712646484, 0.06400911712646484, 0.06401929473876954, 0.06404668426513672, 0.06401673889160156, 0.06398252868652343, 0.06405398559570312, 0.0639818229675293, 0.06401837158203125]",tokens/s,3999.1174023148083,kWh,7.561434521452193e-07,4.1428604276022867e-07,4.258388049311805e-06,5.428817544217253e-06,tokens/kWh,47155756.83192555,MB,1490.853888,1258.815488,0.0,612.368384,597.290496,s,10,11.129281372070313,1.1129281372070312,0.009755869218219944,1.1120055541992189,1.1196916015624998,1.1276762451171876,1.1340639599609377,"[1.1086903076171875, 1.09870751953125, 1.1062462158203126, 1.117917236328125, 1.11532080078125, 1.11746435546875, 1.135660888671875, 1.1172476806640625, 1.1075418701171875, 1.1044844970703125]",tokens/s,56.60742854260364,kWh,1.312961498785518e-05,7.194650786322663e-06,2.18752439686932e-05,4.2199509742871035e-05,tokens/kWh,1492908.3390747895,,s,630,11.124627464294438,0.01765813883221339,0.00036550659982543296,0.01748684883117676,0.018101759719848634,0.018248806285858156,0.01888953313827515,"[0.01718272018432617, 0.01738137626647949, 0.018149375915527344, 0.01805516815185547, 0.01801318359375, 0.017983488082885742, 0.017987583160400392, 0.017961984634399415, 0.01803468894958496, 0.01804800033569336, 0.017758207321166994, 0.017480703353881837, 0.01742438316345215, 0.017373184204101562, 0.017321983337402345, 0.017449983596801756, 0.01757900810241699, 0.017796096801757814, 0.01802239990234375, 0.018084863662719726, 0.01807161521911621, 0.01735980796813965, 0.017359872817993165, 0.01739366340637207, 0.017352703094482422, 0.017376256942749024, 0.017467391967773437, 0.017302528381347656, 0.017217536926269532, 0.017391616821289063, 0.01741619110107422, 0.017345535278320313, 0.01741312026977539, 0.017345535278320313, 0.01746124839782715, 0.017399808883666993, 0.017373184204101562, 0.017449983596801756, 0.01739673614501953, 0.01741312026977539, 0.01743667221069336, 0.01755036735534668, 0.018182111740112306, 0.018168832778930662, 0.018314239501953124, 0.018324480056762696, 0.017984512329101563, 0.017434623718261717, 0.017487871170043946, 0.01747865676879883, 0.017434623718261717, 0.01745715141296387, 0.017270816802978515, 0.017353696823120116, 0.017509376525878906, 0.0174202880859375, 0.017314815521240236, 0.017443840026855468, 0.017438720703125, 0.017491968154907226, 0.017509376525878906, 0.0174335994720459, 0.01739263916015625, 0.01741619110107422, 0.017497087478637697, 0.01741312026977539, 0.01741414451599121, 0.017246208190917968, 0.01745510482788086, 0.01742848014831543, 0.0174202880859375, 0.017488895416259767, 0.01742131233215332, 0.01739571189880371, 0.01745305633544922, 0.017376256942749024, 0.01745715141296387, 0.01742540740966797, 0.01744895935058594, 0.017484800338745117, 0.01743052864074707, 0.017391616821289063, 0.01746124839782715, 0.01742848014831543, 0.01740185546875, 0.017497152328491212, 0.017478591918945314, 0.017497087478637697, 0.017378303527832033, 0.0174202880859375, 0.01745408058166504, 0.017307647705078123, 0.0172359676361084, 0.017273855209350587, 0.01717558479309082, 0.017181663513183593, 0.017059839248657227, 0.01721036720275879, 0.017318912506103516, 0.01755340766906738, 0.017508352279663086, 0.01798249626159668, 0.017821664810180663, 0.017336320877075196, 0.017504255294799806, 0.017511423110961915, 0.017375232696533204, 0.017201152801513672, 0.01744179153442383, 0.01740185546875, 0.017367040634155274, 0.01743974494934082, 0.017707008361816406, 0.01763020706176758, 0.017506303787231444, 0.01745715141296387, 0.017438720703125, 0.017524736404418945, 0.017451007843017577, 0.0174335994720459, 0.017811456680297853, 0.017514495849609374, 0.01740595245361328, 0.017406976699829102, 0.01740902328491211, 0.017261568069458007, 0.017293312072753905, 0.01740083122253418, 0.017354751586914064, 0.01744179153442383, 0.017325056076049804, 0.017329151153564454, 0.017350656509399414, 0.017435647964477538, 0.01721139144897461, 0.017108991622924806, 0.01736396789550781, 0.017344512939453126, 0.017316864013671874, 0.017353727340698243, 0.017359872817993165, 0.01739673614501953, 0.01741312026977539, 0.01740083122253418, 0.017361919403076173, 0.017315839767456053, 0.01734758377075195, 0.017369087219238282, 0.017358848571777344, 0.017325056076049804, 0.017315839767456053, 0.017399808883666993, 0.01742233657836914, 0.017737728118896484, 0.018033664703369142, 0.018229248046875, 0.018522111892700196, 0.01864499282836914, 0.01818009567260742, 0.018082815170288084, 0.017986560821533205, 0.018018304824829103, 0.018075647354125975, 0.017978368759155275, 0.01799679946899414, 0.017906688690185548, 0.017588224411010742, 0.01743155288696289, 0.01740492820739746, 0.017502208709716797, 0.017475584030151366, 0.017500160217285156, 0.017434623718261717, 0.01762713623046875, 0.017524736404418945, 0.01738444709777832, 0.017467391967773437, 0.01743769645690918, 0.017353727340698243, 0.017332223892211913, 0.017460224151611328, 0.017532928466796875, 0.017530879974365234, 0.017487871170043946, 0.017489919662475584, 0.01745305633544922, 0.017476608276367187, 0.01748684883117676, 0.017597440719604493, 0.017055744171142577, 0.017374208450317383, 0.017321983337402345, 0.017408000946044923, 0.017386495590209963, 0.017398784637451172, 0.017426431655883787, 0.017442815780639647, 0.017357824325561523, 0.01737215995788574, 0.01743257522583008, 0.01741823959350586, 0.0174335994720459, 0.01745305633544922, 0.017562623977661132, 0.017426431655883787, 0.01742848014831543, 0.017753087997436523, 0.01826918411254883, 0.018062335968017578, 0.01801318359375, 0.01801523208618164, 0.018395135879516602, 0.0187064323425293, 0.018323455810546875, 0.01803468894958496, 0.017716224670410157, 0.017456127166748048, 0.017308671951293944, 0.017299455642700197, 0.017648639678955077, 0.01743667221069336, 0.017353727340698243, 0.017346559524536134, 0.017339391708374022, 0.017346559524536134, 0.01789952087402344, 0.01840947151184082, 0.018330623626708984, 0.018083839416503905, 0.018136064529418947, 0.01804083251953125, 0.017969152450561524, 0.01761587142944336, 0.01739673614501953, 0.01740083122253418, 0.017359872817993165, 0.017391616821289063, 0.017735679626464843, 0.017862655639648437, 0.01865830421447754, 0.018395135879516602, 0.01804697608947754, 0.018085887908935547, 0.0183767032623291, 0.01836953544616699, 0.018030624389648437, 0.01793840026855469, 0.018249727249145507, 0.017649663925170898, 0.01744691276550293, 0.01741107177734375, 0.01763839912414551, 0.01725132751464844, 0.017464319229125978, 0.018214912414550782, 0.017803264617919923, 0.01726464080810547, 0.01741107177734375, 0.01761587142944336, 0.01904025650024414, 0.018373632431030275, 0.017562623977661132, 0.01740083122253418, 0.01740595245361328, 0.017521696090698244, 0.017727455139160155, 0.018112512588500978, 0.017664031982421877, 0.017482784271240233, 0.01750009536743164, 0.017456127166748048, 0.017296415328979492, 0.017574880599975588, 0.017494016647338868, 0.017499135971069335, 0.017505279541015627, 0.018184192657470705, 0.01799782371520996, 0.01820364761352539, 0.018082815170288084, 0.01819443130493164, 0.01803878402709961, 0.017663999557495116, 0.017529855728149413, 0.017468416213989257, 0.01746227264404297, 0.017456127166748048, 0.017548288345336914, 0.01748684883117676, 0.017477632522583008, 0.017406976699829102, 0.01738956832885742, 0.017440799713134766, 0.018018272399902342, 0.018112512588500978, 0.01800704002380371, 0.018061344146728515, 0.017983455657958985, 0.018129919052124025, 0.018085887908935547, 0.018061311721801757, 0.017904640197753906, 0.017622016906738282, 0.017333248138427734, 0.017993728637695314, 0.017934335708618163, 0.017501184463500977, 0.017373184204101562, 0.017330175399780275, 0.01756979179382324, 0.01757798385620117, 0.01748684883117676, 0.01742848014831543, 0.017300479888916014, 0.017374208450317383, 0.017098751068115235, 0.017362943649291994, 0.0173885440826416, 0.01742336082458496, 0.01747865676879883, 0.017707008361816406, 0.018115583419799804, 0.0175994873046875, 0.01742131233215332, 0.01797427177429199, 0.018892799377441406, 0.018149375915527344, 0.01803264045715332, 0.017926143646240233, 0.01799782371520996, 0.01801523208618164, 0.01796403121948242, 0.0180316162109375, 0.01780019187927246, 0.01757798385620117, 0.018009088516235353, 0.017491968154907226, 0.019382272720336914, 0.018525184631347655, 0.01808076858520508, 0.017755136489868165, 0.017451007843017577, 0.01745305633544922, 0.01743052864074707, 0.017368064880371094, 0.01742848014831543, 0.018298879623413086, 0.018126848220825196, 0.017999872207641602, 0.01798860740661621, 0.01800499153137207, 0.017947647094726564, 0.017597440719604493, 0.01741926383972168, 0.01748684883117676, 0.017483776092529296, 0.017552383422851564, 0.01807257652282715, 0.01805516815185547, 0.017914880752563478, 0.017491968154907226, 0.01740902328491211, 0.017466367721557616, 0.017466367721557616, 0.017538047790527343, 0.017333248138427734, 0.01739263916015625, 0.017468416213989257, 0.017589248657226563, 0.017516544342041016, 0.017467391967773437, 0.017477632522583008, 0.017537023544311522, 0.01745305633544922, 0.017489919662475584, 0.017492992401123047, 0.017503231048583985, 0.0175994873046875, 0.019557376861572266, 0.01961676788330078, 0.018440191268920898, 0.01803878402709961, 0.01804902458190918, 0.017923072814941408, 0.017885183334350584, 0.01861631965637207, 0.018000896453857423, 0.017500160217285156, 0.01745510482788086, 0.017456127166748048, 0.017332223892211913, 0.017456127166748048, 0.017361919403076173, 0.01822003173828125, 0.01804083251953125, 0.018052095413208007, 0.018101247787475586, 0.017999872207641602, 0.01801215934753418, 0.01801318359375, 0.018001920700073244, 0.018358272552490236, 0.018257919311523436, 0.01800396728515625, 0.018096128463745118, 0.018083839416503905, 0.017910783767700195, 0.01804595184326172, 0.01801625633239746, 0.01796403121948242, 0.018143232345581056, 0.019340288162231444, 0.018226175308227538, 0.018052095413208007, 0.018082815170288084, 0.018118656158447266, 0.018076671600341796, 0.018044927597045898, 0.018123775482177733, 0.0182476806640625, 0.01808076858520508, 0.017516544342041016, 0.017498111724853514, 0.017554431915283202, 0.017469440460205078, 0.01736195182800293, 0.017413087844848633, 0.01741823959350586, 0.017726463317871095, 0.017920000076293945, 0.01743052864074707, 0.01739571189880371, 0.01792515182495117, 0.019176416397094727, 0.018290687561035156, 0.01840742492675781, 0.01798963165283203, 0.018153472900390624, 0.01801318359375, 0.018078720092773438, 0.01802956771850586, 0.017250335693359375, 0.01804591941833496, 0.017992704391479493, 0.01766912078857422, 0.017510400772094727, 0.01741823959350586, 0.017710079193115236, 0.018008064270019532, 0.0176312313079834, 0.01767628860473633, 0.01799577522277832, 0.01802239990234375, 0.01804697608947754, 0.018119680404663087, 0.01801215934753418, 0.01785958480834961, 0.01797222328186035, 0.017967103958129883, 0.018107391357421874, 0.017736703872680663, 0.01797635269165039, 0.017999839782714844, 0.01762918472290039, 0.01744179153442383, 0.017352703094482422, 0.017929216384887696, 0.017489919662475584, 0.01795686340332031, 0.018124799728393554, 0.017969152450561524, 0.018086912155151368, 0.018164735794067383, 0.01801420783996582, 0.018010112762451173, 0.01790771293640137, 0.017408000946044923, 0.017447935104370118, 0.0174202880859375, 0.01738035202026367, 0.017386495590209963, 0.017438720703125, 0.017480703353881837, 0.01747865676879883, 0.01742131233215332, 0.01739366340637207, 0.017483776092529296, 0.018066432952880858, 0.018143232345581056, 0.01798860740661621, 0.018076671600341796, 0.01802649688720703, 0.01777667236328125, 0.017490911483764648, 0.01786675262451172, 0.01744895935058594, 0.017390592575073242, 0.017500160217285156, 0.01743155288696289, 0.017335296630859375, 0.01741721534729004, 0.01743667221069336, 0.017458175659179686, 0.01739776039123535, 0.018881536483764647, 0.017544191360473634, 0.01743667221069336, 0.018008064270019532, 0.01798860740661621, 0.017686559677124025, 0.018015199661254883, 0.018033664703369142, 0.017944576263427735, 0.018050048828125, 0.01784115219116211, 0.01744486427307129, 0.017299455642700197, 0.01738035202026367, 0.01741926383972168, 0.01741619110107422, 0.01741414451599121, 0.017447935104370118, 0.017360895156860352, 0.017336320877075196, 0.017715200424194336, 0.01801318359375, 0.01801215934753418, 0.017829887390136717, 0.01741414451599121, 0.017332223892211913, 0.017502208709716797, 0.017426431655883787, 0.017383424758911133, 0.01740185546875, 0.01740492820739746, 0.017350656509399414, 0.017376256942749024, 0.01749504089355469, 0.017293312072753905, 0.01743667221069336, 0.017458175659179686, 0.017554431915283202, 0.018183168411254884, 0.01760051155090332, 0.017442815780639647, 0.017475584030151366, 0.01740595245361328, 0.017435647964477538, 0.01742848014831543, 0.017474559783935546, 0.01738035202026367, 0.01738751983642578, 0.017257471084594727, 0.017391616821289063, 0.01741823959350586, 0.01760665512084961, 0.017539072036743163, 0.017317888259887695, 0.01739468765258789, 0.017473535537719728, 0.01738956832885742, 0.01741312026977539, 0.017341440200805663, 0.017318912506103516, 0.017941503524780272, 0.018041856765747072, 0.01764556884765625, 0.017542144775390626, 0.01738956832885742, 0.017374208450317383, 0.01740287971496582, 0.017386495590209963, 0.01744691276550293, 0.01744895935058594, 0.01757798385620117, 0.01741926383972168, 0.017366016387939453, 0.017303552627563477, 0.01744179153442383, 0.01739785575866699, 0.01742223930358887, 0.017488895416259767, 0.01741619110107422, 0.017485824584960938, 0.017378303527832033, 0.017490943908691405, 0.017479679107666016, 0.01746124839782715, 0.017385471343994142, 0.01765068817138672, 0.018106367111206053, 0.017795072555541993, 0.01755340766906738, 0.01739468765258789, 0.017383424758911133, 0.017905664443969727, 0.01764556884765625, 0.017954816818237306, 0.017802240371704102, 0.017395744323730467, 0.01734550476074219, 0.01740595245361328, 0.017352703094482422, 0.017474559783935546, 0.01738956832885742, 0.01740390396118164, 0.017287168502807617, 0.017367040634155274, 0.017386495590209963, 0.017386495590209963, 0.0173885440826416, 0.017352703094482422, 0.017459199905395507, 0.017336320877075196, 0.017314847946166993, 0.01790153694152832, 0.01799884796142578, 0.018009088516235353, 0.018156543731689453, 0.01803878402709961, 0.017770496368408203, 0.017675264358520508, 0.01744076728820801, 0.017494016647338868, 0.017508352279663086, 0.01744076728820801, 0.017466367721557616, 0.01744486427307129, 0.017459199905395507, 0.01748684883117676]",tokens/s,56.631109852626146,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b61-3dd454f06d4669706371a771;9097486f-9799-43ca-9c32-0c49eeb86a6c) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4918.145024,8141.668352,0.0,7511.998464,6895.682048,s,1,11.0723310546875,11.0723310546875,0.0,11.0723310546875,11.0723310546875,11.0723310546875,11.0723310546875,[11.0723310546875],,kWh,4.8151171945836625e-05,2.6374611308649627e-05,8.382812261797934e-05,0.0001583539058724656,,MB,2722.902016,8160.54272,0.0,7514.095616,6822.141952,s,10,12.405833374023437,1.2405833374023438,0.0005011143962875515,1.2406719360351564,1.2412067749023437,1.2412671447753907,1.241315440673828,"[1.2413275146484375, 1.240769287109375, 1.240200927734375, 1.240961181640625, 1.240897705078125, 1.2405745849609375, 1.2401065673828124, 1.241193359375, 1.2398531494140625, 1.2399490966796876]",tokens/s,206.35453683912775,kWh,1.4653978406459678e-05,8.030039884948445e-06,8.626043011940032e-05,0.00010894444841080845,tokens/kWh,2349821.434082382,MB,2727.165952,8162.639872,0.0,7516.192768,6822.144512,s,10,19.54996875,1.9549968750000002,0.007867678298945054,1.955849365234375,1.9627544555664063,1.965955047607422,1.9685155212402343,"[1.9575081787109374, 1.955779541015625, 1.955919189453125, 1.9463756103515626, 1.9691556396484375, 1.955599853515625, 1.94757373046875, 1.962043212890625, 1.9404801025390626, 1.95953369140625]",tokens/s,32.22511544935334,kWh,2.3154480176803842e-05,1.2687765777620784e-05,6.804083221040315e-05,0.0001038830781648278,tokens/kWh,606451.0323812317,,s,630,19.54795221328737,0.03102849557664659,0.0005466639212431549,0.030816767692565916,0.031776972198486325,0.03202252960205078,0.03278013492584229,"[0.031936511993408204, 0.03159859275817871, 0.031152128219604492, 0.03149516868591309, 0.03157811164855957, 0.0314019832611084, 0.03103436851501465, 0.030688255310058594, 0.030638080596923828, 0.03075584030151367, 0.031071231842041015, 0.03060736083984375, 0.030688255310058594, 0.030390272140502928, 0.030723072052001952, 0.03146240043640137, 0.031559680938720705, 0.030826496124267577, 0.030727167129516602, 0.030630943298339843, 0.03191190338134765, 0.031615999221801756, 0.031492095947265625, 0.031627264022827145, 0.030649343490600587, 0.030915584564208985, 0.03176652717590332, 0.03292364883422851, 0.031886335372924804, 0.03169177627563476, 0.03149516868591309, 0.03096063995361328, 0.031611904144287106, 0.03100467109680176, 0.03077631950378418, 0.031228927612304686, 0.03167436790466309, 0.03073843193054199, 0.030847999572753908, 0.03079475212097168, 0.03076710319519043, 0.030757888793945313, 0.03115827178955078, 0.030881792068481444, 0.030880767822265624, 0.030819328308105468, 0.030661632537841797, 0.030525440216064452, 0.03074355125427246, 0.03079475212097168, 0.030741504669189453, 0.030665727615356447, 0.03081113624572754, 0.031290367126464845, 0.030993408203125, 0.03081523132324219, 0.030709760665893555, 0.030697471618652345, 0.030696447372436524, 0.03057766342163086, 0.030649343490600587, 0.03079680061340332, 0.03129548835754394, 0.031659008026123044, 0.03077939224243164, 0.030612480163574218, 0.030721023559570314, 0.03157811164855957, 0.030670848846435547, 0.030833663940429686, 0.031422464370727536, 0.03039641571044922, 0.03079270362854004, 0.030632959365844727, 0.03078758430480957, 0.03073843193054199, 0.030840831756591795, 0.031205375671386718, 0.03077836799621582, 0.03074764823913574, 0.030700544357299804, 0.03078860855102539, 0.03082035255432129, 0.03072204780578613, 0.030651391983032225, 0.030713855743408205, 0.030741504669189453, 0.030725120544433594, 0.031076383590698243, 0.033357791900634766, 0.03282022476196289, 0.03193548774719238, 0.03218841552734375, 0.03186073684692383, 0.03097395133972168, 0.030725183486938475, 0.030781375885009767, 0.030644224166870116, 0.030702592849731446, 0.03180748748779297, 0.030757888793945313, 0.03081625556945801, 0.030529535293579102, 0.03075481605529785, 0.030841856002807616, 0.030845951080322266, 0.03081523132324219, 0.030785535812377928, 0.030693376541137695, 0.030857215881347655, 0.030884864807128907, 0.031119424819946288, 0.031773632049560546, 0.03078656005859375, 0.030900224685668946, 0.030680063247680665, 0.030649343490600587, 0.030870527267456056, 0.03085312080383301, 0.030737407684326173, 0.031032320022583007, 0.03173075294494629, 0.03130361557006836, 0.03155148887634277, 0.031322111129760744, 0.03174399948120117, 0.031286272048950195, 0.030878719329833985, 0.03078656005859375, 0.030831615447998048, 0.03080703926086426, 0.03078348731994629, 0.030840831756591795, 0.030921728134155273, 0.030931968688964844, 0.030710784912109376, 0.030496768951416016, 0.030851072311401367, 0.030980096817016602, 0.03077222442626953, 0.03061759948730469, 0.030547967910766603, 0.03078758430480957, 0.030742528915405274, 0.03077939224243164, 0.03176652717590332, 0.031025152206420898, 0.03156684875488281, 0.03164262390136719, 0.031529983520507815, 0.03151769638061523, 0.03077734375, 0.031615999221801756, 0.030954496383666992, 0.0307640323638916, 0.030835712432861328, 0.030852096557617188, 0.030641151428222657, 0.030760959625244142, 0.03075993537902832, 0.03083776092529297, 0.030734336853027344, 0.03159859275817871, 0.03156172752380371, 0.031543296813964845, 0.03142860794067383, 0.03186380767822266, 0.031680511474609374, 0.031529983520507815, 0.030742528915405274, 0.030678016662597656, 0.030908416748046875, 0.03079167938232422, 0.03054080009460449, 0.03076300811767578, 0.03074355125427246, 0.030719999313354493, 0.031494144439697266, 0.031768575668334964, 0.031939584732055666, 0.031055871963500976, 0.030702592849731446, 0.030712831497192384, 0.03075174331665039, 0.031244287490844725, 0.03138252830505371, 0.031100927352905275, 0.030895103454589845, 0.03164262390136719, 0.03120025634765625, 0.0307957763671875, 0.030930944442749023, 0.03037081527709961, 0.030725120544433594, 0.030515199661254884, 0.0303503360748291, 0.03036057662963867, 0.030712831497192384, 0.030640127182006836, 0.03144396781921387, 0.03159449577331543, 0.030858240127563476, 0.03095347213745117, 0.03099545669555664, 0.031070207595825194, 0.030880767822265624, 0.030681087493896485, 0.030857215881347655, 0.030895103454589845, 0.030814207077026368, 0.030748672485351562, 0.030943231582641603, 0.03172966384887695, 0.031029247283935548, 0.030635007858276365, 0.03075379180908203, 0.0307957763671875, 0.03078451156616211, 0.03075993537902832, 0.030999551773071288, 0.030834688186645507, 0.03077734375, 0.0312412166595459, 0.031037439346313478, 0.030691328048706053, 0.030840831756591795, 0.03078144073486328, 0.030340095520019532, 0.03078041648864746, 0.03075071907043457, 0.030769151687622072, 0.030658624649047853, 0.031665088653564454, 0.03124019241333008, 0.030723072052001952, 0.03098111915588379, 0.030801919937133788, 0.030918655395507814, 0.03139788818359375, 0.0316180477142334, 0.03091967964172363, 0.03077734375, 0.030895103454589845, 0.0307589111328125, 0.030999551773071288, 0.031268863677978515, 0.03113369560241699, 0.030612480163574218, 0.03079475212097168, 0.03075584030151367, 0.03078144073486328, 0.030810111999511718, 0.03125657653808594, 0.030884864807128907, 0.030801919937133788, 0.030757888793945313, 0.03081113624572754, 0.030737407684326173, 0.030838783264160157, 0.030785535812377928, 0.03073843193054199, 0.03081216049194336, 0.031996959686279296, 0.03448214340209961, 0.03219558334350586, 0.031441919326782225, 0.03180953598022461, 0.031212543487548827, 0.030724096298217773, 0.03187513542175293, 0.03161081504821777, 0.031677440643310545, 0.03237887954711914, 0.0317071361541748, 0.031529983520507815, 0.031659008026123044, 0.03119615936279297, 0.03077836799621582, 0.03162623977661133, 0.030870527267456056, 0.030689279556274415, 0.03082342338562012, 0.03163852882385254, 0.030664735794067383, 0.030763999938964844, 0.03122380828857422, 0.03074764823913574, 0.030628864288330077, 0.031101951599121092, 0.03080703926086426, 0.030934015274047853, 0.032655361175537106, 0.03114905548095703, 0.0307589111328125, 0.030686208724975586, 0.030785535812377928, 0.03057254409790039, 0.030879743576049806, 0.030915584564208985, 0.03077836799621582, 0.03075379180908203, 0.03127193641662598, 0.031528959274291994, 0.03157708740234375, 0.03153305625915527, 0.03227238464355469, 0.032020481109619144, 0.0318023681640625, 0.03172863960266113, 0.03168460845947266, 0.030852096557617188, 0.03102003288269043, 0.030765056610107422, 0.030888959884643553, 0.030858240127563476, 0.03121049690246582, 0.03095244789123535, 0.03078041648864746, 0.030714879989624022, 0.030874624252319335, 0.030661632537841797, 0.030827520370483398, 0.031851520538330076, 0.030874624252319335, 0.03083263969421387, 0.030911487579345705, 0.030907392501831055, 0.03075071907043457, 0.030728191375732423, 0.030932992935180665, 0.030863359451293947, 0.031064159393310548, 0.03220675277709961, 0.030869504928588868, 0.030904319763183592, 0.031497215270996096, 0.031546367645263675, 0.031267839431762694, 0.03177471923828125, 0.031764480590820314, 0.03179724884033203, 0.03139379119873047, 0.03081318473815918, 0.030292991638183595, 0.03058585548400879, 0.03122585678100586, 0.032292865753173826, 0.031036415100097657, 0.03166720008850098, 0.03174307250976562, 0.031013792037963867, 0.0315996150970459, 0.030938112258911132, 0.030737407684326173, 0.030882816314697265, 0.03077529525756836, 0.03077836799621582, 0.030726144790649414, 0.03196518325805664, 0.03168870353698731, 0.03077836799621582, 0.03080089569091797, 0.030774272918701173, 0.03063910484313965, 0.03080089569091797, 0.03152179145812988, 0.030900224685668946, 0.030638080596923828, 0.030885887145996094, 0.030631935119628906, 0.030742528915405274, 0.030678016662597656, 0.03076300811767578, 0.03062681579589844, 0.030696447372436524, 0.030692352294921874, 0.030744575500488282, 0.03057356834411621, 0.031188991546630858, 0.030838783264160157, 0.03059916877746582, 0.03056844711303711, 0.03038003158569336, 0.03079475212097168, 0.03072204780578613, 0.030742528915405274, 0.03075174331665039, 0.03159859275817871, 0.031710208892822264, 0.03080601692199707, 0.03076300811767578, 0.030905344009399413, 0.031425535202026365, 0.03092787170410156, 0.030817279815673827, 0.030741504669189453, 0.030702592849731446, 0.03079583930969238, 0.03069228744506836, 0.030654464721679688, 0.030705663681030275, 0.030732288360595703, 0.030636064529418944, 0.030724063873291015, 0.030900224685668946, 0.031063039779663085, 0.03168972778320313, 0.03080396842956543, 0.0307906551361084, 0.031025152206420898, 0.030635007858276365, 0.030826559066772462, 0.030854080200195314, 0.030856191635131838, 0.030678016662597656, 0.030882816314697265, 0.03058483123779297, 0.030481407165527344, 0.03096780776977539, 0.031265792846679685, 0.030898176193237304, 0.030814207077026368, 0.031230976104736328, 0.030822399139404297, 0.03227033615112305, 0.03255807876586914, 0.03122380828857422, 0.031235071182250978, 0.03056947135925293, 0.03094937515258789, 0.030531583786010744, 0.030999584197998045, 0.03080291175842285, 0.030870527267456056, 0.030587903976440428, 0.030870527267456056, 0.03075379180908203, 0.031036415100097657, 0.03077734375, 0.03078246307373047, 0.03056342315673828, 0.03153817558288574, 0.030912511825561522, 0.030670848846435547, 0.030688255310058594, 0.030713855743408205, 0.030432256698608398, 0.030441471099853516, 0.030319616317749022, 0.030264320373535155, 0.03079475212097168, 0.03061350440979004, 0.030943231582641603, 0.030719999313354493, 0.030512128829956055, 0.031903743743896484, 0.031529983520507815, 0.03143680000305176, 0.03209523010253906, 0.03181670379638672, 0.03170816040039062, 0.031214591979980468, 0.030660608291625976, 0.030713855743408205, 0.03059507179260254, 0.030329856872558594, 0.033865726470947266, 0.03210444641113281, 0.032123905181884765, 0.03057663917541504, 0.030682111740112306, 0.030748672485351562, 0.030670848846435547, 0.03039232063293457, 0.03032268714904785, 0.030438400268554686, 0.030719999313354493, 0.030681087493896485, 0.030619647979736327, 0.03039743995666504, 0.030535680770874023, 0.031037439346313478, 0.03202252960205078, 0.031925247192382815, 0.032091136932373046, 0.0316866569519043, 0.03192422485351563, 0.03161395263671875, 0.031285247802734374, 0.0318525447845459, 0.030481407165527344, 0.03018239974975586, 0.03121049690246582, 0.03303936004638672, 0.03243622589111328, 0.031628288269042966, 0.031302656173706055, 0.03176755142211914, 0.03182489585876465, 0.03175526428222656, 0.03080294418334961, 0.030275583267211914, 0.030508031845092775, 0.030701568603515625, 0.031086591720581053, 0.030906368255615234, 0.032312320709228515, 0.03185971260070801, 0.03202252960205078, 0.03204710388183594, 0.031512575149536134, 0.03203583908081055, 0.03134566307067871, 0.030529535293579102, 0.031120384216308594, 0.030931968688964844, 0.030954496383666992, 0.030707712173461913, 0.030482431411743165, 0.03058380889892578, 0.030357503890991212, 0.030297088623046874, 0.03055820846557617, 0.03036057662963867, 0.030233600616455077, 0.030509056091308592, 0.030642175674438478, 0.030443519592285157, 0.030733312606811523, 0.030700544357299804, 0.03060121536254883, 0.030431232452392577, 0.030072832107543947, 0.03056844711303711, 0.030688255310058594, 0.030281728744506835, 0.03097702407836914, 0.03153305625915527, 0.031938560485839845, 0.03146854400634766, 0.031507455825805664, 0.03151769638061523, 0.031718399047851564, 0.030638080596923828, 0.03038412857055664, 0.030310400009155275, 0.030282751083374023, 0.03037593650817871, 0.03016908836364746, 0.030728191375732423, 0.03057766342163086, 0.030522367477416993, 0.030673919677734376, 0.030324735641479493, 0.03017728042602539, 0.0304005126953125, 0.03016396713256836, 0.03081318473815918, 0.030850048065185546, 0.03078963279724121, 0.030637056350708007, 0.030661632537841797, 0.030517248153686522, 0.03039232063293457, 0.030442495346069336, 0.03021004867553711, 0.030665727615356447, 0.031057920455932617, 0.030615583419799804, 0.030702560424804688, 0.03077529525756836, 0.030709760665893555, 0.030476287841796876, 0.03268198394775391, 0.03333222579956055, 0.031373311996459964, 0.03121766471862793, 0.030713855743408205, 0.03079270362854004, 0.030699520111083983, 0.030682111740112306, 0.030636064529418944, 0.030838783264160157, 0.030916576385498048, 0.03073843193054199, 0.030690303802490236, 0.03081523132324219, 0.030732288360595703, 0.030687231063842774, 0.03054800033569336, 0.030766048431396485, 0.030447616577148437, 0.03036467170715332, 0.03057561683654785, 0.03039232063293457, 0.03135385513305664, 0.03210649490356445, 0.03158118438720703, 0.03095961570739746, 0.03055820846557617, 0.030697471618652345, 0.030748672485351562, 0.030284799575805665, 0.030855167388916017, 0.030872575759887694, 0.030453760147094725, 0.03191705513000488, 0.03197644805908203, 0.03264921569824219, 0.03204403305053711, 0.03173990440368652, 0.031932416915893554, 0.03184435272216797, 0.032031742095947266, 0.03203379058837891, 0.03190169525146484, 0.032054271697998044, 0.031925247192382815, 0.031453184127807614, 0.031033344268798828, 0.030805023193359374, 0.03058787155151367, 0.03074355125427246, 0.030756864547729492, 0.030717952728271485, 0.030719999313354493, 0.031121408462524414, 0.03079782485961914, 0.030783519744873047, 0.03029910469055176]",tokens/s,32.2284397427455,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8199.094272,12374.769664,0.0,11737.759744,11171.24352,s,1,12.954294921875,12.954294921875,0.0,12.954294921875,12.954294921875,12.954294921875,12.954294921875,[12.954294921875],,kWh,7.216926597291275e-05,3.953256066920683e-05,0.00014525178286800555,0.00025695360951012513,,MB,3956.9408,12393.644032,0.0,11739.856896,10924.361728,s,10,24.236986083984373,2.4236986083984373,0.0005378017027911047,2.4235673828125,2.424571264648437,2.424573352050781,2.424575021972656,"[2.424111083984375, 2.4236689453125, 2.423406982421875, 2.423177490234375, 2.424575439453125, 2.42457080078125, 2.423823486328125, 2.4234658203125, 2.422966796875, 2.42321923828125]",tokens/s,105.62369393328281,kWh,2.8617388600000154e-05,1.568279053278671e-05,0.00017044660857939986,0.00021474678771218675,tokens/kWh,1192101.6501681164,MB,3960.99584,12395.741184,0.0,11741.954048,10924.364288,s,10,27.019614013671873,2.7019614013671878,0.01960468420770648,2.703920654296875,2.721824877929688,2.7284668823242186,2.733780485839844,"[2.73510888671875, 2.720348876953125, 2.706371337890625, 2.685182861328125, 2.674461181640625, 2.67181396484375, 2.69395556640625, 2.71297802734375, 2.701469970703125, 2.71792333984375]",tokens/s,23.31639525572871,kWh,3.1655503149443965e-05,1.735010100053609e-05,0.0001103624216232016,0.0001593680257731816,tokens/kWh,395311.4164171419,,s,630,27.01753133392333,0.04288497037130688,0.0005705775376014383,0.043006464004516604,0.043363327026367186,0.04354160614013672,0.0443741997909546,"[0.04311654281616211, 0.043278335571289066, 0.04233216094970703, 0.043071487426757815, 0.043769855499267575, 0.043363327026367186, 0.048228351593017575, 0.04867686462402344, 0.04333158493041992, 0.04333055877685547, 0.04326604843139648, 0.043216896057128903, 0.04334694290161133, 0.04333567810058594, 0.042382335662841795, 0.043428863525390625, 0.042998783111572264, 0.043170814514160154, 0.04322099304199219, 0.04331417465209961, 0.04333977508544922, 0.043286529541015625, 0.04325580978393555, 0.043340801239013675, 0.04318003082275391, 0.043099136352539064, 0.043049983978271485, 0.04318515014648437, 0.04375142288208008, 0.04311552047729492, 0.04348108673095703, 0.04329062271118164, 0.04311552047729492, 0.043245567321777346, 0.04297011184692383, 0.04360908889770508, 0.04326707077026367, 0.04322099304199219, 0.04347084808349609, 0.043218944549560545, 0.04305920028686523, 0.04316876983642578, 0.043442176818847655, 0.043289600372314455, 0.04310630416870117, 0.043396095275878906, 0.04342169570922851, 0.04315750503540039, 0.0427960319519043, 0.04331315231323242, 0.04244582366943359, 0.043325439453125, 0.04355072021484375, 0.0436049919128418, 0.04314828872680664, 0.042359809875488284, 0.04299574279785156, 0.04356707382202148, 0.04343807983398437, 0.04347084808349609, 0.04325273513793945, 0.04365107345581055, 0.04425932693481445, 0.045699073791503904, 0.043461631774902344, 0.04335411071777344, 0.04338175964355469, 0.04375961685180664, 0.04335411071777344, 0.043225086212158204, 0.04371865463256836, 0.04326502227783203, 0.04327731323242188, 0.043278335571289066, 0.04324761581420898, 0.04332953643798828, 0.0433807373046875, 0.04324147033691406, 0.04299264144897461, 0.04318207931518555, 0.043409408569335936, 0.043358207702636715, 0.043199489593505856, 0.04234239959716797, 0.04249190521240234, 0.04315238571166992, 0.04358041763305664, 0.04319232177734375, 0.04322918319702149, 0.04324966430664062, 0.043393024444580076, 0.04351385498046875, 0.04381491088867188, 0.04384972763061523, 0.042627071380615236, 0.04273356628417969, 0.043410430908203124, 0.04253900909423828, 0.043363327026367186, 0.04311552047729492, 0.04318822479248047, 0.04317900848388672, 0.042916862487792966, 0.043066368103027344, 0.04354048156738281, 0.04326502227783203, 0.04311347198486328, 0.04334694290161133, 0.04335615921020508, 0.043087871551513675, 0.04315545654296875, 0.043154430389404294, 0.04208025741577148, 0.04303974533081055, 0.043440128326416014, 0.042447872161865234, 0.042584064483642575, 0.04227686309814453, 0.042240001678466796, 0.04222873687744141, 0.04221132659912109, 0.04389888000488281, 0.04326092910766602, 0.04311964797973633, 0.043117534637451174, 0.043109375, 0.04297830581665039, 0.0449617919921875, 0.04338790512084961, 0.042775550842285154, 0.04314931106567383, 0.04319641494750977, 0.042592254638671875, 0.04226867294311523, 0.04295372772216797, 0.043259902954101564, 0.04260147094726562, 0.043099136352539064, 0.04311449432373047, 0.04314828872680664, 0.043291648864746096, 0.04365414428710938, 0.04316569519042969, 0.04237619018554688, 0.042431488037109374, 0.04233011245727539, 0.04234444808959961, 0.04207001495361328, 0.04238848114013672, 0.04235673522949219, 0.042280960083007815, 0.04295065689086914, 0.042881023406982424, 0.04239769744873047, 0.04333977508544922, 0.04320460891723633, 0.04295987319946289, 0.043123710632324216, 0.043112449645996094, 0.042774528503417966, 0.042349567413330076, 0.042559486389160156, 0.04313087844848633, 0.04326604843139648, 0.043081729888916016, 0.04290867233276367, 0.042916862487792966, 0.043101184844970705, 0.043184127807617184, 0.04343091201782227, 0.04315955352783203, 0.04318207931518555, 0.04300492858886719, 0.042943489074707034, 0.043099136352539064, 0.04312575912475586, 0.04350054550170898, 0.04305612945556641, 0.043055103302001956, 0.04313087844848633, 0.043041793823242185, 0.04309708786010742, 0.04307763290405273, 0.04307660675048828, 0.04332953643798828, 0.04305817413330078, 0.04244582366943359, 0.04256358337402344, 0.04236185455322266, 0.042600448608398435, 0.042433536529541016, 0.042401790618896484, 0.042246143341064454, 0.04226047897338867, 0.043374591827392575, 0.04303769683837891, 0.04228300857543945, 0.04240281677246094, 0.04237721633911133, 0.04224716949462891, 0.04288409423828125, 0.043049983978271485, 0.04306227111816406, 0.04307046508789063, 0.04307455825805664, 0.0429752311706543, 0.04326502227783203, 0.04310630416870117, 0.04317593765258789, 0.043053054809570314, 0.04262604904174805, 0.044028926849365234, 0.04354969787597656, 0.042947582244873043, 0.042417152404785156, 0.042369022369384765, 0.04239974212646484, 0.0422553596496582, 0.04247040176391602, 0.042256385803222656, 0.04245503997802735, 0.04232601547241211, 0.04264550399780274, 0.04253696060180664, 0.042355712890625, 0.04232191848754883, 0.04229529571533203, 0.04254412841796875, 0.04234035110473633, 0.04252569580078125, 0.04241408157348633, 0.042193920135498046, 0.04231782531738281, 0.04238438415527344, 0.04244377517700195, 0.04232089614868164, 0.042365951538085936, 0.04229939270019531, 0.04248064041137695, 0.042262527465820314, 0.04228915023803711, 0.04477132797241211, 0.043417598724365236, 0.04230451202392578, 0.042444801330566405, 0.0423004150390625, 0.04248166275024414, 0.04234649658203125, 0.04236185455322266, 0.04218675231933594, 0.04223897552490234, 0.04230963134765625, 0.04256563186645508, 0.042294273376464846, 0.042418174743652344, 0.0424192008972168, 0.042369022369384765, 0.04242227172851563, 0.04230758285522461, 0.043038719177246096, 0.0429486083984375, 0.042842113494873046, 0.04235366439819336, 0.042417152404785156, 0.042193920135498046, 0.042249214172363284, 0.04269875335693359, 0.042469375610351565, 0.042333183288574217, 0.0423004150390625, 0.042313728332519535, 0.04246015930175781, 0.04230758285522461, 0.04226559829711914, 0.04306739044189453, 0.04267724609375, 0.042365951538085936, 0.04245913696289062, 0.04235161590576172, 0.042401790618896484, 0.04235161590576172, 0.04229529571533203, 0.042393600463867184, 0.04289228820800781, 0.042619903564453124, 0.04239155197143555, 0.04246220779418945, 0.04271206283569336, 0.042466304779052735, 0.04230758285522461, 0.042600448608398435, 0.042403839111328126, 0.04242124938964844, 0.04229836654663086, 0.04230963134765625, 0.04231475067138672, 0.04230553436279297, 0.042425342559814457, 0.04235776138305664, 0.0425615348815918, 0.04231270217895508, 0.042261505126953126, 0.042369022369384765, 0.042395648956298826, 0.04220723342895508, 0.042175487518310545, 0.04228812789916992, 0.043254783630371094, 0.04324870300292969, 0.04222560119628906, 0.04216320037841797, 0.04257791900634766, 0.04217958450317383, 0.04217139053344727, 0.042218494415283206, 0.04257894515991211, 0.04224512100219727, 0.04221747207641602, 0.04234035110473633, 0.04220415878295898, 0.042434558868408204, 0.04235059356689453, 0.04262604904174805, 0.042933246612548825, 0.04284108734130859, 0.042365951538085936, 0.042313728332519535, 0.042254337310791014, 0.04226969528198242, 0.04237107086181641, 0.042349567413330076, 0.042157054901123044, 0.04256972885131836, 0.0433172492980957, 0.043096065521240234, 0.04237004852294922, 0.04237209701538086, 0.04221440124511719, 0.04226969528198242, 0.0425533447265625, 0.04220927810668945, 0.042237953186035154, 0.04233932876586914, 0.042234878540039066, 0.042262527465820314, 0.042065921783447265, 0.04245196914672852, 0.04234035110473633, 0.04273459243774414, 0.04233932876586914, 0.04237721633911133, 0.04228300857543945, 0.042221569061279295, 0.04218982315063476, 0.04232499313354492, 0.042379264831542966, 0.0427407341003418, 0.04233830261230469, 0.04250726318359375, 0.042313728332519535, 0.04233216094970703, 0.042347583770751956, 0.042378177642822264, 0.042251262664794925, 0.04224204635620117, 0.04230656051635742, 0.04229324722290039, 0.042249214172363284, 0.04229836654663086, 0.04233011245727539, 0.04312985610961914, 0.04245094299316406, 0.0425687026977539, 0.04239462280273437, 0.042602497100830077, 0.042365951538085936, 0.04234137725830078, 0.04221542358398438, 0.042674175262451174, 0.04268032073974609, 0.04238950347900391, 0.04313600158691406, 0.04316876983642578, 0.043154430389404294, 0.04303257751464844, 0.04312268829345703, 0.04327116775512695, 0.043224063873291016, 0.04283903884887695, 0.04311859130859375, 0.043025409698486325, 0.042331134796142575, 0.042635265350341796, 0.04312063980102539, 0.043070526123046876, 0.04289734268188477, 0.04231270217895508, 0.04269363021850586, 0.04233523178100586, 0.04213248062133789, 0.04232806396484375, 0.04317900848388672, 0.04229632186889649, 0.04227686309814453, 0.042774528503417966, 0.0434442253112793, 0.0433172492980957, 0.04313907241821289, 0.04298342514038086, 0.043053054809570314, 0.04229529571533203, 0.042275840759277344, 0.04326399993896484, 0.04298342514038086, 0.043055103302001956, 0.04308377456665039, 0.04299059295654297, 0.04300595092773438, 0.04415283203125, 0.04354252624511719, 0.04230246353149414, 0.04304383850097656, 0.04229119873046875, 0.04271308898925781, 0.04312575912475586, 0.04251955032348633, 0.042249214172363284, 0.042218494415283206, 0.04248678588867188, 0.04310015869140625, 0.042971134185791016, 0.04218777465820313, 0.042237953186035154, 0.0420136947631836, 0.04219801712036133, 0.042218494415283206, 0.04225843048095703, 0.0421734390258789, 0.042213375091552735, 0.04237209701538086, 0.04304383850097656, 0.042676223754882815, 0.04355379104614258, 0.04306534576416016, 0.043117568969726565, 0.04222259140014648, 0.043128833770751954, 0.04235468673706055, 0.04234649658203125, 0.043099136352539064, 0.04291993713378906, 0.042947582244873043, 0.0429567985534668, 0.04481228637695313, 0.043463680267333986, 0.04365926361083984, 0.04313702392578125, 0.043104255676269534, 0.04310835266113281, 0.042395648956298826, 0.04255539321899414, 0.043052032470703126, 0.043224063873291016, 0.043172863006591795, 0.04310220718383789, 0.043235328674316405, 0.04308582305908203, 0.04293529510498047, 0.043099136352539064, 0.0430489616394043, 0.04307660675048828, 0.04298956680297852, 0.04312473678588867, 0.043003902435302735, 0.04326911926269531, 0.043222015380859374, 0.04285440063476562, 0.043146240234375, 0.04312268829345703, 0.0430643196105957, 0.042444801330566405, 0.04233830261230469, 0.042313728332519535, 0.04359372711181641, 0.04313087844848633, 0.04310732650756836, 0.04309299087524414, 0.0429639663696289, 0.04311654281616211, 0.04308889770507812, 0.04305612945556641, 0.04317388916015625, 0.04319232177734375, 0.04318822479248047, 0.043020286560058595, 0.04315135955810547, 0.0431278076171875, 0.043049983978271485, 0.04336435317993164, 0.04272127914428711, 0.04343910217285156, 0.043448318481445314, 0.04312268829345703, 0.04307046508789063, 0.04256256103515625, 0.0421847038269043, 0.04225024032592774, 0.04228403091430664, 0.04224204635620117, 0.0422369270324707, 0.042275840759277344, 0.04232908630371094, 0.04224204635620117, 0.042893310546875, 0.04298854446411133, 0.04282777786254883, 0.04311449432373047, 0.043033599853515625, 0.04300595092773438, 0.042998783111572264, 0.04298035049438476, 0.043015167236328124, 0.04231782531738281, 0.04248371124267578, 0.04228812789916992, 0.042843135833740234, 0.04305920028686523, 0.04300697708129883, 0.04302336120605469, 0.04298649597167969, 0.04298854446411133, 0.04301824188232422, 0.0429752311706543, 0.04307763290405273, 0.04225228881835937, 0.042412033081054686, 0.042482688903808595, 0.042777599334716795, 0.04300288009643555, 0.04304793548583984, 0.043046913146972655, 0.043020286560058595, 0.04302444839477539, 0.04306732940673828, 0.04301926422119141, 0.04310323333740235, 0.043078720092773436, 0.04328134536743164, 0.04318003082275391, 0.04308582305908203, 0.04309811019897461, 0.04344627380371094, 0.04354355239868164, 0.04290867233276367, 0.043128833770751954, 0.04312985610961914, 0.0431646728515625, 0.043166721343994144, 0.043238399505615234, 0.04325068664550781, 0.04303974533081055, 0.04299673461914062, 0.042417152404785156, 0.04277350234985351, 0.04312575912475586, 0.043156478881835936, 0.043254783630371094, 0.042638336181640625, 0.04244172668457031, 0.042613761901855465, 0.04303155136108398, 0.04310220718383789, 0.04316159820556641, 0.04397772979736328, 0.043757568359375, 0.04319027328491211, 0.04329574584960937, 0.04317695999145508, 0.043215873718261716, 0.043044864654541014, 0.043053054809570314, 0.042759201049804685, 0.04320252990722656, 0.043015167236328124, 0.043121662139892575, 0.043033599853515625, 0.04314316940307617, 0.04283391952514649, 0.0431278076171875, 0.04323942565917969, 0.04337254333496094, 0.043146240234375, 0.04312678527832031, 0.043053054809570314, 0.04305920028686523, 0.04299468612670899, 0.04309503936767578, 0.04327529525756836, 0.0433397445678711, 0.044421119689941405, 0.04342784118652344, 0.043066368103027344, 0.04316569519042969, 0.04318207931518555, 0.043087871551513675, 0.043112449645996094, 0.04326707077026367, 0.0430909423828125, 0.0431278076171875, 0.043038719177246096, 0.04307763290405273, 0.04292300796508789, 0.043112449645996094, 0.04283596801757812, 0.04310835266113281, 0.04307763290405273, 0.04313600158691406, 0.04303769683837891, 0.04313907241821289, 0.04306639862060547, 0.043296737670898436, 0.04307251358032226, 0.043232257843017576, 0.043199489593505856, 0.043225086212158204, 0.04315852737426758, 0.043284481048583984, 0.043144256591796874, 0.04296799850463867, 0.04299468612670899]",tokens/s,23.318192628834638,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694918d-64cec4f566f3d39b311c3360;ba53cb0d-2431-423c-a60f-3d6c2c367cea) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c6d-6b9ca4037afd9e181c826e56;07f6a07c-afc9-405b-96b7-10926cccde8d) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fe6-7c509360614f81c367915a07;37b8a00d-d972-4899-8226-d021077ad739) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949295-5e3b7acd50cc194f42a607fa;c240b588-f5a2-42c2-be4a-a16f526ecd6c) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11149.545472,14821.097472,0.0,14191.427584,13325.783552,s,1,13.8892587890625,13.8892587890625,0.0,13.8892587890625,13.8892587890625,13.8892587890625,13.8892587890625,[13.8892587890625],,kWh,8.397257186181012e-05,4.600824899498132e-05,0.00018259486829796945,0.00031257568915476087,,MB,2114.793472,14837.874688,0.0,14191.427584,12582.029312,s,10,36.300349121093745,3.6300349121093753,0.0001321088213150365,3.6300118408203126,3.6301526611328123,3.6302617553710936,3.6303490307617188,"[3.630370849609375, 3.6300625, 3.629924560546875, 3.62994970703125, 3.630008056640625, 3.629977294921875, 3.629866943359375, 3.630015625, 3.630045166015625, 3.63012841796875]",tokens/s,70.52273771417836,kWh,4.2868873101041664e-05,2.3494089326469723e-05,0.0002681586867489982,0.00033452164917650964,tokens/kWh,765271.8460231019,MB,2131.27168,14837.874688,0.0,14191.427584,12935.96672,s,10,23.218867431640625,2.3218867431640624,0.023642694088096383,2.3171993408203124,2.340269873046875,2.3610004150390624,2.3775848486328126,"[2.290661376953125, 2.310260986328125, 2.30163232421875, 2.3221298828125, 2.322947021484375, 2.33083935546875, 2.312268798828125, 2.310733642578125, 2.3356630859375, 2.38173095703125]",tokens/s,27.1331063780265,kWh,2.7107192667361587e-05,1.4857385920337638e-05,0.00012828971374280186,0.000170254292330501,tokens/kWh,370034.72357515164,,s,630,23.205080074310285,0.036833460435413176,0.0011251348242302481,0.036968446731567385,0.038104267883300784,0.03834593257904053,0.03915692134857178,"[0.0366110725402832, 0.03686195373535156, 0.03834163284301758, 0.03805184173583984, 0.03812351989746094, 0.03785932922363281, 0.037749759674072264, 0.038778881072998046, 0.038160385131835936, 0.03797708892822266, 0.0357314567565918, 0.03562803268432617, 0.03542323303222656, 0.035544063568115236, 0.03563827133178711, 0.03563520050048828, 0.038796287536621094, 0.03807743835449219, 0.03774259185791016, 0.03542937469482422, 0.03548364639282227, 0.035694591522216795, 0.035593215942382815, 0.035514366149902346, 0.03557580947875977, 0.03556556701660156, 0.035416065216064455, 0.035585025787353515, 0.03552870559692383, 0.035560447692871096, 0.03553177642822265, 0.035745792388916016, 0.03576627349853516, 0.03557068634033203, 0.03559936141967773, 0.037766143798828124, 0.0395335693359375, 0.03795455932617187, 0.03574476623535156, 0.035645439147949216, 0.035664894104003905, 0.03570380783081055, 0.035661823272705076, 0.03585331344604492, 0.035730430603027344, 0.03570175933837891, 0.03572326278686523, 0.03565363311767578, 0.035697662353515625, 0.03574169540405273, 0.035810302734375, 0.037571582794189456, 0.03781017684936523, 0.035765247344970705, 0.035781631469726564, 0.03583795166015625, 0.03582259368896484, 0.03584511947631836, 0.03592396926879883, 0.03582566452026367, 0.035860481262207033, 0.03589324951171875, 0.03586355209350586, 0.036531200408935545, 0.03622604751586914, 0.0376258544921875, 0.037720062255859374, 0.03765862274169922, 0.03559526443481445, 0.037351425170898435, 0.03777433776855469, 0.03756032180786133, 0.03832524871826172, 0.037795841217041014, 0.038255615234375, 0.037746688842773435, 0.03780710220336914, 0.03781324768066406, 0.03834777450561523, 0.03813683319091797, 0.037736446380615234, 0.03766681671142578, 0.035560447692871096, 0.0354969596862793, 0.035716094970703126, 0.03550310516357422, 0.035573760986328126, 0.03554611206054688, 0.0355860481262207, 0.035552257537841796, 0.03556147384643555, 0.03570687866210937, 0.035520511627197264, 0.03553996658325195, 0.03563622283935547, 0.03558707046508789, 0.0386058235168457, 0.03790233612060547, 0.0378081283569336, 0.03610726547241211, 0.037874687194824216, 0.03814604949951172, 0.037716991424560545, 0.037760032653808596, 0.03563721466064453, 0.035675136566162106, 0.036124671936035156, 0.0359628791809082, 0.03568947219848633, 0.03572531127929687, 0.03568025588989258, 0.03571916961669922, 0.03577241516113281, 0.035659774780273434, 0.03575091171264649, 0.03580108642578125, 0.035732479095458985, 0.03579084777832031, 0.03582566452026367, 0.03704627227783203, 0.03761356735229492, 0.03772415924072266, 0.037735424041748046, 0.03754598236083984, 0.03589017486572266, 0.03583590316772461, 0.03662335968017578, 0.03594956970214844, 0.03576319885253906, 0.03578879928588867, 0.03581849670410156, 0.03567718505859375, 0.03553279876708984, 0.03557785415649414, 0.03562700653076172, 0.035501056671142575, 0.0354785270690918, 0.03542118453979492, 0.036327423095703124, 0.03728076934814453, 0.03769753646850586, 0.03774156951904297, 0.03559833526611328, 0.03565465545654297, 0.03558911895751953, 0.03559936141967773, 0.035576831817626955, 0.035622913360595705, 0.03575091171264649, 0.036337665557861325, 0.03568742370605469, 0.035681278228759765, 0.035563518524169925, 0.03570483016967774, 0.03561369705200195, 0.035620864868164064, 0.03564134216308594, 0.03580313491821289, 0.03568742370605469, 0.03556249618530274, 0.03564646530151367, 0.03821875381469726, 0.0398131217956543, 0.03762176132202148, 0.03765862274169922, 0.0381030387878418, 0.03805491256713867, 0.038073345184326174, 0.03775385665893555, 0.03775692749023438, 0.03766988754272461, 0.03772723388671875, 0.03574272155761719, 0.035806209564208984, 0.03704934310913086, 0.03911475372314453, 0.03814604949951172, 0.0385689582824707, 0.037784576416015625, 0.037623809814453124, 0.037617664337158206, 0.037623809814453124, 0.03576115036010742, 0.03584307098388672, 0.03589734268188476, 0.03583692932128906, 0.03586457443237305, 0.035899391174316404, 0.03583488082885742, 0.036552703857421875, 0.037400577545166014, 0.03586969757080078, 0.03541196823120117, 0.0355788803100586, 0.03738828659057617, 0.035961856842041014, 0.035547134399414065, 0.035353599548339845, 0.03559628677368164, 0.03567001724243164, 0.03559628677368164, 0.035542015075683595, 0.03559731292724609, 0.035694591522216795, 0.0354252815246582, 0.03560550308227539, 0.03547340774536133, 0.03561164855957031, 0.03551334381103516, 0.03544473648071289, 0.03560857772827149, 0.03678310394287109, 0.038623233795166016, 0.0379607048034668, 0.03783987045288086, 0.037763072967529294, 0.03754905700683594, 0.03773747253417969, 0.037920768737792966, 0.03623833465576172, 0.03566694259643555, 0.03678822326660156, 0.03782860946655273, 0.0377446403503418, 0.035563518524169925, 0.03607961654663086, 0.037733375549316404, 0.03771187210083008, 0.03760639953613281, 0.03567721557617187, 0.03827606582641602, 0.037768192291259765, 0.037814273834228515, 0.037628929138183595, 0.03784499359130859, 0.03764019012451172, 0.03764121627807617, 0.03755724716186523, 0.03762483215332031, 0.037459968566894535, 0.037634048461914066, 0.03857100677490234, 0.03772415924072266, 0.037563392639160156, 0.03752345657348633, 0.03772313690185547, 0.036141056060791016, 0.03612979125976563, 0.03672883224487305, 0.037743614196777346, 0.03774566268920899, 0.03788390350341797, 0.036689918518066404, 0.03573964691162109, 0.03563008117675781, 0.0356014404296875, 0.0356341438293457, 0.0356577262878418, 0.03624652862548828, 0.0381317138671875, 0.036659198760986327, 0.03616259384155274, 0.0355552978515625, 0.03723468780517578, 0.037773311614990236, 0.03786751937866211, 0.037835777282714846, 0.03807231903076172, 0.03782963180541992, 0.037784576416015625, 0.035743743896484374, 0.0358809585571289, 0.035697662353515625, 0.03586867141723633, 0.035829761505126956, 0.035914752960205076, 0.035757057189941405, 0.03790643310546875, 0.037644287109375, 0.037779457092285154, 0.037733375549316404, 0.037759998321533206, 0.035573760986328126, 0.03559731292724609, 0.03565875244140625, 0.03603353500366211, 0.03792281723022461, 0.0363581428527832, 0.03829043197631836, 0.037921791076660154, 0.037667839050292966, 0.03795251083374023, 0.0377077751159668, 0.037754878997802735, 0.037797889709472655, 0.03771187210083008, 0.037539840698242184, 0.03768627166748047, 0.03770163345336914, 0.037730304718017575, 0.03764633560180664, 0.035775489807128906, 0.03569664001464844, 0.03575091171264649, 0.03581644821166992, 0.035781631469726564, 0.035806209564208984, 0.0358656005859375, 0.03581644821166992, 0.03618099212646484, 0.03765248107910156, 0.036967422485351564, 0.03843174362182617, 0.0381736946105957, 0.037872638702392575, 0.03840204620361328, 0.03786240005493164, 0.037730304718017575, 0.037705726623535156, 0.037759998321533206, 0.03761151885986328, 0.03781222534179687, 0.03793100738525391, 0.037789695739746096, 0.03773440170288086, 0.03799859237670898, 0.03805286407470703, 0.03766886520385742, 0.03776204681396484, 0.038370304107666016, 0.038042625427246096, 0.037975040435791016, 0.037754878997802735, 0.03790233612060547, 0.037820415496826174, 0.03980799865722656, 0.03820646286010742, 0.03784089660644531, 0.037615615844726565, 0.03772723388671875, 0.035536895751953124, 0.035639297485351565, 0.03553279876708984, 0.035622913360595705, 0.035542015075683595, 0.03557068634033203, 0.03558911895751953, 0.035661823272705076, 0.03556966400146484, 0.035730430603027344, 0.035560447692871096, 0.03560755157470703, 0.035620864868164064, 0.03561983871459961, 0.03562700653076172, 0.03767910385131836, 0.03763302230834961, 0.03858534240722656, 0.03890073776245117, 0.03811840057373047, 0.03772723388671875, 0.03759513473510742, 0.03563520050048828, 0.035705856323242184, 0.035762176513671876, 0.035729408264160156, 0.035757057189941405, 0.03583488082885742, 0.03575296020507813, 0.03577958297729492, 0.03584307098388672, 0.03579084777832031, 0.035844097137451174, 0.03597619247436523, 0.037797889709472655, 0.03747635269165039, 0.03775692749023438, 0.037787647247314454, 0.03665510559082031, 0.03562905502319336, 0.035609600067138675, 0.03565158462524414, 0.0357283821105957, 0.03555327987670898, 0.03558195114135742, 0.03556966400146484, 0.03544063949584961, 0.03762176132202148, 0.03763916778564453, 0.03764223861694336, 0.037722110748291016, 0.035568641662597655, 0.035659774780273434, 0.03562496185302735, 0.03631206512451172, 0.03802214431762695, 0.03782656097412109, 0.03778662490844727, 0.03799244689941406, 0.038727680206298826, 0.0379422721862793, 0.03784703826904297, 0.03766067123413086, 0.03766886520385742, 0.037648384094238284, 0.037746688842773435, 0.035560447692871096, 0.035643390655517575, 0.03559219360351563, 0.03570995330810547, 0.03563827133178711, 0.03558707046508789, 0.03562188720703125, 0.03558399963378906, 0.03741183853149414, 0.03779993438720703, 0.03564031982421875, 0.03569049453735352, 0.03569049453735352, 0.035699710845947266, 0.035765247344970705, 0.03573555374145508, 0.03706880187988281, 0.03834368133544922, 0.03791257476806641, 0.03753779220581055, 0.03785318374633789, 0.03838054275512695, 0.037904384613037106, 0.03776409530639648, 0.037694465637207034, 0.037730304718017575, 0.03772313690185547, 0.03753472137451172, 0.03578572845458984, 0.035829761505126956, 0.03589734268188476, 0.035817470550537106, 0.03586355209350586, 0.03591167831420899, 0.0358287353515625, 0.03894169616699219, 0.03827199935913086, 0.03806208038330078, 0.037720062255859374, 0.03787369537353515, 0.03815318298339844, 0.037820415496826174, 0.03801702499389648, 0.03764940643310547, 0.037644287109375, 0.03762995147705078, 0.036013057708740234, 0.035484672546386715, 0.03552460861206055, 0.03563417434692383, 0.0354252815246582, 0.035573760986328126, 0.03537715148925781, 0.035550209045410154, 0.03727360153198242, 0.037602302551269534, 0.037763072967529294, 0.03822182464599609, 0.037566463470458986, 0.03749478530883789, 0.037582847595214845, 0.035591167449951173, 0.036029441833496094, 0.03603251266479492, 0.035535873413085936, 0.03555635070800781, 0.03557068634033203, 0.035588096618652344, 0.035566593170166014, 0.03560243225097656, 0.03554508972167969, 0.035560447692871096, 0.03560243225097656, 0.035558399200439454, 0.035620864868164064, 0.03567308807373047, 0.03566592025756836, 0.035643390655517575, 0.035710975646972655, 0.035639297485351565, 0.036969470977783206, 0.03940147018432617, 0.0379607048034668, 0.035710975646972655, 0.03577139282226562, 0.035730430603027344, 0.035757057189941405, 0.035783679962158206, 0.03575296020507813, 0.03584921646118164, 0.03653734588623047, 0.03994112014770508, 0.03825254440307617, 0.037713920593261716, 0.037651454925537106, 0.03593523025512695, 0.037644287109375, 0.03775385665893555, 0.036620288848876956, 0.035697662353515625, 0.03570995330810547, 0.035945472717285154, 0.03575193786621094, 0.03552665710449219, 0.03564851379394531, 0.03648102569580078, 0.03563622283935547, 0.03561574554443359, 0.03552972793579102, 0.03556249618530274, 0.038578174591064454, 0.03778662490844727, 0.03792281723022461, 0.03784601593017578, 0.03770265579223633, 0.03780710220336914, 0.0377968635559082, 0.0378081283569336, 0.03583078384399414, 0.03590758514404297, 0.03580723190307617, 0.03581235122680664, 0.03566796875, 0.03584102249145508, 0.03778662490844727, 0.03775795364379883, 0.03784396743774414, 0.038645759582519534, 0.03855974578857422, 0.03797708892822266, 0.03764633560180664, 0.037634048461914066, 0.038225921630859375, 0.03840409469604492, 0.037648384094238284, 0.03785420989990235, 0.03778559875488281, 0.03806412887573242, 0.0376545295715332, 0.037855232238769534, 0.037684223175048825, 0.037157886505126955, 0.037716991424560545, 0.03768524932861328, 0.03778252792358398, 0.03824127960205078, 0.03778559875488281, 0.0378787841796875, 0.03757670211791992, 0.03791155242919922, 0.03581644821166992, 0.035757057189941405, 0.03732070541381836, 0.03585945510864258, 0.03576319885253906, 0.035934207916259765, 0.036634624481201174, 0.036013057708740234, 0.036416511535644534, 0.037735424041748046, 0.03825254440307617, 0.03874816131591797, 0.03774566268920899, 0.0378787841796875, 0.03839590454101562, 0.03889254379272461, 0.038043647766113284, 0.037935104370117184, 0.03791257476806641, 0.0377077751159668, 0.0377784309387207, 0.03784396743774414, 0.03567923355102539, 0.039174144744873046, 0.040417278289794925, 0.03818086242675781, 0.03783065414428711, 0.03789516830444336, 0.03791974258422852, 0.037884929656982425, 0.03760025787353516, 0.03853004837036133, 0.0381317138671875, 0.03786444854736328, 0.038174720764160154, 0.03818905639648437, 0.0380948486328125, 0.038365184783935545, 0.03796377563476563, 0.037850112915039064, 0.037700607299804685, 0.03786751937866211, 0.037855232238769534, 0.03786444854736328, 0.03789311981201172, 0.03770982360839844, 0.035768318176269534, 0.03729817581176758, 0.038269950866699216, 0.03814297485351562, 0.0382033920288086, 0.03811532974243164, 0.03772313690185547, 0.03641548919677735, 0.03788390350341797, 0.037804031372070314, 0.03774156951904297, 0.037817344665527344, 0.03647590255737305, 0.035691520690917966, 0.036316158294677735, 0.03720601654052735, 0.03783168029785156, 0.03772927856445313, 0.037787647247314454, 0.03792281723022461, 0.03785932922363281, 0.03798019027709961, 0.03593622589111328, 0.037318656921386716, 0.03786342239379883, 0.03802521514892578, 0.03769241714477539, 0.037921791076660154]",tokens/s,27.149227582173072,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948d0f-021b1999049b241c164628c2;256122c7-411d-40e4-84e2-1960a59d69d7) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6755.069952,9406.251008,0.0,8776.58112,8188.314112,s,1,15.4819404296875,15.4819404296875,0.0,15.4819404296875,15.4819404296875,15.4819404296875,15.4819404296875,[15.4819404296875],,kWh,5.334993862848529e-05,2.9224284052144118e-05,0.00010903619833996236,0.00019161042102059175,,MB,1709.334528,9423.028224,0.0,8776.58112,7654.291456,s,10,21.331978759765626,2.1331978759765624,5.814364165122625e-05,2.1331973876953123,2.133276489257813,2.1332803344726563,2.1332834106445313,"[2.133201904296875, 2.13315087890625, 2.133275634765625, 2.13318896484375, 2.13322802734375, 2.13308447265625, 2.133230224609375, 2.1331416015625, 2.13319287109375, 2.1332841796875]",tokens/s,120.00761996015258,kWh,2.5196868891665646e-05,1.3808508721025548e-05,0.00014347444811279585,0.00018247982572548705,tokens/kWh,1402894.8075888278,MB,1725.861888,9423.028224,0.0,8776.58112,7908.82816,s,10,16.742042724609373,1.6742042724609374,0.011036865682778877,1.6735211791992188,1.6870495361328124,1.68981083984375,1.6920198828125,"[1.6864359130859374, 1.6925721435546874, 1.6685333251953125, 1.6776661376953126, 1.68317626953125, 1.6583134765625, 1.6582708740234375, 1.6676629638671876, 1.669376220703125, 1.680035400390625]",tokens/s,37.62981676506856,kWh,1.967307581729138e-05,1.0782570622773106e-05,8.009723074440812e-05,0.00011055287718447261,tokens/kWh,569863.0520024899,,s,630,16.739581972122178,0.02657076503511459,0.0005944552200350991,0.026258432388305664,0.027262053871154785,0.02743644189834595,0.028566435890197753,"[0.027304960250854493, 0.02630860710144043, 0.026233856201171874, 0.02627993583679199, 0.026190847396850587, 0.026294271469116212, 0.026224639892578124, 0.02631475257873535, 0.026232831954956053, 0.02695680046081543, 0.027291648864746092, 0.027236352920532225, 0.02860339164733887, 0.028446720123291015, 0.027419647216796874, 0.02731622314453125, 0.027172864913940428, 0.027188224792480467, 0.027212799072265623, 0.027088895797729492, 0.026229759216308594, 0.026255359649658205, 0.026216447830200194, 0.026389503479003908, 0.026396671295166017, 0.026237951278686524, 0.026254335403442384, 0.026262527465820314, 0.026287103652954103, 0.02618880081176758, 0.026232831954956053, 0.02628915214538574, 0.026323968887329102, 0.026291200637817383, 0.02627993583679199, 0.02627276802062988, 0.026246143341064454, 0.026257408142089843, 0.02778726387023926, 0.028617727279663087, 0.02652262306213379, 0.026295295715332033, 0.026315776824951172, 0.027321344375610353, 0.027261951446533202, 0.027245567321777343, 0.027157503128051756, 0.027243520736694334, 0.027218944549560548, 0.02720358467102051, 0.027228160858154295, 0.027231231689453125, 0.027219968795776366, 0.027266048431396486, 0.02713804817199707, 0.027214847564697265, 0.026778623580932616, 0.026299392700195313, 0.026237951278686524, 0.026258432388305664, 0.026252288818359375, 0.026273792266845702, 0.026376192092895507, 0.027080703735351562, 0.02671820831298828, 0.027207679748535156, 0.026429439544677736, 0.02616422462463379, 0.026464256286621093, 0.027238399505615234, 0.027189247131347655, 0.02714112091064453, 0.026207231521606447, 0.02613657569885254, 0.02611712074279785, 0.02611404800415039, 0.026814464569091798, 0.027123712539672853, 0.027226112365722657, 0.027043840408325196, 0.02716262435913086, 0.027125759124755858, 0.027390975952148438, 0.027304960250854493, 0.027231231689453125, 0.027023359298706053, 0.02618880081176758, 0.026175487518310548, 0.02717695999145508, 0.02717695999145508, 0.02716262435913086, 0.02710527992248535, 0.02709503936767578, 0.0267775993347168, 0.027214847564697265, 0.0271278076171875, 0.02628915214538574, 0.026291200637817383, 0.02617344093322754, 0.026437631607055666, 0.026187776565551758, 0.026162176132202147, 0.02616012763977051, 0.026177536010742186, 0.026219520568847656, 0.026605567932128905, 0.027157503128051756, 0.027172864913940428, 0.027181055068969725, 0.02712678337097168, 0.026184703826904295, 0.026951679229736326, 0.02715443229675293, 0.027082752227783204, 0.027130880355834962, 0.0271278076171875, 0.027192319869995117, 0.027684864044189454, 0.027886592864990234, 0.028277759552001954, 0.02760601615905762, 0.027244543075561522, 0.026262527465820314, 0.027080703735351562, 0.027053056716918947, 0.026219520568847656, 0.02711859130859375, 0.026256383895874022, 0.02616524887084961, 0.026673152923583986, 0.027196416854858397, 0.027091968536376954, 0.026987520217895508, 0.02710527992248535, 0.02707865524291992, 0.026281984329223632, 0.02612224006652832, 0.02608742332458496, 0.02728447914123535, 0.02829516792297363, 0.02753023910522461, 0.027365375518798828, 0.027232255935668945, 0.02732339286804199, 0.026566656112670898, 0.026177536010742186, 0.026166271209716797, 0.026174463272094727, 0.026204160690307617, 0.026187776565551758, 0.027230207443237304, 0.02780467224121094, 0.027100160598754884, 0.02647039985656738, 0.026234880447387695, 0.026184703826904295, 0.02614067268371582, 0.026166271209716797, 0.026206207275390626, 0.026166271209716797, 0.026199039459228517, 0.026222591400146485, 0.02617651176452637, 0.02617651176452637, 0.02613350486755371, 0.026215423583984376, 0.026628095626831053, 0.026315776824951172, 0.026231807708740236, 0.026211328506469726, 0.026182655334472657, 0.026192895889282225, 0.026225664138793944, 0.026174463272094727, 0.02614784049987793, 0.026178560256958007, 0.026177536010742186, 0.02614784049987793, 0.026170368194580077, 0.02618060874938965, 0.02613145637512207, 0.026230783462524415, 0.026232831954956053, 0.026217472076416014, 0.026194944381713867, 0.026202112197875976, 0.026163200378417968, 0.02615193557739258, 0.0261079044342041, 0.027052032470703126, 0.026183679580688478, 0.026392576217651367, 0.026787839889526367, 0.026109952926635743, 0.02615603256225586, 0.026137599945068358, 0.02615705680847168, 0.027044864654541017, 0.026263551712036134, 0.02617344093322754, 0.02618880081176758, 0.02615603256225586, 0.026101760864257813, 0.026287103652954103, 0.026416128158569335, 0.026261503219604493, 0.02617344093322754, 0.0261345272064209, 0.02613862419128418, 0.026858495712280273, 0.027181055068969725, 0.026720256805419923, 0.02721075248718262, 0.026475519180297852, 0.02614784049987793, 0.02649395179748535, 0.027077632904052733, 0.027166719436645507, 0.027043840408325196, 0.02710527992248535, 0.026266624450683593, 0.026780672073364258, 0.02713702392578125, 0.02740019226074219, 0.027288576126098633, 0.026224639892578124, 0.026177536010742186, 0.02740019226074219, 0.027222015380859374, 0.027166719436645507, 0.027100160598754884, 0.026203136444091796, 0.026202112197875976, 0.026875904083251953, 0.026277887344360353, 0.026310655593872072, 0.026266624450683593, 0.026203136444091796, 0.026217472076416014, 0.027191295623779296, 0.026463232040405273, 0.026249216079711913, 0.026294271469116212, 0.026351615905761717, 0.02627686309814453, 0.02675814437866211, 0.02715238380432129, 0.027281408309936524, 0.027261951446533202, 0.027122688293457032, 0.027258880615234377, 0.027239423751831054, 0.02758143997192383, 0.028572671890258788, 0.02855116844177246, 0.02733875274658203, 0.02734284782409668, 0.027406335830688477, 0.02699776077270508, 0.026190847396850587, 0.026210304260253905, 0.02655129623413086, 0.026242048263549804, 0.026204160690307617, 0.02595327949523926, 0.026161151885986327, 0.02610380744934082, 0.02615705680847168, 0.026359807968139647, 0.026264575958251952, 0.026189823150634766, 0.026208255767822267, 0.026513408660888672, 0.02615603256225586, 0.026245119094848633, 0.02612735939025879, 0.026264575958251952, 0.026170368194580077, 0.0261345272064209, 0.02660147285461426, 0.026207231521606447, 0.02617651176452637, 0.02595020866394043, 0.025923583984375, 0.026178560256958007, 0.02771046447753906, 0.02897203254699707, 0.02752511978149414, 0.027239423751831054, 0.02709708786010742, 0.02706329536437988, 0.027142143249511717, 0.027209728240966798, 0.027592704772949218, 0.027380735397338866, 0.027217920303344727, 0.027108352661132814, 0.027036672592163087, 0.027189247131347655, 0.027272192001342774, 0.027190271377563476, 0.027143167495727538, 0.026464256286621093, 0.026175487518310548, 0.026418176651000977, 0.02637926483154297, 0.026235904693603516, 0.026149887084960938, 0.026223615646362306, 0.02617651176452637, 0.02637414360046387, 0.026294271469116212, 0.026186752319335937, 0.02711347198486328, 0.026403839111328126, 0.027053056716918947, 0.026239999771118162, 0.026181631088256836, 0.026205184936523438, 0.026078208923339844, 0.026189823150634766, 0.02628915214538574, 0.026211328506469726, 0.026166271209716797, 0.026187776565551758, 0.02613862419128418, 0.026557439804077147, 0.027159551620483398, 0.026207231521606447, 0.02614681625366211, 0.02612224006652832, 0.02614886474609375, 0.026310655593872072, 0.026161151885986327, 0.02614374351501465, 0.02613145637512207, 0.02615910339355469, 0.026153984069824218, 0.026214399337768556, 0.02612531280517578, 0.026226688385009765, 0.02613657569885254, 0.026178560256958007, 0.02614374351501465, 0.02611814308166504, 0.02616524887084961, 0.02615091133117676, 0.026171392440795898, 0.026233856201171874, 0.027403263092041014, 0.027207679748535156, 0.02617344093322754, 0.02614374351501465, 0.026175487518310548, 0.026163200378417968, 0.026194944381713867, 0.02614784049987793, 0.02615193557739258, 0.026195968627929687, 0.02611814308166504, 0.026182655334472657, 0.026201087951660155, 0.02689023971557617, 0.028572671890258788, 0.027015167236328123, 0.02632908821105957, 0.02617344093322754, 0.026366975784301756, 0.026274816513061523, 0.026187776565551758, 0.026217472076416014, 0.026183679580688478, 0.026174463272094727, 0.026189823150634766, 0.026238975524902345, 0.026179584503173828, 0.026266624450683593, 0.026145792007446288, 0.02713907241821289, 0.026224639892578124, 0.02616422462463379, 0.027208703994750977, 0.027208703994750977, 0.027183103561401366, 0.027057151794433593, 0.026300416946411134, 0.026201087951660155, 0.026101760864257813, 0.02618880081176758, 0.0261396484375, 0.02612838363647461, 0.026096639633178712, 0.02612224006652832, 0.02609766387939453, 0.02616422462463379, 0.02611199951171875, 0.02574131202697754, 0.025972736358642577, 0.02614271926879883, 0.026068992614746093, 0.026256383895874022, 0.026315776824951172, 0.026175487518310548, 0.0261212158203125, 0.025911296844482422, 0.02617344093322754, 0.02611097526550293, 0.02596147155761719, 0.026230783462524415, 0.026270719528198243, 0.026212352752685547, 0.026208255767822267, 0.026158079147338868, 0.02617651176452637, 0.02614271926879883, 0.026104831695556642, 0.02633113670349121, 0.026359807968139647, 0.02630348777770996, 0.026084352493286132, 0.027229183197021483, 0.02718720054626465, 0.027077632904052733, 0.027299840927124022, 0.026874879837036132, 0.026204160690307617, 0.02613555145263672, 0.02614681625366211, 0.0261529598236084, 0.02614067268371582, 0.025796607971191408, 0.02616012763977051, 0.02614374351501465, 0.02613657569885254, 0.025754623413085938, 0.026852352142333984, 0.027214847564697265, 0.026434560775756837, 0.025806848526000976, 0.02576896095275879, 0.02614169692993164, 0.027228160858154295, 0.026307584762573243, 0.02613555145263672, 0.02710425567626953, 0.026351615905761717, 0.026108928680419922, 0.026711040496826172, 0.027297792434692384, 0.02713702392578125, 0.02714931106567383, 0.026772480010986328, 0.02772377586364746, 0.027518976211547853, 0.026198015213012696, 0.026401792526245117, 0.02727628707885742, 0.02718720054626465, 0.02706329536437988, 0.02614374351501465, 0.02614067268371582, 0.02614067268371582, 0.02612531280517578, 0.02611609649658203, 0.02611507225036621, 0.02611814308166504, 0.026102783203125, 0.02613862419128418, 0.026206207275390626, 0.02612428855895996, 0.026186752319335937, 0.026166271209716797, 0.026177536010742186, 0.025863168716430664, 0.026246143341064454, 0.026184703826904295, 0.026195968627929687, 0.026251264572143555, 0.0261345272064209, 0.026286079406738282, 0.026221567153930665, 0.026239999771118162, 0.026264575958251952, 0.026295295715332033, 0.02617344093322754, 0.026219520568847656, 0.026235904693603516, 0.026202112197875976, 0.026211328506469726, 0.026206207275390626, 0.026607616424560547, 0.028465152740478516, 0.02772787284851074, 0.027130880355834962, 0.026310655593872072, 0.026238975524902345, 0.026247167587280275, 0.026232831954956053, 0.026247167587280275, 0.026244096755981446, 0.026194944381713867, 0.026213375091552735, 0.02617241668701172, 0.026254335403442384, 0.027198463439941405, 0.026234880447387695, 0.026228736877441407, 0.026239999771118162, 0.026185728073120116, 0.026225664138793944, 0.026203136444091796, 0.02615603256225586, 0.026208255767822267, 0.02629631996154785, 0.026290176391601562, 0.02637004852294922, 0.02618880081176758, 0.026203136444091796, 0.026089471817016603, 0.026254335403442384, 0.02615500831604004, 0.026214399337768556, 0.0261212158203125, 0.026521600723266602, 0.026234880447387695, 0.026190847396850587, 0.026220544815063477, 0.026634239196777345, 0.02608332824707031, 0.02611199951171875, 0.02617241668701172, 0.02615705680847168, 0.026204160690307617, 0.026738687515258788, 0.027058176040649414, 0.02613862419128418, 0.02614374351501465, 0.02615705680847168, 0.026212352752685547, 0.027084800720214845, 0.027339775085449217, 0.026505216598510743, 0.0261529598236084, 0.02611712074279785, 0.026224639892578124, 0.026204160690307617, 0.02614374351501465, 0.026327039718627928, 0.02635468864440918, 0.02628505516052246, 0.02634444808959961, 0.02690559959411621, 0.026867712020874023, 0.02680012893676758, 0.027082752227783204, 0.027244543075561522, 0.026594303131103517, 0.027242496490478517, 0.02710937690734863, 0.026266624450683593, 0.0261396484375, 0.027262975692749023, 0.027503616333007814, 0.027143167495727538, 0.02717081642150879, 0.027227136611938478, 0.027431936264038087, 0.027648000717163085, 0.027242496490478517, 0.026020864486694335, 0.026847232818603517, 0.0269803524017334, 0.028242944717407226, 0.02737664031982422, 0.02715238380432129, 0.02666803169250488, 0.025993215560913087, 0.025997312545776367, 0.02612633514404297, 0.02613350486755371, 0.02595327949523926, 0.025800703048706054, 0.026002431869506838, 0.026254335403442384, 0.025798656463623046, 0.026190847396850587, 0.025931776046752928, 0.02588057518005371, 0.02613555145263672, 0.025907199859619142, 0.026192895889282225, 0.02614271926879883, 0.026022911071777344, 0.026051584243774413, 0.026203136444091796, 0.026215423583984376, 0.026202112197875976, 0.026175487518310548, 0.025847808837890625, 0.02618880081176758, 0.026184703826904295, 0.026011648178100585, 0.02609971237182617, 0.026225664138793944, 0.026259456634521484, 0.026394624710083008, 0.0314521598815918, 0.029646848678588866, 0.02760601615905762, 0.027802623748779298, 0.027769855499267578, 0.027386880874633788, 0.027205631256103514, 0.02719436836242676, 0.027440128326416017, 0.02631884765625, 0.026933248519897462, 0.02735308837890625, 0.026838016510009766, 0.02631372833251953, 0.027046911239624022, 0.027251712799072264, 0.026281984329223632, 0.026227712631225586, 0.02728550338745117, 0.026573823928833007, 0.02634444808959961, 0.026263551712036134, 0.026284032821655274, 0.026258432388305664]",tokens/s,37.63534842442249,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493a1-02192fe012f8ec46150875d6;6840c524-3d96-4f6d-8fe9-94973540ecb8) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp76n4ebgl/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,914.90304,925.36832,0.0,295.698432,277.263872,s,1,7.50359326171875,7.50359326171875,0.0,7.50359326171875,7.50359326171875,7.50359326171875,7.50359326171875,[7.50359326171875],,kWh,6.269273883343532e-06,3.419627637982495e-06,8.051950885989978e-06,1.7740852407316006e-05,,MB,1432.584192,981.991424,0.0,335.54432,312.39168,s,17,0.34494895935058595,0.02029111525591682,4.2718583514160195e-05,0.020298112869262697,0.02034021759033203,0.020347238159179687,0.020352070922851562,"[0.020192800521850587, 0.020295936584472655, 0.020336544036865235, 0.02035327911376953, 0.02030259132385254, 0.020276832580566406, 0.020260927200317382, 0.02032598304748535, 0.020274272918701174, 0.02022159957885742, 0.020316959381103516, 0.020326528549194336, 0.020298112869262697, 0.020308576583862304, 0.02024831962585449, 0.020263967514038087, 0.020345727920532228]",tokens/s,12616.359267160107,kWh,2.393779015955399e-07,1.3116639439183236e-07,1.2500920931902346e-06,1.620636389177607e-06,tokens/kWh,157962638.4484106,MB,1467.875328,1007.157248,0.0,360.710144,312.39424,s,17,10.181630432128904,0.5989194371840533,0.010424345338125432,0.6040440063476562,0.60754462890625,0.6090422607421875,0.6104241748046875,"[0.6040440063476562, 0.6107696533203125, 0.6063119506835938, 0.6054072265625, 0.6068341064453125, 0.60091015625, 0.6013977661132812, 0.6060458984375, 0.57550048828125, 0.6086104125976562, 0.591460205078125, 0.6047490234375, 0.5884724731445312, 0.5765367431640624, 0.5957401733398437, 0.5920833129882812, 0.6067568359375]",tokens/s,105.18943966188151,kWh,6.831371599793002e-06,3.743262907820139e-06,1.1311895080576502e-05,2.1886529588189637e-05,tokens/kWh,2878482.8470017435,,s,1071,10.174313457489017,0.009499825823986008,0.0002648600666591048,0.00959488010406494,0.009693183898925782,0.009752592086791991,0.010232729625701905,"[0.008923135757446288, 0.00941055965423584, 0.009431039810180664, 0.009448448181152343, 0.00952835178375244, 0.009495519638061523, 0.00953446388244629, 0.009370623588562011, 0.00951193618774414, 0.009490431785583496, 0.009478143692016602, 0.009415679931640625, 0.009467904090881347, 0.009639936447143555, 0.009622528076171874, 0.009484288215637206, 0.009383935928344727, 0.009382911682128906, 0.009472000122070312, 0.009798656463623047, 0.009687040328979492, 0.009605119705200196, 0.009614336013793945, 0.009656352043151855, 0.009593824386596679, 0.00953654384613037, 0.00956003189086914, 0.00960102367401123, 0.009684032440185546, 0.009578432083129883, 0.00960102367401123, 0.009629695892333985, 0.009668607711791993, 0.009586688041687011, 0.009595904350280762, 0.009671680450439453, 0.009646080017089843, 0.009680895805358887, 0.009706496238708496, 0.009572352409362793, 0.00971776008605957, 0.009688063621520996, 0.009668607711791993, 0.009743359565734864, 0.009620479583740234, 0.009680928230285645, 0.009669599533081054, 0.009661439895629884, 0.009654272079467773, 0.009575424194335937, 0.009569279670715332, 0.0096112642288208, 0.009675775527954102, 0.009585663795471192, 0.009621503829956055, 0.00960102367401123, 0.009621503829956055, 0.009708543777465821, 0.00959705638885498, 0.009531264305114745, 0.009637887954711915, 0.009668607711791993, 0.009737215995788574, 0.009377792358398437, 0.009681920051574706, 0.009933823585510254, 0.010165247917175293, 0.009812992095947265, 0.009784319877624511, 0.009676799774169922, 0.009702400207519531, 0.009607168197631836, 0.009585663795471192, 0.009630720138549804, 0.009621503829956055, 0.009597951889038087, 0.009573375701904297, 0.009552895545959473, 0.009669631958007812, 0.009607168197631836, 0.009648127555847168, 0.009552960395812988, 0.01114310359954834, 0.010584063529968261, 0.009977855682373048, 0.009724927902221679, 0.009700351715087891, 0.009577471733093262, 0.009677824020385742, 0.009630720138549804, 0.00961638355255127, 0.009607168197631836, 0.009552895545959473, 0.009679871559143067, 0.009629695892333985, 0.009640959739685059, 0.009683967590332031, 0.009598976135253906, 0.009655296325683594, 0.009658368110656738, 0.009600000381469726, 0.009639936447143555, 0.009652223587036133, 0.009690112113952636, 0.009645055770874024, 0.009658368110656738, 0.009607168197631836, 0.009584639549255371, 0.009639936447143555, 0.00954265594482422, 0.00960102367401123, 0.009541631698608399, 0.009646080017089843, 0.009643008232116699, 0.009686016082763671, 0.009587712287902832, 0.009591808319091797, 0.009645055770874024, 0.009730048179626465, 0.009673727989196777, 0.009632831573486328, 0.009604031562805176, 0.009638912200927734, 0.009774080276489258, 0.009478143692016602, 0.009485312461853027, 0.009298944473266601, 0.009615360260009765, 0.009602047920227052, 0.009569279670715332, 0.00961945629119873, 0.009565183639526367, 0.009539584159851074, 0.0095283203125, 0.009596927642822266, 0.009631744384765625, 0.009572352409362793, 0.009605119705200196, 0.009591808319091797, 0.009580544471740723, 0.009636863708496094, 0.00963276767730713, 0.00961638355255127, 0.009558015823364258, 0.009593855857849122, 0.00961843204498291, 0.009611295700073243, 0.009656288146972657, 0.009575424194335937, 0.009593855857849122, 0.00962662410736084, 0.00959488010406494, 0.009615360260009765, 0.009591808319091797, 0.00961843204498291, 0.009634816169738769, 0.009621503829956055, 0.009573375701904297, 0.009572352409362793, 0.009577471733093262, 0.00972697639465332, 0.009655296325683594, 0.009569279670715332, 0.009602047920227052, 0.0096112642288208, 0.009645055770874024, 0.009640959739685059, 0.00961740779876709, 0.009654272079467773, 0.00962662410736084, 0.009894911766052245, 0.010085375785827636, 0.009821184158325195, 0.009588735580444336, 0.009669631958007812, 0.009640959739685059, 0.009608192443847656, 0.009559040069580077, 0.009613311767578125, 0.009676799774169922, 0.00959488010406494, 0.009574399948120118, 0.009595904350280762, 0.009679871559143067, 0.009510911941528321, 0.009494527816772461, 0.009537535667419434, 0.009678879737854003, 0.00967471981048584, 0.00903987216949463, 0.009586688041687011, 0.009615360260009765, 0.010242048263549805, 0.00985804843902588, 0.009553919792175293, 0.009605119705200196, 0.009662464141845703, 0.009641983985900878, 0.00960921573638916, 0.009541631698608399, 0.009623552322387695, 0.00963276767730713, 0.009607168197631836, 0.009589759826660157, 0.009545727729797364, 0.009546751976013184, 0.00963584041595459, 0.009583616256713867, 0.009516032218933105, 0.009545727729797364, 0.009649151802062989, 0.009613311767578125, 0.009613311767578125, 0.009547776222229003, 0.009593855857849122, 0.009568256378173828, 0.009597951889038087, 0.009529343605041504, 0.009575424194335937, 0.00960102367401123, 0.009567232131958007, 0.009607168197631836, 0.009552895545959473, 0.009579520225524902, 0.009645055770874024, 0.00960102367401123, 0.009669631958007812, 0.009573375701904297, 0.009637887954711915, 0.0096112642288208, 0.009637887954711915, 0.009614336013793945, 0.009575424194335937, 0.009586688041687011, 0.009600000381469726, 0.009517056465148926, 0.00961638355255127, 0.009568256378173828, 0.009649151802062989, 0.009620479583740234, 0.009607168197631836, 0.009561087608337402, 0.009572352409362793, 0.009647104263305664, 0.00960921573638916, 0.009582592010498046, 0.00959488010406494, 0.009547776222229003, 0.009688063621520996, 0.009607168197631836, 0.00962662410736084, 0.00963276767730713, 0.009267200469970703, 0.009655327796936035, 0.009556960105895997, 0.009582592010498046, 0.00963379192352295, 0.009585663795471192, 0.009580544471740723, 0.009448448181152343, 0.009647104263305664, 0.009666560173034668, 0.009689087867736817, 0.00961740779876709, 0.009535488128662109, 0.009640959739685059, 0.00974028778076172, 0.009672703742980958, 0.009621503829956055, 0.009531392097473144, 0.009670656204223632, 0.00960307216644287, 0.009636863708496094, 0.009917440414428711, 0.010112000465393066, 0.009902079582214356, 0.009608192443847656, 0.009651200294494629, 0.009521151542663574, 0.009572383880615234, 0.009627615928649903, 0.009590784072875976, 0.009634816169738769, 0.009586688041687011, 0.009558015823364258, 0.009660415649414063, 0.009638912200927734, 0.009641983985900878, 0.009604096412658691, 0.009643008232116699, 0.009677824020385742, 0.009644031524658203, 0.009613311767578125, 0.009605119705200196, 0.009644031524658203, 0.009674752235412597, 0.009688063621520996, 0.00961638355255127, 0.009549823760986328, 0.009684991836547852, 0.00963276767730713, 0.009628671646118164, 0.009576448440551758, 0.009533439636230469, 0.00962559986114502, 0.009613311767578125, 0.009597951889038087, 0.009586688041687011, 0.009572383880615234, 0.00967574405670166, 0.009597951889038087, 0.009523200035095216, 0.009563136100769042, 0.009559040069580077, 0.009660415649414063, 0.008938495635986327, 0.009505791664123535, 0.009577471733093262, 0.009581567764282227, 0.00961740779876709, 0.00955084800720215, 0.009589759826660157, 0.009631744384765625, 0.009636863708496094, 0.009560064315795898, 0.009573375701904297, 0.009608192443847656, 0.009612288475036621, 0.009583616256713867, 0.009598976135253906, 0.009551872253417968, 0.009607168197631836, 0.009606143951416016, 0.009547776222229003, 0.009516032218933105, 0.009565183639526367, 0.009615360260009765, 0.009607168197631836, 0.009564191818237304, 0.009593824386596679, 0.009672703742980958, 0.009469951629638672, 0.009494527816772461, 0.009383935928344727, 0.009382911682128906, 0.009681920051574706, 0.009472000122070312, 0.009376768112182618, 0.00955084800720215, 0.009531392097473144, 0.009492480278015136, 0.009439231872558594, 0.00939417552947998, 0.009464832305908203, 0.009547807693481446, 0.009615327835083008, 0.009648127555847168, 0.009552895545959473, 0.009702400207519531, 0.009545727729797364, 0.009470975875854493, 0.009369600296020507, 0.00941055965423584, 0.00942796802520752, 0.009494527816772461, 0.009424896240234374, 0.0094269437789917, 0.009360383987426758, 0.00952012825012207, 0.009621503829956055, 0.009525247573852539, 0.009527296066284179, 0.009634816169738769, 0.009565183639526367, 0.00960921573638916, 0.009489407539367676, 0.009556991577148437, 0.009692159652709961, 0.00974233627319336, 0.009781248092651367, 0.009551872253417968, 0.00969321632385254, 0.009636832237243651, 0.009597951889038087, 0.00961945629119873, 0.009531392097473144, 0.009721856117248535, 0.009590784072875976, 0.009569279670715332, 0.00952012825012207, 0.009424896240234374, 0.009507840156555175, 0.009570303916931153, 0.009565183639526367, 0.009812992095947265, 0.009785344123840332, 0.009960448265075684, 0.009659392356872559, 0.009672703742980958, 0.009438207626342773, 0.009465855598449707, 0.00952012825012207, 0.009431039810180664, 0.009367551803588867, 0.00943616008758545, 0.009448448181152343, 0.009583616256713867, 0.009463808059692384, 0.009422847747802734, 0.009500672340393066, 0.009457663536071777, 0.009446399688720703, 0.009466879844665528, 0.009588735580444336, 0.009541631698608399, 0.009660415649414063, 0.009545727729797364, 0.009496576309204101, 0.009450495719909668, 0.00963584041595459, 0.009473024368286133, 0.009409536361694336, 0.009475104331970215, 0.009524191856384277, 0.00960102367401123, 0.00941260814666748, 0.009465855598449707, 0.009395199775695801, 0.009572352409362793, 0.009485312461853027, 0.009415679931640625, 0.009547776222229003, 0.009538559913635255, 0.009464832305908203, 0.009393152236938476, 0.009476096153259277, 0.00963379192352295, 0.009553919792175293, 0.00931123161315918, 0.009348095893859864, 0.009589759826660157, 0.00899788761138916, 0.009082880020141602, 0.009112671852111816, 0.009540512084960937, 0.009775103569030762, 0.009525247573852539, 0.009671680450439453, 0.00961740779876709, 0.009765888214111328, 0.00960307216644287, 0.009646080017089843, 0.0096245756149292, 0.009593855857849122, 0.00962764835357666, 0.009592831611633301, 0.009640959739685059, 0.009455615997314454, 0.009439231872558594, 0.009473024368286133, 0.009442303657531738, 0.009461759567260742, 0.009433088302612304, 0.00961945629119873, 0.009675775527954102, 0.00962764835357666, 0.009647104263305664, 0.009647104263305664, 0.009629695892333985, 0.00962559986114502, 0.009580544471740723, 0.009666560173034668, 0.009743359565734864, 0.009673727989196777, 0.009665535926818849, 0.00962662410736084, 0.009647104263305664, 0.009634816169738769, 0.009669631958007812, 0.009623552322387695, 0.009641983985900878, 0.009692159652709961, 0.009675775527954102, 0.009834495544433594, 0.01022873592376709, 0.00994918441772461, 0.009714688301086426, 0.009711615562438965, 0.009574399948120118, 0.00963276767730713, 0.009655296325683594, 0.009588735580444336, 0.009592831611633301, 0.009664511680603028, 0.00960102367401123, 0.009677824020385742, 0.00961945629119873, 0.00960921573638916, 0.009668607711791993, 0.00960307216644287, 0.009641983985900878, 0.009583616256713867, 0.009634816169738769, 0.009688063621520996, 0.008882176399230958, 0.009203712463378906, 0.009149439811706543, 0.009137151718139648, 0.009148415565490722, 0.009164799690246582, 0.009128959655761718, 0.00912384033203125, 0.00909727954864502, 0.009076671600341797, 0.009134079933166504, 0.009107456207275391, 0.009166848182678223, 0.009125887870788574, 0.009125887870788574, 0.009160703659057617, 0.009159680366516113, 0.009078783988952637, 0.00913920021057129, 0.009142271995544434, 0.009135104179382325, 0.009146368026733399, 0.009153535842895508, 0.009160703659057617, 0.009083904266357423, 0.009088000297546387, 0.009150464057922364, 0.009104384422302245, 0.009088000297546387, 0.009111552238464356, 0.009151488304138184, 0.009135104179382325, 0.009115648269653321, 0.009164799690246582, 0.009124863624572753, 0.00912179183959961, 0.008978431701660156, 0.009175040245056153, 0.009157631874084473, 0.00910028839111328, 0.009243647575378417, 0.00908902359008789, 0.00910643196105957, 0.009133055686950683, 0.009143296241760255, 0.009105440139770507, 0.009118687629699708, 0.00913100814819336, 0.009152511596679687, 0.00912281608581543, 0.009134079933166504, 0.009142271995544434, 0.009094143867492676, 0.009127936363220214, 0.009148415565490722, 0.009160703659057617, 0.009127936363220214, 0.009184255599975585, 0.009151488304138184, 0.009133055686950683, 0.00913100814819336, 0.009163776397705077, 0.009149439811706543, 0.009288703918457031, 0.009661439895629884, 0.009591808319091797, 0.00962662410736084, 0.00960921573638916, 0.00961740779876709, 0.009613311767578125, 0.00963584041595459, 0.009658368110656738, 0.009636863708496094, 0.00960921573638916, 0.009666560173034668, 0.009599040031433106, 0.009629631996154785, 0.009626655578613282, 0.009652192115783692, 0.009651200294494629, 0.009596927642822266, 0.009614336013793945, 0.00960921573638916, 0.00963276767730713, 0.009677824020385742, 0.009762816429138184, 0.01030246353149414, 0.009768959999084472, 0.00972697639465332, 0.009662464141845703, 0.009694208145141601, 0.009659392356872559, 0.009668607711791993, 0.009697279930114745, 0.009649151802062989, 0.009654272079467773, 0.009669631958007812, 0.009669631958007812, 0.009640959739685059, 0.00963379192352295, 0.009672703742980958, 0.009659392356872559, 0.00972390365600586, 0.009612288475036621, 0.009739328384399414, 0.009687999725341796, 0.009658368110656738, 0.009719807624816895, 0.009659392356872559, 0.00963584041595459, 0.009637887954711915, 0.00963584041595459, 0.009687040328979492, 0.009517056465148926, 0.009689087867736817, 0.009649151802062989, 0.00960921573638916, 0.00970137596130371, 0.009693183898925782, 0.009691136360168457, 0.009695232391357422, 0.009606143951416016, 0.009698304176330566, 0.009477120399475097, 0.00953446388244629, 0.009525247573852539, 0.00899891185760498, 0.00917404842376709, 0.009149408340454102, 0.009165823936462402, 0.009187328338623046, 0.009124863624572753, 0.009163776397705077, 0.009186304092407227, 0.009157631874084473, 0.00912384033203125, 0.009119744300842286, 0.009200639724731445, 0.009047039985656738, 0.009150464057922364, 0.009161727905273438, 0.009146368026733399, 0.009135104179382325, 0.009162752151489258, 0.009275391578674316, 0.009111552238464356, 0.00914739227294922, 0.009248767852783203, 0.009148415565490722, 0.009181183815002441, 0.009145343780517578, 0.009175040245056153, 0.009126912117004395, 0.009102335929870605, 0.009158656120300293, 0.009163776397705077, 0.00913100814819336, 0.009163776397705077, 0.009182208061218262, 0.00914739227294922, 0.009027584075927735, 0.009051136016845703, 0.00899891185760498, 0.009042943954467773, 0.009132032394409179, 0.009296895980834961, 0.009283583641052246, 0.010942463874816894, 0.010817536354064941, 0.010008576393127442, 0.00970751953125, 0.009837568283081055, 0.009672703742980958, 0.00970956802368164, 0.009684991836547852, 0.009720831871032716, 0.009699328422546387, 0.009708543777465821, 0.009676799774169922, 0.009676799774169922, 0.009754655838012695, 0.009659359931945801, 0.009640959739685059, 0.009772031784057618, 0.009649151802062989, 0.009702400207519531, 0.009653247833251954, 0.009686016082763671, 0.009650176048278808, 0.008846336364746094, 0.009524224281311035, 0.00960921573638916, 0.009329664230346679, 0.00918835163116455, 0.009128959655761718, 0.009152511596679687, 0.009158656120300293, 0.009156607627868652, 0.009785344123840332, 0.009764863967895507, 0.009662464141845703, 0.009653247833251954, 0.009645055770874024, 0.009634816169738769, 0.00961740779876709, 0.009727999687194825, 0.009693183898925782, 0.00971571159362793, 0.00973209571838379, 0.009629695892333985, 0.009646080017089843, 0.009659392356872559, 0.009606143951416016, 0.009643008232116699, 0.009646080017089843, 0.00953446388244629, 0.009669631958007812, 0.009604096412658691, 0.009673727989196777, 0.009660415649414063, 0.009654272079467773, 0.00961740779876709, 0.009620479583740234, 0.009652223587036133, 0.009604096412658691, 0.009669631958007812, 0.009645055770874024, 0.00962764835357666, 0.009663488388061523, 0.00960307216644287, 0.009653247833251954, 0.00959488010406494, 0.009665535926818849, 0.009664511680603028, 0.009682944297790527, 0.009702400207519531, 0.009595904350280762, 0.009681920051574706, 0.009669631958007812, 0.00961023998260498, 0.009678848266601562, 0.009533439636230469, 0.009640959739685059, 0.009661439895629884, 0.009622528076171874, 0.009637887954711915, 0.009651200294494629, 0.009679903984069824, 0.009607135772705078, 0.009644031524658203, 0.009655296325683594, 0.009724927902221679, 0.008840191841125488, 0.009184255599975585, 0.009174015998840332, 0.009198592185974122, 0.009160703659057617, 0.009142271995544434, 0.009148415565490722, 0.009165823936462402, 0.009154560089111329, 0.009134079933166504, 0.009145343780517578, 0.009116671562194823, 0.009104384422302245, 0.009149439811706543, 0.009125887870788574, 0.009138175964355469, 0.009134079933166504, 0.009129983901977539, 0.009136128425598144, 0.00912281608581543, 0.009155584335327148, 0.009202688217163087, 0.009135104179382325, 0.009158656120300293, 0.00913920021057129, 0.009138175964355469, 0.009112575531005859, 0.00912179183959961, 0.009151488304138184, 0.009115648269653321, 0.009096192359924317, 0.009140224456787109, 0.009104384422302245, 0.009150464057922364, 0.009109503746032714, 0.009189375877380371, 0.009134079933166504, 0.009135104179382325, 0.009070591926574707, 0.00900710391998291, 0.00900921630859375, 0.008881088256835937, 0.00912281608581543, 0.009203712463378906, 0.009037823677062988, 0.009160703659057617, 0.011279359817504882, 0.010522624015808106, 0.010570752143859862, 0.009845760345458985, 0.009711615562438965, 0.00973516845703125, 0.009631808280944825, 0.009691072463989257, 0.009630720138549804, 0.009590784072875976, 0.009676799774169922, 0.00999014377593994, 0.009750528335571289, 0.009698304176330566, 0.00970956802368164, 0.009675775527954102, 0.009692159652709961, 0.008932352066040039, 0.009159680366516113, 0.008979455947875976, 0.009158656120300293, 0.009118720054626465, 0.00910028839111328, 0.009157631874084473, 0.009136128425598144, 0.009129983901977539, 0.009143296241760255, 0.009151488304138184, 0.009158656120300293, 0.009186304092407227, 0.009442303657531738, 0.009165823936462402, 0.009137151718139648, 0.009160703659057617, 0.009144319534301757, 0.009111552238464356, 0.009154560089111329, 0.00918015956878662, 0.009174015998840332, 0.009125887870788574, 0.009172991752624511, 0.009162752151489258, 0.009142271995544434, 0.00913100814819336, 0.009195520401000976, 0.00913920021057129, 0.009033727645874023, 0.009143296241760255, 0.00912281608581543, 0.00913100814819336, 0.009146368026733399, 0.009164799690246582, 0.009129983901977539, 0.009163776397705077, 0.009134143829345702, 0.009167807579040527, 0.009141247749328613, 0.00912281608581543, 0.009161727905273438, 0.009118720054626465, 0.009134079933166504, 0.009125887870788574, 0.009171968460083007, 0.00912384033203125, 0.009101311683654785, 0.009134079933166504, 0.009142271995544434, 0.009109503746032714, 0.009111552238464356, 0.009532416343688965, 0.009456640243530273, 0.009001983642578124, 0.00900710391998291, 0.009068703651428223, 0.009058143615722656, 0.009116703987121583, 0.009167840003967285, 0.00909823989868164, 0.009111552238464356, 0.009217023849487305, 0.008868864059448242, 0.0091146240234375, 0.009142271995544434, 0.009142271995544434, 0.009133055686950683, 0.009357312202453612, 0.009200639724731445, 0.009115648269653321, 0.009168895721435547, 0.009468928337097168, 0.009159680366516113, 0.00912492847442627, 0.009188287734985352, 0.009161727905273438, 0.009134079933166504, 0.009175040245056153, 0.009192447662353515, 0.009101311683654785, 0.00912179183959961, 0.009054207801818847, 0.009159680366516113, 0.009124863624572753, 0.009167872428894042, 0.009185279846191406, 0.009169919967651367, 0.009099264144897461, 0.009307168006896972, 0.009563103675842285, 0.009773056030273437, 0.009228287696838379, 0.009337856292724609, 0.01032703971862793, 0.009805824279785156, 0.009801728248596191, 0.009658368110656738, 0.009638912200927734, 0.009675775527954102, 0.009621503829956055, 0.009657343864440919, 0.009620479583740234, 0.009660415649414063, 0.009640959739685059, 0.009645055770874024, 0.009686016082763671, 0.009692159652709961, 0.009583616256713867, 0.009613311767578125, 0.009584639549255371, 0.009648127555847168, 0.009654272079467773, 0.009694208145141601, 0.00961843204498291, 0.009630720138549804, 0.00961945629119873, 0.009637887954711915, 0.009703424453735352, 0.009896960258483887, 0.00982630443572998, 0.009630720138549804, 0.009650176048278808, 0.009623552322387695, 0.009643008232116699, 0.009676799774169922, 0.008888319969177246, 0.009193471908569336, 0.009176063537597656, 0.009136128425598144, 0.009132032394409179, 0.009167872428894042, 0.009071616172790528, 0.009072640419006347, 0.00912384033203125, 0.009137151718139648, 0.009109503746032714, 0.009147456169128418, 0.00914527988433838, 0.00912384033203125, 0.009117695808410644, 0.009088000297546387, 0.009153535842895508, 0.009102335929870605, 0.00908902359008789, 0.0091146240234375, 0.009151488304138184, 0.009110527992248535, 0.009095168113708496, 0.009126912117004395, 0.009124863624572753, 0.009137151718139648, 0.00910848045349121, 0.009164799690246582, 0.009141247749328613, 0.009119775772094727, 0.009166815757751464, 0.009128959655761718, 0.009736191749572755, 0.009774080276489258, 0.009563136100769042, 0.009653247833251954, 0.00960307216644287, 0.009634816169738769, 0.009653247833251954, 0.009633855819702148, 0.009575360298156739, 0.009622528076171874, 0.009588735580444336, 0.009662464141845703, 0.009623552322387695, 0.009695232391357422, 0.009671711921691894, 0.009611231803894043, 0.009647104263305664, 0.00961740779876709, 0.009712639808654786, 0.0096245756149292, 0.009641983985900878, 0.009623552322387695, 0.009659392356872559, 0.00970137596130371, 0.009608192443847656, 0.009702400207519531, 0.009665535926818849, 0.009654272079467773, 0.009498623847961426, 0.010301440238952637, 0.009821184158325195, 0.008895487785339355, 0.009555968284606933, 0.009655296325683594, 0.009595904350280762, 0.009661439895629884, 0.010002431869506835, 0.010071040153503418, 0.009658368110656738, 0.009686047554016114, 0.009646047592163086, 0.009637887954711915, 0.00970956802368164, 0.009603103637695313, 0.009625568389892578, 0.009654272079467773, 0.009643008232116699, 0.00962662410736084, 0.009640959739685059, 0.009520159721374512, 0.009432031631469726, 0.009495552062988282, 0.009401344299316406, 0.0096245756149292, 0.009653247833251954, 0.009596927642822266, 0.009583616256713867, 0.009585663795471192, 0.00961638355255127, 0.009681920051574706, 0.009690112113952636, 0.009750528335571289, 0.009896960258483887, 0.009630720138549804, 0.009651200294494629, 0.00961945629119873, 0.009729023933410644, 0.009602047920227052, 0.009680895805358887, 0.009607168197631836, 0.0096112642288208, 0.009656319618225098, 0.00959488010406494, 0.009636863708496094, 0.009657343864440919, 0.009623552322387695, 0.009607168197631836, 0.00960921573638916, 0.009531455993652344, 0.009595840454101563, 0.009580544471740723, 0.009620479583740234, 0.009558015823364258, 0.009640959739685059, 0.009581567764282227, 0.00960102367401123, 0.00961023998260498, 0.009588768005371093, 0.009687007904052735, 0.00963584041595459, 0.009568256378173828, 0.009655296325683594, 0.009615360260009765, 0.009636863708496094]",tokens/s,105.26508785825428,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8198.311936,12367.429632,0.0,11737.759744,11171.24352,s,1,12.9870703125,12.9870703125,0.0,12.9870703125,12.9870703125,12.9870703125,12.9870703125,[12.9870703125],,kWh,7.161473575971892e-05,3.9235098655533856e-05,0.00013646149805801588,0.0002473113324732687,,MB,3956.211712,12386.304,0.0,11739.856896,10924.361728,s,10,24.25015478515625,2.425015478515625,0.0005157407942711411,2.4251220703125,2.4255887451171874,2.4256837768554687,2.4257598022460938,"[2.42539306640625, 2.42503857421875, 2.425567626953125, 2.42520556640625, 2.42577880859375, 2.42536572265625, 2.4246494140625, 2.424083251953125, 2.424600830078125, 2.424471923828125]",tokens/s,105.56633649064378,kWh,2.8643975177428985e-05,1.5697789869738594e-05,0.0001643557981511945,0.0002086975631983621,tokens/kWh,1226655.4341924828,MB,3960.266752,12388.401152,0.0,11741.954048,10924.364288,s,10,26.665796142578124,2.666579614257812,0.005653574029655948,2.6669134521484374,2.674668994140625,2.6757436767578127,2.6766034228515623,"[2.66777099609375, 2.67443017578125, 2.657687255859375, 2.676818359375, 2.667739990234375, 2.665291015625, 2.659474853515625, 2.668023193359375, 2.662473388671875, 2.6660869140625]",tokens/s,23.62577125511205,kWh,3.145611324958812e-05,1.724070253572834e-05,0.00010563280672840625,0.0001543296225137227,tokens/kWh,408217.1586624477,,s,630,26.6637148170471,0.042323356852455744,0.0002950411667332122,0.042223617553710936,0.04275210151672364,0.04291845092773437,0.04333242240905762,"[0.04254924774169922, 0.04231270217895508, 0.04221440124511719, 0.04226559829711914, 0.04243046569824219, 0.042218494415283206, 0.0423454704284668, 0.04313907241821289, 0.04232601547241211, 0.042259456634521485, 0.04207923126220703, 0.04215500640869141, 0.042684417724609375, 0.04231167984008789, 0.04214988708496094, 0.04225331115722656, 0.04224204635620117, 0.04224204635620117, 0.04227276611328125, 0.04221440124511719, 0.042136577606201174, 0.042261505126953126, 0.04257894515991211, 0.04226047897338867, 0.04217139053344727, 0.04229017639160156, 0.04222771072387695, 0.042208255767822264, 0.04203007888793946, 0.04213145446777344, 0.04262911987304688, 0.04304076766967774, 0.04259635162353516, 0.042916862487792966, 0.04220006561279297, 0.042343425750732425, 0.04239257431030274, 0.04313600158691406, 0.042537982940673826, 0.04223590469360351, 0.04237619018554688, 0.04215193557739258, 0.04266495895385742, 0.04224512100219727, 0.042261505126953126, 0.0421734390258789, 0.04212531280517578, 0.042162174224853514, 0.042157054901123044, 0.04221952056884765, 0.04188671875, 0.042210304260253906, 0.04214476776123047, 0.04221747207641602, 0.04246527862548828, 0.04244275283813476, 0.04250624084472656, 0.04209868621826172, 0.04219289779663086, 0.042425342559814457, 0.0424161262512207, 0.04282470321655273, 0.04220415878295898, 0.04290764617919922, 0.042782718658447266, 0.042933246612548825, 0.04294041442871094, 0.04290662384033203, 0.04283699035644531, 0.04274790573120117, 0.04281139373779297, 0.04247654342651367, 0.04285542297363281, 0.042548225402832034, 0.04299161529541016, 0.0424796142578125, 0.04462694549560547, 0.0428851203918457, 0.04228812789916992, 0.042000385284423826, 0.04201062393188477, 0.04207513427734375, 0.04220620727539062, 0.04228505706787109, 0.04218572616577149, 0.04222054290771484, 0.04217958450317383, 0.04211711883544922, 0.04217139053344727, 0.04220620727539062, 0.04293119812011719, 0.04316876983642578, 0.04229324722290039, 0.04252979278564453, 0.04227686309814453, 0.0425164794921875, 0.042180606842041016, 0.04205363082885742, 0.04218163299560547, 0.042277889251708986, 0.04256665420532227, 0.04293632125854492, 0.04300595092773438, 0.042105857849121096, 0.0421580810546875, 0.04209868621826172, 0.04275199890136719, 0.042229759216308595, 0.0422031364440918, 0.04207206344604492, 0.04229939270019531, 0.042024959564208986, 0.042180606842041016, 0.04213248062133789, 0.0421396484375, 0.042024959564208986, 0.04277964782714844, 0.04282777786254883, 0.042237953186035154, 0.04204646301269531, 0.042159103393554685, 0.04230451202392578, 0.04214169692993164, 0.042120193481445314, 0.042105857849121096, 0.04246015930175781, 0.04261785507202148, 0.04216934585571289, 0.042044414520263675, 0.04228403091430664, 0.04211404800415039, 0.04200755310058594, 0.042087425231933595, 0.04206387329101562, 0.04218777465820313, 0.04224409484863281, 0.042120193481445314, 0.042077182769775394, 0.042071041107177735, 0.0419788818359375, 0.04208127975463867, 0.04261273574829102, 0.04233420944213867, 0.042180606842041016, 0.04216012954711914, 0.042194942474365234, 0.04203007888793946, 0.04195840072631836, 0.041913345336914064, 0.04215398406982422, 0.042105857849121096, 0.04213555145263672, 0.04222873687744141, 0.04218777465820313, 0.04216729736328125, 0.04210176086425781, 0.04216012954711914, 0.04218675231933594, 0.04214886474609375, 0.04214169692993164, 0.042028030395507815, 0.04218675231933594, 0.04228300857543945, 0.042103809356689455, 0.04215091323852539, 0.04225843048095703, 0.04243865585327149, 0.042823680877685545, 0.04229119873046875, 0.04240281677246094, 0.042365951538085936, 0.04221132659912109, 0.04206489562988281, 0.04216934585571289, 0.04208230209350586, 0.042177536010742187, 0.04219289779663086, 0.04212940979003906, 0.04209356689453125, 0.04209664154052734, 0.04210790252685547, 0.04218163299560547, 0.0423454704284668, 0.042205184936523435, 0.042109951019287106, 0.042105857849121096, 0.04209561538696289, 0.042261505126953126, 0.042264575958251956, 0.042674175262451174, 0.04290969467163086, 0.042705921173095705, 0.0427694091796875, 0.04271615982055664, 0.04289843368530273, 0.04265574264526367, 0.04291891098022461, 0.04271923065185547, 0.042684417724609375, 0.043584510803222655, 0.04238438415527344, 0.0422737922668457, 0.04222771072387695, 0.04212940979003906, 0.04222771072387695, 0.04223590469360351, 0.04272537612915039, 0.04223590469360351, 0.042365951538085936, 0.04211814498901367, 0.042103809356689455, 0.042180606842041016, 0.04292607879638672, 0.04255846405029297, 0.04209664154052734, 0.042188800811767575, 0.04194303894042969, 0.041885696411132815, 0.04211711883544922, 0.042031105041503904, 0.04255641555786133, 0.04208844757080078, 0.04218572616577149, 0.04229529571533203, 0.04233523178100586, 0.04220723342895508, 0.04210176086425781, 0.04205875015258789, 0.04251136016845703, 0.042240001678466796, 0.04234239959716797, 0.04228505706787109, 0.04236492919921875, 0.04191641616821289, 0.042241024017333983, 0.04211814498901367, 0.04261785507202148, 0.04276326370239258, 0.043099136352539064, 0.04294144058227539, 0.042782718658447266, 0.04270796966552735, 0.04273971176147461, 0.04268851089477539, 0.04291788864135742, 0.042676223754882815, 0.042551296234130856, 0.042787841796875, 0.04288000106811524, 0.04267724609375, 0.04276633453369141, 0.042967041015625, 0.04323942565917969, 0.04219801712036133, 0.04214476776123047, 0.04211404800415039, 0.0420843505859375, 0.042403839111328126, 0.04213759994506836, 0.04206796646118164, 0.0420843505859375, 0.042103809356689455, 0.04260966491699219, 0.04220006561279297, 0.042157054901123044, 0.042812416076660156, 0.042261505126953126, 0.04259635162353516, 0.04217446517944336, 0.042193920135498046, 0.04244889450073242, 0.042521598815917966, 0.0420945930480957, 0.04256563186645508, 0.04319232177734375, 0.042700801849365234, 0.042799102783203126, 0.04223590469360351, 0.04281856155395508, 0.04224716949462891, 0.042180606842041016, 0.04221440124511719, 0.04216115188598633, 0.04220006561279297, 0.042210304260253906, 0.04230144119262695, 0.04213452911376953, 0.04213452911376953, 0.04258099365234375, 0.042708992004394535, 0.04224204635620117, 0.04214374542236328, 0.042145790100097655, 0.04272742462158203, 0.042188800811767575, 0.042649600982666014, 0.04232908630371094, 0.04210073471069336, 0.042223617553710936, 0.04279296112060547, 0.04229017639160156, 0.042297344207763675, 0.042164222717285156, 0.042164222717285156, 0.042417152404785156, 0.042180606842041016, 0.04212838363647461, 0.042297344207763675, 0.04239155197143555, 0.042157054901123044, 0.0421580810546875, 0.04240486526489258, 0.042469375610351565, 0.04224716949462891, 0.0421847038269043, 0.042359809875488284, 0.04211404800415039, 0.04206796646118164, 0.04220723342895508, 0.04197683334350586, 0.04216831970214844, 0.043020286560058595, 0.043363327026367186, 0.04218572616577149, 0.04213862228393555, 0.042157054901123044, 0.04226662445068359, 0.04216934585571289, 0.04208332824707031, 0.0421734390258789, 0.042223617553710936, 0.04220927810668945, 0.04219596862792969, 0.043012096405029294, 0.04330188751220703, 0.04261785507202148, 0.04297420883178711, 0.04217139053344727, 0.04217036819458008, 0.04204851150512695, 0.04213248062133789, 0.042176513671875, 0.04216115188598633, 0.04213452911376953, 0.04214169692993164, 0.04239974212646484, 0.04221747207641602, 0.04258099365234375, 0.04211814498901367, 0.04230246353149414, 0.04236492919921875, 0.042178558349609374, 0.04214169692993164, 0.04208844757080078, 0.042065921783447265, 0.04212428665161133, 0.042087425231933595, 0.04286975860595703, 0.04270694351196289, 0.042275840759277344, 0.04275814437866211, 0.0422737922668457, 0.042275840759277344, 0.042234878540039066, 0.042176513671875, 0.04220620727539062, 0.04225843048095703, 0.042234878540039066, 0.042172416687011716, 0.04208947372436524, 0.042194942474365234, 0.042152961730957034, 0.042142719268798826, 0.042278911590576174, 0.042237953186035154, 0.04219084930419922, 0.04228505706787109, 0.04226867294311523, 0.042472446441650394, 0.042254337310791014, 0.042241024017333983, 0.042261505126953126, 0.042147838592529296, 0.04213043212890625, 0.04211814498901367, 0.04228505706787109, 0.0424192008972168, 0.04247040176391602, 0.04219084930419922, 0.0423004150390625, 0.0421212158203125, 0.04214169692993164, 0.04217446517944336, 0.042196990966796875, 0.04219903945922852, 0.043344894409179685, 0.04274790573120117, 0.04214169692993164, 0.04208025741577148, 0.04213862228393555, 0.042654720306396485, 0.042310657501220705, 0.042103809356689455, 0.04211711883544922, 0.042176513671875, 0.04218982315063476, 0.04213862228393555, 0.042156032562255856, 0.04207820892333984, 0.042109951019287106, 0.042157054901123044, 0.04213248062133789, 0.04216934585571289, 0.042142719268798826, 0.042065921783447265, 0.042087425231933595, 0.042057727813720705, 0.04209766387939453, 0.04209664154052734, 0.042076160430908206, 0.04219903945922852, 0.04221952056884765, 0.042090496063232424, 0.042044414520263675, 0.04221235275268555, 0.04248166275024414, 0.042237953186035154, 0.04209868621826172, 0.04211199951171875, 0.04212223815917969, 0.042145790100097655, 0.04214169692993164, 0.04217958450317383, 0.042221569061279295, 0.042208255767822264, 0.042123264312744144, 0.042103809356689455, 0.04230656051635742, 0.04215500640869141, 0.04212736129760742, 0.04204646301269531, 0.04287897491455078, 0.04221132659912109, 0.042665985107421874, 0.043924480438232424, 0.04299059295654297, 0.04225331115722656, 0.04213350296020508, 0.04232396697998047, 0.04213452911376953, 0.04211814498901367, 0.04214067077636719, 0.04218368148803711, 0.042087425231933595, 0.042176513671875, 0.042415103912353515, 0.04253081512451172, 0.042243072509765625, 0.042175487518310545, 0.0420945930480957, 0.042178558349609374, 0.042105857849121096, 0.04219084930419922, 0.04220006561279297, 0.04234137725830078, 0.042945537567138675, 0.04229324722290039, 0.04218982315063476, 0.042327041625976565, 0.04220415878295898, 0.04218368148803711, 0.04275302505493164, 0.04230144119262695, 0.04252262496948242, 0.04226764678955078, 0.04228300857543945, 0.04230348968505859, 0.042208255767822264, 0.04220415878295898, 0.04214681625366211, 0.042270721435546874, 0.042396671295166014, 0.04224204635620117, 0.042087425231933595, 0.04218368148803711, 0.04227174377441406, 0.042249214172363284, 0.042202110290527346, 0.042333183288574217, 0.04243558502197266, 0.04243558502197266, 0.04213350296020508, 0.04232601547241211, 0.04216831970214844, 0.04230758285522461, 0.042065921783447265, 0.04229939270019531, 0.04229324722290039, 0.04232191848754883, 0.04314419174194336, 0.04335923385620117, 0.042162174224853514, 0.042224639892578124, 0.042076160430908206, 0.04253081512451172, 0.042510337829589843, 0.042537982940673826, 0.04232601547241211, 0.04234137725830078, 0.04210176086425781, 0.042157054901123044, 0.04221747207641602, 0.0421212158203125, 0.042175487518310545, 0.04221542358398438, 0.042074111938476565, 0.042213375091552735, 0.042859519958496094, 0.04232601547241211, 0.042249214172363284, 0.0421580810546875, 0.042156032562255856, 0.042229759216308595, 0.042234878540039066, 0.04216524887084961, 0.04216320037841797, 0.04219596862792969, 0.04221952056884765, 0.0421212158203125, 0.04230553436279297, 0.04264755249023437, 0.04215193557739258, 0.04211711883544922, 0.04216320037841797, 0.04214374542236328, 0.04216524887084961, 0.04214169692993164, 0.04218163299560547, 0.04210176086425781, 0.042159103393554685, 0.04262911987304688, 0.04257382583618164, 0.04247859191894531, 0.04229939270019531, 0.0422369270324707, 0.042126335144042966, 0.04218777465820313, 0.04225024032592774, 0.042246143341064454, 0.04212838363647461, 0.042074111938476565, 0.042554367065429685, 0.04218982315063476, 0.04212531280517578, 0.04248883056640625, 0.04239769744873047, 0.04275711822509766, 0.04212428665161133, 0.04209561538696289, 0.0421847038269043, 0.04207001495361328, 0.042074111938476565, 0.04206694412231445, 0.042246143341064454, 0.04223078536987305, 0.042275840759277344, 0.0422737922668457, 0.043840511322021485, 0.0429752311706543, 0.04255641555786133, 0.042618881225585936, 0.0422369270324707, 0.04228607940673828, 0.04229529571533203, 0.042336254119873046, 0.04220620727539062, 0.04215398406982422, 0.04257177734375, 0.04248371124267578, 0.042090496063232424, 0.042278911590576174, 0.042224639892578124, 0.04220415878295898, 0.042254337310791014, 0.042152961730957034, 0.04220415878295898, 0.04221747207641602, 0.042237953186035154, 0.04236492919921875, 0.04222054290771484, 0.04232396697998047, 0.04228915023803711, 0.04226047897338867, 0.042352638244628905, 0.04270182418823242, 0.04252262496948242, 0.04236083221435547, 0.042175487518310545, 0.04229119873046875, 0.04230451202392578, 0.04222259140014648, 0.04224512100219727, 0.04237311935424805, 0.04229119873046875, 0.042254337310791014, 0.04220006561279297, 0.042248191833496096, 0.04226355361938477, 0.0422369270324707, 0.04223590469360351, 0.04223590469360351, 0.0422553596496582, 0.04222771072387695, 0.04222566223144531, 0.042256385803222656, 0.04228915023803711, 0.04237004852294922, 0.04226969528198242, 0.04224512100219727, 0.04255641555786133, 0.04228710556030273, 0.04213555145263672, 0.042251262664794925, 0.0421396484375, 0.04215500640869141, 0.04212531280517578, 0.04227276611328125, 0.04197580718994141, 0.04235161590576172, 0.04209971237182617]",tokens/s,23.627615443787196,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948199-501abf544af13d907f908b6a;6bc76075-90c5-4095-be9e-73b444905df8) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1870.749696,3095.92064,0.0,2466.250752,2401.696256,s,1,8.9144638671875,8.9144638671875,0.0,8.9144638671875,8.9144638671875,8.9144638671875,8.9144638671875,[8.9144638671875],,kWh,2.439852480835422e-05,1.3355261471301824e-05,3.589447316010297e-05,7.364825943975902e-05,,MB,1838.36672,3326.60736,0.0,2680.160256,2582.175744,s,10,2.4070564727783204,0.24070564727783203,0.00015680042859667434,0.24063260650634766,0.24093948211669922,0.2410077278137207,0.2410623243713379,"[0.2410759735107422, 0.24073631286621094, 0.2406330871582031, 0.2405760955810547, 0.24063212585449217, 0.24062226867675782, 0.2406317138671875, 0.24092431640625, 0.24064877319335937, 0.2405758056640625]",tokens/s,1063.539650586239,kWh,2.8456295682532366e-06,1.5588489741031716e-06,1.661777123071605e-05,2.1022249773072457e-05,tokens/kWh,12177573.892586518,MB,1845.00224,3326.60736,0.0,2680.160256,2582.178304,s,10,14.569903686523437,1.4569903686523438,0.013268081306929561,1.453865478515625,1.4788658325195314,1.4822155090332032,1.4848952502441406,"[1.4555330810546876, 1.4552801513671876, 1.45090380859375, 1.4432294921875, 1.4464150390625, 1.4524508056640626, 1.4781214599609376, 1.485565185546875, 1.445437255859375, 1.4569674072265626]",tokens/s,43.23981911992487,kWh,1.7753591316193893e-05,9.729573415806873e-06,3.5104520147082105e-05,6.258768487908289e-05,tokens/kWh,1006587.7995281928,,s,630,14.568161260604866,0.023124065493023586,0.00045591826174580806,0.02289459228515625,0.02374359073638916,0.023828582191467284,0.024747509479522714,"[0.02304614448547363, 0.022943744659423827, 0.022846464157104493, 0.022832128524780275, 0.022814720153808594, 0.022808576583862306, 0.022820863723754883, 0.022804479598999023, 0.022785024642944338, 0.02282803153991699, 0.022863872528076173, 0.02289356803894043, 0.02271334457397461, 0.022812671661376953, 0.02290483283996582, 0.022841344833374022, 0.022968320846557616, 0.022790143966674805, 0.022866943359375, 0.022797311782836914, 0.022767616271972657, 0.02289971160888672, 0.022764543533325195, 0.022363136291503907, 0.02345471954345703, 0.023772159576416017, 0.02372403144836426, 0.023734272003173826, 0.023607295989990236, 0.023644159317016602, 0.023714815139770508, 0.023586816787719726, 0.02353971290588379, 0.023600128173828124, 0.0228351993560791, 0.02291814422607422, 0.022887424468994142, 0.022980607986450196, 0.022962175369262695, 0.022978559494018554, 0.02283622360229492, 0.02287001609802246, 0.02289971160888672, 0.022722560882568358, 0.022563840866088865, 0.022846464157104493, 0.02285670471191406, 0.02289664077758789, 0.02284441566467285, 0.02291814422607422, 0.022839296340942384, 0.02286489677429199, 0.022980607986450196, 0.023994367599487306, 0.02409267234802246, 0.02370560073852539, 0.023682048797607422, 0.02368819236755371, 0.02370457649230957, 0.023605247497558594, 0.023630847930908205, 0.02369843292236328, 0.02290380859375, 0.022932479858398438, 0.022803455352783202, 0.02284339141845703, 0.022788095474243163, 0.022833152770996092, 0.02285158348083496, 0.022799360275268556, 0.022895616531372072, 0.02287001609802246, 0.023538688659667968, 0.023657535552978514, 0.023381952285766602, 0.02284851264953613, 0.02282598304748535, 0.0227061767578125, 0.02284339141845703, 0.02285977554321289, 0.022816768646240236, 0.023031808853149413, 0.023615488052368162, 0.023586816787719726, 0.023558143615722657, 0.023621631622314454, 0.023612415313720703, 0.023908351898193358, 0.023161855697631836, 0.022777856826782225, 0.022819839477539062, 0.02281881523132324, 0.022797311782836914, 0.02287820816040039, 0.022838272094726563, 0.022845439910888672, 0.022887424468994142, 0.022833152770996092, 0.022838272094726563, 0.022625280380249024, 0.02290380859375, 0.022928384780883788, 0.022800384521484376, 0.02282598304748535, 0.02286489677429199, 0.022905855178833007, 0.022883359909057616, 0.022859743118286133, 0.02287308883666992, 0.02404249572753906, 0.025049087524414062, 0.024052736282348632, 0.0237260799407959, 0.023780351638793946, 0.02374963188171387, 0.023654399871826173, 0.023649280548095702, 0.022939647674560547, 0.022795263290405272, 0.02285055923461914, 0.022766592025756836, 0.022715391159057616, 0.02284339141845703, 0.022866943359375, 0.022771711349487304, 0.02285055923461914, 0.022947839736938477, 0.02291097640991211, 0.022803455352783202, 0.022793216705322264, 0.022829055786132812, 0.02287615966796875, 0.02268876838684082, 0.023593984603881835, 0.023488512039184572, 0.02371993637084961, 0.023666688919067383, 0.023756799697875978, 0.023742464065551756, 0.0236759033203125, 0.02367897605895996, 0.023160831451416015, 0.022895616531372072, 0.022822912216186524, 0.022898687362670898, 0.02286492729187012, 0.02293244743347168, 0.022932479858398438, 0.02289356803894043, 0.0228351993560791, 0.022915071487426757, 0.02330624008178711, 0.02370867156982422, 0.023645183563232423, 0.023508991241455078, 0.02330009651184082, 0.02284441566467285, 0.02290483283996582, 0.022798336029052735, 0.022905855178833007, 0.022862848281860353, 0.02284339141845703, 0.02275225639343262, 0.022809600830078124, 0.023383039474487305, 0.022929407119750975, 0.022853631973266602, 0.022817792892456053, 0.02288844871520996, 0.022895616531372072, 0.022820863723754883, 0.02289971160888672, 0.02284441566467285, 0.022806528091430665, 0.022978559494018554, 0.02286079978942871, 0.0231014404296875, 0.023149568557739256, 0.02287820816040039, 0.022830080032348633, 0.02280243110656738, 0.022817792892456053, 0.02271129608154297, 0.022846464157104493, 0.022813695907592774, 0.022921215057373046, 0.022830080032348633, 0.022758399963378906, 0.022771711349487304, 0.02266214370727539, 0.022689792633056642, 0.02282700729370117, 0.02291097640991211, 0.022895679473876954, 0.02282182312011719, 0.02281062316894531, 0.02289459228515625, 0.022889471054077147, 0.022687744140625, 0.02286591911315918, 0.02287513542175293, 0.02282803153991699, 0.022801408767700194, 0.022936576843261718, 0.022905855178833007, 0.022982656478881838, 0.022853631973266602, 0.022573055267333983, 0.02281881523132324, 0.02269593620300293, 0.022905855178833007, 0.02284339141845703, 0.022786048889160155, 0.02283622360229492, 0.022770687103271483, 0.0228853759765625, 0.022962175369262695, 0.02286079978942871, 0.022816768646240236, 0.02287513542175293, 0.02271334457397461, 0.022812671661376953, 0.022770687103271483, 0.022819839477539062, 0.022801408767700194, 0.022796287536621093, 0.022976512908935546, 0.02284339141845703, 0.02292736053466797, 0.022849536895751952, 0.02290380859375, 0.022770719528198244, 0.022889440536499023, 0.022915071487426757, 0.022949920654296876, 0.022820831298828125, 0.023282688140869142, 0.022977535247802734, 0.023860223770141603, 0.023750688552856444, 0.023720928192138672, 0.02364313507080078, 0.022759424209594727, 0.02290176010131836, 0.02291814422607422, 0.022882303237915038, 0.022846464157104493, 0.02285977554321289, 0.02283417510986328, 0.022790143966674805, 0.02288844871520996, 0.022814720153808594, 0.022994943618774414, 0.022756351470947265, 0.022863872528076173, 0.02288025665283203, 0.02287615966796875, 0.02284339141845703, 0.023533567428588868, 0.022845439910888672, 0.022832128524780275, 0.02287001609802246, 0.0228351993560791, 0.022799360275268556, 0.022725631713867187, 0.022791168212890626, 0.022797311782836914, 0.02286899185180664, 0.022821887969970703, 0.022832128524780275, 0.02289254379272461, 0.02284851264953613, 0.022786048889160155, 0.022853631973266602, 0.022951936721801756, 0.0226693115234375, 0.0228351993560791, 0.022831167221069336, 0.02279622459411621, 0.022795263290405272, 0.02327654457092285, 0.0237076473236084, 0.023665664672851562, 0.02367180824279785, 0.023619583129882812, 0.023589887619018556, 0.02355200004577637, 0.023653375625610353, 0.02283113670349121, 0.022813663482666016, 0.02283417510986328, 0.02289459228515625, 0.022980607986450196, 0.02284441566467285, 0.02288844871520996, 0.022887424468994142, 0.02287308883666992, 0.022820863723754883, 0.02288025665283203, 0.022951936721801756, 0.022971391677856445, 0.022776832580566408, 0.02284339141845703, 0.02283417510986328, 0.023006208419799806, 0.022838272094726563, 0.022905855178833007, 0.022976512908935546, 0.022714368820190428, 0.022804479598999023, 0.02286796760559082, 0.02285670471191406, 0.02286489677429199, 0.02287308883666992, 0.02284236717224121, 0.022953983306884765, 0.02292633628845215, 0.02285772705078125, 0.0229171199798584, 0.0227061767578125, 0.02289664077758789, 0.022821887969970703, 0.02285158348083496, 0.022812671661376953, 0.02284339141845703, 0.02286899185180664, 0.02285875129699707, 0.022872064590454103, 0.02289151954650879, 0.023010303497314453, 0.02274406433105469, 0.0227061767578125, 0.023037952423095705, 0.023250944137573244, 0.02390118408203125, 0.02370867156982422, 0.02368409538269043, 0.02368511962890625, 0.02364313507080078, 0.022985727310180663, 0.02290892791748047, 0.022740991592407226, 0.02288128089904785, 0.022977535247802734, 0.022951936721801756, 0.02287820816040039, 0.02292531204223633, 0.02307276725769043, 0.023005184173583985, 0.02289459228515625, 0.02290380859375, 0.022962175369262695, 0.02269900894165039, 0.022830080032348633, 0.022861824035644532, 0.02386227226257324, 0.023348224639892577, 0.022897663116455077, 0.02284851264953613, 0.02288844871520996, 0.022879232406616212, 0.022801408767700194, 0.022510623931884764, 0.022507488250732424, 0.02272972869873047, 0.02282803153991699, 0.022831104278564454, 0.022862848281860353, 0.02345369529724121, 0.023755775451660157, 0.02373734474182129, 0.023771135330200196, 0.023157760620117186, 0.02290073585510254, 0.023073856353759765, 0.024096704483032225, 0.023778303146362305, 0.02282700729370117, 0.02294272041320801, 0.02283417510986328, 0.022763519287109374, 0.022830080032348633, 0.02284339141845703, 0.02305740737915039, 0.02282598304748535, 0.02309017562866211, 0.023661567687988282, 0.023645183563232423, 0.023635967254638672, 0.023792640686035156, 0.0235284481048584, 0.023658496856689453, 0.023661567687988282, 0.02368307113647461, 0.023702527999877928, 0.022922239303588866, 0.02272051239013672, 0.022831104278564454, 0.02349567985534668, 0.02367487907409668, 0.023624704360961913, 0.023638015747070314, 0.0236759033203125, 0.023630847930908205, 0.023736320495605468, 0.023742464065551756, 0.023799808502197265, 0.02369536018371582, 0.024000511169433594, 0.023673856735229492, 0.023589887619018556, 0.02286079978942871, 0.023302143096923827, 0.023629823684692384, 0.02284339141845703, 0.022862848281860353, 0.022980607986450196, 0.02351923179626465, 0.023600128173828124, 0.023112703323364257, 0.02288640022277832, 0.023579647064208984, 0.02365132713317871, 0.02392166328430176, 0.02429439926147461, 0.024011775970458983, 0.023756799697875978, 0.023623680114746092, 0.023686143875122072, 0.023738367080688477, 0.0236943359375, 0.02368921661376953, 0.023801855087280274, 0.023739391326904297, 0.022799360275268556, 0.022953983306884765, 0.023199743270874023, 0.02371788787841797, 0.02384486389160156, 0.024210432052612304, 0.023823360443115234, 0.023219200134277345, 0.023619583129882812, 0.023638015747070314, 0.023972864151000976, 0.02368511962890625, 0.023774208068847655, 0.023748607635498048, 0.02374553680419922, 0.023908351898193358, 0.023795711517333985, 0.02326016044616699, 0.023774208068847655, 0.02484121513366699, 0.023769088745117187, 0.023746559143066406, 0.02329804801940918, 0.023009279251098632, 0.02371174430847168, 0.023762943267822266, 0.023550975799560548, 0.02369024085998535, 0.023654399871826173, 0.022853631973266602, 0.022931455612182617, 0.022824960708618162, 0.022816768646240236, 0.02288332748413086, 0.022862848281860353, 0.022871040344238282, 0.02305433654785156, 0.022749183654785156, 0.022803455352783202, 0.022846464157104493, 0.022786048889160155, 0.022853631973266602, 0.022784000396728517, 0.02285670471191406, 0.022791168212890626, 0.02281881523132324, 0.022855680465698244, 0.026423328399658202, 0.024852447509765625, 0.023925760269165038, 0.023776256561279296, 0.02371788787841797, 0.0241213436126709, 0.024025087356567384, 0.023827455520629884, 0.023767040252685546, 0.02369740867614746, 0.023743488311767577, 0.023563264846801758, 0.024812543869018554, 0.025108480453491212, 0.02391449546813965, 0.023810047149658203, 0.0236943359375, 0.023757823944091795, 0.023815168380737304, 0.023710720062255858, 0.023748607635498048, 0.023593984603881835, 0.023564287185668945, 0.023015424728393553, 0.022854656219482423, 0.022812671661376953, 0.022841344833374022, 0.022751232147216797, 0.022619136810302733, 0.022764543533325195, 0.022786048889160155, 0.022821887969970703, 0.02282803153991699, 0.022657024383544923, 0.02301644706726074, 0.023391231536865235, 0.02290073585510254, 0.022794240951538085, 0.02288844871520996, 0.023034879684448242, 0.022961151123046874, 0.022863872528076173, 0.02283417510986328, 0.022756351470947265, 0.02267136001586914, 0.02284339141845703, 0.02285158348083496, 0.02285055923461914, 0.022837247848510742, 0.0228853759765625, 0.02287513542175293, 0.022895616531372072, 0.022992895126342772, 0.02286489677429199, 0.022839296340942384, 0.02274508857727051, 0.022895616531372072, 0.02287615966796875, 0.02287001609802246, 0.022855680465698244, 0.022887424468994142, 0.023009279251098632, 0.022994943618774414, 0.02291097640991211, 0.0228853759765625, 0.023710720062255858, 0.023564287185668945, 0.023756799697875978, 0.023744512557983398, 0.023829504013061522, 0.022992895126342772, 0.0230328311920166, 0.022975488662719725, 0.022916095733642578, 0.02289664077758789, 0.022968320846557616, 0.022948863983154297, 0.022776832580566408, 0.023059455871582032, 0.022895616531372072, 0.02281062316894531, 0.022794240951538085, 0.02255564880371094, 0.022811647415161132, 0.02284441566467285, 0.022847488403320314, 0.02310348892211914, 0.02284441566467285, 0.02273587226867676, 0.02289356803894043, 0.022953983306884765, 0.02264678382873535, 0.022632448196411133, 0.023052288055419923, 0.022821887969970703, 0.02295091247558594, 0.02289459228515625, 0.02291097640991211, 0.02512998390197754, 0.023638015747070314, 0.0236810245513916, 0.02368716812133789, 0.023665664672851562, 0.023605247497558594, 0.023614463806152345, 0.02287615966796875, 0.02290892791748047, 0.022882303237915038, 0.022986751556396484, 0.022841344833374022, 0.02275225639343262, 0.022982656478881838, 0.02286591911315918, 0.022955007553100586, 0.02288640022277832, 0.022861824035644532, 0.02270515251159668, 0.022853631973266602, 0.022575103759765625, 0.022776832580566408, 0.023044095993041993, 0.022700031280517577, 0.022854656219482423, 0.02286899185180664, 0.023391231536865235, 0.023169023513793945, 0.024588287353515623, 0.023791616439819335, 0.02384486389160156, 0.023924736022949217, 0.02370969581604004, 0.023635967254638672, 0.02368000030517578, 0.023361536026000978, 0.02288435173034668, 0.022815744400024415, 0.022846464157104493, 0.022899744033813476, 0.02311881637573242, 0.023037952423095705, 0.02284441566467285, 0.02364825630187988, 0.022839296340942384, 0.02286591911315918, 0.022855680465698244, 0.02285055923461914, 0.022861824035644532, 0.02284441566467285, 0.022838272094726563]",tokens/s,43.24499082143211,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 128719 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949139-133786530bb9da5b413c96aa;ac686b55-2c86-4f0e-88a6-9bbd00c42b3a) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694827a-76afe2bd12238ae5791ab1e5;c14281ad-9f6d-4207-a978-fe4cbe3250f2) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481ff-551622401da134c31e980f63;aacf66f4-10d4-468f-b4da-4ffaa2b351af) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494b4-549dcc15366ce1242c5eefa3;f15389f7-3f94-4733-accd-4a3f23975402) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2218.307584,3142.057984,0.0,2512.388096,2240.694784,s,1,8.3489765625,8.3489765625,0.0,8.3489765625,8.3489765625,8.3489765625,8.3489765625,[8.3489765625],,kWh,1.8316104725010977e-05,1.0022696194942941e-05,2.942530131799792e-05,5.776410223795184e-05,,MB,2239.492096,3160.932352,0.0,2514.485248,2226.413568,s,10,4.890858673095702,0.48908586730957027,0.00013305800834442557,0.48910696411132815,0.4892443237304687,0.4892599792480469,0.4892725036621094,"[0.4890847473144531, 0.4891933288574219, 0.48894415283203124, 0.48892459106445313, 0.488921630859375, 0.4892408447265625, 0.48894949340820315, 0.4891291809082031, 0.489275634765625, 0.489195068359375]",tokens/s,523.4254700677398,kWh,5.784897377314352e-06,3.169088916858889e-06,3.3124656129334184e-05,4.207864242350742e-05,tokens/kWh,6083846.465944549,MB,2248.13056,3160.932352,0.0,2514.485248,2337.123328,s,10,14.387443359374998,1.4387443359374998,0.016877809769501786,1.441059814453125,1.4526045043945313,1.461165655517578,1.4680145764160157,"[1.469726806640625, 1.4472353515625, 1.404952392578125, 1.4507020263671875, 1.4193819580078124, 1.4321954345703125, 1.43756787109375, 1.446572509765625, 1.4445517578125, 1.4345572509765625]",tokens/s,43.78818281077616,kWh,1.6762135222407663e-05,9.1870103191645e-06,3.879381807205923e-05,6.474296361363139e-05,tokens/kWh,973078.7174953416,,s,630,14.384901111602819,0.022833176367623467,0.0005803254572608102,0.022979071617126465,0.023327714347839357,0.023616460037231446,0.02445863916397095,"[0.025496576309204103, 0.02491596794128418, 0.023605247497558594, 0.023166976928710937, 0.023192575454711914, 0.023149568557739256, 0.023183359146118163, 0.02304204750061035, 0.02311680030822754, 0.02301644706726074, 0.023085056304931642, 0.02329804801940918, 0.023119871139526366, 0.023166976928710937, 0.02306662368774414, 0.02351923179626465, 0.02392268753051758, 0.023111679077148437, 0.02329702377319336, 0.02311680030822754, 0.023299072265625, 0.02310758399963379, 0.023214080810546874, 0.02313216018676758, 0.023112703323364257, 0.023128063201904296, 0.023121919631958008, 0.023396352767944335, 0.023175167083740233, 0.023045120239257814, 0.023517183303833008, 0.024887296676635744, 0.023775232315063476, 0.023340032577514647, 0.023144447326660156, 0.023327743530273438, 0.02304204750061035, 0.023234560012817384, 0.02309119987487793, 0.02305843162536621, 0.02313113594055176, 0.023085056304931642, 0.02308095932006836, 0.022996992111206056, 0.023606271743774415, 0.026298368453979492, 0.023472127914428712, 0.022930431365966796, 0.023173120498657225, 0.022958080291748048, 0.02310348892211914, 0.02307379150390625, 0.02324684715270996, 0.022122495651245116, 0.02250752067565918, 0.02347110366821289, 0.02331443214416504, 0.022973440170288087, 0.023666688919067383, 0.023302143096923827, 0.02309529685974121, 0.023012351989746094, 0.023093248367309572, 0.02248192024230957, 0.022732799530029296, 0.023053312301635744, 0.022967296600341795, 0.023384063720703126, 0.023727104187011717, 0.02369126319885254, 0.023120895385742187, 0.02307276725769043, 0.023031808853149413, 0.023121919631958008, 0.02308608055114746, 0.022794240951538085, 0.023007232666015624, 0.023274496078491212, 0.023149568557739256, 0.022872064590454103, 0.023243776321411135, 0.023310335159301757, 0.02302566337585449, 0.022977535247802734, 0.023031808853149413, 0.022953983306884765, 0.02288844871520996, 0.022936576843261718, 0.022976512908935546, 0.022955007553100586, 0.02289459228515625, 0.023994367599487306, 0.024461311340332033, 0.023545856475830077, 0.023182336807250976, 0.02306559944152832, 0.022953983306884765, 0.022944768905639647, 0.022967296600341795, 0.022979583740234375, 0.02302566337585449, 0.022948863983154297, 0.023257087707519532, 0.022993919372558593, 0.022982656478881838, 0.022952959060668944, 0.02304921531677246, 0.023014400482177736, 0.0230328311920166, 0.02290380859375, 0.023996416091918944, 0.02431795120239258, 0.023363584518432616, 0.022863872528076173, 0.022733823776245117, 0.022206464767456056, 0.022206464767456056, 0.022157312393188477, 0.022191104888916017, 0.0221265926361084, 0.022152191162109376, 0.02209689521789551, 0.02214297676086426, 0.02205183982849121, 0.022238208770751954, 0.022145023345947267, 0.022208511352539064, 0.02215116882324219, 0.022112255096435548, 0.022128639221191407, 0.022130687713623046, 0.02209791946411133, 0.022190080642700196, 0.02210918426513672, 0.022040576934814454, 0.02207334327697754, 0.022152191162109376, 0.02208563232421875, 0.02343731117248535, 0.023444480895996093, 0.023076864242553712, 0.022232063293457033, 0.022204416275024414, 0.022145023345947267, 0.02223308753967285, 0.022288383483886717, 0.02211327934265137, 0.023130111694335938, 0.02266009521484375, 0.02223308753967285, 0.02221670341491699, 0.022183935165405275, 0.022214656829833986, 0.022206464767456056, 0.022212608337402344, 0.022210559844970702, 0.02212761688232422, 0.02208358383178711, 0.022187007904052734, 0.022166528701782227, 0.022204416275024414, 0.022223871231079103, 0.022171648025512695, 0.022232063293457033, 0.02213478469848633, 0.02230681610107422, 0.0221214714050293, 0.02223411178588867, 0.022210559844970702, 0.02227916717529297, 0.02229555130004883, 0.02221670341491699, 0.02222489547729492, 0.022236160278320313, 0.022215679168701173, 0.02229964828491211, 0.022221824645996095, 0.022197248458862305, 0.02225049591064453, 0.02224947166442871, 0.022303743362426756, 0.0222740478515625, 0.022195199966430663, 0.022382591247558595, 0.022823936462402345, 0.022766592025756836, 0.02232729530334473, 0.022239231109619142, 0.022387712478637696, 0.02408143997192383, 0.02332771110534668, 0.022740991592407226, 0.023186431884765626, 0.02303385543823242, 0.023141376495361327, 0.023028736114501954, 0.023076864242553712, 0.023061504364013673, 0.0231014404296875, 0.023017471313476562, 0.023155712127685548, 0.023334911346435547, 0.023163904190063478, 0.022832128524780275, 0.022945791244506835, 0.023019519805908203, 0.02310655975341797, 0.023015424728393553, 0.023010303497314453, 0.02253209686279297, 0.022222848892211915, 0.022722560882568358, 0.023197696685791015, 0.022937599182128905, 0.02368921661376953, 0.023216127395629883, 0.023177215576171875, 0.022947839736938477, 0.022971391677856445, 0.022993919372558593, 0.023023616790771483, 0.023096319198608398, 0.023010303497314453, 0.022969343185424804, 0.022879232406616212, 0.022930431365966796, 0.02305740737915039, 0.02308198356628418, 0.02302463912963867, 0.023009279251098632, 0.023014400482177736, 0.022960128784179686, 0.023956480026245116, 0.023254016876220703, 0.023051263809204102, 0.022931455612182617, 0.02308403205871582, 0.022952959060668944, 0.023008256912231444, 0.022928384780883788, 0.02305536079406738, 0.02313932800292969, 0.023061504364013673, 0.022968320846557616, 0.02312704086303711, 0.023053375244140625, 0.022945728302001953, 0.023008256912231444, 0.02306559944152832, 0.022269952774047853, 0.022227968215942383, 0.022329343795776366, 0.023026687622070312, 0.022389759063720704, 0.022133760452270508, 0.022183935165405275, 0.02208563232421875, 0.022183935165405275, 0.02209382438659668, 0.022166528701782227, 0.02225152015686035, 0.022173696517944336, 0.02214297676086426, 0.022183935165405275, 0.022149120330810547, 0.02215936088562012, 0.02193715286254883, 0.022189056396484375, 0.022163455963134765, 0.022025215148925782, 0.02185830307006836, 0.02208153533935547, 0.022205440521240235, 0.022194175720214843, 0.022156288146972656, 0.022304767608642577, 0.022862848281860353, 0.022992895126342772, 0.023226367950439454, 0.023141376495361327, 0.023686143875122072, 0.024441856384277344, 0.02376192092895508, 0.02324787139892578, 0.023188480377197264, 0.023244800567626952, 0.02304204750061035, 0.02294988822937012, 0.022550527572631835, 0.02302566337585449, 0.02294272041320801, 0.023019519805908203, 0.02301644706726074, 0.023198720932006835, 0.02303385543823242, 0.023863296508789062, 0.022435840606689454, 0.022063104629516602, 0.022123519897460937, 0.022222848892211915, 0.02186649513244629, 0.021976064682006836, 0.02207027244567871, 0.022339584350585938, 0.022305791854858398, 0.02226688003540039, 0.022207487106323243, 0.022227968215942383, 0.022271999359130858, 0.022358015060424806, 0.02269593620300293, 0.022288383483886717, 0.02227609634399414, 0.02208563232421875, 0.022196224212646484, 0.023061504364013673, 0.022329343795776366, 0.02206924819946289, 0.022108160018920898, 0.022012928009033202, 0.022150144577026368, 0.022054912567138672, 0.022108160018920898, 0.02205900764465332, 0.02208870315551758, 0.02210918426513672, 0.022383615493774413, 0.022376447677612304, 0.022958080291748048, 0.02290073585510254, 0.023026687622070312, 0.022970367431640625, 0.022974464416503908, 0.02287718391418457, 0.02307379150390625, 0.022986751556396484, 0.022813695907592774, 0.022932479858398438, 0.023052288055419923, 0.022966272354125978, 0.023068672180175782, 0.023015424728393553, 0.023060480117797853, 0.023035903930664063, 0.02311577606201172, 0.023359487533569336, 0.022948863983154297, 0.023206911087036132, 0.023180288314819338, 0.023021568298339845, 0.02305536079406738, 0.022992895126342772, 0.02309017562866211, 0.023010303497314453, 0.023021568298339845, 0.022994943618774414, 0.023060480117797853, 0.02225868797302246, 0.02208768081665039, 0.022141952514648438, 0.02222591972351074, 0.022362112045288086, 0.022565887451171874, 0.024005632400512695, 0.023657472610473632, 0.02327244758605957, 0.023163904190063478, 0.023061504364013673, 0.023076864242553712, 0.02289971160888672, 0.022815744400024415, 0.022222848892211915, 0.022211584091186523, 0.02221772766113281, 0.022295583724975587, 0.022168544769287108, 0.022219776153564453, 0.02226483154296875, 0.023045120239257814, 0.023163904190063478, 0.022938623428344726, 0.023006208419799806, 0.022898687362670898, 0.02309939193725586, 0.023011327743530274, 0.023015424728393553, 0.022805503845214844, 0.022952959060668944, 0.022808576583862306, 0.021934080123901366, 0.02246143913269043, 0.023028736114501954, 0.022160383224487306, 0.021948415756225585, 0.022169599533081053, 0.022222848892211915, 0.022152191162109376, 0.02270207977294922, 0.023121919631958008, 0.02309529685974121, 0.02308198356628418, 0.02308915138244629, 0.022529024124145508, 0.022190080642700196, 0.022435840606689454, 0.022633472442626954, 0.02249830436706543, 0.022565887451171874, 0.021983232498168945, 0.022211584091186523, 0.022138879776000975, 0.023152639389038086, 0.023341056823730468, 0.02351923179626465, 0.023355392456054686, 0.02391859245300293, 0.023610368728637695, 0.023170047760009766, 0.0228853759765625, 0.02306252861022949, 0.023043071746826172, 0.022367231369018553, 0.0219238395690918, 0.02210508728027344, 0.022076416015625, 0.022995967864990235, 0.022946815490722656, 0.023015424728393553, 0.023030784606933592, 0.023026687622070312, 0.023029760360717775, 0.023137279510498047, 0.02312704086303711, 0.023020544052124024, 0.023021568298339845, 0.022982656478881838, 0.023022592544555662, 0.023036928176879884, 0.023173120498657225, 0.02302463912963867, 0.023111679077148437, 0.023759872436523437, 0.023226367950439454, 0.022996992111206056, 0.022955007553100586, 0.02285875129699707, 0.023002111434936523, 0.022863872528076173, 0.022982656478881838, 0.022977535247802734, 0.02348748779296875, 0.023363584518432616, 0.023028736114501954, 0.022957056045532227, 0.022943744659423827, 0.022982656478881838, 0.023023616790771483, 0.022978559494018554, 0.022940671920776368, 0.022977535247802734, 0.0231014404296875, 0.02305536079406738, 0.02308915138244629, 0.02306355285644531, 0.023104511260986327, 0.023022592544555662, 0.023150592803955077, 0.023167999267578124, 0.02310041618347168, 0.022967296600341795, 0.023145471572875977, 0.02274406433105469, 0.022255615234375, 0.022200319290161134, 0.022215679168701173, 0.022128639221191407, 0.022211584091186523, 0.022172672271728516, 0.02230271911621094, 0.023352319717407227, 0.023394304275512694, 0.022986751556396484, 0.023226367950439454, 0.023456768035888673, 0.023750656127929686, 0.023052288055419923, 0.02302566337585449, 0.023000064849853515, 0.023043071746826172, 0.022990848541259764, 0.02295091247558594, 0.022895616531372072, 0.02307481575012207, 0.022968320846557616, 0.022982656478881838, 0.022980607986450196, 0.022981632232666017, 0.02302566337585449, 0.0231147518157959, 0.022963199615478515, 0.023031808853149413, 0.02245734405517578, 0.022222848892211915, 0.02285772705078125, 0.02291302490234375, 0.022626304626464845, 0.022183935165405275, 0.02347417640686035, 0.023468032836914062, 0.02261299133300781, 0.0221214714050293, 0.022203392028808593, 0.0221265926361084, 0.02205286407470703, 0.022149120330810547, 0.022337535858154296, 0.022758399963378906, 0.024180736541748047, 0.023171072006225587, 0.02309939193725586, 0.022980607986450196, 0.0230645751953125, 0.023060480117797853, 0.02306662368774414, 0.02289254379272461, 0.023051263809204102, 0.023027711868286133, 0.02307379150390625, 0.022990848541259764, 0.02811801528930664, 0.023243776321411135, 0.02310348892211914, 0.02310246467590332, 0.02309939193725586, 0.02310041618347168, 0.023026687622070312, 0.023111679077148437, 0.02307788848876953, 0.023207935333251953, 0.02315673637390137, 0.02314035224914551, 0.02308198356628418, 0.02306662368774414, 0.023237632751464843, 0.023147520065307618, 0.023620607376098633, 0.023130111694335938, 0.023214080810546874, 0.023141376495361327, 0.02309119987487793, 0.023129087448120117, 0.02329292869567871, 0.02388275146484375, 0.022560768127441407, 0.02222489547729492, 0.02224742317199707, 0.021936128616333008, 0.022169599533081053, 0.022268928527832032, 0.022125568389892578, 0.022269952774047853, 0.022244352340698242, 0.022578176498413087, 0.02244607925415039, 0.022383615493774413, 0.022365184783935548, 0.02226688003540039, 0.02325299263000488, 0.023568384170532225, 0.02307583999633789, 0.02307174491882324, 0.022939647674560547, 0.02275225639343262, 0.022172672271728516, 0.022305791854858398, 0.02208870315551758, 0.023015424728393553, 0.022413312911987306, 0.02472550392150879, 0.02384486389160156, 0.02328166389465332, 0.022977535247802734, 0.022995967864990235, 0.022948863983154297, 0.02290278434753418, 0.023047168731689452, 0.0230328311920166, 0.023009279251098632, 0.022968320846557616, 0.023230464935302734, 0.02292736053466797, 0.022898687362670898, 0.022982656478881838, 0.02305740737915039, 0.022458368301391602, 0.022168575286865236, 0.022220800399780274, 0.022179840087890625, 0.02220953559875488, 0.022223871231079103, 0.022185983657836913, 0.02206105613708496, 0.022158336639404298, 0.02211327934265137, 0.022128639221191407, 0.022173696517944336, 0.022106111526489256, 0.02211942481994629, 0.022137855529785155, 0.02207334327697754, 0.02246451187133789, 0.023026687622070312, 0.024452096939086915, 0.023611391067504883, 0.023212032318115236, 0.022552576065063477, 0.022193151473999022, 0.023014400482177736, 0.02305433654785156, 0.022915071487426757, 0.02304921531677246, 0.02351103973388672, 0.023480319976806642, 0.022796287536621093, 0.0230328311920166, 0.022750207901000977, 0.02224947166442871, 0.022137855529785155, 0.022214656829833986, 0.022366207122802736]",tokens/s,43.795921509105504,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3569.410048,5404.884992,0.0,4775.215104,4427.072512,s,1,10.231529296875,10.231529296875,0.0,10.231529296875,10.231529296875,10.231529296875,10.231529296875,[10.231529296875],,kWh,4.121844880208335e-05,2.2550697260194595e-05,7.213450215199854e-05,0.0001359036482142765,,MB,1656.19712,5440.536576,0.0,4794.089472,4101.022208,s,10,10.499466796875,1.0499466796874999,0.00020095666874035456,1.04991455078125,1.0501102172851564,1.0502667175292968,1.0503919177246093,"[1.0504232177734374, 1.049901123046875, 1.0499102783203125, 1.0499464111328125, 1.0500535888671876, 1.050075439453125, 1.049821044921875, 1.04962109375, 1.0499188232421874, 1.0497957763671875]",tokens/s,243.82190539065695,kWh,1.2404548993680704e-05,6.797136897931818e-06,7.124300143880046e-05,9.0444687330413e-05,tokens/kWh,2830459.2293495303,MB,1690.836992,5451.022336,0.0,4802.47808,4101.024768,s,10,16.46152294921875,1.646152294921875,0.01800851373438333,1.6397034912109376,1.667281640625,1.680069287109375,1.690299404296875,"[1.69285693359375, 1.6380806884765624, 1.6374737548828124, 1.640567138671875, 1.66443994140625, 1.645124267578125, 1.63883984375, 1.64182763671875, 1.6271156005859375, 1.6351971435546875]",tokens/s,38.271064101629754,kWh,1.984523454930528e-05,1.0876962210572747e-05,5.17204024873992e-05,8.244259924727726e-05,tokens/kWh,764168.0463159418,,s,630,16.45949334907531,0.026126179919167176,0.000538887413017119,0.0259051513671875,0.02690355110168457,0.027369933223724364,0.027687310638427734,"[0.026411008834838868, 0.025993215560913087, 0.025956352233886718, 0.02592051124572754, 0.02588467216491699, 0.025875455856323244, 0.025812992095947264, 0.025866239547729493, 0.02592972755432129, 0.027650047302246093, 0.02591334342956543, 0.02592460823059082, 0.0261345272064209, 0.0259051513671875, 0.025894912719726562, 0.02597068786621094, 0.02568191909790039, 0.025645055770874024, 0.025886720657348632, 0.02595840072631836, 0.026747903823852538, 0.027641855239868163, 0.027378688812255858, 0.02774527931213379, 0.02740019226074219, 0.02590412712097168, 0.02593791961669922, 0.026043392181396483, 0.02587750434875488, 0.026841087341308592, 0.026840063095092775, 0.026863616943359377, 0.027083776473999024, 0.027594751358032226, 0.027639808654785155, 0.028063743591308594, 0.028079103469848633, 0.027615232467651366, 0.027394048690795897, 0.02734694480895996, 0.027427839279174804, 0.02714726448059082, 0.027469823837280274, 0.027354112625122072, 0.027546623229980468, 0.027481088638305663, 0.027624448776245116, 0.027429887771606445, 0.02768076705932617, 0.027418624877929686, 0.027433984756469725, 0.027389951705932617, 0.027328512191772462, 0.02735923194885254, 0.027249664306640626, 0.02730905532836914, 0.027303936004638672, 0.027431936264038087, 0.027381759643554687, 0.02735308837890625, 0.027438079833984375, 0.027459583282470702, 0.02738380813598633, 0.026102848052978515, 0.02576582336425781, 0.025801727294921875, 0.025854976654052734, 0.02589286422729492, 0.025808895111083984, 0.025834495544433594, 0.02589798355102539, 0.025944063186645508, 0.026052608489990234, 0.02653388786315918, 0.026201087951660155, 0.025853952407836913, 0.025804800033569338, 0.025846784591674804, 0.025959423065185547, 0.02587648010253906, 0.025875455856323244, 0.025847871780395507, 0.0259931526184082, 0.02552217674255371, 0.025601024627685546, 0.025875455856323244, 0.02594508743286133, 0.026256383895874022, 0.025805824279785155, 0.02595020866394043, 0.025865215301513672, 0.02554265594482422, 0.026727424621582032, 0.025873407363891602, 0.025827327728271485, 0.025815040588378906, 0.025817087173461914, 0.0259102725982666, 0.02589286422729492, 0.025776128768920898, 0.025793535232543945, 0.025774080276489256, 0.025833471298217774, 0.025825279235839844, 0.025779199600219727, 0.026500095367431642, 0.02634956741333008, 0.027432960510253908, 0.026664960861206056, 0.02629734420776367, 0.02574131202697754, 0.02589388847351074, 0.026177536010742186, 0.026137599945068358, 0.025661439895629884, 0.02575564765930176, 0.02588467216491699, 0.025911296844482422, 0.025865215301513672, 0.025960447311401368, 0.02652774429321289, 0.026678272247314453, 0.026578943252563478, 0.026261503219604493, 0.02598297691345215, 0.025848831176757812, 0.026220544815063477, 0.02591231918334961, 0.025979904174804686, 0.02587238311767578, 0.02611609649658203, 0.025784320831298828, 0.025772031784057618, 0.027052032470703126, 0.026060800552368164, 0.0255150089263916, 0.026632192611694337, 0.027106304168701172, 0.025986047744750978, 0.025672704696655273, 0.02557542419433594, 0.02595327949523926, 0.02590003204345703, 0.025845760345458983, 0.025579519271850586, 0.025868288040161135, 0.025798656463623046, 0.025810943603515626, 0.025800703048706054, 0.02613043212890625, 0.025810943603515626, 0.02637004852294922, 0.026384384155273437, 0.025966623306274413, 0.025928672790527345, 0.02589593505859375, 0.02594713592529297, 0.026078208923339844, 0.026005504608154296, 0.025594879150390625, 0.026032127380371094, 0.025844736099243162, 0.02555187225341797, 0.025634815216064453, 0.025665536880493164, 0.02575257682800293, 0.02593791961669922, 0.02588572883605957, 0.026613727569580078, 0.027222015380859374, 0.02712883186340332, 0.02605161666870117, 0.02598294448852539, 0.025786367416381836, 0.026596351623535155, 0.026137599945068358, 0.02652672004699707, 0.025701375961303712, 0.025793535232543945, 0.02573619270324707, 0.02551910400390625, 0.0254597110748291, 0.025825279235839844, 0.02551807975769043, 0.025416704177856447, 0.025560064315795897, 0.02576486396789551, 0.025986047744750978, 0.026704896926879884, 0.02615603256225586, 0.026078208923339844, 0.02628812789916992, 0.02609868812561035, 0.02612428855895996, 0.02592972755432129, 0.025866239547729493, 0.025811967849731447, 0.025845760345458983, 0.025808895111083984, 0.025874431610107423, 0.02592767906188965, 0.025874431610107423, 0.025774080276489256, 0.025784320831298828, 0.025882623672485353, 0.02632089614868164, 0.026010623931884767, 0.02589798355102539, 0.025891839981079103, 0.025845760345458983, 0.025841663360595703, 0.025653247833251954, 0.025873407363891602, 0.025860095977783205, 0.025811967849731447, 0.02591641616821289, 0.026002431869506838, 0.026200063705444337, 0.02671308708190918, 0.027034624099731445, 0.026856447219848634, 0.026608640670776368, 0.02594508743286133, 0.025986047744750978, 0.026224639892578124, 0.025865215301513672, 0.025800703048706054, 0.02591744041442871, 0.02637926483154297, 0.026658815383911134, 0.026900480270385742, 0.02675814437866211, 0.02608332824707031, 0.025799680709838867, 0.025853952407836913, 0.025854976654052734, 0.026080255508422853, 0.02592972755432129, 0.025948160171508788, 0.02588979148864746, 0.025820159912109376, 0.025869312286376952, 0.02590105628967285, 0.02615193557739258, 0.025980928421020507, 0.02595020866394043, 0.02590822410583496, 0.025817087173461914, 0.02587750434875488, 0.025976831436157227, 0.02587648010253906, 0.02590822410583496, 0.026267648696899414, 0.02589798355102539, 0.02589798355102539, 0.025969663619995118, 0.026006528854370117, 0.025606143951416017, 0.02591948890686035, 0.025862144470214843, 0.026479616165161132, 0.02630143928527832, 0.0259420166015625, 0.025875455856323244, 0.025963520050048827, 0.025964544296264647, 0.026227712631225586, 0.026809343338012694, 0.02676121520996094, 0.027123712539672853, 0.027081727981567383, 0.026902528762817384, 0.026628095626831053, 0.026630144119262695, 0.026647552490234375, 0.02655232048034668, 0.026658815383911134, 0.02649497604370117, 0.026795007705688476, 0.026999807357788085, 0.02690355110168457, 0.026661888122558593, 0.026632192611694337, 0.026419200897216798, 0.02627686309814453, 0.02667519950866699, 0.026747903823852538, 0.02609459114074707, 0.025812992095947264, 0.02597478485107422, 0.025988096237182616, 0.02594611167907715, 0.025907199859619142, 0.02592870330810547, 0.025903104782104492, 0.02587238311767578, 0.026065919876098635, 0.026614784240722656, 0.026755071640014647, 0.0267325439453125, 0.02670697593688965, 0.026397663116455077, 0.026395647048950196, 0.026267648696899414, 0.026600448608398438, 0.026657791137695314, 0.026798080444335938, 0.026763263702392577, 0.027213823318481444, 0.027044864654541017, 0.026673152923583986, 0.026496000289916992, 0.02668441581726074, 0.02669875144958496, 0.026648576736450196, 0.02695475196838379, 0.026220544815063477, 0.026561567306518555, 0.026248159408569335, 0.02670899200439453, 0.026500095367431642, 0.02633830451965332, 0.026018815994262694, 0.026425344467163086, 0.026042367935180662, 0.02655948829650879, 0.026369024276733398, 0.027611135482788086, 0.02858598327636719, 0.027012096405029298, 0.0267007999420166, 0.02609152030944824, 0.025849855422973633, 0.025793535232543945, 0.02609766387939453, 0.025810976028442383, 0.025866207122802735, 0.025827327728271485, 0.025767936706542968, 0.025652223587036133, 0.02593484878540039, 0.025886720657348632, 0.026412031173706055, 0.02590105628967285, 0.025841663360595703, 0.025470975875854493, 0.02609971237182617, 0.027234304428100587, 0.02576383972167969, 0.025858047485351563, 0.02589388847351074, 0.025789440155029295, 0.025761791229248047, 0.026446847915649413, 0.026910720825195314, 0.02588876724243164, 0.0259051513671875, 0.02593484878540039, 0.02590617561340332, 0.025852928161621092, 0.02551398468017578, 0.02595737648010254, 0.02571468734741211, 0.025591808319091795, 0.02591744041442871, 0.025875455856323244, 0.02575667190551758, 0.025686016082763673, 0.025833471298217774, 0.025852928161621092, 0.025841663360595703, 0.025860095977783205, 0.02592563247680664, 0.026167295455932618, 0.026226688385009765, 0.02570751953125, 0.025482240676879882, 0.02570342445373535, 0.02637824058532715, 0.02594099235534668, 0.02591231918334961, 0.025661439895629884, 0.026209280014038085, 0.0263372802734375, 0.026414079666137694, 0.025789440155029295, 0.027114496231079102, 0.025992191314697266, 0.025840639114379883, 0.025852928161621092, 0.02873139190673828, 0.027291648864746092, 0.026194944381713867, 0.025931776046752928, 0.025845760345458983, 0.025790464401245116, 0.025843711853027345, 0.025899007797241212, 0.025833471298217774, 0.02578124809265137, 0.025771007537841797, 0.025844736099243162, 0.025845760345458983, 0.025520128250122072, 0.025470975875854493, 0.025586687088012695, 0.02614067268371582, 0.025859071731567384, 0.02587238311767578, 0.025829376220703124, 0.026631168365478516, 0.02592153549194336, 0.025964544296264647, 0.026039295196533203, 0.025605119705200196, 0.025790464401245116, 0.025847808837890625, 0.02690355110168457, 0.02611404800415039, 0.025883647918701173, 0.02592767906188965, 0.025979904174804686, 0.025776128768920898, 0.025656320571899413, 0.02593382453918457, 0.02590105628967285, 0.026488832473754883, 0.025832447052001953, 0.025939968109130858, 0.02590617561340332, 0.02590412712097168, 0.02593280029296875, 0.025776128768920898, 0.025523199081420898, 0.02616422462463379, 0.025971712112426756, 0.025804800033569338, 0.025812992095947264, 0.025801727294921875, 0.025841663360595703, 0.02574028778076172, 0.02768998336791992, 0.026413055419921876, 0.025878528594970703, 0.02593484878540039, 0.025868288040161135, 0.025832447052001953, 0.025761791229248047, 0.02550681686401367, 0.025382911682128906, 0.02536960029602051, 0.02652364730834961, 0.026031103134155274, 0.026606592178344726, 0.026457088470458984, 0.02568191909790039, 0.02569113540649414, 0.02588876724243164, 0.026057727813720705, 0.025862144470214843, 0.025975807189941406, 0.025506847381591796, 0.02638332748413086, 0.025634815216064453, 0.027151359558105468, 0.026959871292114256, 0.025814016342163085, 0.025854976654052734, 0.025948160171508788, 0.025887744903564453, 0.02591641616821289, 0.02576896095275879, 0.025810943603515626, 0.025867263793945314, 0.025849855422973633, 0.025869312286376952, 0.025967615127563477, 0.02589798355102539, 0.025829376220703124, 0.02569932746887207, 0.026455104827880858, 0.028938175201416016, 0.026214399337768556, 0.025547775268554687, 0.025753599166870117, 0.02587238311767578, 0.02594304084777832, 0.025861120223999022, 0.025867263793945314, 0.02570444869995117, 0.026214399337768556, 0.025787391662597657, 0.02653183937072754, 0.026015743255615235, 0.026842111587524413, 0.025832447052001953, 0.026610687255859376, 0.02658406448364258, 0.025838592529296874, 0.025701375961303712, 0.02576486396789551, 0.025511936187744142, 0.025427967071533202, 0.02676736068725586, 0.026809343338012694, 0.025825279235839844, 0.025753599166870117, 0.025812992095947264, 0.025931776046752928, 0.02572185516357422, 0.02566655921936035, 0.02572800064086914, 0.02573209571838379, 0.025734144210815428, 0.025777151107788086, 0.025831424713134765, 0.025818111419677735, 0.02578124809265137, 0.025793535232543945, 0.025796607971191408, 0.025849855422973633, 0.025825279235839844, 0.025811967849731447, 0.02588467216491699, 0.025762815475463868, 0.025483264923095703, 0.025804800033569338, 0.025766912460327147, 0.025887744903564453, 0.02551398468017578, 0.02594918441772461, 0.025931776046752928, 0.025608192443847655, 0.025837568283081053, 0.025861120223999022, 0.025795583724975587, 0.025776128768920898, 0.02575974464416504, 0.025858047485351563, 0.025850879669189454, 0.026196992874145508, 0.025816064834594726, 0.025804800033569338, 0.026207231521606447, 0.026277887344360353, 0.025825279235839844, 0.025825279235839844, 0.025814016342163085, 0.025815040588378906, 0.025778175354003906, 0.025769983291625977, 0.025815040588378906, 0.025818111419677735, 0.02593791961669922, 0.02592051124572754, 0.0259102725982666, 0.025861120223999022, 0.025639936447143553, 0.02574745559692383, 0.025907199859619142, 0.025891839981079103, 0.025789440155029295, 0.02569625663757324, 0.025815040588378906, 0.025827327728271485, 0.025469951629638672, 0.025402368545532225, 0.026224639892578124, 0.026022911071777344, 0.025643072128295898, 0.025927616119384767, 0.02593791961669922, 0.025593856811523437, 0.025480192184448244, 0.02569113540649414, 0.025797632217407225, 0.02572083282470703, 0.026444799423217775, 0.025967615127563477, 0.025830400466918944, 0.02588057518005371, 0.025831424713134765, 0.02594304084777832, 0.02637004852294922, 0.026007551193237305, 0.025825279235839844, 0.025793535232543945, 0.02574028778076172, 0.025859071731567384, 0.025779199600219727, 0.025842687606811524, 0.02556211280822754, 0.02575155258178711, 0.025840639114379883, 0.025778175354003906, 0.025804800033569338, 0.025827327728271485, 0.025753599166870117, 0.02573209571838379, 0.025811967849731447, 0.02596249580383301, 0.025849855422973633, 0.025806848526000976, 0.025861120223999022, 0.025828351974487306, 0.025828351974487306, 0.025792512893676758, 0.02574131202697754, 0.025847808837890625, 0.026169343948364256, 0.025791488647460937, 0.02570854377746582, 0.02571878433227539, 0.02569219207763672, 0.025976800918579103, 0.02694655990600586, 0.026570751190185548, 0.025839616775512695, 0.026735616683959962, 0.027339775085449217, 0.02614169692993164, 0.02584275245666504, 0.02648569679260254, 0.0265164794921875, 0.025761791229248047, 0.02573721694946289, 0.026446847915649413, 0.026422271728515623, 0.02572697639465332, 0.02588467216491699]",tokens/s,38.275783260083934,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,2215.71072,2726.821888,0.0,2097.152,1986.693632,s,1,9.2316484375,9.2316484375,0.0,9.2316484375,9.2316484375,9.2316484375,9.2316484375,[9.2316484375],,kWh,2.7951897403472182e-05,1.530339008094471e-05,4.207142254597507e-05,8.532671003039196e-05,,MB,2294.222848,2743.599104,0.0,2097.152,1859.01312,s,10,4.923785003662109,0.49237850036621095,0.00020069526631456413,0.4924298095703125,0.4924824279785156,0.49250030822753904,0.4925146124267578,"[0.49178607177734374, 0.49240176391601564, 0.49241778564453126, 0.49247845458984374, 0.4924723510742188, 0.4925181884765625, 0.4923990173339844, 0.49241668701171876, 0.4924418334960938, 0.49245285034179687]",tokens/s,519.9252197437494,kWh,5.814491784027748e-06,3.1857393437264823e-06,3.2639986429428534e-05,4.164021755718276e-05,tokens/kWh,6147902.557147929,MB,2301.52192,2743.599104,0.0,2097.152,1942.90944,s,10,11.022335571289062,1.1022335571289061,0.0065516630037765694,1.1004860229492186,1.1099015747070313,1.112922100830078,1.1153385217285157,"[1.10150537109375, 1.0989345703125, 1.0994666748046875, 1.09771728515625, 1.1092303466796876, 1.1032130126953126, 1.093125, 1.10743798828125, 1.115942626953125, 1.0957626953125]",tokens/s,57.15667028329474,kWh,1.2872195022500317e-05,7.054517538204623e-06,3.063556419096949e-05,5.056227675167443e-05,tokens/kWh,1245988.1960895616,,s,630,11.019298805236822,0.017490950484502884,0.000341170046840201,0.01732096004486084,0.017873101043701174,0.017993831062316896,0.01835261907577515,"[0.01808076858520508, 0.01747865676879883, 0.017300479888916014, 0.01728102493286133, 0.017229824066162108, 0.017154048919677735, 0.01717043113708496, 0.017343488693237305, 0.017209344863891602, 0.01716633605957031, 0.017187839508056642, 0.01719910430908203, 0.017145856857299805, 0.01721446418762207, 0.0172677116394043, 0.017159168243408202, 0.017160192489624023, 0.01723084831237793, 0.017144832611083984, 0.017169408798217774, 0.01702707290649414, 0.01718272018432617, 0.017150976181030272, 0.0172042236328125, 0.017175552368164062, 0.017671167373657228, 0.017597440719604493, 0.018869247436523438, 0.018148351669311523, 0.01780735969543457, 0.01782374382019043, 0.01778278350830078, 0.017689599990844726, 0.017716224670410157, 0.017633279800415038, 0.017756160736083985, 0.017709056854248048, 0.017770496368408203, 0.017712127685546874, 0.017746944427490235, 0.017760255813598632, 0.017682432174682617, 0.017765375137329103, 0.017737728118896484, 0.01783500862121582, 0.017741823196411134, 0.01782579231262207, 0.01775923156738281, 0.017632255554199217, 0.017238016128540038, 0.01718681526184082, 0.01724006462097168, 0.01720524787902832, 0.017283071517944337, 0.01759129524230957, 0.01723391914367676, 0.017391616821289063, 0.017760255813598632, 0.017488895416259767, 0.01715814399719238, 0.017147903442382813, 0.017303552627563477, 0.01776639938354492, 0.017758207321166994, 0.01742540740966797, 0.017184768676757813, 0.017283071517944337, 0.01722163200378418, 0.01721651268005371, 0.01722265625, 0.01717452812194824, 0.017179647445678712, 0.01724928092956543, 0.01717862319946289, 0.01721855926513672, 0.01722777557373047, 0.017238016128540038, 0.01720627212524414, 0.017040384292602538, 0.01717350387573242, 0.01721139144897461, 0.01720319938659668, 0.017319936752319336, 0.01717452812194824, 0.01725951957702637, 0.017155071258544922, 0.01721855926513672, 0.01719910430908203, 0.017180671691894533, 0.01721651268005371, 0.01719193649291992, 0.017145856857299805, 0.017326080322265625, 0.017122304916381836, 0.018036735534667968, 0.017880064010620117, 0.01785651206970215, 0.017772544860839845, 0.017765375137329103, 0.017520639419555666, 0.01718681526184082, 0.017157119750976564, 0.01721855926513672, 0.01723494338989258, 0.017983488082885742, 0.0198922233581543, 0.01819443130493164, 0.017757183074951173, 0.017764352798461915, 0.01775103950500488, 0.01783193588256836, 0.017726463317871095, 0.017762304306030274, 0.017739776611328126, 0.017863679885864257, 0.0178155517578125, 0.017765375137329103, 0.017492992401123047, 0.017168384552001953, 0.01721446418762207, 0.017292287826538084, 0.01705369567871094, 0.017236991882324217, 0.01720627212524414, 0.01721036720275879, 0.01718988800048828, 0.018122751235961913, 0.01740390396118164, 0.01721855926513672, 0.017209344863891602, 0.017286144256591796, 0.017177600860595704, 0.017252351760864256, 0.01718681526184082, 0.017346559524536134, 0.01724825668334961, 0.017092607498168946, 0.01721139144897461, 0.01724825668334961, 0.01722163200378418, 0.017177600860595704, 0.017133567810058595, 0.01724825668334961, 0.017236991882324217, 0.01723187255859375, 0.01726464080810547, 0.01720524787902832, 0.017160192489624023, 0.0172410888671875, 0.01725951957702637, 0.01780940818786621, 0.01779302406311035, 0.017482751846313475, 0.01720524787902832, 0.01721343994140625, 0.01725132751464844, 0.01722572708129883, 0.01719808006286621, 0.01719808006286621, 0.01717350387573242, 0.017184768676757813, 0.01723289680480957, 0.017147903442382813, 0.017228799819946287, 0.017257471084594727, 0.017153024673461914, 0.01717350387573242, 0.017271808624267578, 0.017169408798217774, 0.01724313545227051, 0.01784012794494629, 0.017868799209594728, 0.017909759521484374, 0.017716224670410157, 0.017712127685546874, 0.017708032608032227, 0.01781452751159668, 0.017829887390136717, 0.0178288631439209, 0.01860812759399414, 0.017863679885864257, 0.017994752883911135, 0.017737728118896484, 0.017937408447265626, 0.01773465538024902, 0.0178288631439209, 0.017744895935058593, 0.01775103950500488, 0.017738752365112305, 0.017335296630859375, 0.017343488693237305, 0.01723187255859375, 0.01724415969848633, 0.01723391914367676, 0.017054719924926756, 0.017052671432495118, 0.01721958351135254, 0.017321983337402345, 0.017134592056274413, 0.017134592056274413, 0.017116159439086915, 0.017135616302490234, 0.017095680236816405, 0.01718272018432617, 0.017107967376708985, 0.017110015869140623, 0.017141759872436522, 0.017488895416259767, 0.017699840545654297, 0.018147327423095702, 0.017539072036743163, 0.0172359676361084, 0.01717350387573242, 0.017153024673461914, 0.017152000427246093, 0.017152000427246093, 0.017114112854003907, 0.017116159439086915, 0.01721036720275879, 0.017171455383300782, 0.017120256423950195, 0.017134592056274413, 0.017144832611083984, 0.017113088607788086, 0.01699430465698242, 0.017180671691894533, 0.01715814399719238, 0.017159168243408202, 0.017145856857299805, 0.017238016128540038, 0.01781760025024414, 0.018378751754760742, 0.017836032867431642, 0.01785036849975586, 0.017743871688842772, 0.01784115219116211, 0.018000896453857423, 0.017894399642944335, 0.017727487564086913, 0.017707008361816406, 0.017781759262084963, 0.01773465538024902, 0.01781452751159668, 0.01717043113708496, 0.017289215087890625, 0.017663999557495116, 0.017724416732788087, 0.01785241508483887, 0.017881088256835938, 0.01810534477233887, 0.017895423889160156, 0.017770496368408203, 0.01765068817138672, 0.01722163200378418, 0.01720012855529785, 0.01717452812194824, 0.017184768676757813, 0.017364992141723632, 0.017146879196166993, 0.017382400512695313, 0.017364992141723632, 0.017338367462158204, 0.01725542449951172, 0.017283071517944337, 0.017184768676757813, 0.017675264358520508, 0.017757183074951173, 0.017705984115600586, 0.017680383682250975, 0.017699840545654297, 0.017787904739379884, 0.017787904739379884, 0.01774284744262695, 0.01783500862121582, 0.017730560302734375, 0.017724416732788087, 0.01781657600402832, 0.01782579231262207, 0.0178606071472168, 0.017876991271972655, 0.017694719314575197, 0.017686527252197267, 0.017699840545654297, 0.017725439071655275, 0.01721343994140625, 0.017265663146972657, 0.01718681526184082, 0.017297407150268555, 0.017671167373657228, 0.017758207321166994, 0.017752063751220702, 0.017689599990844726, 0.017747968673706056, 0.017912832260131836, 0.017755136489868165, 0.018086912155151368, 0.017718271255493166, 0.017711103439331053, 0.0176680965423584, 0.01785036849975586, 0.017687551498413084, 0.0176629753112793, 0.017139711380004884, 0.01721958351135254, 0.01744076728820801, 0.017771520614624024, 0.017689599990844726, 0.017670143127441407, 0.017712127685546874, 0.01790771293640137, 0.01761689567565918, 0.01777459144592285, 0.0176363525390625, 0.017778688430786133, 0.01787494468688965, 0.018110464096069336, 0.01741107177734375, 0.017293312072753905, 0.017257471084594727, 0.01723494338989258, 0.017188863754272463, 0.01721958351135254, 0.017930240631103517, 0.018148351669311523, 0.018000896453857423, 0.017694719314575197, 0.017583103179931642, 0.01718272018432617, 0.017276927947998046, 0.01718272018432617, 0.01721958351135254, 0.017266687393188478, 0.017176576614379883, 0.017175552368164062, 0.01721139144897461, 0.01720729637145996, 0.017163263320922852, 0.017276927947998046, 0.017317888259887695, 0.01718169593811035, 0.017122304916381836, 0.017238016128540038, 0.01720832061767578, 0.017290239334106446, 0.017312768936157227, 0.017228799819946287, 0.017152000427246093, 0.017201152801513672, 0.017266687393188478, 0.017187839508056642, 0.01721446418762207, 0.017887231826782226, 0.01825382423400879, 0.01779302406311035, 0.017640447616577147, 0.017711103439331053, 0.017760255813598632, 0.017855487823486327, 0.01779199981689453, 0.01782374382019043, 0.017787904739379884, 0.017752063751220702, 0.01770086479187012, 0.017893375396728514, 0.017895423889160156, 0.017761280059814453, 0.01784217643737793, 0.018181119918823242, 0.01802137565612793, 0.017769472122192383, 0.017901567459106444, 0.017894399642944335, 0.017504255294799806, 0.01720319938659668, 0.017257471084594727, 0.01721855926513672, 0.017313791275024415, 0.0171909122467041, 0.018151424407958985, 0.0175861759185791, 0.017308671951293944, 0.01724006462097168, 0.017160192489624023, 0.017156095504760743, 0.017160192489624023, 0.017161216735839844, 0.01720524787902832, 0.017177600860595704, 0.017257471084594727, 0.01716531181335449, 0.017139711380004884, 0.01725951957702637, 0.017375232696533204, 0.017326080322265625, 0.017287168502807617, 0.017179647445678712, 0.01721651268005371, 0.017391616821289063, 0.017132543563842775, 0.01716633605957031, 0.017292287826538084, 0.017558528900146485, 0.01745408058166504, 0.0172728328704834, 0.01716633605957031, 0.017143808364868163, 0.017145856857299805, 0.017257471084594727, 0.017290239334106446, 0.017228799819946287, 0.017201152801513672, 0.017142784118652343, 0.01725542449951172, 0.017125375747680666, 0.017179647445678712, 0.017177600860595704, 0.017169408798217774, 0.01721651268005371, 0.017274879455566407, 0.01725951957702637, 0.01721958351135254, 0.01720217514038086, 0.017161216735839844, 0.01719398307800293, 0.017294336318969726, 0.017192960739135742, 0.01728102493286133, 0.017171455383300782, 0.017667072296142578, 0.017720319747924804, 0.018647039413452148, 0.01843507194519043, 0.01768550491333008, 0.017262592315673828, 0.017163263320922852, 0.017144832611083984, 0.017169408798217774, 0.017590272903442384, 0.017944576263427735, 0.017912832260131836, 0.017715200424194336, 0.01824870491027832, 0.017331199645996095, 0.017238016128540038, 0.0172677116394043, 0.017183744430541992, 0.017238016128540038, 0.01723289680480957, 0.01717452812194824, 0.017245183944702147, 0.017699840545654297, 0.01779097557067871, 0.01785139274597168, 0.017938432693481447, 0.017723392486572266, 0.017713151931762695, 0.01777663993835449, 0.017698816299438477, 0.017949695587158202, 0.01801523208618164, 0.017913856506347657, 0.01784320068359375, 0.017307647705078123, 0.017252351760864256, 0.01721446418762207, 0.01720627212524414, 0.0174704647064209, 0.017910783767700195, 0.01781862449645996, 0.017726463317871095, 0.017747968673706056, 0.017663999557495116, 0.017763328552246094, 0.017765375137329103, 0.01785753631591797, 0.017722368240356445, 0.01775923156738281, 0.01773465538024902, 0.017934335708618163, 0.017992704391479493, 0.017872896194458008, 0.017720319747924804, 0.017778688430786133, 0.017744895935058593, 0.01810534477233887, 0.01719500732421875, 0.017342464447021484, 0.017183744430541992, 0.01719603157043457, 0.01721446418762207, 0.017133567810058595, 0.017133567810058595, 0.017188863754272463, 0.017164287567138673, 0.017295360565185547, 0.01718681526184082, 0.017701887130737306, 0.017935359954833984, 0.017726463317871095, 0.017547264099121093, 0.017168384552001953, 0.017753087997436523, 0.017731584548950196, 0.01720729637145996, 0.01760972785949707, 0.01741004753112793, 0.01723494338989258, 0.017252351760864256, 0.017201152801513672, 0.01734758377075195, 0.0172042236328125, 0.01719500732421875, 0.017145856857299805, 0.01778278350830078, 0.01782374382019043, 0.017763328552246094, 0.017723392486572266, 0.017648639678955077, 0.01718681526184082, 0.017148927688598634, 0.017119232177734374, 0.017180671691894533, 0.01724928092956543, 0.017836032867431642, 0.01780838394165039, 0.01814630317687988, 0.018191360473632814, 0.018874368667602538, 0.018288639068603514, 0.017872896194458008, 0.017963008880615236, 0.01781350326538086, 0.017743871688842772, 0.017778688430786133, 0.017733631134033204, 0.017769472122192383, 0.017750015258789064, 0.017721343994140625, 0.017839103698730468, 0.01786675262451172, 0.01778483200073242, 0.0177838077545166, 0.017765375137329103, 0.01765990447998047, 0.017846271514892577, 0.017750015258789064, 0.01775923156738281, 0.017902591705322265, 0.017731584548950196, 0.017726463317871095, 0.017689599990844726, 0.017879039764404296, 0.018069503784179687, 0.017934335708618163, 0.01777561569213867, 0.017829887390136717, 0.017705984115600586, 0.017590272903442384, 0.01781760025024414, 0.0177838077545166, 0.017870847702026366, 0.017754112243652344, 0.017780736923217775, 0.01777561569213867, 0.017770496368408203, 0.017795072555541993, 0.017847295761108398, 0.01781350326538086, 0.017449983596801756, 0.0172359676361084, 0.017278976440429687, 0.017146879196166993, 0.01725951957702637, 0.01714995193481445, 0.017139711380004884, 0.017154048919677735, 0.017340415954589843, 0.017141759872436522, 0.01718681526184082, 0.017163263320922852, 0.017280000686645508, 0.01722572708129883, 0.01719500732421875, 0.01717043113708496, 0.017146879196166993, 0.017155071258544922, 0.017105920791625977, 0.01718681526184082, 0.01719705581665039, 0.017266687393188478, 0.01718681526184082, 0.017128448486328125, 0.01720012855529785, 0.017184768676757813, 0.017164287567138673, 0.01716531181335449, 0.017260543823242186, 0.017155071258544922, 0.017159168243408202, 0.017307647705078123, 0.0172359676361084, 0.01722060775756836, 0.017229824066162108, 0.017134592056274413, 0.01723084831237793, 0.01719808006286621, 0.017133567810058595, 0.017161216735839844, 0.017126399993896483, 0.017306623458862306, 0.01717043113708496, 0.01764249610900879, 0.018096128463745118, 0.017768447875976562, 0.017731584548950196, 0.01781862449645996, 0.017722368240356445, 0.017777664184570312, 0.017796096801757814, 0.017730560302734375, 0.017754112243652344, 0.017804288864135744, 0.01779302406311035, 0.017796096801757814, 0.01780633544921875, 0.017862655639648437, 0.017758207321166994, 0.017887231826782226, 0.01773465538024902, 0.017687551498413084]",tokens/s,57.1724218695838,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949513-18f7095e1c0a0dd50cfbd6e8;08addee7-113b-40f5-9f9e-480d6dbc74ac) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1481, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpi4hzcsjj/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5167.054848,6469.189632,0.0,5832.179712,5645.103616,s,1,12.3639912109375,12.3639912109375,0.0,12.3639912109375,12.3639912109375,12.3639912109375,12.3639912109375,[12.3639912109375],,kWh,6.301908348889166e-05,3.452087347621541e-05,0.00011501620312398542,0.0002125561600890925,,MB,1790.95552,6527.909888,0.0,5874.122752,5159.561216,s,10,16.607302490234375,1.6607302490234375,0.0003797372094725765,1.6608844604492188,1.661083056640625,1.6612134643554686,1.6613177905273437,"[1.660956298828125, 1.66096533203125, 1.660470703125, 1.6610540771484374, 1.6601475830078125, 1.6613438720703124, 1.66020166015625, 1.66039404296875, 1.66082373046875, 1.6609451904296875]",tokens/s,154.1490559050973,kWh,1.9617876000347614e-05,1.075025968901896e-05,0.00011440231374399978,0.00014477044943336635,tokens/kWh,1768316.6765178097,MB,1798.508544,6548.881408,0.0,5892.99712,5159.563776,s,10,27.999011962890624,2.7999011962890625,0.018943201246796302,2.80120556640625,2.8236138183593753,2.8251147705078123,2.8263155322265625,"[2.82661572265625, 2.7816953125, 2.7992587890625, 2.80315234375, 2.764563232421875, 2.79645068359375, 2.80720361328125, 2.8232802734375, 2.780130859375, 2.8166611328125]",tokens/s,22.50079398640889,kWh,3.3485224089652605e-05,1.8350392072256113e-05,8.854698750419833e-05,0.00014038260366610705,tokens/kWh,448773.55423498433,,s,630,27.996179382324218,0.04443837997194321,0.0008948467196508871,0.04475904083251953,0.045377843856811526,0.04561464290618897,0.04673517433166504,"[0.04403507232666016, 0.04501708984375, 0.045028350830078126, 0.04496486282348633, 0.04504064178466797, 0.04504780960083008, 0.044905471801757815, 0.04492287826538086, 0.04420915222167969, 0.045034496307373044, 0.04480307388305664, 0.045548545837402345, 0.04530483245849609, 0.044862464904785154, 0.04486860656738281, 0.04479180908203125, 0.04537753677368164, 0.04344934463500977, 0.04337152099609375, 0.04432588958740234, 0.045480960845947264, 0.04471807861328125, 0.044186622619628906, 0.045297664642333986, 0.044695552825927735, 0.04350057601928711, 0.04468323135375977, 0.04449280166625977, 0.044639232635498044, 0.04491059112548828, 0.044813312530517575, 0.04418252944946289, 0.04338585662841797, 0.044734462738037106, 0.04500172805786133, 0.044886016845703126, 0.04492083358764649, 0.044918785095214846, 0.045174785614013675, 0.044916736602783204, 0.04516352081298828, 0.04485836791992188, 0.045241344451904295, 0.045162494659423826, 0.044938240051269535, 0.044685310363769534, 0.04491775894165039, 0.04488806533813477, 0.04640256118774414, 0.047131649017333986, 0.04563353729248047, 0.04468838500976562, 0.04481433486938476, 0.045042686462402344, 0.04504780960083008, 0.04482457733154297, 0.04489420700073242, 0.044780544281005856, 0.044834815979003906, 0.0448092155456543, 0.045274112701416014, 0.044957695007324217, 0.044913665771484375, 0.044978111267089844, 0.04424806213378906, 0.04446822357177734, 0.044875774383544925, 0.04502732849121094, 0.045517822265625, 0.04498944091796875, 0.045090816497802735, 0.04516454315185547, 0.0455362548828125, 0.045176830291748044, 0.04388351821899414, 0.04338483047485352, 0.04300697708129883, 0.04343603134155274, 0.043494400024414064, 0.043619327545166016, 0.043431934356689454, 0.04347903823852539, 0.04337868881225586, 0.043420673370361325, 0.04338380813598633, 0.04338790512084961, 0.04343603134155274, 0.04335103988647461, 0.04338995361328125, 0.04330495834350586, 0.043815937042236325, 0.043377662658691404, 0.0434442253112793, 0.043399166107177735, 0.043273216247558595, 0.043445247650146485, 0.0432281608581543, 0.043491329193115234, 0.04319027328491211, 0.043440128326416014, 0.04332032012939453, 0.04352511978149414, 0.04331827163696289, 0.0432988166809082, 0.04344627380371094, 0.04325785446166992, 0.04507852935791016, 0.04561510467529297, 0.04479590225219727, 0.044984321594238284, 0.04497305679321289, 0.04493414306640625, 0.04393164825439453, 0.04368998336791992, 0.04454502487182617, 0.04491059112548828, 0.0447088623046875, 0.045004798889160154, 0.04529663848876953, 0.04470272064208984, 0.044153854370117186, 0.04526182556152344, 0.04477644729614258, 0.04545843124389649, 0.045515777587890625, 0.044955646514892575, 0.04396646499633789, 0.04334182357788086, 0.04335001754760742, 0.04319232177734375, 0.04283084869384766, 0.043096065521240234, 0.04323328018188476, 0.043292671203613284, 0.043514881134033206, 0.043338752746582034, 0.043433982849121096, 0.04640256118774414, 0.04576870346069336, 0.04616806411743164, 0.045080577850341794, 0.044935169219970705, 0.04711423873901367, 0.045917182922363284, 0.04506316757202149, 0.044916736602783204, 0.04501708984375, 0.0451143684387207, 0.045107200622558595, 0.04503039932250977, 0.04585574340820313, 0.04364799880981445, 0.043379711151123046, 0.043276287078857424, 0.043325439453125, 0.04345446395874023, 0.04327219009399414, 0.04319027328491211, 0.043363327026367186, 0.04449894332885742, 0.04506828689575195, 0.04526694488525391, 0.045267967224121096, 0.04501094436645508, 0.045123584747314455, 0.04489625549316406, 0.045338623046875, 0.04509183883666992, 0.04495667266845703, 0.0450252799987793, 0.04533760070800781, 0.04549222564697265, 0.04511948776245117, 0.04496384048461914, 0.04538060760498047, 0.04457779312133789, 0.04352204895019531, 0.04360396957397461, 0.043815937042236325, 0.04350668716430664, 0.043921409606933595, 0.04501708984375, 0.04498636627197266, 0.045282302856445314, 0.04409958267211914, 0.043512832641601565, 0.043464702606201173, 0.04337561416625976, 0.04348620986938476, 0.04659097671508789, 0.046159870147705076, 0.04568473434448242, 0.04556083297729492, 0.04524851226806641, 0.04542156982421875, 0.045044734954833986, 0.04530790328979492, 0.045041664123535156, 0.04501401519775391, 0.045211647033691404, 0.045026302337646484, 0.04503244781494141, 0.045369342803955076, 0.045259777069091796, 0.04494233703613281, 0.044988414764404294, 0.04507648086547852, 0.04495872116088867, 0.04473241424560547, 0.04476518249511719, 0.043253761291503906, 0.04336025619506836, 0.04489932632446289, 0.045134849548339843, 0.045244415283203124, 0.045146110534667966, 0.04495462417602539, 0.04496076965332031, 0.045445121765136716, 0.04491775894165039, 0.043853824615478515, 0.04352204895019531, 0.0433623046875, 0.04362956619262695, 0.04350259017944336, 0.04338483047485352, 0.04338790512084961, 0.04503244781494141, 0.04490854263305664, 0.044862464904785154, 0.04522598266601562, 0.04472217559814453, 0.04500582504272461, 0.044930049896240234, 0.044930049896240234, 0.04497100830078125, 0.04479283142089844, 0.04317900848388672, 0.04338175964355469, 0.04355788803100586, 0.043561985015869144, 0.04377190399169922, 0.04341862487792969, 0.04333465576171875, 0.0432988166809082, 0.04332134246826172, 0.04327526473999024, 0.04343603134155274, 0.043440128326416014, 0.04343091201782227, 0.04328857421875, 0.04334899139404297, 0.04375961685180664, 0.04339507293701172, 0.04332748794555664, 0.0432281608581543, 0.04335411071777344, 0.04337254333496094, 0.04316876983642578, 0.043377662658691404, 0.04339712142944336, 0.04333465576171875, 0.04334284973144531, 0.04377190399169922, 0.04496281433105469, 0.04394905471801758, 0.04327116775512695, 0.04505702209472656, 0.045246463775634765, 0.04509900665283203, 0.044165119171142575, 0.04379443359375, 0.04369715118408203, 0.04354355239868164, 0.04361523056030273, 0.04354048156738281, 0.043515903472900394, 0.04346879959106445, 0.04344627380371094, 0.04351180648803711, 0.043410430908203124, 0.04330393600463867, 0.04347187042236328, 0.043496448516845705, 0.044631038665771484, 0.04434124755859375, 0.04343500900268555, 0.04333772659301758, 0.04353023910522461, 0.04552294540405273, 0.04465663909912109, 0.04362649536132813, 0.04389068984985352, 0.044951553344726565, 0.04521062469482422, 0.04361830520629883, 0.043545600891113284, 0.04358246231079101, 0.043703296661376956, 0.043584510803222655, 0.04356710433959961, 0.04346777725219726, 0.04352102279663086, 0.04333670425415039, 0.04337868881225586, 0.04340019226074219, 0.04349542236328125, 0.043412479400634765, 0.04348928070068359, 0.04342784118652344, 0.04516864013671875, 0.04493209457397461, 0.045267967224121096, 0.04558643341064453, 0.045290496826171874, 0.04438016128540039, 0.0435865592956543, 0.043361278533935545, 0.04333977508544922, 0.04335411071777344, 0.04467814254760742, 0.04535910415649414, 0.044027904510498046, 0.044988414764404294, 0.04544716644287109, 0.04535398483276367, 0.04497510528564453, 0.0449536018371582, 0.044521472930908204, 0.043509761810302736, 0.04532326507568359, 0.04475289535522461, 0.0435968017578125, 0.04461363220214844, 0.04477644729614258, 0.04545024108886719, 0.044698623657226565, 0.04340326309204102, 0.04337356948852539, 0.043338752746582034, 0.046704639434814454, 0.04645273590087891, 0.04561407852172852, 0.044895233154296874, 0.04483583831787109, 0.045110271453857424, 0.04488191986083984, 0.04564377593994141, 0.04534272003173828, 0.04406784057617188, 0.04326092910766602, 0.043410430908203124, 0.04359987258911133, 0.04363776016235352, 0.04343091201782227, 0.04354150390625, 0.04326604843139648, 0.043312126159667966, 0.04331417465209961, 0.043407360076904294, 0.043483135223388675, 0.043412479400634765, 0.04334796905517578, 0.04342988967895508, 0.043410430908203124, 0.04462899017333984, 0.046876670837402344, 0.04568064117431641, 0.044897281646728515, 0.04489625549316406, 0.0454205436706543, 0.045119518280029296, 0.045071327209472656, 0.0451932144165039, 0.04338585662841797, 0.04401766586303711, 0.04359884643554687, 0.043466751098632815, 0.04394598388671875, 0.04335411071777344, 0.043684864044189455, 0.04353638458251953, 0.04419071960449219, 0.045454334259033204, 0.0439736328125, 0.04506009674072266, 0.045028350830078126, 0.04504678344726563, 0.04513587188720703, 0.04500172805786133, 0.04499660873413086, 0.044943359375, 0.044075008392333984, 0.04512870407104492, 0.04492697525024414, 0.0454574089050293, 0.04531097412109375, 0.044646400451660156, 0.043325439453125, 0.043428863525390625, 0.04488806533813477, 0.04498739242553711, 0.04500172805786133, 0.04551065444946289, 0.045049854278564457, 0.04546559906005859, 0.04496486282348633, 0.044821502685546875, 0.04526694488525391, 0.044832767486572264, 0.04500172805786133, 0.04415999984741211, 0.04353126525878906, 0.04333465576171875, 0.04337152099609375, 0.04335308837890625, 0.043417598724365236, 0.04335615921020508, 0.045402111053466795, 0.04502425765991211, 0.044711936950683595, 0.044385280609130856, 0.04369612884521484, 0.04330188751220703, 0.043117568969726565, 0.04333260726928711, 0.043437057495117185, 0.04395315170288086, 0.0451409912109375, 0.044818431854248046, 0.04484710311889648, 0.044805118560791016, 0.04494131088256836, 0.04505395126342773, 0.045480960845947264, 0.04530176162719726, 0.04478464126586914, 0.04487372970581055, 0.04479487991333008, 0.04499558258056641, 0.04658380889892578, 0.0438476791381836, 0.04328243255615234, 0.043363327026367186, 0.0433438720703125, 0.043344894409179685, 0.04464332962036133, 0.04518502426147461, 0.045071361541748046, 0.0452044792175293, 0.04507340621948242, 0.045208576202392575, 0.04569702529907226, 0.04509900665283203, 0.04476518249511719, 0.045077503204345705, 0.045297664642333986, 0.04550143814086914, 0.04510515213012695, 0.04471091079711914, 0.04476416015625, 0.044393470764160156, 0.04614451217651367, 0.047094783782958984, 0.04597452926635742, 0.04538265609741211, 0.04533760070800781, 0.044900352478027344, 0.044918785095214846, 0.04494131088256836, 0.04534579086303711, 0.045333503723144535, 0.04511948776245117, 0.045230079650878906, 0.044665855407714845, 0.04357120132446289, 0.04341862487792969, 0.04436275100708008, 0.04403814315795898, 0.045428737640380856, 0.04575743865966797, 0.04388454437255859, 0.04357529449462891, 0.04499660873413086, 0.04514815902709961, 0.04499456024169922, 0.04515020751953125, 0.04529151916503906, 0.043491329193115234, 0.043551742553710936, 0.04674764633178711, 0.046456832885742184, 0.046666751861572264, 0.04516044616699219, 0.04489420700073242, 0.0452229118347168, 0.045350910186767575, 0.044990463256835936, 0.04394291305541992, 0.04342169570922851, 0.043535358428955076, 0.04353945541381836, 0.04398080062866211, 0.04407910537719727, 0.0440002555847168, 0.0442716178894043, 0.04359884643554687, 0.04335308837890625, 0.043617279052734374, 0.04660019302368164, 0.04550143814086914, 0.04466483306884766, 0.04532633590698242, 0.045055999755859374, 0.04506726455688476, 0.04494847869873047, 0.04440576171875, 0.04396646499633789, 0.04334182357788086, 0.04352716827392578, 0.04523929595947265, 0.045090816497802735, 0.04492800140380859, 0.04470783996582031, 0.04483891296386719, 0.04475392150878906, 0.04468838500976562, 0.045243392944335936, 0.044905471801757815, 0.04474265670776367, 0.04330393600463867, 0.04341964721679688, 0.0438364143371582, 0.04369715118408203, 0.0434411506652832, 0.04336435317993164, 0.04338483047485352, 0.043974655151367184, 0.043853824615478515, 0.04335615921020508, 0.04357324981689453, 0.04356915283203125, 0.043568126678466795, 0.043390975952148435, 0.04346777725219726, 0.0439736328125, 0.0433889274597168, 0.043407360076904294, 0.043684864044189455, 0.04382515335083008, 0.04367462539672851, 0.04309708786010742, 0.043463680267333986, 0.04351692962646484, 0.04337868881225586, 0.04542771148681641, 0.04530585479736328, 0.04482559967041016, 0.04482355117797852, 0.044988414764404294, 0.04483174514770508, 0.04331827163696289, 0.04337561416625976, 0.043524097442626954, 0.04354150390625, 0.043483135223388675, 0.04346060943603516, 0.04389888000488281, 0.04336435317993164, 0.043377662658691404, 0.04350054550170898, 0.04339712142944336, 0.043515903472900394, 0.04313087844848633, 0.04358246231079101, 0.043908096313476565, 0.04532428741455078, 0.04552601623535156, 0.045039615631103515, 0.04503039932250977, 0.04545228958129883, 0.04498329544067383, 0.04498739242553711, 0.045118465423583984, 0.044128257751464846, 0.04478464126586914, 0.0450437126159668, 0.04505497741699219, 0.04505702209472656, 0.04320975875854492, 0.0437350082397461, 0.04365311813354492, 0.04364492797851562, 0.043437057495117185, 0.04354150390625, 0.04339199829101562, 0.04330393600463867, 0.043407360076904294, 0.043652095794677735, 0.04478883361816406, 0.045137825012207033, 0.04519424057006836, 0.04530995178222656, 0.04518195343017578, 0.045049854278564457, 0.04521881484985352, 0.045178878784179685, 0.044974079132080076, 0.045878273010253906, 0.04543078231811523, 0.04530278396606445, 0.04505497741699219, 0.04528025436401367, 0.04784230422973633, 0.046929920196533206, 0.04568064117431641, 0.044886016845703126, 0.04510003280639648, 0.0450437126159668, 0.044957695007324217, 0.0452229118347168, 0.045369342803955076, 0.04516659164428711, 0.044880897521972656, 0.04535910415649414, 0.044902400970458986, 0.04485529708862305, 0.04493107223510742, 0.044867584228515625, 0.0451932144165039]",tokens/s,22.503070558183353,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpicd25i1b/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 91284 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493fd-013e08995fdf1f4d0ea581b1;01e8675b-edd7-4148-a6f3-5dd1e864ce33) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490e8-1ae525a45983b25275f3212e;8282a1b1-aac1-4d8b-b2cc-5b778ad71580) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation - cls._check_and_enable_flash_attn_2( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 - raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyks8c7ez/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491a2-32161b7622c1adf138eafd32;918e0598-ac81-4e21-8a78-e5a774418c9a) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17832.497152,21357.920256,0.0,20772.29056,20328.900608,s,1,14.2491611328125,14.2491611328125,0.0,14.2491611328125,14.2491611328125,14.2491611328125,14.2491611328125,[14.2491611328125],,kWh,8.741333717705655e-05,4.7893715678335785e-05,0.0001253170446978702,0.00026062409755326255,,MB,4436.164608,21821.390848,0.0,21174.943744,20640.73728,s,10,5.006998779296876,0.5006998779296875,0.00016618590777213625,0.5007086639404297,0.5009123413085937,0.5009257446289063,0.5009364672851563,"[0.5008215026855469, 0.5004599914550781, 0.5003928527832031, 0.500716552734375, 0.5007007751464844, 0.5007508544921875, 0.5009391479492188, 0.5006280822753906, 0.5006796569824219, 0.5009093627929687]",tokens/s,511.28432676780017,kWh,5.918419905730793e-06,3.2420787526634532e-06,3.4536610962598854e-05,4.369710962099311e-05,tokens/kWh,5858511.05989426,MB,4440.428544,21846.556672,0.0,21200.109568,20640.73984,s,10,41.15347802734375,4.115347802734375,0.035408619736762376,4.10353369140625,4.154489697265626,4.182166235351563,4.204307465820312,"[4.2098427734375, 4.10558740234375, 4.10941064453125, 4.093538818359375, 4.088434814453125, 4.10147998046875, 4.097609375, 4.14833935546875, 4.08993115234375, 4.1093037109375]",tokens/s,15.308548151906065,kWh,4.872912359440964e-05,2.670784660953215e-05,0.00019714454660441083,0.0002725815168083526,tokens/kWh,231123.52127783565,,s,630,41.150965740203866,0.06531899323841882,0.0009867483740785814,0.06493439865112305,0.06684221725463867,0.06751431579589844,0.06857057350158692,"[0.06560768127441406, 0.06506495666503906, 0.06480486297607421, 0.06769664001464844, 0.06826598358154297, 0.06642278289794921, 0.0648980484008789, 0.06722457885742188, 0.06583910369873047, 0.06740582275390625, 0.06751846313476563, 0.06732902526855469, 0.06761984252929687, 0.06766694641113281, 0.06722252655029297, 0.06762188720703124, 0.06675865936279297, 0.06807756805419922, 0.06771814727783203, 0.06738944244384766, 0.06739148712158204, 0.06518169403076172, 0.0671457290649414, 0.06775193786621093, 0.06763008117675781, 0.06698188781738282, 0.06608179473876953, 0.064932861328125, 0.06705971527099609, 0.06576025390625, 0.06712627410888672, 0.06784307098388671, 0.06832332611083984, 0.07042662048339844, 0.06866636657714843, 0.06654361724853515, 0.06752051544189454, 0.06747750091552734, 0.06742118072509766, 0.06476287841796875, 0.06737305450439453, 0.06466560363769532, 0.0660101089477539, 0.06743654632568359, 0.06589644622802734, 0.06488575744628906, 0.06479769897460938, 0.06668902587890625, 0.0652298583984375, 0.06539568328857422, 0.06702796936035156, 0.06791065979003906, 0.06887935638427735, 0.066302978515625, 0.06717440032958985, 0.06755225372314454, 0.06675558471679688, 0.06495539093017579, 0.06709657287597656, 0.0657223663330078, 0.06750924682617188, 0.06776934051513672, 0.06637158203125, 0.06535475158691406, 0.06696038055419921, 0.06578688049316406, 0.0679178237915039, 0.06767718505859376, 0.06758092498779297, 0.06730035400390624, 0.0674150390625, 0.06456217956542969, 0.06528921508789062, 0.06482022094726562, 0.06508236694335938, 0.06447103881835937, 0.06468300628662109, 0.06503321838378906, 0.0649144287109375, 0.06451712036132813, 0.0646789093017578, 0.06443315124511718, 0.06522367858886718, 0.06451200103759766, 0.06463795471191407, 0.06600396728515626, 0.0652390365600586, 0.06471577453613281, 0.06476493072509766, 0.0647936019897461, 0.06574591827392579, 0.0660469741821289, 0.06657331085205079, 0.06503424072265625, 0.06501273345947266, 0.0646266860961914, 0.06481919860839844, 0.06530764770507813, 0.06480178833007813, 0.06522470092773437, 0.06504550170898438, 0.06500147247314453, 0.06481100463867187, 0.0646666259765625, 0.06475263977050781, 0.06404198455810547, 0.06433792114257812, 0.06462156677246093, 0.06432563018798829, 0.06456320190429687, 0.06484073638916016, 0.06488572692871093, 0.06434099578857422, 0.0651304931640625, 0.06411161804199218, 0.06463488006591797, 0.06568243408203125, 0.06539263916015625, 0.06482943725585938, 0.0647936019897461, 0.06426726531982421, 0.06491852569580078, 0.06485401916503906, 0.06490316772460937, 0.0650987548828125, 0.06492876434326172, 0.06523391723632813, 0.06479974365234376, 0.06481613159179687, 0.06477823638916015, 0.06475775909423828, 0.06489702606201173, 0.06487347412109375, 0.0648652801513672, 0.06438092803955078, 0.06490624237060547, 0.06543462371826173, 0.06565171051025391, 0.06456524658203125, 0.06551551818847656, 0.06562815856933593, 0.0645928955078125, 0.06470553588867188, 0.06404402923583985, 0.06408806610107422, 0.06443827056884766, 0.06493593597412109, 0.06459699249267578, 0.06548274993896484, 0.06490316772460937, 0.06416690826416016, 0.06508236694335938, 0.06504959869384766, 0.0654028778076172, 0.06734745788574219, 0.06553804779052734, 0.06504243469238281, 0.06477311706542968, 0.0649175033569336, 0.06476799774169922, 0.06850150299072266, 0.06549811553955077, 0.06485504150390625, 0.06490930938720703, 0.06467072296142579, 0.06477619171142578, 0.06500045013427734, 0.06475161743164062, 0.0651151351928711, 0.06482431793212891, 0.06602857971191406, 0.06591075134277344, 0.06917529296875, 0.06480076599121094, 0.06419455718994141, 0.06530457305908204, 0.06539878082275391, 0.06589030456542969, 0.06558515167236328, 0.06458367919921874, 0.06522675323486328, 0.06516429138183594, 0.06650777435302735, 0.06550118255615234, 0.06533631896972657, 0.067989501953125, 0.06519602966308594, 0.064753662109375, 0.06473420715332032, 0.06461542510986328, 0.06513766479492188, 0.06499737548828124, 0.0648089599609375, 0.0672368621826172, 0.06669004821777344, 0.06667366027832031, 0.06540185546875, 0.06492569732666016, 0.06570291137695312, 0.06506905364990234, 0.06729523468017579, 0.06517759704589844, 0.0647188491821289, 0.06495846557617188, 0.06433280181884765, 0.06491340637207031, 0.06501990509033204, 0.0643246078491211, 0.06499225616455079, 0.06472499084472656, 0.06468096160888671, 0.06463488006591797, 0.06507520294189453, 0.06483353424072266, 0.06465638732910156, 0.06464511871337891, 0.0646645736694336, 0.06469939422607422, 0.0645898208618164, 0.06415666961669922, 0.06445568084716796, 0.06485094451904297, 0.06467276763916016, 0.06481305694580078, 0.06636544036865234, 0.06457241821289063, 0.06435327911376953, 0.06460928344726563, 0.06462770843505859, 0.06483455657958985, 0.0673802261352539, 0.06567526245117188, 0.06448844909667968, 0.06449971008300781, 0.06443417358398437, 0.06449152374267578, 0.06505267333984376, 0.06433382415771484, 0.06467072296142579, 0.06521548461914063, 0.06536396789550782, 0.0648304672241211, 0.06471782684326172, 0.06461849975585937, 0.06459494018554687, 0.0645959701538086, 0.06458367919921874, 0.06455500793457031, 0.06457344055175782, 0.06467686462402343, 0.06438912200927735, 0.06504345703125, 0.06582886505126953, 0.06454374694824219, 0.06455398559570312, 0.06434611511230469, 0.06444544219970703, 0.064321533203125, 0.06440140533447265, 0.06486016082763672, 0.064216064453125, 0.06405939483642578, 0.06515609741210937, 0.0653854751586914, 0.06473420715332032, 0.06476390075683594, 0.06505471801757813, 0.06466764831542969, 0.06459801483154297, 0.06464102172851563, 0.06568038177490235, 0.06488063812255859, 0.06485401916503906, 0.06467993927001953, 0.06528921508789062, 0.06445772552490234, 0.06473216247558594, 0.0645580825805664, 0.06631321716308594, 0.06463385772705078, 0.06416588592529297, 0.06448332977294922, 0.06477005004882813, 0.06465229034423828, 0.0646297607421875, 0.06460518646240235, 0.06514076995849609, 0.0649277114868164, 0.06460518646240235, 0.06469939422607422, 0.06448435211181641, 0.0645191650390625, 0.06411980438232422, 0.06496153259277344, 0.06510387420654297, 0.06476799774169922, 0.06463180541992188, 0.06458367919921874, 0.06636646270751953, 0.06614425659179687, 0.06462156677246093, 0.06478540802001953, 0.0643942413330078, 0.06467686462402343, 0.06462566375732422, 0.06481100463867187, 0.0648253402709961, 0.06476697540283204, 0.06521139526367188, 0.06635622406005859, 0.06495231628417969, 0.06799974060058593, 0.06586163330078125, 0.06411571502685547, 0.06514073944091797, 0.06498611450195313, 0.06593023681640625, 0.06548172760009766, 0.06472704315185547, 0.06441779327392579, 0.06474547576904296, 0.06397849655151368, 0.06397132873535157, 0.06439218902587891, 0.06456217956542969, 0.06447718048095703, 0.06492876434326172, 0.06454681396484375, 0.06514278411865235, 0.06503424072265625, 0.06472499084472656, 0.06411878204345703, 0.06796390533447266, 0.06564966583251954, 0.06514995574951171, 0.06498713684082032, 0.06482125091552735, 0.06634291076660156, 0.06549094390869141, 0.0662845458984375, 0.06566297912597656, 0.064574462890625, 0.06596915435791016, 0.06490214538574218, 0.06552780914306641, 0.06485708618164063, 0.06479872131347657, 0.06667878723144531, 0.06601522827148437, 0.06641766357421874, 0.06437478637695312, 0.06457241821289063, 0.06471167755126953, 0.0652072982788086, 0.06580633544921875, 0.0643420181274414, 0.06407782745361328, 0.06474752044677734, 0.06450688171386719, 0.06591180419921874, 0.06448025512695313, 0.0647567367553711, 0.06540799713134765, 0.06602649688720703, 0.06467788696289062, 0.06447615814208985, 0.0645406723022461, 0.06411264038085937, 0.06861824035644531, 0.06653440093994141, 0.06460518646240235, 0.06458777618408203, 0.06472601318359375, 0.0645560302734375, 0.0650035171508789, 0.06466560363769532, 0.06454579162597657, 0.064395263671875, 0.06505369567871094, 0.06459085083007812, 0.06522982025146484, 0.06474240112304687, 0.06533529663085938, 0.06483865356445312, 0.06472294616699219, 0.06564556884765625, 0.06578278350830079, 0.06524006652832032, 0.06546534729003907, 0.06559232330322265, 0.06447103881835937, 0.06474240112304687, 0.06499225616455079, 0.06468402862548828, 0.06461440277099609, 0.06466563415527343, 0.06542947387695312, 0.06550016021728515, 0.064827392578125, 0.06482841491699219, 0.06477107238769532, 0.06576025390625, 0.0649717788696289, 0.06437580871582031, 0.06482841491699219, 0.06482022094726562, 0.06481100463867187, 0.0647188491821289, 0.06555852508544922, 0.06478233337402343, 0.06496153259277344, 0.06479564666748047, 0.06485094451904297, 0.06508953857421874, 0.06470246124267579, 0.06488473510742188, 0.06580838775634766, 0.06545101165771484, 0.06487347412109375, 0.06483968353271484, 0.06489600372314454, 0.06461337280273438, 0.06471782684326172, 0.06476185607910157, 0.06476902770996094, 0.0644659194946289, 0.06463692474365235, 0.06457855987548829, 0.06507520294189453, 0.06468812561035156, 0.06567935943603516, 0.06504550170898438, 0.06489190673828125, 0.06480691528320312, 0.06461440277099609, 0.06477823638916015, 0.06588313293457031, 0.06788301086425781, 0.06555443572998047, 0.06512127685546874, 0.0652390365600586, 0.06556671905517578, 0.06859878540039062, 0.06674432373046875, 0.06493695831298828, 0.06504550170898438, 0.06549913787841796, 0.0648622055053711, 0.06514278411865235, 0.06465023803710937, 0.06484480285644531, 0.06570188903808594, 0.06513152313232422, 0.06460825347900391, 0.06568243408203125, 0.06636441802978515, 0.06545203399658203, 0.0658892822265625, 0.06659174346923828, 0.06732697296142579, 0.0668753890991211, 0.06652108764648437, 0.06680268859863281, 0.06668185424804687, 0.06683853149414062, 0.06654361724853515, 0.06681190490722656, 0.0664115219116211, 0.06609305572509766, 0.06703411102294922, 0.06793318176269532, 0.06705458831787109, 0.0661934051513672, 0.06678323364257813, 0.06636544036865234, 0.06662451171875, 0.06608895874023438, 0.0665354232788086, 0.06626201629638671, 0.06623027038574218, 0.06625177764892579, 0.06680883026123047, 0.06657536315917968, 0.06639103698730468, 0.0648826904296875, 0.06500863647460937, 0.06489292907714844, 0.06485094451904297, 0.06469529724121094, 0.06603366088867188, 0.0654571533203125, 0.06500454711914062, 0.06495334625244141, 0.06470758056640626, 0.06501785278320313, 0.06459391784667969, 0.06528819274902344, 0.06446797180175781, 0.06500556945800781, 0.0649512939453125, 0.0667914276123047, 0.06492569732666016, 0.06467072296142579, 0.06454681396484375, 0.06556774139404296, 0.06455910491943359, 0.06479052734375, 0.06514482879638672, 0.06555238342285157, 0.06508748626708985, 0.06525030517578125, 0.06535475158691406, 0.06499839782714843, 0.0646666259765625, 0.06472601318359375, 0.06457036590576172, 0.06451609802246094, 0.0652564468383789, 0.06521446228027344, 0.06472396850585938, 0.06472806549072266, 0.06487347412109375, 0.06494822692871094, 0.06580838775634766, 0.06479564666748047, 0.06495539093017579, 0.06466867065429688, 0.06498918151855469, 0.06500863647460937, 0.06471065521240234, 0.06492876434326172, 0.06474547576904296, 0.06476799774169922, 0.06456320190429687, 0.06472499084472656, 0.06519705963134766, 0.06491238403320312, 0.06503936004638672, 0.06536294555664063, 0.06522982025146484, 0.0646789093017578, 0.06490930938720703, 0.06490316772460937, 0.06480281829833984, 0.06486016082763672, 0.0646645736694336, 0.06534451293945312, 0.06467174530029297, 0.0648642578125, 0.06461337280273438, 0.06508748626708985, 0.06473420715332032, 0.06484992218017578, 0.06487859344482422, 0.06600498962402344, 0.06466764831542969, 0.06544281768798828, 0.06501273345947266, 0.064110595703125, 0.06456422424316406, 0.0647014389038086, 0.06461746978759765, 0.06447309112548828, 0.0648089599609375, 0.06438195037841797, 0.06486835479736328, 0.06523187255859375, 0.0663367691040039, 0.06528717041015625, 0.06485606384277344, 0.06473625946044922, 0.06485913848876954, 0.06486732482910157, 0.0652042236328125, 0.06425190734863281, 0.06471475219726562, 0.06469324493408203, 0.06573363494873047, 0.0649512939453125, 0.06470758056640626, 0.06640332794189453, 0.06583193969726563, 0.06481919860839844, 0.06495846557617188, 0.06572646331787109, 0.06705254364013671, 0.06505983734130859, 0.06513970947265625, 0.06494515228271484, 0.06513970947265625, 0.06469427490234375, 0.06482431793212891, 0.06552371215820313, 0.06528819274902344, 0.06477721405029296, 0.06491852569580078, 0.06459699249267578, 0.06485606384277344, 0.06481203460693359, 0.06498099517822266, 0.06509056091308593, 0.06474444580078124, 0.06490624237060547, 0.06486118316650391, 0.06514073944091797, 0.06469734191894531, 0.06460518646240235, 0.06464307403564454, 0.06862950134277344, 0.06584524536132813, 0.06515200042724609, 0.06464921569824218, 0.06473420715332032, 0.06614220428466797, 0.06521139526367188, 0.06493695831298828, 0.06525849914550781, 0.06514073944091797, 0.06698802947998046, 0.06510489654541016, 0.0651704330444336, 0.06518988800048828, 0.06512025451660156, 0.06682418823242188, 0.065617919921875, 0.06467993927001953, 0.06498406219482422, 0.06552780914306641, 0.06497586822509765, 0.06491852569580078]",tokens/s,15.309482746464434,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1234.579456,1005.060096,0.0,358.612992,318.913024,s,21,0.18631392097473143,0.008872091474987211,0.000924222773129299,0.008636768341064454,0.008913311958312988,0.009082719802856446,0.012191328048706058,"[0.012968480110168457, 0.008715680122375488, 0.008649439811706542, 0.008653440475463868, 0.008547360420227051, 0.008636223793029784, 0.00852006435394287, 0.008628000259399414, 0.008578911781311035, 0.008601471900939942, 0.008556415557861328, 0.00872332763671875, 0.008607872009277344, 0.008735967636108398, 0.008636768341064454, 0.008690496444702149, 0.008913311958312988, 0.00862070369720459, 0.008642592430114747, 0.009082719802856446, 0.008604672431945801]",tokens/s,28854.526660566135,kWh,1.0560125751725471e-07,5.786425132276262e-08,2.2601994052193067e-07,3.89485449361948e-07,tokens/kWh,657277442.3778274,MB,1234.579456,1005.060096,0.0,358.612992,328.809472,s,21,10.324910400390625,0.4916624000186012,0.00544805281141677,0.49037628173828124,0.4947781066894531,0.5050043334960937,0.5079629760742188,"[0.50870263671875, 0.4930988159179687, 0.48819091796875, 0.4877738342285156, 0.4884733581542969, 0.48753598022460937, 0.488943115234375, 0.48698284912109374, 0.48965280151367185, 0.4866800231933594, 0.49037628173828124, 0.4889322509765625, 0.4905810546875, 0.48850283813476564, 0.4947781066894531, 0.49458047485351564, 0.4923774108886719, 0.4912550964355469, 0.49059906005859377, 0.49188916015625, 0.5050043334960937]",tokens/s,128.13670518147515,kWh,5.982969098851764e-06,3.278237305428838e-06,1.0177446715578437e-05,1.9438653119859037e-05,tokens/kWh,3240965.287643183,,s,1322,10.486766648292543,0.007932501246817355,0.0011072131495569327,0.007730175971984863,0.008002457332611084,0.008157030057907104,0.016623786373138428,"[0.011603967666625976, 0.010896384239196777, 0.009056256294250489, 0.008364031791687012, 0.007814144134521485, 0.007742464065551758, 0.0078919677734375, 0.007983104228973388, 0.007925759792327881, 0.007923711776733398, 0.007890944004058837, 0.00773529577255249, 0.00810598373413086, 0.007941120147705078, 0.007896063804626464, 0.00778547191619873, 0.0077209601402282715, 0.007713791847229004, 0.00801689624786377, 0.007914495944976807, 0.00795750379562378, 0.007947264194488525, 0.007755775928497314, 0.007707647800445557, 0.007718912124633789, 0.0077281279563903805, 0.007827455997467042, 0.0077281279563903805, 0.007740416049957275, 0.00769536018371582, 0.007731200218200684, 0.0077281279563903805, 0.0077578239440917966, 0.007866432189941406, 0.007905216217041015, 0.007806975841522217, 0.007733280181884765, 0.007848959922790527, 0.007939040184020996, 0.007905280113220215, 0.007763967990875244, 0.007702527999877929, 0.007729152202606201, 0.0076871681213378906, 0.007738368034362793, 0.007699456214904785, 0.00774451208114624, 0.007715839862823487, 0.007705599784851074, 0.007665664196014404, 0.007705599784851074, 0.007772160053253174, 0.00832307243347168, 0.008374272346496582, 0.008364031791687012, 0.008137727737426758, 0.008676351547241211, 0.008647680282592773, 0.008061951637268066, 0.007941120147705078, 0.007804927825927735, 0.007699456214904785, 0.016753664016723634, 0.007749631881713868, 0.007865344047546387, 0.007974912166595459, 0.008176639556884765, 0.008267775535583496, 0.008225791931152344, 0.008252415657043457, 0.008347647666931152, 0.008347647666931152, 0.008121343612670898, 0.00808140754699707, 0.00795750379562378, 0.007962624073028564, 0.007906303882598878, 0.00773529577255249, 0.007738368034362793, 0.007763967990875244, 0.007729152202606201, 0.0077506561279296875, 0.0077281279563903805, 0.007724031925201416, 0.007824384212493896, 0.007721983909606934, 0.007715839862823487, 0.007718912124633789, 0.007739391803741455, 0.00787660789489746, 0.007906303882598878, 0.0078919677734375, 0.007863296031951903, 0.0077281279563903805, 0.007847936153411865, 0.007693312168121338, 0.007734272003173828, 0.007749631881713868, 0.007703551769256592, 0.007707680225372314, 0.007713791847229004, 0.00769532823562622, 0.007730175971984863, 0.007704576015472412, 0.007762944221496582, 0.007703551769256592, 0.007713791847229004, 0.007731200218200684, 0.0077209601402282715, 0.007721983909606934, 0.0076912641525268555, 0.007723008155822754, 0.007797760009765625, 0.007732223987579345, 0.007863296031951903, 0.007836671829223632, 0.007715839862823487, 0.007715839862823487, 0.0076984319686889645, 0.007693312168121338, 0.007702527999877929, 0.007692287921905518, 0.007733248233795166, 0.007749631881713868, 0.007714816093444824, 0.016701440811157226, 0.007828479766845703, 0.0077547521591186525, 0.007716864109039307, 0.007701504230499268, 0.007740416049957275, 0.007717887878417969, 0.007721983909606934, 0.007828479766845703, 0.007741471767425537, 0.007738336086273194, 0.007726079940795898, 0.00785100793838501, 0.007770112037658691, 0.007825407981872558, 0.0077506561279296875, 0.0077506561279296875, 0.007702527999877929, 0.007725056171417236, 0.007788544178009033, 0.007708672046661377, 0.007740416049957275, 0.00773529577255249, 0.0077209601402282715, 0.007741439819335938, 0.007802879810333252, 0.00779366397857666, 0.007837696075439453, 0.007726079940795898, 0.007718912124633789, 0.007726079940795898, 0.007703584194183349, 0.007741407871246338, 0.007717887878417969, 0.007705599784851074, 0.007717887878417969, 0.007699456214904785, 0.007767039775848389, 0.007716864109039307, 0.007705599784851074, 0.007756800174713135, 0.007726079940795898, 0.007723008155822754, 0.00773632001876831, 0.0077333121299743655, 0.007885759830474853, 0.00774348783493042, 0.007778304100036621, 0.007740416049957275, 0.007734335899353027, 0.0077424001693725585, 0.007721983909606934, 0.007734272003173828, 0.007772160053253174, 0.007711743831634522, 0.007724031925201416, 0.007748608112335205, 0.007730175971984863, 0.0077281279563903805, 0.007767039775848389, 0.007715839862823487, 0.007709695816040039, 0.007712768077850342, 0.016694271087646484, 0.007724031925201416, 0.00775270414352417, 0.007763967990875244, 0.0077578239440917966, 0.007700479984283447, 0.0077281279563903805, 0.007669760227203369, 0.007833600044250488, 0.007715839862823487, 0.007715839862823487, 0.007729152202606201, 0.007787519931793213, 0.00773529577255249, 0.007711743831634522, 0.00774451208114624, 0.007697408199310303, 0.007684095859527588, 0.007714816093444824, 0.0076984319686889645, 0.007693312168121338, 0.007684095859527588, 0.007703551769256592, 0.007802879810333252, 0.007802879810333252, 0.007792640209197998, 0.00774348783493042, 0.007737343788146973, 0.007920639991760254, 0.007919616222381591, 0.007796735763549805, 0.007738431930541992, 0.007714752197265625, 0.0077209601402282715, 0.007729152202606201, 0.007764992237091065, 0.007960576057434082, 0.007717887878417969, 0.007692287921905518, 0.007679999828338623, 0.007699456214904785, 0.007741471767425537, 0.007738336086273194, 0.007733248233795166, 0.007748608112335205, 0.007697408199310303, 0.007705599784851074, 0.007739391803741455, 0.007689216136932373, 0.007723008155822754, 0.007702527999877929, 0.007710720062255859, 0.007726079940795898, 0.007693312168121338, 0.007741439819335938, 0.007713791847229004, 0.00773529577255249, 0.0076943359375, 0.007782400131225586, 0.007725056171417236, 0.007685120105743408, 0.007715839862823487, 0.007738368034362793, 0.016563199996948243, 0.0077547521591186525, 0.007727104187011719, 0.007756800174713135, 0.0076984319686889645, 0.00783564805984497, 0.007682047843933106, 0.00773632001876831, 0.0076984319686889645, 0.007762944221496582, 0.007725056171417236, 0.007751679897308349, 0.007710720062255859, 0.007872511863708496, 0.007729152202606201, 0.00774451208114624, 0.007726079940795898, 0.007779327869415284, 0.00779366397857666, 0.007710720062255859, 0.007713791847229004, 0.007693312168121338, 0.007716864109039307, 0.007709695816040039, 0.007707647800445557, 0.007715839862823487, 0.007712768077850342, 0.0076871681213378906, 0.00773529577255249, 0.007710720062255859, 0.007712768077850342, 0.007706624031066894, 0.0076902399063110355, 0.007711743831634522, 0.007706624031066894, 0.007689216136932373, 0.007766016006469726, 0.0076912641525268555, 0.007726079940795898, 0.007915520191192627, 0.00795750379562378, 0.007804927825927735, 0.007710720062255859, 0.007705599784851074, 0.007717887878417969, 0.007700479984283447, 0.0076943359375, 0.008194047927856446, 0.008091648101806641, 0.007954432010650634, 0.00774451208114624, 0.007711743831634522, 0.007692319869995117, 0.007700448036193848, 0.007789567947387695, 0.007700479984283447, 0.007714816093444824, 0.007712768077850342, 0.007706624031066894, 0.007718912124633789, 0.007716864109039307, 0.0077292160987854006, 0.007698368072509765, 0.016657407760620118, 0.007730175971984863, 0.007724031925201416, 0.0078438401222229, 0.00769536018371582, 0.007745535850524903, 0.007696383953094482, 0.007697408199310303, 0.007726079940795898, 0.00785203218460083, 0.00775270414352417, 0.007676928043365478, 0.007704576015472412, 0.00774348783493042, 0.007731200218200684, 0.007701504230499268, 0.007804927825927735, 0.007708672046661377, 0.007710720062255859, 0.007707647800445557, 0.007711743831634522, 0.007700479984283447, 0.007700479984283447, 0.007699456214904785, 0.007712768077850342, 0.007697472095489502, 0.007697343826293945, 0.0076871681213378906, 0.007741471767425537, 0.007927775859832764, 0.00779366397857666, 0.00778547191619873, 0.007738368034362793, 0.00773529577255249, 0.007708672046661377, 0.007841792106628418, 0.007705599784851074, 0.007713791847229004, 0.007704576015472412, 0.007732223987579345, 0.007716928005218506, 0.007747519969940186, 0.007733248233795166, 0.007702527999877929, 0.007742464065551758, 0.007692287921905518, 0.007708672046661377, 0.007714848041534424, 0.007716832160949707, 0.007709695816040039, 0.007716864109039307, 0.0077209601402282715, 0.007709695816040039, 0.0077506561279296875, 0.0077578239440917966, 0.007712768077850342, 0.007811071872711181, 0.007751679897308349, 0.007782400131225586, 0.007685120105743408, 0.007799808025360107, 0.007716864109039307, 0.007768064022064209, 0.016578559875488282, 0.0077199358940124516, 0.007724031925201416, 0.007730175971984863, 0.007751679897308349, 0.007838719844818116, 0.007721983909606934, 0.007738368034362793, 0.007946239948272706, 0.007903232097625732, 0.007706624031066894, 0.007726079940795898, 0.007748608112335205, 0.007771135807037354, 0.007941120147705078, 0.00794220781326294, 0.007843776226043701, 0.007714816093444824, 0.007815167903900147, 0.007742464065551758, 0.007722015857696533, 0.007703519821166992, 0.007739391803741455, 0.007772160053253174, 0.007702527999877929, 0.007701504230499268, 0.007732223987579345, 0.007699456214904785, 0.008051775932312011, 0.008126399993896484, 0.007746560096740723, 0.0077281279563903805, 0.007717887878417969, 0.0076943359375, 0.0076871681213378906, 0.007692287921905518, 0.007706624031066894, 0.007808000087738037, 0.0077209601402282715, 0.007701504230499268, 0.007711743831634522, 0.0078438401222229, 0.007739391803741455, 0.007706624031066894, 0.007716864109039307, 0.007707647800445557, 0.007768064022064209, 0.007886847972869874, 0.007692287921905518, 0.007712768077850342, 0.007726079940795898, 0.007709695816040039, 0.007706624031066894, 0.00773529577255249, 0.007704576015472412, 0.00773529577255249, 0.007718912124633789, 0.007711743831634522, 0.007676928043365478, 0.0076871681213378906, 0.007708703994750977, 0.007705567836761474, 0.007707647800445557, 0.016647167205810547, 0.007724031925201416, 0.007713791847229004, 0.007688191890716553, 0.007745600223541259, 0.007724991798400879, 0.007704576015472412, 0.007768064022064209, 0.007717887878417969, 0.007707647800445557, 0.007766016006469726, 0.007725056171417236, 0.00769536018371582, 0.00769536018371582, 0.007714816093444824, 0.007707647800445557, 0.007798783779144287, 0.00790118408203125, 0.00781004810333252, 0.0076574721336364745, 0.007778304100036621, 0.007678976058959961, 0.007707647800445557, 0.0077209601402282715, 0.007697408199310303, 0.007703551769256592, 0.0076902399063110355, 0.007699456214904785, 0.007769087791442871, 0.0077844481468200685, 0.007714816093444824, 0.007693312168121338, 0.0077209601402282715, 0.007701504230499268, 0.0077578239440917966, 0.007845888137817383, 0.007723008155822754, 0.007715839862823487, 0.007715839862823487, 0.007713791847229004, 0.0076943359375, 0.007692287921905518, 0.007703551769256592, 0.0076943359375, 0.007688191890716553, 0.007770112037658691, 0.0076912641525268555, 0.007683072090148926, 0.007733248233795166, 0.0076984319686889645, 0.007782400131225586, 0.007789567947387695, 0.007717919826507568, 0.007701471805572509, 0.007716864109039307, 0.007721983909606934, 0.007721983909606934, 0.007702527999877929, 0.0077547521591186525, 0.007731200218200684, 0.0076943359375, 0.007699456214904785, 0.007689216136932373, 0.016616479873657226, 0.007721951961517334, 0.0076984319686889645, 0.007682079792022705, 0.0077536959648132325, 0.007910399913787843, 0.0077199358940124516, 0.007729152202606201, 0.007723008155822754, 0.007775231838226319, 0.007875584125518798, 0.007718912124633789, 0.007697408199310303, 0.007702527999877929, 0.007699456214904785, 0.007777279853820801, 0.008135680198669434, 0.007952383995056152, 0.007699456214904785, 0.007688191890716553, 0.0076912641525268555, 0.007688191890716553, 0.007723008155822754, 0.007730175971984863, 0.007710720062255859, 0.00773529577255249, 0.007718912124633789, 0.0076912641525268555, 0.007692319869995117, 0.0076973757743835445, 0.007742464065551758, 0.007873536109924317, 0.007718912124633789, 0.008615936279296875, 0.007956480026245117, 0.007966720104217529, 0.007947264194488525, 0.007872511863708496, 0.0077608962059021, 0.007699456214904785, 0.007715839862823487, 0.00769536018371582, 0.007725056171417236, 0.007732223987579345, 0.007871488094329833, 0.007798783779144287, 0.0077209601402282715, 0.007711743831634522, 0.007717887878417969, 0.0077281279563903805, 0.007717887878417969, 0.007701504230499268, 0.007701504230499268, 0.007703551769256592, 0.007751679897308349, 0.007730175971984863, 0.007711743831634522, 0.0076984319686889645, 0.007762944221496582, 0.007710720062255859, 0.00773632001876831, 0.007712800025939942, 0.007716832160949707, 0.01661030387878418, 0.007712768077850342, 0.007726079940795898, 0.007716864109039307, 0.007732223987579345, 0.007729152202606201, 0.007705599784851074, 0.007732223987579345, 0.007708672046661377, 0.007714816093444824, 0.007675903797149658, 0.007910399913787843, 0.007725056171417236, 0.007729152202606201, 0.007730175971984863, 0.007763967990875244, 0.007731200218200684, 0.007730175971984863, 0.0076871681213378906, 0.007688191890716553, 0.007684095859527588, 0.007711743831634522, 0.0076687359809875484, 0.007706624031066894, 0.007774208068847656, 0.007678976058959961, 0.007700479984283447, 0.0076902399063110355, 0.007696447849273681, 0.007699391841888428, 0.007823359966278077, 0.007854080200195313, 0.007693312168121338, 0.007725056171417236, 0.007701504230499268, 0.00783564805984497, 0.007677951812744141, 0.007689216136932373, 0.007685120105743408, 0.007682047843933106, 0.007734272003173828, 0.007696383953094482, 0.007682047843933106, 0.007710720062255859, 0.007696383953094482, 0.007702527999877929, 0.0076984319686889645, 0.00775270414352417, 0.007681024074554443, 0.007682047843933106, 0.007686143875122071, 0.007697408199310303, 0.0077199358940124516, 0.007731200218200684, 0.007727104187011719, 0.007754784107208252, 0.0077209281921386715, 0.007737343788146973, 0.007764992237091065, 0.007710720062255859, 0.007709695816040039, 0.007701504230499268, 0.007770112037658691, 0.016547840118408205, 0.007730175971984863, 0.007739391803741455, 0.007730175971984863, 0.007729152202606201, 0.00789299201965332, 0.0077619199752807615, 0.007703551769256592, 0.007711743831634522, 0.007745535850524903, 0.007731200218200684, 0.0078438401222229, 0.007710720062255859, 0.0077619199752807615, 0.007710720062255859, 0.007699456214904785, 0.0077281279563903805, 0.008186944007873535, 0.008094655990600585, 0.007806975841522217, 0.007730175971984863, 0.00773529577255249, 0.007699456214904785, 0.0077199358940124516, 0.007721983909606934, 0.007737343788146973, 0.007670783996582031, 0.007726079940795898, 0.007745535850524903, 0.007716864109039307, 0.007806975841522217, 0.007882751941680909, 0.007704576015472412, 0.007703551769256592, 0.0077209601402282715, 0.007707647800445557, 0.007676928043365478, 0.008251456260681153, 0.008133567810058594, 0.007824384212493896, 0.007700479984283447, 0.0076984319686889645, 0.0077209601402282715, 0.007747583866119385, 0.007709695816040039, 0.007673855781555176, 0.007715839862823487, 0.007702527999877929, 0.007711743831634522, 0.008233983993530274, 0.007969791889190675, 0.007902207851409913, 0.00793497610092163, 0.007878655910491944, 0.007746560096740723, 0.007704576015472412, 0.007705599784851074, 0.007717887878417969, 0.007718912124633789, 0.007718944072723389, 0.0077291841506958005, 0.00770246410369873, 0.007727104187011719, 0.01676288032531738, 0.007725056171417236, 0.007721983909606934, 0.007742464065551758, 0.00769536018371582, 0.007717887878417969, 0.007709760189056397, 0.007786431789398193, 0.007712768077850342, 0.007718912124633789, 0.007725056171417236, 0.007823359966278077, 0.007729152202606201, 0.007708672046661377, 0.007699456214904785, 0.007780352115631104, 0.007726079940795898, 0.007726079940795898, 0.007740416049957275, 0.007729152202606201, 0.007700479984283447, 0.007715839862823487, 0.007773183822631836, 0.007751679897308349, 0.0077199358940124516, 0.007716864109039307, 0.007704576015472412, 0.007727104187011719, 0.007710720062255859, 0.007731200218200684, 0.007723008155822754, 0.007889920234680176, 0.00790937614440918, 0.008008735656738281, 0.00790012788772583, 0.007946239948272706, 0.00785100793838501, 0.007679999828338623, 0.007686143875122071, 0.007853055953979492, 0.00769536018371582, 0.007714816093444824, 0.007721983909606934, 0.007976960182189942, 0.007935999870300293, 0.007832575798034667, 0.007708672046661377, 0.007726079940795898, 0.007707647800445557, 0.007676928043365478, 0.007708672046661377, 0.00773632001876831, 0.007734272003173828, 0.007734272003173828, 0.007725056171417236, 0.007711743831634522, 0.00774451208114624, 0.007724031925201416, 0.00780185604095459, 0.007727104187011719, 0.0077281279563903805, 0.007778304100036621, 0.007770112037658691, 0.01660211181640625, 0.007740416049957275, 0.007758848190307617, 0.007717887878417969, 0.0076984319686889645, 0.007717887878417969, 0.007723008155822754, 0.007696383953094482, 0.00782643222808838, 0.0076902399063110355, 0.007689216136932373, 0.007712768077850342, 0.00794316816329956, 0.007795711994171142, 0.007724031925201416, 0.007756800174713135, 0.007738368034362793, 0.007890944004058837, 0.007928832054138184, 0.007711743831634522, 0.007721983909606934, 0.007827455997467042, 0.007703551769256592, 0.007713791847229004, 0.007709695816040039, 0.007723008155822754, 0.007705599784851074, 0.007701504230499268, 0.00779366397857666, 0.007730175971984863, 0.007710720062255859, 0.007721983909606934, 0.00773632001876831, 0.007712768077850342, 0.007732223987579345, 0.007749695777893067, 0.007699391841888428, 0.007712768077850342, 0.00769536018371582, 0.007714816093444824, 0.007675903797149658, 0.007679999828338623, 0.007706624031066894, 0.007713791847229004, 0.007707647800445557, 0.007725056171417236, 0.0077199358940124516, 0.0077199358940124516, 0.007711743831634522, 0.007701504230499268, 0.008041472434997558, 0.007925759792327881, 0.007964672088623047, 0.008812543869018554, 0.00818892765045166, 0.007971839904785156, 0.007931903839111328, 0.00797388792037964, 0.007821311950683594, 0.00773632001876831, 0.007737343788146973, 0.0076984319686889645, 0.0077506561279296875, 0.016773120880126953, 0.007713791847229004, 0.007703551769256592, 0.007712768077850342, 0.0077199358940124516, 0.007724031925201416, 0.007887872219085693, 0.007883776187896728, 0.007776256084442139, 0.0076943359375, 0.007706655979156494, 0.007736288070678711, 0.007830527782440186, 0.0077619199752807615, 0.0077281279563903805, 0.007740416049957275, 0.007749631881713868, 0.007731200218200684, 0.007759871959686279, 0.007758848190307617, 0.007709695816040039, 0.0077209601402282715, 0.007706624031066894, 0.007707647800445557, 0.007697408199310303, 0.007702527999877929, 0.0077209601402282715, 0.00769536018371582, 0.007708672046661377, 0.007721983909606934, 0.008154111862182617, 0.008143872261047362, 0.007910399913787843, 0.007795711994171142, 0.007726079940795898, 0.007689280033111572, 0.007698368072509765, 0.007763967990875244, 0.007821311950683594, 0.007713791847229004, 0.007711743831634522, 0.007733248233795166, 0.007748608112335205, 0.007656447887420655, 0.00775270414352417, 0.007725056171417236, 0.0077281279563903805, 0.007710720062255859, 0.007682047843933106, 0.007711743831634522, 0.007732223987579345, 0.007700479984283447, 0.007749631881713868, 0.00773632001876831, 0.007706624031066894, 0.007697408199310303, 0.00774348783493042, 0.00773632001876831, 0.007778304100036621, 0.00773632001876831, 0.007763967990875244, 0.007724031925201416, 0.007725056171417236, 0.01663488006591797, 0.0077281279563903805, 0.0077292160987854006, 0.00771782398223877, 0.007703551769256592, 0.007724031925201416, 0.007721983909606934, 0.007717887878417969, 0.007869440078735352, 0.007717887878417969, 0.007703551769256592, 0.007697408199310303, 0.00781824016571045, 0.007864319801330566, 0.007912447929382324, 0.007799808025360107, 0.007717887878417969, 0.007748608112335205, 0.007753727912902832, 0.007782400131225586, 0.007894015789031983, 0.007772160053253174, 0.007773183822631836, 0.007839744091033935, 0.007825407981872558, 0.007860223770141601, 0.007705599784851074, 0.007798783779144287, 0.007741439819335938, 0.007748608112335205, 0.007869440078735352, 0.0077209601402282715, 0.007725056171417236, 0.007706624031066894, 0.0076984319686889645, 0.007726079940795898, 0.007718912124633789, 0.007711743831634522, 0.0077547521591186525, 0.0077281279563903805, 0.007716864109039307, 0.0076943359375, 0.007706624031066894, 0.007708735942840576, 0.008207296371459961, 0.00828006362915039, 0.008375295639038086, 0.008964159965515136, 0.008193984031677246, 0.008244223594665527, 0.008076288223266602, 0.008083456039428711, 0.008111104011535645, 0.008119296073913575, 0.007963647842407226, 0.007763967990875244, 0.007849984169006348, 0.007905344009399413, 0.00797382402420044, 0.007958528041839599, 0.007767039775848389, 0.007678976058959961, 0.007705599784851074, 0.016735231399536133, 0.007749631881713868, 0.007739391803741455, 0.007732223987579345, 0.007741439819335938, 0.007729152202606201, 0.007734272003173828, 0.007770112037658691, 0.007745535850524903, 0.007739391803741455, 0.0076912641525268555, 0.007707680225372314, 0.0077803201675415035, 0.007710720062255859, 0.007718912124633789, 0.00785920000076294, 0.007742464065551758, 0.007711743831634522, 0.007757855892181397, 0.007923679828643799, 0.00797388792037964, 0.007967743873596191, 0.007703551769256592, 0.007781375885009765, 0.007738368034362793, 0.0077199358940124516, 0.0077199358940124516, 0.00773632001876831, 0.007770112037658691, 0.007689216136932373, 0.007716864109039307, 0.00784281587600708, 0.007911424160003662, 0.007947264194488525, 0.007904255867004394, 0.00787766408920288, 0.007705567836761474, 0.007828479766845703, 0.007711743831634522, 0.007731200218200684, 0.007724031925201416, 0.007885824203491211, 0.0077281279563903805, 0.007792640209197998, 0.00790937614440918, 0.007910399913787843, 0.00787660789489746, 0.007709695816040039, 0.007669760227203369, 0.007706655979156494, 0.007722976207733154, 0.007747583866119385, 0.009075712203979493, 0.0091146240234375, 0.008141823768615723, 0.007981056213378907, 0.007968768119812012, 0.007946239948272706, 0.008045568466186523, 0.007699456214904785, 0.00791756820678711, 0.00808243179321289, 0.007932928085327149, 0.01699430465698242, 0.007724031925201416, 0.007711743831634522, 0.007674880027770996, 0.007733248233795166, 0.007865344047546387, 0.008018943786621094, 0.007984127998352051, 0.007921664237976075, 0.0078919677734375, 0.007894015789031983, 0.007947264194488525, 0.0077209601402282715, 0.007711743831634522, 0.007716864109039307, 0.007726079940795898, 0.008037376403808593, 0.008160256385803222, 0.008114175796508789, 0.007946239948272706, 0.007955455780029297, 0.007880703926086426, 0.007709695816040039, 0.0076902399063110355, 0.007707647800445557, 0.007819263935089112, 0.0077066879272460935, 0.0077127041816711425, 0.007713791847229004, 0.007715839862823487, 0.007741439819335938, 0.007872511863708496, 0.008002559661865234, 0.007911424160003662, 0.007808000087738037, 0.007950335979461669, 0.007971839904785156, 0.007889920234680176, 0.007895040035247802, 0.007922688007354736, 0.007844863891601562, 0.007697408199310303, 0.007679999828338623, 0.007716864109039307, 0.007702527999877929, 0.0077547521591186525, 0.007723008155822754, 0.007700479984283447, 0.0076943359375, 0.007708672046661377, 0.00769536018371582, 0.007707647800445557, 0.0076902399063110355, 0.0076943359375, 0.007699456214904785, 0.007935999870300293, 0.007902207851409913, 0.007918591976165772, 0.00793398380279541, 0.007732192039489746, 0.007676928043365478, 0.007679999828338623, 0.007780352115631104, 0.016642080307006837, 0.007711711883544922, 0.007697408199310303, 0.007676928043365478, 0.007706624031066894, 0.007731200218200684, 0.007718976020812988, 0.00803116798400879, 0.007894015789031983, 0.007731200218200684, 0.007890944004058837, 0.008027135848999023, 0.008517631530761719, 0.007713791847229004, 0.007714816093444824, 0.00786947202682495, 0.007754720211029052, 0.007733248233795166, 0.007702527999877929, 0.007700479984283447, 0.007743519783020019, 0.007692255973815918, 0.007667712211608887, 0.007686143875122071, 0.007748608112335205, 0.007677951812744141, 0.007717887878417969, 0.007771135807037354, 0.007829504013061523, 0.007766016006469726, 0.00800153636932373, 0.007898111820220948, 0.007880703926086426, 0.007862271785736084, 0.007678016185760498, 0.007819200038909912, 0.007671840190887451, 0.007682015895843506, 0.007673855781555176, 0.007704576015472412, 0.0076871681213378906, 0.007699456214904785, 0.007682047843933106, 0.007710720062255859, 0.007718944072723389, 0.0076902079582214355, 0.007753727912902832, 0.007715839862823487, 0.007767039775848389, 0.007705599784851074, 0.007976960182189942, 0.007907328128814697, 0.007895040035247802, 0.007870463848114014, 0.008042495727539062, 0.007890975952148438, 0.007929823875427245, 0.007815167903900147, 0.008281087875366211, 0.007686143875122071, 0.007705599784851074, 0.007724031925201416, 0.00773632001876831, 0.01665023994445801, 0.0076984319686889645, 0.007708672046661377, 0.007708672046661377, 0.007717887878417969, 0.007837696075439453, 0.007702527999877929, 0.00774451208114624, 0.007753727912902832, 0.007738368034362793, 0.007723008155822754, 0.007798783779144287, 0.007696383953094482, 0.007700479984283447, 0.007701504230499268, 0.007718912124633789, 0.007715839862823487, 0.007762944221496582, 0.00785203218460083, 0.007715839862823487, 0.007769087791442871, 0.007730175971984863, 0.007700479984283447, 0.0077199358940124516, 0.007688191890716553, 0.007705599784851074, 0.0076943359375, 0.007681024074554443, 0.0076984319686889645, 0.007692287921905518, 0.007710720062255859, 0.007753727912902832, 0.00773529577255249, 0.007738368034362793, 0.007740416049957275, 0.007705599784851074, 0.0077281279563903805, 0.007871488094329833, 0.007733248233795166, 0.007703551769256592, 0.007703551769256592, 0.007701504230499268, 0.007677951812744141, 0.007689216136932373, 0.007725056171417236, 0.007723008155822754, 0.007781375885009765, 0.007701504230499268, 0.007701504230499268, 0.00852684783935547, 0.0082227201461792, 0.007988224029541016, 0.00809881591796875, 0.007950335979461669, 0.007958528041839599, 0.007836671829223632, 0.007874559879302979, 0.007763967990875244, 0.007967743873596191, 0.0078438401222229, 0.007737343788146973, 0.007927807807922363, 0.00799948787689209, 0.017304576873779298, 0.0077199358940124516, 0.007718912124633789, 0.007681024074554443, 0.0076871681213378906, 0.007727104187011719, 0.0076984319686889645, 0.007781375885009765, 0.007724031925201416, 0.007789567947387695, 0.007988224029541016, 0.008054783821105957, 0.007925759792327881, 0.00790937614440918, 0.00774348783493042, 0.007699456214904785, 0.00774348783493042, 0.007709695816040039, 0.007708672046661377, 0.007677951812744141, 0.00774348783493042, 0.007771135807037354, 0.007776256084442139, 0.007746560096740723, 0.00773529577255249, 0.007714816093444824, 0.007729152202606201, 0.00793497610092163, 0.007860223770141601, 0.007898111820220948, 0.007904255867004394, 0.007738368034362793, 0.00795244789123535, 0.007912415981292725, 0.0076861119270324706, 0.00797388792037964, 0.007954432010650634, 0.007786496162414551, 0.007844863891601562, 0.007825407981872558, 0.007705599784851074, 0.007705599784851074, 0.007717887878417969, 0.00774451208114624, 0.007718912124633789, 0.0076912641525268555, 0.00773632001876831, 0.007782400131225586, 0.007776256084442139, 0.007937024116516114, 0.007910399913787843, 0.007729152202606201, 0.00830668830871582, 0.00813158416748047, 0.007849984169006348, 0.007703551769256592, 0.007739391803741455, 0.007732223987579345, 0.008006655693054199, 0.007778304100036621, 0.00773632001876831, 0.0076984319686889645, 0.007703551769256592, 0.016625728607177735, 0.007673791885375977, 0.00773529577255249, 0.0076912641525268555, 0.007684095859527588, 0.007864319801330566, 0.007684095859527588, 0.007782400131225586, 0.008040448188781739, 0.007699456214904785, 0.007816192150115966, 0.00787660789489746, 0.007726079940795898, 0.0076912641525268555, 0.007709695816040039, 0.007703551769256592, 0.007727104187011719, 0.007713791847229004, 0.007734272003173828, 0.007740416049957275, 0.007954432010650634, 0.008110079765319824, 0.008164352416992187, 0.008158207893371582, 0.008134655952453614, 0.008079360008239746, 0.008034303665161132, 0.008069120407104492, 0.008080384254455567, 0.008045568466186523, 0.008086527824401855, 0.008064000129699708, 0.008039423942565918, 0.008177663803100586, 0.008115232467651367, 0.008054752349853516, 0.008057855606079101, 0.00807423973083496, 0.008069120407104492, 0.00808243179321289, 0.008110079765319824, 0.008058879852294922, 0.008219648361206054, 0.008138751983642578, 0.008049663543701171, 0.008075263977050781, 0.008070143699645996, 0.008043519973754883, 0.008030207633972167, 0.008031231880187988, 0.008062975883483887, 0.008071167945861817, 0.008064000129699708, 0.008061951637268066, 0.00809881591796875, 0.008316927909851075, 0.008250368118286134, 0.00838963222503662, 0.008570879936218261, 0.008749055862426757, 0.008308735847473145, 0.008157183647155761, 0.0081080961227417]",tokens/s,126.06364233490866,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run - self.run_text_generation_memory_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 205, in run_text_generation_memory_tracking - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 454, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample - outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 335, in forward - attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/functional.py"", line 1890, in softmax - ret = input.softmax(dim, dtype=dtype) -RuntimeError: CUDA error: an illegal memory access was encountered -CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. -For debugging consider passing CUDA_LAUNCH_BLOCKING=1 -Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. - - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1333.014528,1292.36992,0.0,706.740224,675.13344,s,1,7.6254833984375,7.6254833984375,0.0,7.6254833984375,7.6254833984375,7.6254833984375,7.6254833984375,[7.6254833984375],,kWh,9.990961554178234e-06,5.460104368617458e-06,1.4473067133979889e-05,2.9924133056775582e-05,,MB,1582.522368,1642.594304,0.0,996.1472,942.733312,s,10,0.27663871955871583,0.027663871955871583,0.0011222434405430129,0.027161983489990237,0.0287592414855957,0.02971774883270263,0.030484554710388183,"[0.02719340705871582, 0.027825183868408203, 0.02713055992126465, 0.027544256210327148, 0.03067625617980957, 0.02704342460632324, 0.026865407943725585, 0.02679091262817383, 0.028546239852905272, 0.0270230712890625]",tokens/s,9253.94682307531,kWh,3.2511186323835646e-07,1.781457501536158e-07,9.554403247915664e-07,1.4586979381835385e-06,tokens/kWh,175498979.80851823,MB,1622.360064,1650.982912,0.0,1004.535808,942.735872,s,10,14.279858642578125,1.4279858642578125,0.005582562406454156,1.4284188842773438,1.4341954467773437,1.435299627685547,1.4361829724121093,"[1.4339500732421875, 1.43640380859375, 1.4284742431640625, 1.4176553955078126, 1.4193118896484376, 1.425897705078125, 1.4318565673828125, 1.428363525390625, 1.4298638916015625, 1.42808154296875]",tokens/s,44.11808378281384,kWh,1.8023913988636687e-05,9.877106974897525e-06,2.976822314299942e-05,5.766924410653363e-05,tokens/kWh,1092436.7221394258,,s,630,14.27786032295227,0.02266327035389249,0.0004517663781688229,0.022526975631713866,0.023390309715270997,0.023661056423187256,0.024018913612365723,"[0.02260479927062988, 0.022751232147216797, 0.022781951904296875, 0.022390783309936522, 0.022394880294799805, 0.02245529556274414, 0.02270310401916504, 0.02243174362182617, 0.022384639739990234, 0.022607872009277344, 0.02251263999938965, 0.0224849910736084, 0.023262208938598632, 0.02374143981933594, 0.023225343704223633, 0.022724607467651366, 0.02304921531677246, 0.02271232032775879, 0.022676479339599608, 0.022153215408325197, 0.022395904541015626, 0.022487039566040038, 0.022626304626464845, 0.022405120849609376, 0.02269388771057129, 0.022725631713867187, 0.023803903579711915, 0.023427072525024413, 0.023200767517089844, 0.023616512298583983, 0.023488512039184572, 0.023533567428588868, 0.023785472869873047, 0.022963199615478515, 0.022773759841918945, 0.022726655960083008, 0.02269081687927246, 0.022467584609985353, 0.02245427131652832, 0.022460416793823244, 0.02244095993041992, 0.022254592895507814, 0.0228351993560791, 0.02286489677429199, 0.02250444793701172, 0.02266111946105957, 0.02272870445251465, 0.023196672439575194, 0.022605823516845702, 0.022709247589111328, 0.02246348762512207, 0.022335487365722655, 0.02226483154296875, 0.022365184783935548, 0.022708223342895507, 0.02251468849182129, 0.022568960189819336, 0.022764543533325195, 0.022746112823486327, 0.02284851264953613, 0.022929407119750975, 0.02290380859375, 0.022785024642944338, 0.022967296600341795, 0.022569984436035157, 0.022467584609985353, 0.02267136001586914, 0.023076864242553712, 0.023615488052368162, 0.024020992279052734, 0.024026111602783205, 0.023155712127685548, 0.023390207290649414, 0.023367679595947266, 0.023373823165893554, 0.022494207382202147, 0.022361087799072265, 0.022377471923828125, 0.022326271057128907, 0.0222873592376709, 0.02248908805847168, 0.022861824035644532, 0.02267136001586914, 0.02231808090209961, 0.022581247329711913, 0.022733823776245117, 0.0224532470703125, 0.022385663986206054, 0.022411264419555665, 0.022374399185180666, 0.02291814422607422, 0.02388275146484375, 0.023723007202148438, 0.02305433654785156, 0.022763519287109374, 0.022647808074951172, 0.022460416793823244, 0.02245631980895996, 0.02246348762512207, 0.022707199096679686, 0.022799360275268556, 0.022785055160522462, 0.02257711982727051, 0.02327244758605957, 0.02259660720825195, 0.022746112823486327, 0.02265907287597656, 0.022452224731445314, 0.023211008071899415, 0.02330931282043457, 0.02251366424560547, 0.022820863723754883, 0.022936576843261718, 0.022599679946899414, 0.023229440689086913, 0.02365542411804199, 0.022796287536621093, 0.02259660720825195, 0.022419456481933595, 0.022759424209594727, 0.022757375717163086, 0.022379520416259766, 0.022359039306640623, 0.02231705665588379, 0.022417407989501953, 0.022352895736694335, 0.023159807205200195, 0.022674432754516603, 0.02251263999938965, 0.02243174362182617, 0.022311935424804686, 0.022396928787231447, 0.02229555130004883, 0.023144447326660156, 0.023152639389038086, 0.0227061767578125, 0.02240716743469238, 0.022396928787231447, 0.022212608337402344, 0.022371328353881836, 0.022367231369018553, 0.022607872009277344, 0.02269491195678711, 0.022353919982910156, 0.02230271911621094, 0.02290790367126465, 0.02271027183532715, 0.02272870445251465, 0.022779903411865234, 0.022573055267333983, 0.022731775283813475, 0.02269593620300293, 0.022366207122802736, 0.02265088081359863, 0.022460416793823244, 0.02323865509033203, 0.023644159317016602, 0.02286079978942871, 0.02285055923461914, 0.022508544921875, 0.02225971221923828, 0.022408191680908202, 0.022459392547607423, 0.02245631980895996, 0.022577152252197266, 0.022391807556152343, 0.022305791854858398, 0.022824960708618162, 0.022527999877929687, 0.02241433525085449, 0.022960128784179686, 0.024595455169677736, 0.024365055084228517, 0.023640064239501952, 0.02254643249511719, 0.022268928527832032, 0.02259660720825195, 0.02230988883972168, 0.022406143188476564, 0.022578176498413087, 0.02290892791748047, 0.022738943099975584, 0.022177791595458983, 0.02229452705383301, 0.02229555130004883, 0.022175743103027345, 0.02346188735961914, 0.0228853759765625, 0.02226278305053711, 0.022687744140625, 0.022377471923828125, 0.022305791854858398, 0.022228992462158204, 0.02243174362182617, 0.022406143188476564, 0.022345727920532226, 0.022709247589111328, 0.022386688232421875, 0.02369536018371582, 0.022939647674560547, 0.022403072357177735, 0.022404096603393556, 0.022718463897705078, 0.022572032928466795, 0.02271232032775879, 0.022691839218139647, 0.022551551818847656, 0.022378496170043945, 0.022359039306640623, 0.022371360778808594, 0.022381536483764647, 0.022401023864746093, 0.022377471923828125, 0.022724607467651366, 0.022747135162353514, 0.022573055267333983, 0.022351871490478514, 0.02229248046875, 0.022311935424804686, 0.02235699272155762, 0.022346752166748047, 0.022387712478637696, 0.022336511611938475, 0.022354944229125977, 0.02239897537231445, 0.02231603240966797, 0.02227916717529297, 0.022370304107666016, 0.022213632583618165, 0.02267955207824707, 0.022946815490722656, 0.022553600311279298, 0.022666240692138673, 0.022220800399780274, 0.02232729530334473, 0.022260736465454102, 0.022246400833129884, 0.022107135772705077, 0.022239231109619142, 0.022254592895507814, 0.022174720764160157, 0.02231091117858887, 0.022190080642700196, 0.023021568298339845, 0.022674432754516603, 0.022675455093383787, 0.022931455612182617, 0.022403072357177735, 0.022311935424804686, 0.023194623947143556, 0.023358463287353515, 0.022534143447875975, 0.023804927825927736, 0.023746559143066406, 0.0230328311920166, 0.0223242244720459, 0.02229452705383301, 0.02253107261657715, 0.022175743103027345, 0.022380544662475587, 0.022336511611938475, 0.022228992462158204, 0.022358015060424806, 0.022380544662475587, 0.02231808090209961, 0.022355968475341798, 0.0223191032409668, 0.022297599792480468, 0.022441984176635742, 0.022210559844970702, 0.022252544403076172, 0.02228428840637207, 0.0225218563079834, 0.02253926467895508, 0.02248294448852539, 0.022363136291503907, 0.02283417510986328, 0.02267136001586914, 0.022601728439331056, 0.022432767868041992, 0.02247270393371582, 0.022269952774047853, 0.02231603240966797, 0.022393856048583984, 0.02243891143798828, 0.02287718391418457, 0.022838272094726563, 0.02267033576965332, 0.02267955207824707, 0.022707199096679686, 0.022595584869384764, 0.022297599792480468, 0.023001087188720702, 0.023438335418701172, 0.023548927307128906, 0.02245529556274414, 0.02234982490539551, 0.022354944229125977, 0.022370304107666016, 0.02232729530334473, 0.02230988883972168, 0.02229862403869629, 0.02233241653442383, 0.022359039306640623, 0.022424575805664062, 0.022331392288208008, 0.022378496170043945, 0.022803455352783202, 0.02269491195678711, 0.022525951385498046, 0.022429695129394533, 0.02231603240966797, 0.022378496170043945, 0.022245376586914063, 0.022381568908691408, 0.022597631454467772, 0.022397951126098634, 0.022359039306640623, 0.022260736465454102, 0.02287308883666992, 0.022577152252197266, 0.022625280380249024, 0.022579200744628908, 0.022116352081298828, 0.02226688003540039, 0.02239897537231445, 0.022487039566040038, 0.02231500816345215, 0.022511615753173828, 0.02368409538269043, 0.023517183303833008, 0.023440383911132814, 0.023538688659667968, 0.023653375625610353, 0.023151615142822265, 0.02347520065307617, 0.023447551727294923, 0.023391231536865235, 0.02269900894165039, 0.022379520416259766, 0.022297599792480468, 0.023021568298339845, 0.022767616271972657, 0.0223191032409668, 0.022347776412963868, 0.022199296951293947, 0.02254745674133301, 0.022709247589111328, 0.022607872009277344, 0.022591487884521484, 0.02260479927062988, 0.022608896255493165, 0.02234060859680176, 0.022466560363769532, 0.022726655960083008, 0.022297599792480468, 0.022143999099731446, 0.02225971221923828, 0.022166528701782227, 0.022338560104370117, 0.02262015914916992, 0.022543359756469726, 0.022303743362426756, 0.022938623428344726, 0.022536191940307617, 0.022288383483886717, 0.022360063552856444, 0.02264678382873535, 0.022172672271728516, 0.022326271057128907, 0.022330368041992187, 0.0223191032409668, 0.022389759063720704, 0.02262118339538574, 0.022445056915283205, 0.02267750358581543, 0.022938623428344726, 0.023136255264282226, 0.02244095993041992, 0.02230988883972168, 0.022579200744628908, 0.02272972869873047, 0.022337535858154296, 0.022365184783935548, 0.02226380729675293, 0.022409215927124023, 0.023009279251098632, 0.024013824462890625, 0.023970815658569337, 0.023665664672851562, 0.023444480895996093, 0.023037952423095705, 0.022602752685546876, 0.022756351470947265, 0.02268671989440918, 0.022483968734741212, 0.022585344314575196, 0.022525951385498046, 0.022715391159057616, 0.022777856826782225, 0.022640640258789063, 0.022411264419555665, 0.022573055267333983, 0.022758399963378906, 0.022743040084838868, 0.02283417510986328, 0.02246553611755371, 0.02246553611755371, 0.02226278305053711, 0.022420480728149415, 0.022353919982910156, 0.02231705665588379, 0.022434816360473633, 0.02250752067565918, 0.02245734405517578, 0.022549503326416014, 0.023798784255981444, 0.02286079978942871, 0.022609920501708985, 0.022344703674316405, 0.022368255615234374, 0.022597631454467772, 0.022202367782592772, 0.022362112045288086, 0.025776128768920898, 0.023769088745117187, 0.02309222412109375, 0.022789119720458984, 0.022627328872680662, 0.022252544403076172, 0.02223308753967285, 0.02227712059020996, 0.02232524871826172, 0.02222489547729492, 0.02241433525085449, 0.02307276725769043, 0.022944768905639647, 0.022931455612182617, 0.02312499237060547, 0.022553600311279298, 0.022218751907348632, 0.022347776412963868, 0.02231603240966797, 0.022191104888916017, 0.022245376586914063, 0.02255564880371094, 0.02265292739868164, 0.02264371109008789, 0.022322175979614257, 0.02267750358581543, 0.022417407989501953, 0.022351871490478514, 0.022330368041992187, 0.02230886459350586, 0.022141952514648438, 0.02235699272155762, 0.0225218563079834, 0.0223191032409668, 0.022408191680908202, 0.02390937614440918, 0.023587839126586914, 0.02250547218322754, 0.023362560272216795, 0.023739391326904297, 0.022898687362670898, 0.024013824462890625, 0.023415807723999024, 0.022649856567382814, 0.023347200393676756, 0.023446527481079102, 0.023023616790771483, 0.02225971221923828, 0.02246963119506836, 0.02269388771057129, 0.02267238426208496, 0.022701055526733398, 0.022230016708374024, 0.022347776412963868, 0.022286336898803712, 0.02234982490539551, 0.022468608856201173, 0.022286336898803712, 0.02307583999633789, 0.022459392547607423, 0.022331392288208008, 0.02233344078063965, 0.022338560104370117, 0.022227968215942383, 0.022305791854858398, 0.022503423690795898, 0.02265190315246582, 0.022687744140625, 0.022756351470947265, 0.02315673637390137, 0.022768640518188478, 0.0227194881439209, 0.022444032669067384, 0.022801408767700194, 0.022658048629760744, 0.02253824043273926, 0.02265497589111328, 0.022402048110961914, 0.022393856048583984, 0.024010751724243166, 0.023707616806030275, 0.023799808502197265, 0.023983104705810547, 0.02389606475830078, 0.023735296249389647, 0.023555072784423828, 0.022931455612182617, 0.022730752944946288, 0.02270207977294922, 0.022681600570678712, 0.02270515251159668, 0.022519807815551757, 0.023573503494262696, 0.023443456649780273, 0.02253926467895508, 0.02268876838684082, 0.02267033576965332, 0.02264678382873535, 0.02328371238708496, 0.023179264068603517, 0.022384639739990234, 0.022518783569335937, 0.02263654327392578, 0.02252288055419922, 0.022600704193115235, 0.022286336898803712, 0.02269593620300293, 0.02268057632446289, 0.02271232032775879, 0.02231705665588379, 0.022328319549560546, 0.02290790367126465, 0.022380544662475587, 0.02287001609802246, 0.022562816619873048, 0.02249728012084961, 0.022305791854858398, 0.02245529556274414, 0.02266316795349121, 0.022228992462158204, 0.02226483154296875, 0.022658048629760744, 0.022529024124145508, 0.022358015060424806, 0.022416383743286132, 0.02223308753967285, 0.02226380729675293, 0.02223411178588867, 0.02202726364135742, 0.022342655181884767, 0.022633472442626954, 0.02267852783203125, 0.022252544403076172, 0.022326271057128907, 0.022537216186523438, 0.022336511611938475, 0.022202367782592772, 0.022420480728149415, 0.022642688751220705, 0.022656000137329102, 0.023236608505249022, 0.022375423431396483, 0.022518783569335937, 0.022405120849609376, 0.022477823257446287, 0.02228223991394043, 0.02262937545776367, 0.02225766372680664, 0.02230271911621094, 0.022222848892211915, 0.02227712059020996, 0.022610944747924806, 0.022413312911987306, 0.022334495544433595, 0.022873056411743163, 0.02263039970397949, 0.022344703674316405, 0.02223308753967285, 0.02230169677734375, 0.02234163284301758, 0.022079488754272462, 0.02227302360534668, 0.022339584350585938, 0.022404096603393556, 0.02226483154296875, 0.023029760360717775, 0.022790143966674805, 0.022435840606689454, 0.022368255615234374, 0.022352895736694335, 0.022383615493774413, 0.022269952774047853, 0.022362112045288086, 0.022386688232421875, 0.022286336898803712, 0.022420480728149415, 0.02251468849182129, 0.02266726493835449, 0.022761472702026365, 0.02250752067565918, 0.022529024124145508, 0.023972864151000976, 0.023837696075439452, 0.023352319717407227, 0.02274406433105469, 0.022424575805664062, 0.022492160797119142, 0.02252390480041504, 0.022426624298095704, 0.02253209686279297, 0.023407615661621094, 0.022664192199707032, 0.022402048110961914, 0.02222489547729492, 0.02226585578918457, 0.023426048278808592, 0.024250368118286132, 0.024169471740722655, 0.02346905517578125, 0.023427072525024413, 0.022748159408569335, 0.02248089599609375, 0.022683647155761717, 0.022701055526733398, 0.02343731117248535, 0.02346291160583496]",tokens/s,44.124258519832146,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1578.635264,1753.74336,0.0,1168.113664,1154.613248,s,1,7.95435791015625,7.95435791015625,0.0,7.95435791015625,7.95435791015625,7.95435791015625,7.95435791015625,[7.95435791015625],,kWh,1.3682665982622036e-05,7.483663230869344e-06,1.7861958734044503e-05,3.9028287947535885e-05,,MB,1638.551552,2034.761728,0.0,1388.314624,1334.065152,s,10,0.3348356513977051,0.03348356513977051,0.0002595851424421842,0.03341880035400391,0.03381886100769042,0.033959031105041504,0.034071167182922364,"[0.03409920120239258, 0.033482208251953124, 0.03327135848999024, 0.03355974578857422, 0.03343065643310547, 0.033406944274902345, 0.033787712097167966, 0.033253440856933596, 0.0332562255859375, 0.033288158416748044]",tokens/s,7645.541892907124,kWh,3.9386318057175783e-07,2.1581727826777738e-07,1.75493195950005e-06,2.364612418339585e-06,tokens/kWh,108262985.51699288,MB,1672.17152,2034.761728,0.0,1388.314624,1372.847616,s,10,13.725848632812498,1.37258486328125,0.011763144530754442,1.3756602783203125,1.3851809936523438,1.3852950866699218,1.3853863610839845,"[1.378732421875, 1.3800684814453126, 1.3821722412109374, 1.3723170166015626, 1.3851556396484375, 1.372588134765625, 1.3488226318359375, 1.358224853515625, 1.3623580322265625, 1.3854091796875]",tokens/s,45.89880136765792,kWh,1.608486772484466e-05,8.814333230135203e-06,3.008945462709911e-05,5.498865558207899e-05,tokens/kWh,1145690.858107321,,s,630,13.72185497093201,0.02178072217608255,0.00047145727658441677,0.021917695999145507,0.02227312602996826,0.022423807430267333,0.02310226926803589,"[0.02206003189086914, 0.0222740478515625, 0.022245376586914063, 0.022350847244262697, 0.021922815322875978, 0.022009855270385743, 0.021995519638061522, 0.022146047592163084, 0.022010879516601564, 0.022578176498413087, 0.021982208251953125, 0.021369855880737306, 0.021803007125854493, 0.021805055618286134, 0.021396480560302734, 0.021309440612792968, 0.021222400665283202, 0.021818368911743165, 0.022140928268432617, 0.02227609634399414, 0.02207027244567871, 0.021982208251953125, 0.022303775787353517, 0.022453216552734374, 0.02209689521789551, 0.02205388832092285, 0.0220948486328125, 0.02205392074584961, 0.022177759170532226, 0.02225868797302246, 0.022203392028808593, 0.022106111526489256, 0.022188032150268554, 0.022120447158813478, 0.02206003189086914, 0.02206924819946289, 0.021980159759521483, 0.0220948486328125, 0.02169753646850586, 0.02130534362792969, 0.02127872085571289, 0.0212807674407959, 0.021341184616088867, 0.021300224304199217, 0.021207040786743164, 0.021283840179443358, 0.021340160369873046, 0.021526527404785157, 0.021429248809814453, 0.021392383575439454, 0.02129715156555176, 0.021285888671875, 0.02131455993652344, 0.02128486442565918, 0.021328895568847657, 0.02234880065917969, 0.022146047592163084, 0.022208511352539064, 0.022183935165405275, 0.022320127487182616, 0.022768640518188478, 0.022355968475341798, 0.022008832931518556, 0.021046272277832033, 0.021210111618041993, 0.02123776054382324, 0.021197824478149413, 0.021234687805175782, 0.02126028823852539, 0.02123366355895996, 0.021309440612792968, 0.02127872085571289, 0.021366783142089844, 0.02147430419921875, 0.02129305648803711, 0.02130227279663086, 0.02125312042236328, 0.0212490234375, 0.021405696868896484, 0.021364736557006835, 0.021917695999145507, 0.023362560272216795, 0.02306355285644531, 0.022063104629516602, 0.02123263931274414, 0.02130534362792969, 0.021365760803222656, 0.021985279083251954, 0.022141952514648438, 0.02228428840637207, 0.022107135772705077, 0.02146713638305664, 0.022181888580322266, 0.022147071838378905, 0.022399999618530272, 0.02204364776611328, 0.02211020851135254, 0.022107135772705077, 0.022148096084594726, 0.022362112045288086, 0.022205440521240235, 0.022215679168701173, 0.022211584091186523, 0.022122495651245116, 0.02211737632751465, 0.022495231628417968, 0.022716415405273437, 0.02243071937561035, 0.022205440521240235, 0.022137855529785155, 0.022191104888916017, 0.02207027244567871, 0.0224532470703125, 0.022297599792480468, 0.022168575286865236, 0.02208870315551758, 0.02230886459350586, 0.02200371170043945, 0.022016000747680665, 0.022040576934814454, 0.02203545570373535, 0.021983232498168945, 0.021997568130493163, 0.022122495651245116, 0.022139904022216796, 0.021391359329223633, 0.021011455535888672, 0.021283840179443358, 0.021356544494628905, 0.021921791076660157, 0.021816320419311523, 0.022148096084594726, 0.022125568389892578, 0.022017023086547852, 0.02206003189086914, 0.02206105613708496, 0.02240716743469238, 0.022240255355834963, 0.021971967697143553, 0.0218787841796875, 0.021951488494873047, 0.02131865692138672, 0.021184511184692383, 0.022280191421508787, 0.021611520767211914, 0.021283840179443358, 0.021313535690307618, 0.021279743194580078, 0.021978111267089845, 0.02221772766113281, 0.02211123275756836, 0.022537216186523438, 0.02244710350036621, 0.022120479583740235, 0.02198422431945801, 0.021999616622924805, 0.022004735946655272, 0.02227302360534668, 0.022195199966430663, 0.021984256744384766, 0.021982208251953125, 0.022150144577026368, 0.021933055877685546, 0.02208358383178711, 0.022181888580322266, 0.022193151473999022, 0.022013952255249023, 0.02208768081665039, 0.02208460807800293, 0.02208051109313965, 0.021980159759521483, 0.0220579833984375, 0.021967872619628907, 0.02211840057373047, 0.021323776245117186, 0.02128998374938965, 0.021801984786987305, 0.022188032150268554, 0.022192127227783204, 0.022022144317626953, 0.02201190376281738, 0.022334463119506837, 0.02209689521789551, 0.022170623779296874, 0.022001663208007814, 0.022031360626220704, 0.022123519897460937, 0.021585920333862304, 0.021325824737548828, 0.021130239486694336, 0.02124595260620117, 0.021783552169799804, 0.022232063293457033, 0.02213580894470215, 0.021390335083007812, 0.021212160110473634, 0.02165862464904785, 0.022026239395141603, 0.021989376068115234, 0.022034431457519533, 0.022076416015625, 0.022580223083496095, 0.021799936294555664, 0.0213309440612793, 0.021375999450683594, 0.021606399536132814, 0.02225152015686035, 0.021958656311035156, 0.022128639221191407, 0.021527551651000978, 0.02127052879333496, 0.02123161506652832, 0.0214968318939209, 0.021744640350341796, 0.02164735984802246, 0.022988832473754883, 0.02268976020812988, 0.02337283134460449, 0.022196191787719727, 0.02246553611755371, 0.022560768127441407, 0.02224332809448242, 0.022004735946655272, 0.021259263992309572, 0.021577728271484374, 0.021389312744140625, 0.02241535949707031, 0.02201190376281738, 0.022014976501464844, 0.0212992000579834, 0.021176319122314453, 0.021363712310791014, 0.02131046485900879, 0.021796863555908205, 0.021420032501220702, 0.021316608428955077, 0.021345279693603517, 0.021401599884033205, 0.021337087631225587, 0.021312511444091797, 0.02128691291809082, 0.021317632675170898, 0.02127769660949707, 0.0212992000579834, 0.021897247314453125, 0.021690336227416993, 0.021338111877441408, 0.02144972801208496, 0.021386240005493166, 0.02309427261352539, 0.022268928527832032, 0.022475776672363282, 0.020933631896972657, 0.021379072189331053, 0.02210304069519043, 0.023735296249389647, 0.02287001609802246, 0.0222740478515625, 0.022223871231079103, 0.022174720764160157, 0.02215936088562012, 0.022278144836425783, 0.022162431716918944, 0.021933055877685546, 0.021984256744384766, 0.02292736053466797, 0.022187007904052734, 0.022169599533081053, 0.022114303588867186, 0.022132736206054687, 0.022329343795776366, 0.022115327835083007, 0.02208665657043457, 0.021970943450927736, 0.021156864166259767, 0.021348352432250976, 0.021189632415771483, 0.02169343948364258, 0.022072320938110353, 0.022164480209350586, 0.02201190376281738, 0.021635072708129883, 0.02126131248474121, 0.021328895568847657, 0.021207040786743164, 0.021182464599609374, 0.021134336471557616, 0.021186559677124024, 0.021778432846069336, 0.02150099182128906, 0.02298873519897461, 0.02310553550720215, 0.022360063552856444, 0.022139904022216796, 0.021994495391845705, 0.022165504455566407, 0.022128639221191407, 0.022033407211303712, 0.02206719970703125, 0.02223209571838379, 0.021999584197998048, 0.02206105613708496, 0.021936128616333008, 0.021761024475097656, 0.022227968215942383, 0.022133760452270508, 0.022012928009033202, 0.022160383224487306, 0.022005760192871093, 0.021917695999145507, 0.022148096084594726, 0.02208051109313965, 0.02210406494140625, 0.021584896087646483, 0.02132275199890137, 0.021098495483398438, 0.021334016799926758, 0.021359615325927735, 0.02127359962463379, 0.021292032241821288, 0.0212992000579834, 0.021543935775756837, 0.022223871231079103, 0.02208153533935547, 0.022131711959838866, 0.0220579833984375, 0.022039552688598633, 0.021968896865844727, 0.021337087631225587, 0.02125721549987793, 0.021203968048095705, 0.021316608428955077, 0.02131046485900879, 0.021255168914794922, 0.021332992553710937, 0.021833728790283204, 0.022007808685302735, 0.02272870445251465, 0.022399999618530272, 0.022167552947998048, 0.02209689521789551, 0.02207539176940918, 0.022017023086547852, 0.022009855270385743, 0.021712896347045898, 0.02125004768371582, 0.021345279693603517, 0.02122547149658203, 0.021213184356689452, 0.02123776054382324, 0.021243904113769533, 0.02246143913269043, 0.022239231109619142, 0.021964799880981444, 0.02228223991394043, 0.021954559326171876, 0.022031360626220704, 0.022076416015625, 0.022115327835083007, 0.022194175720214843, 0.022068256378173827, 0.022137823104858397, 0.02224742317199707, 0.021638143539428712, 0.021156864166259767, 0.021168127059936523, 0.021331968307495116, 0.02183065605163574, 0.022130687713623046, 0.02202726364135742, 0.022129663467407225, 0.02211020851135254, 0.023508991241455078, 0.022410240173339844, 0.022014976501464844, 0.02127052879333496, 0.021179391860961915, 0.02126643180847168, 0.021153791427612305, 0.021365760803222656, 0.021295103073120117, 0.021811199188232423, 0.02166374397277832, 0.021324800491333007, 0.021358591079711914, 0.021316608428955077, 0.021153791427612305, 0.021296127319335938, 0.021271551132202148, 0.02125823974609375, 0.02127872085571289, 0.021227519989013673, 0.02127769660949707, 0.021317632675170898, 0.02132275199890137, 0.021301248550415038, 0.021369855880737306, 0.02126028823852539, 0.021171199798583985, 0.02127462387084961, 0.021295103073120117, 0.021369855880737306, 0.021353471755981446, 0.021255168914794922, 0.021164031982421876, 0.021283840179443358, 0.02128486442565918, 0.02131455993652344, 0.02125619125366211, 0.021356544494628905, 0.02109644889831543, 0.021181440353393553, 0.02208051109313965, 0.022090751647949217, 0.022116352081298828, 0.021873664855957032, 0.022131711959838866, 0.022136831283569337, 0.021703680038452147, 0.021412864685058593, 0.021352447509765626, 0.021308416366577147, 0.021307392120361326, 0.02142720031738281, 0.021295103073120117, 0.021405696868896484, 0.021393407821655275, 0.02145894432067871, 0.021384191513061524, 0.021924863815307616, 0.021711872100830077, 0.021202943801879884, 0.021234687805175782, 0.021312511444091797, 0.020953088760375976, 0.021147647857666017, 0.021331968307495116, 0.0213309440612793, 0.021350400924682617, 0.021519359588623048, 0.02124799919128418, 0.021131263732910157, 0.02289664077758789, 0.02225663948059082, 0.022013952255249023, 0.02209587287902832, 0.02185932731628418, 0.021304319381713867, 0.021315584182739256, 0.02124083137512207, 0.02122035217285156, 0.02128281593322754, 0.02130534362792969, 0.021134336471557616, 0.021319679260253906, 0.021991424560546875, 0.02208870315551758, 0.023311359405517578, 0.023221248626708983, 0.022269952774047853, 0.02209587287902832, 0.022107135772705077, 0.021405696868896484, 0.021308416366577147, 0.02188595199584961, 0.02164019203186035, 0.021309440612792968, 0.021350400924682617, 0.022122495651245116, 0.021989376068115234, 0.021564416885375977, 0.021421056747436523, 0.02131455993652344, 0.0212674560546875, 0.021335039138793945, 0.021738496780395508, 0.021812223434448243, 0.021414911270141602, 0.021246976852416992, 0.021358591079711914, 0.021406719207763672, 0.021354496002197267, 0.021493759155273438, 0.021304319381713867, 0.02132275199890137, 0.02127872085571289, 0.021309440612792968, 0.021196800231933592, 0.021317632675170898, 0.02123161506652832, 0.02110873603820801, 0.021203968048095705, 0.021368831634521485, 0.021316608428955077, 0.021212160110473634, 0.020925439834594727, 0.02109951972961426, 0.0212992000579834, 0.021385215759277345, 0.02127257537841797, 0.02121625518798828, 0.021382144927978516, 0.02122035217285156, 0.021576704025268553, 0.02109951972961426, 0.021187583923339845, 0.021311487197875977, 0.021326847076416015, 0.02125209617614746, 0.021301279067993163, 0.021261280059814452, 0.021313535690307618, 0.021169151306152344, 0.021176319122314453, 0.021200895309448242, 0.0212674560546875, 0.021501951217651367, 0.021408767700195314, 0.021317632675170898, 0.02129817581176758, 0.02132275199890137, 0.021317632675170898, 0.02182655906677246, 0.02170982360839844, 0.021198848724365234, 0.02127769660949707, 0.021213184356689452, 0.021335039138793945, 0.02124492835998535, 0.02124083137512207, 0.02125209617614746, 0.02128486442565918, 0.021279743194580078, 0.021246976852416992, 0.021242879867553712, 0.021350400924682617, 0.021377023696899415, 0.022222848892211915, 0.02204569625854492, 0.022024192810058595, 0.02183475112915039, 0.02124185562133789, 0.021997568130493163, 0.021534719467163087, 0.02182655906677246, 0.022026239395141603, 0.021605375289916993, 0.021378047943115236, 0.02150297546386719, 0.022149120330810547, 0.02206719970703125, 0.022112255096435548, 0.02213478469848633, 0.021999616622924805, 0.022123519897460937, 0.021995519638061522, 0.0220446720123291, 0.022115327835083007, 0.022024192810058595, 0.02205695915222168, 0.022016000747680665, 0.02205183982849121, 0.02231603240966797, 0.022114303588867186, 0.022013952255249023, 0.02221772766113281, 0.02205183982849121, 0.021354496002197267, 0.02185113525390625, 0.021582847595214845, 0.02129715156555176, 0.021751808166503905, 0.021963775634765623, 0.022010879516601564, 0.022160383224487306, 0.021926912307739257, 0.021251071929931642, 0.021348352432250976, 0.021613567352294923, 0.02208870315551758, 0.0220948486328125, 0.022001663208007814, 0.02213580894470215, 0.02211942481994629, 0.02208665657043457, 0.022231039047241212, 0.022366207122802736, 0.022177791595458983, 0.022125568389892578, 0.02206924819946289, 0.021767168045043944, 0.02127359962463379, 0.021137407302856445, 0.022208511352539064, 0.02211327934265137, 0.0220579833984375, 0.021788671493530275, 0.022244352340698242, 0.022382591247558595, 0.02215936088562012, 0.02229862403869629, 0.0222873592376709, 0.022161407470703123, 0.022154239654541014, 0.02206208038330078, 0.02208153533935547, 0.022147071838378905, 0.022433792114257813, 0.022213632583618165, 0.02210099220275879, 0.02210304069519043, 0.022245376586914063, 0.02231705665588379, 0.022112255096435548, 0.022339584350585938, 0.022624256134033204, 0.022405120849609376, 0.02171392059326172, 0.02127872085571289, 0.021296127319335938, 0.021279743194580078, 0.02191257667541504, 0.022172672271728516, 0.022041599273681642, 0.022009855270385743, 0.022146047592163084, 0.021873664855957032, 0.02205388832092285, 0.022157312393188477, 0.022235136032104492]",tokens/s,45.912159932791475,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 65857 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1547.821056,1753.74336,0.0,1168.113664,1154.613248,s,1,7.9190673828125,7.9190673828125,0.0,7.9190673828125,7.9190673828125,7.9190673828125,7.9190673828125,[7.9190673828125],,kWh,1.2742630039576853e-05,6.9682511033714155e-06,1.8006125516001337e-05,3.77170066589496e-05,,MB,1606.459392,2034.761728,0.0,1388.314624,1334.065152,s,10,0.3324529609680176,0.033245296096801755,9.968645420827918e-05,0.03322847938537597,0.033334228515625,0.03340026664733887,0.033453097152709964,"[0.03312547302246094, 0.03318659210205078, 0.03331164932250977, 0.03315987014770508, 0.03327036666870117, 0.03317561721801758, 0.03328441619873047, 0.03315311813354492, 0.033466304779052734, 0.03331955337524414]",tokens/s,7700.33749299732,kWh,3.9231405692143796e-07,2.1496591732249542e-07,1.7846657029338023e-06,2.391945677177736e-06,tokens/kWh,107025841.95058109,MB,1640.07936,2034.761728,0.0,1388.314624,1372.847616,s,10,13.46328454589844,1.346328454589844,0.012805023167436584,1.3477009887695313,1.359507373046875,1.3602975097656251,1.360929619140625,"[1.3235755615234375, 1.3426622314453125, 1.3365908203125, 1.3317373046875, 1.361087646484375, 1.35273974609375, 1.359331787109375, 1.358909423828125, 1.3584874267578124, 1.33816259765625]",tokens/s,46.79393040028469,kWh,1.566280547696848e-05,8.5830073876174e-06,2.9632276097260364e-05,5.387808896184624e-05,tokens/kWh,1169306.506855012,,s,630,13.45949800872803,0.021364282553536548,0.0004590080757190155,0.021411328315734862,0.021853491020202634,0.022028851222991943,0.0225524634552002,"[0.02105548858642578, 0.020963327407836914, 0.02091110420227051, 0.02086604881286621, 0.021773311614990236, 0.021559295654296876, 0.02087424087524414, 0.02102374458312988, 0.02087116813659668, 0.02082815933227539, 0.0209039363861084, 0.020915199279785156, 0.02084864044189453, 0.020999168395996092, 0.02084966468811035, 0.020892671585083008, 0.020986879348754883, 0.020847616195678712, 0.02102783966064453, 0.020989952087402345, 0.0208721923828125, 0.02079539108276367, 0.02085068893432617, 0.021388288497924804, 0.021375999450683594, 0.020876287460327148, 0.02087833595275879, 0.022791168212890626, 0.022171648025512695, 0.021174272537231444, 0.020936704635620116, 0.020959232330322267, 0.020959232330322267, 0.02088960075378418, 0.020818944931030273, 0.020780031204223632, 0.02088140869140625, 0.02085990333557129, 0.020917247772216797, 0.02105036735534668, 0.02107494354248047, 0.02085273551940918, 0.020959232330322267, 0.02084556770324707, 0.02081177520751953, 0.02089779281616211, 0.020847616195678712, 0.0209039363861084, 0.02084864044189453, 0.020956159591674805, 0.020999168395996092, 0.021523456573486328, 0.020892671585083008, 0.020929536819458007, 0.020921344757080077, 0.021045248031616212, 0.020858879089355468, 0.020576255798339844, 0.02067251205444336, 0.020752384185791017, 0.021014528274536134, 0.020930559158325195, 0.020888576507568358, 0.02062233543395996, 0.02085478401184082, 0.020824064254760744, 0.02079539108276367, 0.02084249687194824, 0.020904960632324218, 0.02088755226135254, 0.020946943283081054, 0.02151628875732422, 0.02129305648803711, 0.020908031463623047, 0.02086911964416504, 0.0209039363861084, 0.022068288803100584, 0.02178451156616211, 0.02103910446166992, 0.02106777572631836, 0.020926464080810548, 0.02087833595275879, 0.020915199279785156, 0.02088140869140625, 0.020891647338867187, 0.020944896697998046, 0.021120000839233398, 0.02086502456665039, 0.021013504028320314, 0.020890623092651366, 0.020921344757080077, 0.021165056228637694, 0.021174272537231444, 0.020970495223999023, 0.02101043128967285, 0.02086297607421875, 0.02091929626464844, 0.02089369583129883, 0.021128192901611328, 0.020996095657348633, 0.020937728881835937, 0.020817920684814452, 0.020966400146484376, 0.02086502456665039, 0.02087731170654297, 0.021501951217651367, 0.022029312133789062, 0.02168627166748047, 0.021711872100830077, 0.02187161636352539, 0.02185625648498535, 0.021737472534179687, 0.02172313690185547, 0.022334463119506837, 0.02349158477783203, 0.02211942481994629, 0.021749759674072267, 0.021715967178344727, 0.021796863555908205, 0.021934080123901366, 0.022373376846313478, 0.021914623260498048, 0.021700607299804688, 0.02167398452758789, 0.021790719985961913, 0.021609472274780273, 0.021309440612792968, 0.021634048461914062, 0.021646335601806642, 0.021575679779052736, 0.02169856071472168, 0.02162073516845703, 0.021757951736450197, 0.021850112915039063, 0.02168115234375, 0.021812223434448243, 0.021608448028564452, 0.021832704544067383, 0.02169753646850586, 0.02103091239929199, 0.020883455276489257, 0.021024768829345702, 0.02086502456665039, 0.020943872451782225, 0.02088960075378418, 0.02087321662902832, 0.02088652801513672, 0.02084556770324707, 0.020896768569946288, 0.020965375900268556, 0.021303295135498047, 0.02163609504699707, 0.021625856399536132, 0.021158912658691405, 0.02109542465209961, 0.021126144409179686, 0.02109132766723633, 0.02188697624206543, 0.02169753646850586, 0.021593088150024413, 0.021581823348999024, 0.02169343948364258, 0.020920320510864256, 0.020980735778808594, 0.020944896697998046, 0.021122047424316406, 0.02132275199890137, 0.021530624389648437, 0.021098495483398438, 0.0210513916015625, 0.020896768569946288, 0.020980735778808594, 0.020853759765625, 0.021014528274536134, 0.02085273551940918, 0.02084249687194824, 0.020918272018432618, 0.020989952087402345, 0.02091929626464844, 0.020896768569946288, 0.02107596778869629, 0.020937728881835937, 0.020987903594970703, 0.020975616455078124, 0.020925439834594727, 0.020925439834594727, 0.02107494354248047, 0.02088652801513672, 0.020978687286376953, 0.020773887634277344, 0.020935680389404295, 0.020931583404541015, 0.020926464080810548, 0.020899839401245117, 0.02088652801513672, 0.020933631896972657, 0.02102681541442871, 0.020918272018432618, 0.02084147262573242, 0.02084659194946289, 0.020917247772216797, 0.020946943283081054, 0.021204992294311522, 0.021537792205810546, 0.020959232330322267, 0.020926464080810548, 0.020890623092651366, 0.02089574432373047, 0.020853759765625, 0.02086092758178711, 0.020884479522705078, 0.021164031982421876, 0.023290880203247072, 0.022252544403076172, 0.021523456573486328, 0.021242879867553712, 0.02088140869140625, 0.020876287460327148, 0.02083328056335449, 0.020982784271240236, 0.02086604881286621, 0.020677631378173827, 0.020909055709838868, 0.02081996726989746, 0.020979711532592774, 0.020867071151733398, 0.02142617607116699, 0.021613567352294923, 0.021813247680664064, 0.021805055618286134, 0.021721088409423828, 0.021748735427856446, 0.02163711929321289, 0.021703680038452147, 0.02126233673095703, 0.020941823959350587, 0.02101043128967285, 0.020960256576538085, 0.020920320510864256, 0.020954111099243163, 0.020891647338867187, 0.020912128448486327, 0.0208855037689209, 0.020986879348754883, 0.021142528533935546, 0.020883455276489257, 0.02085273551940918, 0.020968448638916014, 0.020935680389404295, 0.021024768829345702, 0.021728256225585937, 0.02165657615661621, 0.020666368484497072, 0.020904960632324218, 0.020896768569946288, 0.020900863647460938, 0.02090598487854004, 0.02092748832702637, 0.020974592208862306, 0.022569984436035157, 0.02229862403869629, 0.02263654327392578, 0.022143999099731446, 0.021733375549316408, 0.021687295913696288, 0.021716991424560548, 0.021724159240722657, 0.021750783920288085, 0.02162073516845703, 0.021646335601806642, 0.02169753646850586, 0.021703680038452147, 0.021729280471801758, 0.021908479690551756, 0.021957632064819335, 0.02189516830444336, 0.021733375549316408, 0.02162892723083496, 0.02165043258666992, 0.021753856658935547, 0.02165452766418457, 0.021614591598510743, 0.021715967178344727, 0.021777408599853516, 0.02168320083618164, 0.02169036865234375, 0.021700607299804688, 0.021643264770507813, 0.021994495391845705, 0.021801984786987305, 0.02172211265563965, 0.021429248809814453, 0.021562368392944335, 0.02166681671142578, 0.02173030471801758, 0.021734399795532225, 0.021757951736450197, 0.02168422317504883, 0.021747711181640626, 0.021857280731201172, 0.021555200576782226, 0.02130227279663086, 0.021719039916992186, 0.02171801567077637, 0.021720064163208007, 0.02185932731628418, 0.021585920333862304, 0.02143539237976074, 0.021497856140136717, 0.021924863815307616, 0.021408767700195314, 0.020853759765625, 0.02087116813659668, 0.020794368743896483, 0.020913152694702147, 0.020968448638916014, 0.021779455184936524, 0.021707775115966797, 0.021679103851318358, 0.021819391250610352, 0.021604352951049805, 0.021982208251953125, 0.02172313690185547, 0.02168832015991211, 0.021610496520996093, 0.021575679779052736, 0.02166783905029297, 0.021535743713378908, 0.021611520767211914, 0.021617664337158202, 0.021720064163208007, 0.02168217658996582, 0.021627904891967774, 0.02185318374633789, 0.0218603515625, 0.02163609504699707, 0.02183065605163574, 0.02205183982849121, 0.021790719985961913, 0.021785600662231445, 0.021831680297851562, 0.021761024475097656, 0.021711872100830077, 0.02166067123413086, 0.021728256225585937, 0.02165247917175293, 0.021327871322631836, 0.02084659194946289, 0.020944896697998046, 0.0208721923828125, 0.02087936019897461, 0.020904960632324218, 0.02084249687194824, 0.02088140869140625, 0.021155839920043946, 0.021720064163208007, 0.021215232849121093, 0.020958208084106447, 0.020909055709838868, 0.020924415588378906, 0.020925439834594727, 0.020930559158325195, 0.021316608428955077, 0.02182246398925781, 0.021810176849365235, 0.0218787841796875, 0.021795839309692384, 0.022130687713623046, 0.021793792724609375, 0.02169343948364258, 0.02128179168701172, 0.021355520248413085, 0.021747711181640626, 0.021045248031616212, 0.020936704635620116, 0.020942848205566408, 0.020956159591674805, 0.02088652801513672, 0.021180416107177736, 0.02184806442260742, 0.021094400405883788, 0.020900863647460938, 0.02087321662902832, 0.02090598487854004, 0.021235712051391603, 0.021736448287963867, 0.02166886329650879, 0.021712896347045898, 0.02166374397277832, 0.021711872100830077, 0.021771263122558594, 0.021574655532836915, 0.021708831787109376, 0.02167190361022949, 0.02167807960510254, 0.021659648895263672, 0.02168627166748047, 0.021708799362182618, 0.021780479431152345, 0.021625856399536132, 0.021757951736450197, 0.022178815841674804, 0.02205695915222168, 0.02165657615661621, 0.02162380790710449, 0.021574655532836915, 0.02164531135559082, 0.02168422317504883, 0.02165555191040039, 0.0216494083404541, 0.0216494083404541, 0.021413888931274414, 0.022214656829833986, 0.021794815063476563, 0.021751808166503905, 0.021711872100830077, 0.021695487976074217, 0.02189619255065918, 0.02164121627807617, 0.021719039916992186, 0.021786624908447266, 0.021963775634765623, 0.021959680557250977, 0.021795839309692384, 0.021925888061523437, 0.02202828788757324, 0.021743616104125976, 0.02165760040283203, 0.021850112915039063, 0.021779455184936524, 0.021795839309692384, 0.021749759674072267, 0.021111808776855468, 0.021008384704589843, 0.020925439834594727, 0.0209039363861084, 0.020909055709838868, 0.02091929626464844, 0.020965375900268556, 0.020986879348754883, 0.02084351921081543, 0.02067251205444336, 0.020840448379516603, 0.02090598487854004, 0.020791296005249024, 0.02091007995605469, 0.02081279945373535, 0.02105753517150879, 0.02163199996948242, 0.0212807674407959, 0.020984832763671874, 0.02085068893432617, 0.02106572723388672, 0.02167705535888672, 0.02167807960510254, 0.02166988754272461, 0.021777408599853516, 0.022025215148925782, 0.02187468719482422, 0.021828607559204103, 0.021783552169799804, 0.021618688583374023, 0.02165350341796875, 0.02166476821899414, 0.02187264060974121, 0.021149696350097655, 0.020969472885131835, 0.020975616455078124, 0.020965375900268556, 0.021413888931274414, 0.021971967697143553, 0.021593088150024413, 0.02168934440612793, 0.02164838409423828, 0.021595136642456055, 0.021651456832885742, 0.02169343948364258, 0.021729280471801758, 0.021736448287963867, 0.021625856399536132, 0.02169753646850586, 0.021531648635864258, 0.021729280471801758, 0.021328895568847657, 0.021797887802124022, 0.021444608688354492, 0.022509567260742186, 0.021993471145629884, 0.021964799880981444, 0.021803007125854493, 0.021777408599853516, 0.02171801567077637, 0.021775360107421874, 0.021733375549316408, 0.021771263122558594, 0.02207846450805664, 0.022468608856201173, 0.02201190376281738, 0.02170675277709961, 0.021757951736450197, 0.021614591598510743, 0.02169343948364258, 0.021704704284667968, 0.021526527404785157, 0.02103193664550781, 0.020961280822753905, 0.02088140869140625, 0.020966400146484376, 0.02090598487854004, 0.020898815155029296, 0.0208353271484375, 0.020948991775512696, 0.020913152694702147, 0.02084659194946289, 0.02108518409729004, 0.021158912658691405, 0.020997119903564454, 0.0224399356842041, 0.02189107131958008, 0.021813247680664064, 0.021797887802124022, 0.02166476821899414, 0.021707775115966797, 0.02165657615661621, 0.021724159240722657, 0.02165760040283203, 0.021627904891967774, 0.021757951736450197, 0.021711872100830077, 0.0216627197265625, 0.021679103851318358, 0.021615615844726564, 0.021758975982666014, 0.021819391250610352, 0.022648895263671875, 0.02222892761230469, 0.021159936904907226, 0.020896768569946288, 0.020847616195678712, 0.020993024826049804, 0.02089779281616211, 0.02169856071472168, 0.02151628875732422, 0.02146611213684082, 0.021712896347045898, 0.02169753646850586, 0.02153267288208008, 0.02170572853088379, 0.021873664855957032, 0.02204876708984375, 0.021792768478393554, 0.021639167785644533, 0.021739519119262696, 0.022031360626220704, 0.021908479690551756, 0.021734399795532225, 0.02191974449157715, 0.021708799362182618, 0.021732351303100587, 0.021795839309692384, 0.02207539176940918, 0.022038528442382813, 0.021720064163208007, 0.021767168045043944, 0.02170163154602051, 0.02167296028137207, 0.021754880905151368, 0.02066431999206543, 0.020855808258056642, 0.020929536819458007, 0.02091110420227051, 0.020965375900268556, 0.02085990333557129, 0.020934656143188478, 0.020946943283081054, 0.021761024475097656, 0.02165657615661621, 0.021776384353637695, 0.02163609504699707, 0.021696512222290038, 0.021751808166503905, 0.02165350341796875, 0.021799936294555664, 0.021550079345703126, 0.02184806442260742, 0.021738496780395508, 0.02170163154602051, 0.02147942352294922, 0.02124595260620117, 0.021405696868896484, 0.020984832763671874, 0.02091110420227051, 0.020782079696655274, 0.02083839988708496, 0.02088652801513672, 0.020831232070922853, 0.020813823699951172, 0.0208721923828125, 0.02106470489501953, 0.020943872451782225, 0.02084864044189453, 0.02086297607421875, 0.020793344497680662, 0.02084864044189453, 0.02084659194946289, 0.02083020782470703, 0.02125312042236328, 0.02284851264953613, 0.022223871231079103, 0.021736448287963867, 0.02165555191040039, 0.02108723258972168, 0.022280191421508787, 0.021832704544067383, 0.021497856140136717, 0.020883455276489257, 0.02086195182800293, 0.02084864044189453, 0.020813823699951172, 0.02086911964416504, 0.021644287109375, 0.02145280075073242, 0.020992000579833983, 0.02083737564086914, 0.02089472007751465, 0.02087116813659668, 0.021390335083007812, 0.021534719467163087, 0.0208353271484375, 0.020931583404541015]",tokens/s,46.807094855355416,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 760, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 646, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 413, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 243, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 326, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1255.817216,2645.03296,0.0,1998.585856,1692.285952,s,10,0.2421620788574219,0.02421620788574219,0.0007151447258735442,0.02405540752410889,0.025104490089416506,0.025451717281341553,0.02572949903488159,"[0.0257989444732666, 0.02358336067199707, 0.023982303619384766, 0.023516639709472657, 0.023505535125732422, 0.02412851142883301, 0.023632959365844728, 0.024505855560302735, 0.024480640411376955, 0.025027328491210938]",tokens/s,10571.432208043005,kWh,2.7753140546097755e-07,1.5207407634566343e-07,8.297378834037525e-07,1.2593433652103935e-06,tokens/kWh,203280540.5356871,MB,1257.193472,2645.03296,0.0,1998.585856,1740.091904,s,10,13.934907348632812,1.3934907348632812,0.025773039375764546,1.3917061157226562,1.4230255249023438,1.4235073303222656,1.423892774658203,"[1.4067874755859375, 1.374333740234375, 1.3706243896484376, 1.3586650390625, 1.362622314453125, 1.376624755859375, 1.4168704833984376, 1.42291845703125, 1.4239891357421874, 1.4214715576171875]",tokens/s,45.21020371633909,kWh,1.628260937647143e-05,8.921926230863981e-06,3.281937236919722e-05,5.802390797653265e-05,tokens/kWh,1085759.339503294,,s,629,14.127670267105099,0.02246052506693975,0.0029503195630523993,0.022187007904052734,0.02265027198791504,0.02293166084289551,0.04536406036376953,"[0.022769664764404295, 0.022405120849609376, 0.022578176498413087, 0.023195648193359376, 0.023209983825683594, 0.022948863983154297, 0.022355968475341798, 0.022914047241210937, 0.023353343963623048, 0.023217151641845703, 0.023201791763305665, 0.022947839736938477, 0.023254016876220703, 0.023146495819091797, 0.023326719284057617, 0.023176191329956054, 0.022323200225830078, 0.022245376586914063, 0.02193715286254883, 0.02272153663635254, 0.023012351989746094, 0.023163904190063478, 0.02294272041320801, 0.02234060859680176, 0.02215936088562012, 0.02208768081665039, 0.022123519897460937, 0.02208460807800293, 0.022091775894165038, 0.02206617546081543, 0.022107135772705077, 0.02208358383178711, 0.022021120071411132, 0.022038528442382813, 0.02208051109313965, 0.0220150089263916, 0.02190025520324707, 0.022139904022216796, 0.022179840087890625, 0.02205904006958008, 0.02192790412902832, 0.022039552688598633, 0.022223871231079103, 0.02220953559875488, 0.02188390350341797, 0.02201190376281738, 0.022675455093383787, 0.02223411178588867, 0.02206105613708496, 0.022090751647949217, 0.021857280731201172, 0.021324800491333007, 0.021366783142089844, 0.021997568130493163, 0.022114303588867186, 0.02204569625854492, 0.022026239395141603, 0.02142207908630371, 0.021380096435546874, 0.021425151824951173, 0.021315584182739256, 0.0213309440612793, 0.04596223831176758, 0.021974016189575195, 0.022114303588867186, 0.022154239654541014, 0.02209280014038086, 0.02191974449157715, 0.02253824043273926, 0.021955583572387697, 0.022146047592163084, 0.022205440521240235, 0.02226585578918457, 0.02211737632751465, 0.02268262481689453, 0.02250649642944336, 0.021916671752929686, 0.02152448081970215, 0.021397504806518555, 0.021328895568847657, 0.021774335861206053, 0.021780479431152345, 0.021586944580078125, 0.021465087890625, 0.022072320938110353, 0.021960704803466798, 0.022072320938110353, 0.022037504196166992, 0.022006784439086914, 0.022033407211303712, 0.021959680557250977, 0.022055936813354493, 0.022128639221191407, 0.022040576934814454, 0.0220948486328125, 0.022106111526489256, 0.02224742317199707, 0.02228531265258789, 0.02211327934265137, 0.021979135513305666, 0.022032384872436524, 0.0219289608001709, 0.021993471145629884, 0.02209791946411133, 0.022039552688598633, 0.0212992000579834, 0.021410816192626952, 0.021403648376464843, 0.02163609504699707, 0.02124595260620117, 0.021142528533935546, 0.021389312744140625, 0.021386240005493166, 0.0214466552734375, 0.021399551391601563, 0.021315584182739256, 0.021326847076416015, 0.021357568740844726, 0.021409791946411134, 0.021377023696899415, 0.021366783142089844, 0.021364736557006835, 0.021374975204467773, 0.021353471755981446, 0.021223424911499023, 0.04640563201904297, 0.02209791946411133, 0.022133760452270508, 0.02215936088562012, 0.022190080642700196, 0.02220953559875488, 0.022187007904052734, 0.022112255096435548, 0.022176767349243166, 0.022145023345947267, 0.022112255096435548, 0.022082559585571288, 0.022288383483886717, 0.02206211280822754, 0.022308832168579103, 0.02231091117858887, 0.02206924819946289, 0.022254592895507814, 0.02222489547729492, 0.022042623519897463, 0.0214835205078125, 0.021370880126953123, 0.021332992553710937, 0.021533695220947266, 0.021412864685058593, 0.02131455993652344, 0.021326847076416015, 0.021283840179443358, 0.021344255447387696, 0.02128895950317383, 0.0213309440612793, 0.021343231201171875, 0.02143027114868164, 0.021319679260253906, 0.021373952865600586, 0.0214783992767334, 0.021329919815063478, 0.021348352432250976, 0.021346303939819337, 0.021257247924804688, 0.021285856246948242, 0.02147225570678711, 0.021311487197875977, 0.02129817581176758, 0.021356544494628905, 0.021321727752685548, 0.021562368392944335, 0.021621759414672852, 0.02251571273803711, 0.022560768127441407, 0.02229555130004883, 0.022179840087890625, 0.02206003189086914, 0.022012928009033202, 0.021967872619628907, 0.02206105613708496, 0.02226688003540039, 0.021485567092895508, 0.021444608688354492, 0.021311487197875977, 0.02147532844543457, 0.02142617607116699, 0.02152448081970215, 0.04532940673828125, 0.021456960678100587, 0.0213636474609375, 0.021334016799926758, 0.021410816192626952, 0.02126540756225586, 0.021332992553710937, 0.021356544494628905, 0.021361663818359376, 0.021425151824951173, 0.021332992553710937, 0.021401599884033205, 0.02150297546386719, 0.021410816192626952, 0.02147635269165039, 0.021433343887329103, 0.021370880126953123, 0.021360639572143555, 0.021415935516357423, 0.02128179168701172, 0.02147225570678711, 0.02146611213684082, 0.021404672622680664, 0.021355520248413085, 0.021335039138793945, 0.02127052879333496, 0.02127462387084961, 0.021300224304199217, 0.02129408073425293, 0.02132275199890137, 0.021327871322631836, 0.021271551132202148, 0.02127462387084961, 0.021307392120361326, 0.02125312042236328, 0.021362688064575194, 0.02129817581176758, 0.0212674560546875, 0.02125721549987793, 0.021336063385009766, 0.021368831634521485, 0.021367807388305664, 0.022182912826538087, 0.02204364776611328, 0.022477823257446287, 0.022156288146972656, 0.022164480209350586, 0.022153215408325197, 0.02215116882324219, 0.022112255096435548, 0.02223308753967285, 0.021774335861206053, 0.021360639572143555, 0.021424127578735352, 0.02147430419921875, 0.021820415496826173, 0.022039552688598633, 0.021996543884277343, 0.021987327575683592, 0.021978111267089845, 0.022215679168701173, 0.021952512741088868, 0.021716991424560548, 0.045139968872070314, 0.02131865692138672, 0.02143337631225586, 0.021398496627807618, 0.021307392120361326, 0.021493759155273438, 0.021393407821655275, 0.021301248550415038, 0.021374975204467773, 0.02145280075073242, 0.021390335083007812, 0.02150297546386719, 0.021489664077758788, 0.021440511703491212, 0.02142720031738281, 0.021526527404785157, 0.021372928619384765, 0.021565439224243164, 0.02143129539489746, 0.02141798400878906, 0.021360639572143555, 0.021386240005493166, 0.02146303939819336, 0.021389312744140625, 0.021300224304199217, 0.021390335083007812, 0.02137500762939453, 0.02170979118347168, 0.021416959762573243, 0.02166169548034668, 0.021412864685058593, 0.02129100799560547, 0.021393407821655275, 0.021315584182739256, 0.02129817581176758, 0.021374975204467773, 0.02126540756225586, 0.021320703506469727, 0.021309440612792968, 0.021335039138793945, 0.02105958366394043, 0.021311487197875977, 0.021408767700195314, 0.02206208038330078, 0.021833728790283204, 0.021940223693847655, 0.022131711959838866, 0.022004735946655272, 0.022141952514648438, 0.022601728439331056, 0.022064128875732423, 0.022550527572631835, 0.022140928268432617, 0.022034431457519533, 0.022132736206054687, 0.022040576934814454, 0.02220134353637695, 0.022141952514648438, 0.02208563232421875, 0.02213478469848633, 0.022071296691894532, 0.022076416015625, 0.02202828788757324, 0.04676198577880859, 0.022076416015625, 0.022165504455566407, 0.022025215148925782, 0.022189056396484375, 0.022040576934814454, 0.022009855270385743, 0.02191155242919922, 0.021346303939819337, 0.021363712310791014, 0.021751808166503905, 0.022120447158813478, 0.022120447158813478, 0.022024192810058595, 0.022178815841674804, 0.022125568389892578, 0.0214466552734375, 0.021332992553710937, 0.021196832656860353, 0.021298143386840822, 0.02126131248474121, 0.021347328186035155, 0.021440511703491212, 0.021309440612792968, 0.02128998374938965, 0.02130636787414551, 0.02127462387084961, 0.021506048202514647, 0.021967872619628907, 0.02267852783203125, 0.022377471923828125, 0.022228992462158204, 0.021961727142333985, 0.02238057518005371, 0.022480863571166992, 0.022366207122802736, 0.022130687713623046, 0.022410240173339844, 0.022207487106323243, 0.02203647994995117, 0.02210406494140625, 0.022013952255249023, 0.022139904022216796, 0.02206208038330078, 0.022072320938110353, 0.022063104629516602, 0.022055936813354493, 0.022146047592163084, 0.021373952865600586, 0.021384191513061524, 0.0214517765045166, 0.021425151824951173, 0.021285888671875, 0.02165247917175293, 0.02213580894470215, 0.023093248367309572, 0.022350847244262697, 0.021918720245361328, 0.02126950454711914, 0.02141798400878906, 0.021342208862304687, 0.02129100799560547, 0.02127872085571289, 0.04537753677368164, 0.021362688064575194, 0.02168012809753418, 0.021445632934570313, 0.021562368392944335, 0.023380992889404296, 0.02294988822937012, 0.022619136810302733, 0.022610944747924806, 0.022618112564086915, 0.02253107261657715, 0.022600704193115235, 0.022537216186523438, 0.02262937545776367, 0.022548479080200197, 0.02261299133300781, 0.022598655700683593, 0.022401023864746093, 0.02266828727722168, 0.022421503067016603, 0.02246348762512207, 0.02240716743469238, 0.02251263999938965, 0.022780927658081054, 0.02266111946105957, 0.022450176239013672, 0.022542335510253905, 0.02250547218322754, 0.02253107261657715, 0.022403072357177735, 0.02250752067565918, 0.0224399356842041, 0.022510591506958007, 0.022391807556152343, 0.02243071937561035, 0.022434816360473633, 0.022566911697387695, 0.02246451187133789, 0.02249830436706543, 0.022501375198364256, 0.022556671142578123, 0.02242252731323242, 0.02248294448852539, 0.022494207382202147, 0.02264473533630371, 0.022535167694091796, 0.02271027183532715, 0.022768640518188478, 0.02254745674133301, 0.022559743881225586, 0.022617088317871094, 0.022611967086791994, 0.022586368560791017, 0.022608896255493165, 0.022517759323120116, 0.022565887451171874, 0.022566911697387695, 0.02251571273803711, 0.022542335510253905, 0.02262118339538574, 0.022635520935058592, 0.02247065544128418, 0.022392831802368163, 0.04779520034790039, 0.022597631454467772, 0.022558719635009765, 0.02245734405517578, 0.022569984436035157, 0.022467584609985353, 0.02248192024230957, 0.022486015319824217, 0.02246143913269043, 0.02248294448852539, 0.022742015838623047, 0.022492160797119142, 0.022527008056640624, 0.02257302474975586, 0.022579200744628908, 0.02274508857727051, 0.02345881652832031, 0.023000064849853515, 0.02261299133300781, 0.02246246337890625, 0.02247987174987793, 0.022460416793823244, 0.022501375198364256, 0.022377471923828125, 0.022587392807006838, 0.022579200744628908, 0.022409215927124023, 0.022552576065063477, 0.022545408248901368, 0.022526975631713866, 0.022583295822143554, 0.02240924835205078, 0.02254640007019043, 0.022492191314697266, 0.022502368927001953, 0.02255462455749512, 0.022578176498413087, 0.022533119201660155, 0.022567935943603516, 0.022429695129394533, 0.02269900894165039, 0.022544384002685547, 0.022562816619873048, 0.022451200485229493, 0.022467584609985353, 0.022537216186523438, 0.022425600051879883, 0.0225218563079834, 0.02251571273803711, 0.022526975631713866, 0.022443008422851563, 0.022509567260742186, 0.02247987174987793, 0.022441984176635742, 0.02262019157409668, 0.02277884864807129, 0.022132736206054687, 0.022345727920532226, 0.023601152420043944, 0.022863872528076173, 0.022647808074951172, 0.02253107261657715, 0.02285875129699707, 0.04789452743530274, 0.022619136810302733, 0.022617088317871094, 0.022478847503662108, 0.02246963119506836, 0.022590463638305663, 0.022616064071655274, 0.022584320068359375, 0.02249830436706543, 0.022614015579223632, 0.02253824043273926, 0.022575103759765625, 0.022889471054077147, 0.022776832580566408, 0.022552576065063477, 0.02266316795349121, 0.022708223342895507, 0.022755327224731444, 0.023163904190063478, 0.023447551727294923, 0.022767616271972657, 0.022598655700683593, 0.022563840866088865, 0.022608896255493165, 0.022380544662475587, 0.022608896255493165, 0.022518783569335937, 0.02244915199279785, 0.022536224365234375, 0.022473695755004883, 0.02252390480041504, 0.022567935943603516, 0.02270412826538086, 0.022590463638305663, 0.022618112564086915, 0.022537216186523438, 0.022511615753173828, 0.022529024124145508, 0.022445056915283205, 0.022580223083496095, 0.022503423690795898, 0.022542335510253905, 0.022564863204956053, 0.02249728012084961, 0.022584320068359375, 0.022634496688842775, 0.022619136810302733, 0.022861824035644532, 0.02261299133300781, 0.022543359756469726, 0.02264575958251953, 0.022572032928466795, 0.022399999618530272, 0.022557695388793944, 0.022495231628417968, 0.022634496688842775, 0.02253107261657715, 0.022536191940307617, 0.02249625587463379, 0.0224716796875, 0.022382591247558595, 0.022590463638305663, 0.02266012763977051, 0.0479508171081543, 0.022475776672363282, 0.02249625587463379, 0.02250547218322754, 0.02250547218322754, 0.022533119201660155, 0.02248089599609375, 0.022503423690795898, 0.022444032669067384, 0.022507551193237305, 0.022583263397216797, 0.02248089599609375, 0.022525951385498046, 0.022509567260742186, 0.022599679946899414, 0.022622207641601562, 0.022597631454467772, 0.022395904541015626, 0.022401023864746093, 0.02250547218322754, 0.022385663986206054, 0.022564863204956053, 0.02261299133300781, 0.02285260772705078, 0.022624256134033204, 0.0225167350769043, 0.022809600830078124, 0.022617088317871094, 0.022585344314575196, 0.022565887451171874, 0.022597631454467772, 0.022520832061767578, 0.022486015319824217, 0.022468608856201173, 0.022527999877929687, 0.022373376846313478, 0.02243891143798828, 0.022764575958251952, 0.022584287643432618, 0.022425600051879883, 0.02244607925415039, 0.022509567260742186, 0.02246451187133789, 0.0224532470703125, 0.022565887451171874, 0.02253926467895508, 0.02261299133300781, 0.022559743881225586, 0.022495264053344725, 0.022618080139160158, 0.022915071487426757, 0.02374553680419922, 0.022756351470947265, 0.02259660720825195, 0.022460416793823244, 0.022443008422851563, 0.02251571273803711, 0.022524927139282228, 0.022544384002685547, 0.022533119201660155, 0.022412288665771486, 0.022402048110961914, 0.02247987174987793]",tokens/s,44.522556664177316,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694990d-5035294a29e862db4134ff12;e3557df7-fe3c-4a82-9341-6e5d52ba71fd) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,deci,MB,4386.320384,4769.447936,0.0,4183.81824,4182.069248,s,1,10.2497060546875,10.2497060546875,0.0,10.2497060546875,10.2497060546875,10.2497060546875,10.2497060546875,[10.2497060546875],,kWh,4.045410940555536e-05,2.2155764784764374e-05,5.613976713397806e-05,0.00011874964132429778,,MB,1634.553856,4953.997312,0.0,4307.550208,4281.174016,s,10,1.0666554794311522,0.10666554794311525,6.260636581217446e-05,0.10666915130615234,0.1067264663696289,0.10674532852172852,0.1067604182434082,"[0.10653314971923829, 0.10672227478027344, 0.10676419067382813, 0.10660777282714844, 0.10670089721679688, 0.10662054443359376, 0.1067088623046875, 0.10665948486328125, 0.1066600341796875, 0.10667826843261718]",tokens/s,2400.0251715439067,kWh,1.2603028838504795e-06,6.905844916736116e-07,7.296394725999834e-06,9.247282101523925e-06,tokens/kWh,27683809.922681168,MB,1638.62528,4968.677376,0.0,4322.230272,4281.176576,s,10,17.65544177246094,1.765544177246094,0.006942563081612433,1.7622190551757813,1.7775811645507813,1.7781150451660155,1.778542149658203,"[1.7699185791015626, 1.76092138671875, 1.7647049560546875, 1.761789306640625, 1.7579716796875, 1.7606292724609376, 1.760746337890625, 1.7626488037109376, 1.7774625244140625, 1.77864892578125]",tokens/s,35.68304934644443,kWh,2.083327400260803e-05,1.1416995994804612e-05,5.836943558439925e-05,9.061970558181187e-05,tokens/kWh,695213.0289490217,,s,630,17.652671493530278,0.02802011348179408,0.0003915834245447711,0.027889663696289063,0.02844385223388672,0.028749209403991696,0.029804800319671636,"[0.027877376556396483, 0.02773401641845703, 0.027617279052734374, 0.027704320907592773, 0.02777292823791504, 0.027841535568237305, 0.027793407440185547, 0.027966463088989257, 0.02793267250061035, 0.027905023574829102, 0.027817983627319336, 0.028257280349731444, 0.027893760681152343, 0.027971584320068358, 0.027860992431640624, 0.027936767578125, 0.028091392517089843, 0.028018688201904295, 0.02792959976196289, 0.029411327362060546, 0.028442623138427735, 0.02837401580810547, 0.028003328323364256, 0.027901952743530273, 0.027880447387695313, 0.027882495880126954, 0.028227584838867188, 0.030067712783813476, 0.02891366386413574, 0.028237823486328126, 0.028110847473144532, 0.027915264129638673, 0.027792383193969726, 0.027860992431640624, 0.027842559814453126, 0.02784364891052246, 0.02820806312561035, 0.028057600021362306, 0.02778009605407715, 0.027845632553100585, 0.027852800369262694, 0.02795110321044922, 0.028273664474487304, 0.02792755126953125, 0.028083200454711913, 0.027874303817749024, 0.028258304595947265, 0.027849727630615235, 0.027801599502563477, 0.027889663696289063, 0.027855871200561523, 0.027853824615478515, 0.027784191131591796, 0.027800575256347656, 0.02975027275085449, 0.02982707214355469, 0.02855833625793457, 0.028197887420654297, 0.028192768096923827, 0.02789990425109863, 0.027886592864990234, 0.027864063262939453, 0.02792959976196289, 0.027749376296997072, 0.027752447128295898, 0.027894784927368164, 0.027862016677856444, 0.02776371192932129, 0.02778726387023926, 0.02750771141052246, 0.027701248168945314, 0.027707391738891602, 0.02795417594909668, 0.027801599502563477, 0.02792857551574707, 0.027930624008178712, 0.02792550468444824, 0.02772172737121582, 0.028210176467895507, 0.027724800109863282, 0.02797875213623047, 0.027842559814453126, 0.027800575256347656, 0.02776268768310547, 0.02776780891418457, 0.027811840057373048, 0.028411903381347657, 0.02819584083557129, 0.02774732780456543, 0.028069887161254883, 0.028302335739135744, 0.027685888290405275, 0.027815935134887695, 0.02830438423156738, 0.028210176467895507, 0.02775859260559082, 0.02772684860229492, 0.027792383193969726, 0.027878400802612304, 0.028099584579467773, 0.028233728408813476, 0.02836582374572754, 0.028027904510498046, 0.02795212745666504, 0.02859929656982422, 0.02768076705932617, 0.027860992431640624, 0.028478464126586913, 0.028252159118652344, 0.02792959976196289, 0.028422143936157225, 0.028243967056274414, 0.02790809631347656, 0.027773952484130858, 0.02798899269104004, 0.027784191131591796, 0.02797056007385254, 0.027785215377807617, 0.02778726387023926, 0.027838464736938476, 0.027822080612182616, 0.027840511322021484, 0.028672000885009766, 0.027808767318725586, 0.02774732780456543, 0.0279685115814209, 0.027792383193969726, 0.027769855499267578, 0.027642879486083984, 0.028047359466552735, 0.029074432373046875, 0.028889087677001952, 0.027868160247802733, 0.02855526351928711, 0.027880447387695313, 0.027828224182128908, 0.02855116844177246, 0.02796236801147461, 0.027835391998291017, 0.028039167404174805, 0.028203008651733398, 0.027851776123046876, 0.028180479049682617, 0.02842624092102051, 0.02794495964050293, 0.02777292823791504, 0.028247039794921876, 0.028181503295898438, 0.029464576721191408, 0.028053504943847656, 0.028023807525634766, 0.028144639968872072, 0.027883520126342775, 0.02787942314147949, 0.02809753608703613, 0.02856345558166504, 0.027971584320068358, 0.02775961685180664, 0.027930624008178712, 0.027609088897705077, 0.027489280700683592, 0.027862016677856444, 0.02774527931213379, 0.027930624008178712, 0.02778828811645508, 0.027995136260986327, 0.027801599502563477, 0.027907072067260744, 0.02779136085510254, 0.02790399932861328, 0.02775551986694336, 0.027752447128295898, 0.027837440490722655, 0.027765760421752928, 0.02773504066467285, 0.02817433547973633, 0.027829248428344725, 0.027760639190673828, 0.028241920471191406, 0.028646400451660156, 0.027867136001586915, 0.02819174385070801, 0.02773094367980957, 0.028058624267578124, 0.02774015998840332, 0.027809791564941407, 0.02770636749267578, 0.027760639190673828, 0.02793369674682617, 0.02798489570617676, 0.02794905662536621, 0.02779545593261719, 0.027838464736938476, 0.028005376815795898, 0.027852800369262694, 0.027889663696289063, 0.027833343505859375, 0.02793164825439453, 0.028064767837524415, 0.028016639709472657, 0.027896831512451172, 0.027865087509155274, 0.027785215377807617, 0.027844608306884764, 0.027913215637207032, 0.027889663696289063, 0.027997184753417968, 0.027884544372558592, 0.027914239883422853, 0.027862016677856444, 0.02795110321044922, 0.027898880004882814, 0.027873279571533204, 0.028224512100219725, 0.027840511322021484, 0.027686912536621092, 0.027844640731811525, 0.027833311080932618, 0.02776268768310547, 0.02817024040222168, 0.028325887680053712, 0.027829248428344725, 0.029229055404663085, 0.029181951522827147, 0.028622848510742187, 0.02832691192626953, 0.027856895446777344, 0.027814912796020507, 0.02793471908569336, 0.027909120559692382, 0.027794431686401368, 0.027814912796020507, 0.027797504425048827, 0.02776371192932129, 0.027998207092285156, 0.027881471633911133, 0.027863040924072265, 0.027926528930664062, 0.027831296920776367, 0.02789580726623535, 0.028050432205200194, 0.027848703384399414, 0.02771353530883789, 0.027831296920776367, 0.027831296920776367, 0.027843584060668947, 0.02793574333190918, 0.027872255325317383, 0.027854848861694335, 0.027975679397583008, 0.028155935287475585, 0.027933664321899414, 0.027638784408569338, 0.027701248168945314, 0.028293119430541993, 0.027651071548461914, 0.027603967666625977, 0.027800575256347656, 0.027829248428344725, 0.02791116714477539, 0.027854848861694335, 0.027832319259643554, 0.027682815551757813, 0.027703296661376952, 0.027641855239868163, 0.02776268768310547, 0.0277708797454834, 0.02789580726623535, 0.027819007873535157, 0.02787942314147949, 0.027825151443481445, 0.027863040924072265, 0.027403263092041014, 0.027732992172241212, 0.02753126335144043, 0.027456512451171877, 0.027823104858398437, 0.027778047561645508, 0.028220415115356445, 0.028108800888061523, 0.027543552398681642, 0.028247039794921876, 0.027822080612182616, 0.02751283264160156, 0.027671552658081053, 0.027797504425048827, 0.027808767318725586, 0.027824127197265625, 0.027844608306884764, 0.028061695098876953, 0.028383232116699218, 0.02776268768310547, 0.027685888290405275, 0.027790336608886718, 0.02799001693725586, 0.027792383193969726, 0.027830272674560546, 0.02794598388671875, 0.027866111755371094, 0.02789580726623535, 0.027882495880126954, 0.027817983627319336, 0.02868022346496582, 0.029964256286621093, 0.028876800537109375, 0.027906047821044923, 0.02790297508239746, 0.02757734489440918, 0.027641855239868163, 0.027855871200561523, 0.02775961685180664, 0.028083200454711913, 0.02792550468444824, 0.02795827293395996, 0.02872217559814453, 0.028011520385742186, 0.027867136001586915, 0.02797056007385254, 0.028249120712280272, 0.02779132843017578, 0.02775551986694336, 0.028617727279663087, 0.028420095443725587, 0.027821056365966795, 0.027741184234619142, 0.027830272674560546, 0.02773811149597168, 0.027801599502563477, 0.02775449562072754, 0.027839487075805663, 0.027817983627319336, 0.029274112701416017, 0.03013734436035156, 0.028645376205444335, 0.02797875213623047, 0.02795417594909668, 0.02779648017883301, 0.027848703384399414, 0.027851776123046876, 0.02789990425109863, 0.027670528411865233, 0.027867136001586915, 0.027704320907592773, 0.027867136001586915, 0.027798528671264647, 0.028210176467895507, 0.028161024093627928, 0.027851776123046876, 0.028219392776489258, 0.027832319259643554, 0.027520000457763674, 0.027687936782836913, 0.027837440490722655, 0.027682815551757813, 0.02779136085510254, 0.027667455673217774, 0.02771046447753906, 0.028399616241455077, 0.027998207092285156, 0.027853824615478515, 0.027648000717163085, 0.028021760940551758, 0.027458560943603515, 0.02775551986694336, 0.027862016677856444, 0.027883520126342775, 0.028010496139526365, 0.027850751876831056, 0.027877376556396483, 0.027580415725708008, 0.027620351791381836, 0.027769855499267578, 0.02797772789001465, 0.027857919692993165, 0.02819993591308594, 0.027868160247802733, 0.027660287857055665, 0.027732992172241212, 0.028422143936157225, 0.02812723159790039, 0.02772787284851074, 0.028116992950439453, 0.027786239624023438, 0.027814912796020507, 0.028089344024658205, 0.027871231079101562, 0.027896831512451172, 0.027907072067260744, 0.02777190399169922, 0.028445695877075194, 0.02791219139099121, 0.027833343505859375, 0.027769855499267578, 0.027829248428344725, 0.02799001693725586, 0.027870208740234374, 0.027768831253051757, 0.02795008087158203, 0.027830272674560546, 0.027811840057373048, 0.028023807525634766, 0.028065792083740236, 0.028058624267578124, 0.02796953582763672, 0.028040191650390626, 0.028056575775146485, 0.027744255065917968, 0.028019712448120116, 0.02792755126953125, 0.027843584060668947, 0.02776268768310547, 0.02778009605407715, 0.027801599502563477, 0.028027904510498046, 0.02814156723022461, 0.028083200454711913, 0.027790336608886718, 0.02834534454345703, 0.028280832290649413, 0.02776371192932129, 0.02775654411315918, 0.02775961685180664, 0.027797504425048827, 0.02779955291748047, 0.02780467224121094, 0.029042688369750977, 0.02814259147644043, 0.027831296920776367, 0.027691007614135742, 0.02834636878967285, 0.02775961685180664, 0.02795212745666504, 0.027794431686401368, 0.027785215377807617, 0.027830272674560546, 0.02812211227416992, 0.027749376296997072, 0.02773196792602539, 0.02779648017883301, 0.02777190399169922, 0.02813337516784668, 0.02795110321044922, 0.027865087509155274, 0.027662336349487306, 0.027686912536621092, 0.028071935653686524, 0.02795110321044922, 0.02775859260559082, 0.028269567489624024, 0.028053504943847656, 0.027893760681152343, 0.02796134376525879, 0.027757568359375, 0.027757568359375, 0.02896384048461914, 0.028024831771850587, 0.027827199935913087, 0.02779648017883301, 0.027832319259643554, 0.02776678466796875, 0.02770636749267578, 0.0276889591217041, 0.02773094367980957, 0.02773196792602539, 0.02792755126953125, 0.028030975341796875, 0.027992063522338868, 0.027709440231323244, 0.028103679656982423, 0.030306304931640625, 0.02862387275695801, 0.027717632293701173, 0.02818662452697754, 0.027817983627319336, 0.027824127197265625, 0.028005376815795898, 0.027889663696289063, 0.027813888549804686, 0.027867136001586915, 0.027425792694091795, 0.027936767578125, 0.027923456192016603, 0.02779545593261719, 0.027810815811157227, 0.028256256103515624, 0.028059648513793944, 0.027832319259643554, 0.027840511322021484, 0.027769855499267578, 0.027841535568237305, 0.027877376556396483, 0.027905023574829102, 0.028402687072753906, 0.02858188819885254, 0.027846656799316406, 0.02775961685180664, 0.028598272323608398, 0.028078079223632812, 0.027812864303588865, 0.027892736434936522, 0.027852800369262694, 0.02796953582763672, 0.027852800369262694, 0.027809791564941407, 0.028221439361572266, 0.027901952743530273, 0.02790297508239746, 0.027852800369262694, 0.027797504425048827, 0.027824127197265625, 0.028056575775146485, 0.028249088287353515, 0.027864063262939453, 0.028366847991943358, 0.027870208740234374, 0.027983871459960938, 0.028132352828979492, 0.028786687850952147, 0.028495872497558594, 0.028828672409057617, 0.02813542366027832, 0.028267520904541016, 0.02832896041870117, 0.028630016326904296, 0.02811903953552246, 0.02833305549621582, 0.028266496658325195, 0.028406784057617186, 0.02809753608703613, 0.02776678466796875, 0.02798489570617676, 0.028094463348388672, 0.027897855758666993, 0.030244863510131836, 0.02939801597595215, 0.02879897689819336, 0.028511232376098632, 0.027829248428344725, 0.027840511322021484, 0.028212223052978515, 0.028269567489624024, 0.02792140769958496, 0.027821056365966795, 0.02817024040222168, 0.027971584320068358, 0.028248064041137694, 0.027845632553100585, 0.02783852767944336, 0.027781055450439452, 0.027855871200561523, 0.027817983627319336, 0.027880447387695313, 0.02815692710876465, 0.027883520126342775, 0.027859968185424806, 0.027757568359375, 0.027744255065917968, 0.02795008087158203, 0.027837440490722655, 0.02775449562072754, 0.029868032455444334, 0.029620223999023438, 0.028871679306030275, 0.02907651138305664, 0.028104671478271483, 0.027983871459960938, 0.028033023834228517, 0.02772172737121582, 0.027749376296997072, 0.028281856536865234, 0.028924928665161134, 0.02859110450744629, 0.027869184494018553, 0.028197887420654297, 0.027884544372558592, 0.027813888549804686, 0.02832383918762207, 0.02813132858276367, 0.02774323272705078, 0.027786239624023438, 0.028310527801513673, 0.02797772789001465, 0.027808767318725586, 0.029058048248291016, 0.028460031509399415, 0.028869632720947266, 0.028083200454711913, 0.028260351181030274, 0.029171712875366212, 0.028515327453613282, 0.02817945671081543, 0.02877132797241211, 0.028454912185668944, 0.02813132858276367, 0.028499967575073244, 0.028309503555297853, 0.027996160507202147, 0.028229631423950196, 0.02791731262207031, 0.028475391387939454, 0.02855014419555664, 0.02852659225463867, 0.028217344284057616, 0.027863040924072265, 0.028089344024658205, 0.027678720474243163, 0.027907072067260744, 0.028094463348388672, 0.02792448043823242, 0.028273664474487304, 0.027996160507202147, 0.028579839706420897, 0.02812723159790039, 0.02813542366027832, 0.02790809631347656, 0.028262399673461915, 0.028428287506103517, 0.028075008392333983, 0.028177408218383788, 0.02775449562072754, 0.02777292823791504, 0.028099584579467773, 0.0281343994140625, 0.027832319259643554, 0.02855219268798828, 0.028443647384643556, 0.028471296310424804, 0.029428735733032226, 0.028291072845458985, 0.028299264907836914]",tokens/s,35.68864917873172,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 155, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 326, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 155, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,840.839168,745.013248,0.0,159.383552,141.760512,s,1,7.17542431640625,7.17542431640625,0.0,7.17542431640625,7.17542431640625,7.17542431640625,7.17542431640625,[7.17542431640625],,kWh,4.247864186114233e-06,2.3122588350580126e-06,5.008059561995015e-06,1.156818258316726e-05,,MB,1480.597504,868.745216,0.0,222.298112,184.771584,s,26,0.1927558732032776,0.00741368743089529,0.00021546919819844304,0.007349008083343506,0.007567584037780762,0.007636911988258362,0.008195280194282532,"[0.00837491226196289, 0.0074537920951843264, 0.007302207946777344, 0.007506944179534912, 0.007347968101501465, 0.007350048065185547, 0.0073547520637512204, 0.007339680194854736, 0.007316832065582275, 0.007337952136993408, 0.007656383991241455, 0.007578495979309082, 0.007394976139068603, 0.007364895820617676, 0.007355167865753174, 0.0072501120567321775, 0.007355072021484375, 0.007287968158721924, 0.007327231884002685, 0.007337984085083008, 0.0073283519744873045, 0.007556672096252441, 0.007337152004241943, 0.007404255867004395, 0.0072830719947814945, 0.007252992153167725]",tokens/s,34530.724742071434,kWh,8.798706411744079e-08,4.821255057190802e-08,1.8151003186300777e-07,3.177096465523566e-07,tokens/kWh,805767161.2366759,MB,1527.119872,868.745216,0.0,222.298112,184.774144,s,26,10.049767700195313,0.38652952693058895,0.0025573338159841285,0.3856804504394531,0.39026602172851566,0.39155116271972656,0.39310508728027344,"[0.38904452514648435, 0.38557144165039064, 0.38488446044921876, 0.3862674865722656, 0.3856827697753906, 0.38500714111328127, 0.38518685913085937, 0.38537088012695314, 0.3841073913574219, 0.3848045959472656, 0.3834480590820312, 0.38416796875, 0.3907237548828125, 0.3856781311035156, 0.3844452819824219, 0.3833303527832031, 0.38510556030273435, 0.3935311279296875, 0.38677932739257814, 0.38784188842773437, 0.38641961669921876, 0.38570404052734375, 0.3863427429199219, 0.3886870422363281, 0.38980828857421873, 0.39182696533203126]",tokens/s,162.98884201752904,kWh,4.506343167799829e-06,2.4692647621104934e-06,7.045920963752933e-06,1.4021528893663259e-05,tokens/kWh,4493090.623553296,,s,1638,10.039313482761395,0.006129007010232834,0.0001229051475776163,0.006096896171569824,0.006280499172210694,0.006356121563911438,0.006548561954498289,"[0.006731743812561035, 0.00637337589263916, 0.006304768085479737, 0.006355967998504639, 0.006358016014099121, 0.006288383960723877, 0.006428671836853027, 0.00628326416015625, 0.006343679904937744, 0.006335487842559814, 0.0062494721412658695, 0.0061337599754333495, 0.006158336162567139, 0.006392831802368164, 0.006184959888458252, 0.006218751907348633, 0.0063836159706115725, 0.006341631889343262, 0.006132736206054688, 0.006004735946655273, 0.006112256050109863, 0.006095871925354004, 0.006207488059997559, 0.006090752124786377, 0.006114304065704346, 0.006095871925354004, 0.006116352081298828, 0.006087679862976075, 0.006127615928649902, 0.006086656093597412, 0.0061296639442443845, 0.006086656093597412, 0.006116352081298828, 0.0061265921592712404, 0.006102015972137451, 0.006123519897460937, 0.006074368000030517, 0.0061265921592712404, 0.006081535816192627, 0.006159359931945801, 0.006082560062408447, 0.006108160018920898, 0.006080512046813965, 0.006109183788299561, 0.006082560062408447, 0.006108160018920898, 0.006076416015625, 0.006097919940948486, 0.006087679862976075, 0.006128640174865723, 0.006165503978729248, 0.006176767826080322, 0.006091775894165039, 0.006079487800598145, 0.0060999679565429685, 0.006078464031219482, 0.006109183788299561, 0.006090752124786377, 0.006203392028808594, 0.006100992202758789, 0.006057983875274659, 0.006062079906463623, 0.006109183788299561, 0.006105088233947754, 0.0061348481178283695, 0.006106048107147217, 0.0061265921592712404, 0.0061224961280822755, 0.006146048069000244, 0.006107135772705078, 0.006142975807189942, 0.006105088233947754, 0.0060999679565429685, 0.006120448112487793, 0.006094848155975342, 0.0061224961280822755, 0.006113279819488526, 0.006110208034515381, 0.006079487800598145, 0.006112256050109863, 0.006078464031219482, 0.006118400096893311, 0.006077439785003662, 0.006161407947540283, 0.0060999679565429685, 0.006118400096893311, 0.006067200183868408, 0.006107135772705078, 0.00608460807800293, 0.006093823909759521, 0.006116352081298828, 0.00622489595413208, 0.006220799922943115, 0.006072319984436035, 0.006104063987731933, 0.006046720027923584, 0.006119423866271972, 0.006109183788299561, 0.006118400096893311, 0.006533120155334473, 0.006132736206054688, 0.006081535816192627, 0.006120448112487793, 0.006107135772705078, 0.006086656093597412, 0.006093823909759521, 0.006077439785003662, 0.006127615928649902, 0.006074368000030517, 0.006102015972137451, 0.006090752124786377, 0.006104063987731933, 0.006073344230651856, 0.006108160018920898, 0.006109216213226318, 0.006107103824615478, 0.006073344230651856, 0.006169600009918213, 0.006074368000030517, 0.006108160018920898, 0.006061056137084961, 0.006103040218353272, 0.006090752124786377, 0.006096896171569824, 0.0060928001403808595, 0.006089727878570556, 0.006096896171569824, 0.006131711959838867, 0.006045695781707764, 0.005996543884277344, 0.0063170561790466305, 0.006306816101074219, 0.006498303890228272, 0.006096928119659424, 0.006110176086425782, 0.00608358383178711, 0.00613478422164917, 0.006082560062408447, 0.006145023822784424, 0.0059955201148986816, 0.006066207885742188, 0.006089695930480957, 0.006098944187164307, 0.006112256050109863, 0.006089727878570556, 0.0061562881469726565, 0.006085631847381592, 0.006096896171569824, 0.006077439785003662, 0.00616755199432373, 0.00607539176940918, 0.006088704109191895, 0.006057983875274659, 0.0060999679565429685, 0.0060702719688415525, 0.006111231803894043, 0.00606928014755249, 0.006105055809020996, 0.006091775894165039, 0.006124544143676758, 0.006116352081298828, 0.0060999679565429685, 0.006131711959838867, 0.006087679862976075, 0.006159359931945801, 0.006076416015625, 0.006136832237243653, 0.0061296639442443845, 0.006119423866271972, 0.006050816059112549, 0.0060282878875732426, 0.00601804780960083, 0.006046720027923584, 0.006014976024627685, 0.006045695781707764, 0.0060026879310607914, 0.006034431934356689, 0.006076416015625, 0.006112256050109863, 0.006096896171569824, 0.006106112003326416, 0.005987328052520752, 0.006118400096893311, 0.00608460807800293, 0.006179840087890625, 0.006107135772705078, 0.006114304065704346, 0.006146048069000244, 0.006083615779876709, 0.0063211522102355954, 0.006293504238128662, 0.006346752166748047, 0.0062863359451293946, 0.006146048069000244, 0.006116352081298828, 0.0061337599754333495, 0.006135807991027832, 0.006097919940948486, 0.006118400096893311, 0.006095871925354004, 0.006128640174865723, 0.006090752124786377, 0.006115327835083008, 0.006103040218353272, 0.006111264228820801, 0.006079455852508545, 0.006106143951416016, 0.006082528114318848, 0.00611737585067749, 0.006097919940948486, 0.006160384178161621, 0.006111231803894043, 0.00611952018737793, 0.006082464218139649, 0.0062679038047790524, 0.0061224961280822755, 0.0061562881469726565, 0.006107135772705078, 0.006104063987731933, 0.006144000053405762, 0.006007808208465576, 0.0061337599754333495, 0.006077439785003662, 0.0061859841346740725, 0.005986303806304932, 0.006057983875274659, 0.006081535816192627, 0.006119423866271972, 0.0060702719688415525, 0.006130688190460205, 0.006077439785003662, 0.006110208034515381, 0.006108160018920898, 0.006089727878570556, 0.006119423866271972, 0.006112256050109863, 0.006129695892333985, 0.006093791961669922, 0.006128640174865723, 0.006146048069000244, 0.0062197761535644535, 0.00601087999343872, 0.0060364799499511715, 0.006196224212646485, 0.0061337599754333495, 0.006105088233947754, 0.006102015972137451, 0.006091839790344238, 0.006136767864227295, 0.006076416015625, 0.006178815841674804, 0.006108160018920898, 0.006103040218353272, 0.00612662410736084, 0.006076384067535401, 0.006128640174865723, 0.006076416015625, 0.0061224961280822755, 0.006112256050109863, 0.0061562881469726565, 0.006033472061157227, 0.006127552032470703, 0.006105088233947754, 0.0061296639442443845, 0.006089727878570556, 0.006145023822784424, 0.0060928001403808595, 0.006124544143676758, 0.0060928001403808595, 0.006139904022216797, 0.00613478422164917, 0.006109183788299561, 0.006120448112487793, 0.006100992202758789, 0.006121471881866455, 0.006090752124786377, 0.006120448112487793, 0.006079487800598145, 0.00613478422164917, 0.006100992202758789, 0.006116352081298828, 0.0060999679565429685, 0.006118400096893311, 0.006160384178161621, 0.006115327835083008, 0.006085631847381592, 0.006141952037811279, 0.006150144100189209, 0.006104063987731933, 0.006131711959838867, 0.006071296215057373, 0.006150144100189209, 0.006061056137084961, 0.006161407947540283, 0.006074368000030517, 0.006080512046813965, 0.005996543884277344, 0.006519807815551758, 0.006364160060882569, 0.0062566399574279785, 0.006130688190460205, 0.006001664161682129, 0.00616755199432373, 0.006088704109191895, 0.006132768154144287, 0.0060917439460754395, 0.006138879776000977, 0.006072319984436035, 0.006124544143676758, 0.006080512046813965, 0.006110208034515381, 0.006031360149383545, 0.0060364799499511715, 0.0059955201148986816, 0.006031360149383545, 0.00608358383178711, 0.006159359931945801, 0.006069248199462891, 0.006103040218353272, 0.0060702719688415525, 0.006100992202758789, 0.006073344230651856, 0.006160384178161621, 0.006088704109191895, 0.006181888103485108, 0.006090752124786377, 0.0060702719688415525, 0.006157311916351318, 0.006086656093597412, 0.006195199966430664, 0.006072319984436035, 0.006108160018920898, 0.006093823909759521, 0.006108160018920898, 0.00606822395324707, 0.006182911872863769, 0.006086656093597412, 0.006103040218353272, 0.0060702719688415525, 0.006120448112487793, 0.006087679862976075, 0.006034431934356689, 0.0060590081214904785, 0.006089727878570556, 0.006091775894165039, 0.006139904022216797, 0.0060999679565429685, 0.006079552173614502, 0.006118336200714111, 0.006097919940948486, 0.0062679038047790524, 0.006082560062408447, 0.006106112003326416, 0.006091775894165039, 0.006096896171569824, 0.006095871925354004, 0.006109183788299561, 0.006064191818237304, 0.0061132159233093265, 0.00606822395324707, 0.0060928001403808595, 0.006082560062408447, 0.006119423866271972, 0.006104063987731933, 0.006090752124786377, 0.006094848155975342, 0.006069248199462891, 0.006106112003326416, 0.006053887844085694, 0.006127615928649902, 0.006113279819488526, 0.006166528224945069, 0.006074368000030517, 0.006091775894165039, 0.006072319984436035, 0.006102015972137451, 0.006180863857269287, 0.00617574405670166, 0.00608358383178711, 0.006116352081298828, 0.006052864074707031, 0.0059996161460876465, 0.00623308801651001, 0.006137856006622314, 0.006079487800598145, 0.006056960105895996, 0.006087679862976075, 0.006054912090301514, 0.006108160018920898, 0.006076416015625, 0.00611027193069458, 0.006081471920013428, 0.006096896171569824, 0.006132736206054688, 0.0061296639442443845, 0.006091775894165039, 0.006109248161315918, 0.006107071876525879, 0.00612556791305542, 0.006074368000030517, 0.006162432193756104, 0.006083648204803466, 0.006147007942199707, 0.006114304065704346, 0.006095871925354004, 0.0062740478515625, 0.006154240131378174, 0.0061265921592712404, 0.00607539176940918, 0.006118400096893311, 0.006076416015625, 0.006103040218353272, 0.006161407947540283, 0.006220799922943115, 0.006085631847381592, 0.006124544143676758, 0.00608460807800293, 0.006184959888458252, 0.006118400096893311, 0.006089727878570556, 0.006301695823669433, 0.006137856006622314, 0.0060405759811401364, 0.00606822395324707, 0.006144000053405762, 0.006102015972137451, 0.0061296639442443845, 0.00608358383178711, 0.006120448112487793, 0.006095871925354004, 0.006072319984436035, 0.005991424083709717, 0.006047743797302246, 0.006028351783752442, 0.006086592197418213, 0.006087679862976075, 0.006102015972137451, 0.006159359931945801, 0.006085631847381592, 0.00607539176940918, 0.006085631847381592, 0.006078464031219482, 0.0060928001403808595, 0.006073344230651856, 0.00606822395324707, 0.006104063987731933, 0.006080512046813965, 0.006095871925354004, 0.006086656093597412, 0.006199295997619629, 0.00607539176940918, 0.006098944187164307, 0.00606822395324707, 0.006095871925354004, 0.006071296215057373, 0.006095871925354004, 0.006079487800598145, 0.006107135772705078, 0.0061296639442443845, 0.006103040218353272, 0.006032383918762207, 0.006031360149383545, 0.006300672054290772, 0.006451200008392334, 0.006262784004211426, 0.006079487800598145, 0.0061337599754333495, 0.006104063987731933, 0.006124544143676758, 0.006080512046813965, 0.006168575763702393, 0.006089727878570556, 0.006132736206054688, 0.00608460807800293, 0.006095871925354004, 0.006100992202758789, 0.006071296215057373, 0.006089727878570556, 0.006086656093597412, 0.006129727840423584, 0.006082496166229248, 0.006105088233947754, 0.006080512046813965, 0.0062259202003479, 0.006067200183868408, 0.006088704109191895, 0.006056960105895996, 0.00610207986831665, 0.006056896209716797, 0.0060999679565429685, 0.0060661759376525876, 0.006097919940948486, 0.00606822395324707, 0.006087679862976075, 0.0060928001403808595, 0.006103040218353272, 0.006098944187164307, 0.006076416015625, 0.006080512046813965, 0.006078464031219482, 0.006106112003326416, 0.006074399948120117, 0.006135776042938233, 0.006395904064178467, 0.006087679862976075, 0.006065152168273926, 0.006077439785003662, 0.006046720027923584, 0.006085631847381592, 0.006057983875274659, 0.006095903873443604, 0.006065120220184326, 0.006111231803894043, 0.00606822395324707, 0.006079487800598145, 0.006056960105895996, 0.006153215885162353, 0.0060661759376525876, 0.006106112003326416, 0.006091775894165039, 0.006096896171569824, 0.006088704109191895, 0.006098944187164307, 0.006109183788299561, 0.006089727878570556, 0.006114304065704346, 0.006093823909759521, 0.006116352081298828, 0.006111231803894043, 0.006100992202758789, 0.006075456142425537, 0.006122432231903076, 0.006093823909759521, 0.006110208034515381, 0.0060631041526794435, 0.006103040218353272, 0.00607539176940918, 0.006121471881866455, 0.006161407947540283, 0.0061265921592712404, 0.006113279819488526, 0.006093823909759521, 0.0061224961280822755, 0.006020095825195313, 0.006090752124786377, 0.006053887844085694, 0.006089727878570556, 0.006067200183868408, 0.006097951889038086, 0.006049759864807129, 0.00618393611907959, 0.006069248199462891, 0.006091775894165039, 0.0060631041526794435, 0.006139904022216797, 0.006086656093597412, 0.006112256050109863, 0.006048768043518066, 0.006091775894165039, 0.006057983875274659, 0.006090752124786377, 0.006081535816192627, 0.006078464031219482, 0.006094848155975342, 0.006071296215057373, 0.006113279819488526, 0.006052864074707031, 0.006096896171569824, 0.006033408164978027, 0.006039552211761475, 0.005994495868682862, 0.006033408164978027, 0.006017024040222168, 0.006107135772705078, 0.006074368000030517, 0.006022143840789795, 0.005983232021331787, 0.006024191856384278, 0.006048768043518066, 0.00623308801651001, 0.006347775936126709, 0.006344704151153564, 0.006259712219238281, 0.006228991985321045, 0.006061056137084961, 0.0060999679565429685, 0.006116352081298828, 0.006086656093597412, 0.006127615928649902, 0.006091775894165039, 0.006149119853973388, 0.00608358383178711, 0.006109183788299561, 0.006074368000030517, 0.006105088233947754, 0.00606822395324707, 0.006090784072875976, 0.0060845761299133305, 0.006118400096893311, 0.006057983875274659, 0.006105088233947754, 0.00608358383178711, 0.006091775894165039, 0.006082560062408447, 0.006067200183868408, 0.006082560062408447, 0.006085631847381592, 0.006081535816192627, 0.005990399837493897, 0.00601907205581665, 0.0059955201148986816, 0.006031360149383545, 0.0060405759811401364, 0.006155295848846436, 0.006073311805725097, 0.006124544143676758, 0.006086656093597412, 0.006148096084594727, 0.006071296215057373, 0.00611737585067749, 0.00607539176940918, 0.006113279819488526, 0.006081535816192627, 0.006106112003326416, 0.006034431934356689, 0.006104063987731933, 0.0060702719688415525, 0.006094912052154541, 0.006342591762542725, 0.006345727920532227, 0.00613478422164917, 0.006076416015625, 0.006114304065704346, 0.006058015823364258, 0.006104032039642334, 0.006074368000030517, 0.006090752124786377, 0.0060631041526794435, 0.006085631847381592, 0.006064127922058105, 0.006098944187164307, 0.006074368000030517, 0.006104063987731933, 0.006071296215057373, 0.006095871925354004, 0.006105088233947754, 0.006082560062408447, 0.006103040218353272, 0.006080512046813965, 0.006114304065704346, 0.006137856006622314, 0.0061265921592712404, 0.006069248199462891, 0.00608358383178711, 0.006081535816192627, 0.006119423866271972, 0.006072319984436035, 0.006110208034515381, 0.006094848155975342, 0.006100992202758789, 0.006098944187164307, 0.006107135772705078, 0.006009856224060059, 0.006015999794006348, 0.005943295955657959, 0.006072319984436035, 0.006080512046813965, 0.006102015972137451, 0.005987328052520752, 0.006172671794891358, 0.0059985918998718265, 0.0060067839622497555, 0.006044672012329101, 0.006073344230651856, 0.006146048069000244, 0.006051839828491211, 0.0060999679565429685, 0.006056960105895996, 0.006094848155975342, 0.006041600227355957, 0.006086656093597412, 0.006048768043518066, 0.006110208034515381, 0.006056960105895996, 0.006135807991027832, 0.006056960105895996, 0.006094848155975342, 0.0060631041526794435, 0.006109183788299561, 0.006081535816192627, 0.0060928001403808595, 0.006054912090301514, 0.006071296215057373, 0.006078464031219482, 0.0061265921592712404, 0.006100992202758789, 0.006109183788299561, 0.006119423866271972, 0.006049824237823486, 0.006115295886993408, 0.006052864074707031, 0.006096896171569824, 0.0060631041526794435, 0.006123519897460937, 0.0059361281394958495, 0.006007808208465576, 0.0059955201148986816, 0.006055935859680176, 0.005984255790710449, 0.00603545618057251, 0.006064127922058105, 0.0060999679565429685, 0.006091775894165039, 0.006109183788299561, 0.006067200183868408, 0.006116352081298828, 0.0060590081214904785, 0.006174719810485839, 0.006071296215057373, 0.006120448112487793, 0.006116352081298828, 0.006033408164978027, 0.0059996161460876465, 0.006104063987731933, 0.0060702719688415525, 0.0060026879310607914, 0.006032383918762207, 0.006023168087005615, 0.0060282878875732426, 0.006112256050109863, 0.006120448112487793, 0.006069248199462891, 0.0060999679565429685, 0.006057983875274659, 0.006091775894165039, 0.0060999679565429685, 0.006139904022216797, 0.006047743797302246, 0.006082560062408447, 0.006104063987731933, 0.006081535816192627, 0.006176767826080322, 0.006639616012573242, 0.006371327877044678, 0.006255616188049316, 0.006100992202758789, 0.006013951778411865, 0.006089727878570556, 0.006060031890869141, 0.0060928001403808595, 0.006065152168273926, 0.006155263900756836, 0.006072319984436035, 0.006097919940948486, 0.006023168087005615, 0.006024191856384278, 0.006055935859680176, 0.006087679862976075, 0.0060661759376525876, 0.006086656093597412, 0.006061056137084961, 0.006102015972137451, 0.006072319984436035, 0.006109183788299561, 0.006118400096893311, 0.006087679862976075, 0.006104063987731933, 0.006056992053985596, 0.006090720176696777, 0.006080512046813965, 0.006113279819488526, 0.006097919940948486, 0.006110208034515381, 0.006081535816192627, 0.006123519897460937, 0.006080512046813965, 0.006105088233947754, 0.006142975807189942, 0.0061224961280822755, 0.006096896171569824, 0.006103040218353272, 0.006075424194335937, 0.006127583980560303, 0.006102015972137451, 0.006131711959838867, 0.006253568172454834, 0.006085631847381592, 0.006030335903167725, 0.006003712177276611, 0.006051839828491211, 0.0060702719688415525, 0.0061265921592712404, 0.006001664161682129, 0.006065152168273926, 0.006408192157745361, 0.006465536117553711, 0.006239232063293457, 0.0061972479820251464, 0.006564864158630371, 0.006683648109436035, 0.007256063938140869, 0.006971392154693603, 0.006464511871337891, 0.00633241605758667, 0.006388735771179199, 0.006311935901641846, 0.006400000095367431, 0.006281216144561768, 0.006360064029693604, 0.006340608119964599, 0.006266880035400391, 0.00632422399520874, 0.006090752124786377, 0.006124544143676758, 0.006079487800598145, 0.006127615928649902, 0.006037504196166992, 0.006110208034515381, 0.006000639915466309, 0.006251520156860352, 0.006157311916351318, 0.006303743839263916, 0.006316031932830811, 0.006253568172454834, 0.006069248199462891, 0.006050816059112549, 0.006106112003326416, 0.006077439785003662, 0.006093823909759521, 0.006060031890869141, 0.006114304065704346, 0.006089727878570556, 0.006098944187164307, 0.006081535816192627, 0.006096896171569824, 0.006050816059112549, 0.006088704109191895, 0.00607539176940918, 0.006098944187164307, 0.006096896171569824, 0.006088704109191895, 0.006106112003326416, 0.006072319984436035, 0.006118400096893311, 0.006080512046813965, 0.006098944187164307, 0.006081535816192627, 0.006081535816192627, 0.006078464031219482, 0.0060999679565429685, 0.0061265921592712404, 0.0064204797744750975, 0.0064778242111206055, 0.006300672054290772, 0.006228991985321045, 0.006160384178161621, 0.006194176197052002, 0.006078464031219482, 0.006104063987731933, 0.006082560062408447, 0.006177792072296143, 0.006057983875274659, 0.006102015972137451, 0.00603545618057251, 0.006089727878570556, 0.005865471839904785, 0.006020095825195313, 0.006013984203338623, 0.0060989117622375485, 0.006065152168273926, 0.006104063987731933, 0.006061056137084961, 0.006097919940948486, 0.006096896171569824, 0.0060928001403808595, 0.006100992202758789, 0.006076416015625, 0.006116384029388427, 0.006070240020751953, 0.006086656093597412, 0.006089727878570556, 0.0060590081214904785, 0.0062269439697265625, 0.0063805441856384275, 0.006252543926239014, 0.006309887886047363, 0.006303743839263916, 0.00612556791305542, 0.006073344230651856, 0.006067200183868408, 0.006109183788299561, 0.00606822395324707, 0.006100992202758789, 0.006073344230651856, 0.006180863857269287, 0.0060928001403808595, 0.006069248199462891, 0.006090752124786377, 0.006102015972137451, 0.00606822395324707, 0.006086656093597412, 0.006062079906463623, 0.006113279819488526, 0.006066207885742188, 0.006040544033050537, 0.006012928009033203, 0.00610313606262207, 0.0060834879875183104, 0.006201344013214111, 0.006085631847381592, 0.006062079906463623, 0.006095871925354004, 0.0060702719688415525, 0.006089727878570556, 0.006055935859680176, 0.0060928001403808595, 0.006064127922058105, 0.006154240131378174, 0.006041600227355957, 0.005997568130493164, 0.005961728096008301, 0.006062079906463623, 0.006061056137084961, 0.0060928001403808595, 0.006042623996734619, 0.006094848155975342, 0.0060405759811401364, 0.006088704109191895, 0.006077439785003662, 0.006089727878570556, 0.006031360149383545, 0.006091775894165039, 0.006050816059112549, 0.006095871925354004, 0.006089727878570556, 0.006088704109191895, 0.006054912090301514, 0.006082560062408447, 0.00606822395324707, 0.0060702719688415525, 0.006090752124786377, 0.006069248199462891, 0.006080512046813965, 0.006082560062408447, 0.006095871925354004, 0.0059658241271972655, 0.006072319984436035, 0.006045760154724121, 0.006068160057067871, 0.006035520076751709, 0.006136767864227295, 0.00603545618057251, 0.00607539176940918, 0.00603545618057251, 0.006080512046813965, 0.006057983875274659, 0.0060364799499511715, 0.0059770879745483394, 0.00601804780960083, 0.0063569917678833006, 0.006495232105255127, 0.006238207817077636, 0.006076416015625, 0.006065152168273926, 0.006064127922058105, 0.006088704109191895, 0.006053887844085694, 0.006077439785003662, 0.006069248199462891, 0.006086656093597412, 0.006042623996734619, 0.006082560062408447, 0.006064127922058105, 0.006079487800598145, 0.006056960105895996, 0.006090752124786377, 0.00606822395324707, 0.006097919940948486, 0.006057983875274659, 0.006072319984436035, 0.006067200183868408, 0.006089727878570556, 0.006062079906463623, 0.006095871925354004, 0.006103040218353272, 0.006081535816192627, 0.005970975875854492, 0.00608355188369751, 0.006048768043518066, 0.006093823909759521, 0.0060928001403808595, 0.006061056137084961, 0.006067200183868408, 0.006049791812896729, 0.006069248199462891, 0.00606822395324707, 0.006094848155975342, 0.0060364799499511715, 0.006072319984436035, 0.006043712139129639, 0.0060835199356079105, 0.006044672012329101, 0.006077439785003662, 0.006045695781707764, 0.006089727878570556, 0.006062079906463623, 0.0060999679565429685, 0.006057983875274659, 0.006080512046813965, 0.006054912090301514, 0.006091775894165039, 0.006045695781707764, 0.006554624080657959, 0.0061972479820251464, 0.006240255832672119, 0.00631606388092041, 0.006263775825500488, 0.006221824169158936, 0.006072319984436035, 0.006081535816192627, 0.006057983875274659, 0.006095871925354004, 0.00606822395324707, 0.006091775894165039, 0.006080512046813965, 0.0060908799171447755, 0.005992320060729981, 0.006107135772705078, 0.006088704109191895, 0.006102015972137451, 0.006118400096893311, 0.006094848155975342, 0.006116352081298828, 0.006065152168273926, 0.006116352081298828, 0.006073344230651856, 0.006119423866271972, 0.006113279819488526, 0.006102015972137451, 0.0060702719688415525, 0.006104063987731933, 0.006073344230651856, 0.006090752124786377, 0.006057983875274659, 0.006103040218353272, 0.006095871925354004, 0.006094848155975342, 0.006055935859680176, 0.006088704109191895, 0.006072319984436035, 0.006094848155975342, 0.006127615928649902, 0.006104063987731933, 0.0060928001403808595, 0.006100992202758789, 0.006128640174865723, 0.0060661759376525876, 0.006112256050109863, 0.006060031890869141, 0.006110208034515381, 0.006039552211761475, 0.006151167869567871, 0.006114304065704346, 0.0061562881469726565, 0.0060631041526794435, 0.00611027193069458, 0.006056896209716797, 0.0060026879310607914, 0.006032383918762207, 0.00608460807800293, 0.006062079906463623, 0.006091775894165039, 0.006123519897460937, 0.006220799922943115, 0.006184959888458252, 0.0059955201148986816, 0.006086656093597412, 0.006065152168273926, 0.00608358383178711, 0.006057983875274659, 0.006096896171569824, 0.006060031890869141, 0.0061859841346740725, 0.006284351825714111, 0.0061777281761169435, 0.0063836159706115725, 0.0063508481979370115, 0.006987775802612305, 0.007231488227844239, 0.007269375801086426, 0.0065382399559021, 0.006340608119964599, 0.006305791854858398, 0.006370304107666015, 0.006303743839263916, 0.006347775936126709, 0.006238207817077636, 0.00622489595413208, 0.006177792072296143, 0.006112256050109863, 0.0061123199462890625, 0.006175680160522461, 0.006156352043151856, 0.006168511867523194, 0.0063170561790466305, 0.006452223777770996, 0.006246399879455566, 0.00638976001739502, 0.0063201279640197755, 0.006343679904937744, 0.0062791681289672855, 0.0062791681289672855, 0.006260735988616943, 0.006247424125671387, 0.006107135772705078, 0.006074368000030517, 0.006149119853973388, 0.006009856224060059, 0.006039552211761475, 0.005985280036926269, 0.006096896171569824, 0.006095871925354004, 0.006106112003326416, 0.006015999794006348, 0.006051839828491211, 0.006360064029693604, 0.006498335838317871, 0.0062146239280700686, 0.006108160018920898, 0.006119423866271972, 0.006108160018920898, 0.0061224961280822755, 0.006130688190460205, 0.00609388780593872, 0.006064127922058105, 0.006109183788299561, 0.006065152168273926, 0.00608460807800293, 0.006034431934356689, 0.006093823909759521, 0.006042623996734619, 0.00601087999343872, 0.005991424083709717, 0.00602726411819458, 0.005985280036926269, 0.006053887844085694, 0.00597811222076416, 0.006094848155975342, 0.006076416015625, 0.006150144100189209, 0.006053887844085694, 0.006098944187164307, 0.006074368000030517, 0.006100992202758789, 0.006071296215057373, 0.006146048069000244, 0.0060590081214904785, 0.00609388780593872, 0.0060507521629333495, 0.006090752124786377, 0.006082560062408447, 0.006090752124786377, 0.006097919940948486, 0.0060661759376525876, 0.006095871925354004, 0.006065216064453125, 0.006092735767364502, 0.006069248199462891, 0.006113279819488526, 0.006056960105895996, 0.006090752124786377, 0.006273024082183838, 0.006924287796020508, 0.006445055961608887, 0.006292479991912842, 0.006334464073181153, 0.00626585578918457, 0.0063498239517211915, 0.006412288188934326, 0.006252543926239014, 0.006138879776000977, 0.0060702719688415525, 0.006082560062408447, 0.00612556791305542, 0.0060999999046325686, 0.006059999942779541, 0.006111231803894043, 0.006171648025512695, 0.0064102401733398436, 0.006154240131378174, 0.00622489595413208, 0.006105088233947754, 0.006091775894165039, 0.006095871925354004, 0.006064127922058105, 0.006136832237243653, 0.0060661759376525876, 0.006090752124786377, 0.006195199966430664, 0.006111231803894043, 0.00606822395324707, 0.006086656093597412, 0.006301695823669433, 0.0062873601913452145, 0.006149119853973388, 0.006108191967010498, 0.006077407836914062, 0.006090752124786377, 0.0062863359451293946, 0.006312960147857666, 0.006191103935241699, 0.006012928009033203, 0.006038527965545654, 0.00601907205581665, 0.006034431934356689, 0.006372352123260498, 0.0061859841346740725, 0.006088704109191895, 0.006330368041992188, 0.006680575847625733, 0.006338560104370118, 0.006248447895050049, 0.0071198720932006835, 0.006429696083068847, 0.006387712001800537, 0.006161407947540283, 0.006039552211761475, 0.006062079906463623, 0.006097919940948486, 0.006057983875274659, 0.006106112003326416, 0.006053919792175293, 0.0060927681922912594, 0.006001664161682129, 0.006098944187164307, 0.006069248199462891, 0.006111231803894043, 0.006045695781707764, 0.0059770879745483394, 0.006020095825195313, 0.005974016189575195, 0.006014976024627685, 0.0060282878875732426, 0.0060661759376525876, 0.006073344230651856, 0.006094848155975342, 0.006049791812896729, 0.006077439785003662, 0.006045695781707764, 0.006155263900756836, 0.006046720027923584, 0.0063836159706115725, 0.006171648025512695, 0.006098944187164307, 0.006078559875488282, 0.006087584018707275, 0.006082560062408447, 0.006078464031219482, 0.006080512046813965, 0.006111231803894043, 0.006045695781707764, 0.006090752124786377, 0.0060405759811401364, 0.006105088233947754, 0.006091775894165039, 0.00602623987197876, 0.006155263900756836, 0.0060590081214904785, 0.006109183788299561, 0.006079487800598145, 0.006078464031219482, 0.00606822395324707, 0.006106112003326416, 0.006079487800598145, 0.006116352081298828, 0.006052864074707031, 0.006115327835083008, 0.006079487800598145, 0.006253568172454834, 0.006269951820373535, 0.00632422399520874, 0.006281248092651367, 0.006392799854278565, 0.006034431934356689, 0.0060026879310607914, 0.006049791812896729, 0.006081535816192627, 0.006115327835083008, 0.006041696071624756, 0.006038432121276856, 0.00637337589263916, 0.006509568214416504, 0.006289408206939697, 0.006112256050109863, 0.006132736206054688, 0.006037504196166992, 0.006024191856384278, 0.0060026879310607914, 0.006113279819488526, 0.006344704151153564, 0.006315008163452148, 0.006281216144561768, 0.006159359931945801, 0.00612556791305542, 0.006103040218353272, 0.006097919940948486, 0.00602623987197876, 0.006056960105895996, 0.00606006383895874, 0.006094848155975342, 0.006061024188995361, 0.006089727878570556, 0.006074368000030517, 0.00608460807800293, 0.0060661759376525876, 0.006259712219238281, 0.0062269439697265625, 0.006090752124786377, 0.006072319984436035, 0.006098944187164307, 0.006074368000030517, 0.006095935821533203, 0.006065087795257568, 0.006103040218353272, 0.006072319984436035, 0.006065152168273926, 0.006088704109191895, 0.006065152168273926, 0.006087679862976075, 0.00607539176940918, 0.0062679038047790524, 0.00628326416015625, 0.006073344230651856, 0.006004735946655273, 0.00602623987197876, 0.005992447853088379, 0.006102015972137451, 0.006052864074707031, 0.006142975807189942, 0.006073344230651856, 0.0061337599754333495, 0.00607539176940918, 0.0060999679565429685, 0.006082560062408447, 0.0061296639442443845, 0.006078464031219482, 0.006138879776000977, 0.00638259220123291, 0.006392831802368164, 0.006294528007507324, 0.006077439785003662, 0.006153215885162353, 0.00607539176940918, 0.006103040218353272, 0.00606822395324707, 0.006112256050109863, 0.006062079906463623, 0.006093823909759521, 0.006096896171569824, 0.006128640174865723, 0.006108160018920898, 0.006087679862976075, 0.006109183788299561, 0.0060702719688415525, 0.006095871925354004, 0.006052864074707031, 0.006096896171569824, 0.00606822395324707, 0.006078464031219482, 0.006055935859680176, 0.006158336162567139, 0.006056960105895996, 0.006088704109191895, 0.0060661759376525876, 0.006096896171569824, 0.006076416015625, 0.006097919940948486, 0.006057983875274659, 0.006090816020965576, 0.006074304103851318, 0.006113279819488526, 0.006177792072296143, 0.00617574405670166, 0.006465536117553711, 0.006184959888458252, 0.006146048069000244, 0.005994495868682862, 0.0061337919235229495, 0.006108128070831299, 0.006207488059997559, 0.006071296215057373, 0.0061337599754333495, 0.006062079906463623, 0.00612556791305542, 0.006062079906463623, 0.006329343795776367, 0.006323200225830078, 0.00628326416015625, 0.006307839870452881, 0.006275072097778321, 0.006147071838378906, 0.006085631847381592, 0.006108160018920898, 0.006096896171569824, 0.006094880104064941, 0.006091775894165039, 0.006087647914886475, 0.006113279819488526, 0.006088768005371094, 0.006523839950561524, 0.006293504238128662, 0.006351871967315674, 0.006162432193756104, 0.006127615928649902, 0.006087679862976075, 0.006161407947540283, 0.006082560062408447, 0.006109183788299561, 0.0060928001403808595, 0.006077439785003662, 0.006108160018920898, 0.006097919940948486, 0.006116352081298828, 0.00608358383178711, 0.006111231803894043, 0.006065152168273926, 0.006119423866271972, 0.005974016189575195, 0.006106112003326416, 0.00608460807800293, 0.006091775894165039, 0.006053887844085694, 0.006114304065704346, 0.006069248199462891, 0.006088704109191895, 0.006069248199462891, 0.006106112003326416, 0.006072319984436035, 0.006107135772705078, 0.006054912090301514, 0.006088704109191895, 0.006093823909759521, 0.006079487800598145, 0.006109183788299561, 0.006121471881866455, 0.006107135772705078, 0.006089727878570556, 0.006118400096893311, 0.006042623996734619, 0.006088704109191895, 0.006109183788299561, 0.006155263900756836, 0.006065152168273926, 0.006098944187164307, 0.006300672054290772, 0.00638976001739502, 0.006394879817962646, 0.006155263900756836, 0.006180863857269287, 0.006397952079772949, 0.006131711959838867, 0.006097919940948486, 0.006123519897460937, 0.006073408126831055, 0.00612550401687622, 0.006081535816192627, 0.006104063987731933, 0.006008831977844238, 0.006090784072875976, 0.006099936008453369, 0.006130688190460205, 0.00608358383178711, 0.006090752124786377, 0.006094848155975342, 0.006088704109191895, 0.006150144100189209, 0.006298624038696289, 0.006151167869567871, 0.006067200183868408, 0.006097919940948486, 0.00607539176940918, 0.006102015972137451, 0.006142975807189942, 0.006094848155975342, 0.006102015972137451, 0.00617574405670166, 0.006079487800598145, 0.006090752124786377, 0.006085631847381592, 0.006074368000030517, 0.006105088233947754, 0.006061056137084961, 0.006359039783477783, 0.0062679038047790524, 0.006319104194641113, 0.006278143882751465, 0.006313983917236328, 0.006326272010803223, 0.006333439826965332, 0.006311935901641846, 0.00626585578918457, 0.006280191898345947, 0.006275072097778321, 0.006278143882751465, 0.006301695823669433, 0.006196224212646485, 0.006240255832672119, 0.006166528224945069, 0.006071296215057373, 0.005991424083709717, 0.006005760192871094, 0.005993472099304199, 0.0060928001403808595, 0.006045695781707764, 0.006144000053405762, 0.006303743839263916, 0.006302720069885254, 0.006269951820373535, 0.006289408206939697, 0.0062975997924804685, 0.006189055919647217, 0.006111231803894043, 0.0060702719688415525, 0.0061296639442443845, 0.0060702719688415525, 0.006105088233947754, 0.006067200183868408, 0.0063201279640197755, 0.006094848155975342, 0.006089727878570556, 0.006065152168273926, 0.00613478422164917, 0.006186016082763672, 0.006800352096557617, 0.006496255874633789, 0.0062740478515625, 0.006301695823669433, 0.006278143882751465, 0.007095295906066895, 0.006378496170043945, 0.006301695823669433, 0.006362112045288086, 0.006372352123260498, 0.00628223991394043, 0.0061634559631347655, 0.006123519897460937, 0.006081696033477783, 0.005996416091918946, 0.0061552319526672365, 0.006056960105895996, 0.006107200145721436, 0.006058944225311279, 0.006113279819488526, 0.006065152168273926, 0.006112256050109863, 0.006098944187164307, 0.006093823909759521, 0.006116352081298828, 0.0060661759376525876, 0.006114304065704346, 0.006089759826660156, 0.006124512195587158, 0.00608460807800293, 0.006100992202758789, 0.0060999679565429685, 0.006110208034515381, 0.006076416015625, 0.006131711959838867, 0.006100992202758789, 0.006116352081298828, 0.006094848155975342, 0.006105088233947754, 0.00608460807800293, 0.006130688190460205, 0.006078464031219482, 0.006096896171569824, 0.006080512046813965, 0.006106112003326416, 0.006098944187164307, 0.006081535816192627, 0.006174719810485839, 0.006106112003326416, 0.006141952037811279, 0.006104063987731933, 0.00613478422164917, 0.006102015972137451, 0.006107135772705078, 0.006037504196166992, 0.006427648067474365, 0.006368256092071533, 0.006386688232421875, 0.006446080207824707, 0.006343711853027343, 0.0065771198272705075, 0.007109632015228271, 0.006481919765472412, 0.006403071880340576, 0.006292511940002441, 0.006367199897766114, 0.006340608119964599, 0.006280191898345947, 0.006348800182342529, 0.006269951820373535, 0.0064174079895019534, 0.0064204797744750975, 0.006400000095367431, 0.0064143362045288085, 0.006221824169158936, 0.006098944187164307, 0.006090752124786377, 0.006104063987731933, 0.006131711959838867, 0.006106112003326416, 0.0060631041526794435, 0.006160384178161621, 0.00612556791305542, 0.006069248199462891, 0.006110208034515381, 0.006150144100189209, 0.006487040042877197, 0.00638976001739502, 0.006231040000915527, 0.006079487800598145, 0.0061562881469726565, 0.006064127922058105, 0.006111231803894043, 0.006076416015625, 0.0060067839622497555, 0.006094848155975342, 0.006055935859680176, 0.006128640174865723, 0.006065152168273926, 0.006120448112487793, 0.006094848155975342, 0.006114304065704346, 0.006072319984436035, 0.006111231803894043, 0.00606822395324707]",tokens/s,163.15856684947912,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 326, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492ab-36dbcf4433448b790be9062b;dcab6e1e-89c7-4ff5-b63a-e5b0dddd39a1) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 170, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 760, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 646, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 413, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 243, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,897.687552,763.887616,0.0,178.25792,176.546816,s,1,7.25760205078125,7.25760205078125,0.0,7.25760205078125,7.25760205078125,7.25760205078125,7.25760205078125,[7.25760205078125],,kWh,5.657514008326341e-06,3.085058189871051e-06,8.307784424038678e-06,1.705035662223607e-05,,MB,1446.834176,900.202496,0.0,253.755392,221.108736,s,17,0.19176710414886472,0.011280417891109689,0.0001563468257334057,0.011185471534729003,0.011520153427124024,0.011575692558288574,0.011587714653015137,"[0.01129964828491211, 0.011178560256958008, 0.011193792343139648, 0.011113056182861328, 0.011571935653686524, 0.011485631942749024, 0.011452927589416503, 0.011173215866088868, 0.011176735877990722, 0.011215968132019043, 0.011454527854919433, 0.011185471534729003, 0.011179776191711425, 0.011165632247924804, 0.011178272247314453, 0.011151231765747071, 0.011590720176696778]",tokens/s,22694.194707250914,kWh,1.3260372443414546e-07,7.265836982327291e-08,3.472044179304496e-07,5.52466512187868e-07,tokens/kWh,463376502.1995512,MB,1492.238336,914.88256,0.0,268.435456,221.111296,s,17,9.948509460449216,0.585206438849954,0.0074164256778039605,0.5836074829101563,0.5929084228515625,0.5978193359375,0.6071757812499999,"[0.5826773071289062, 0.584201171875, 0.5776990356445313, 0.5836074829101563, 0.591583740234375, 0.586267333984375, 0.5810518188476562, 0.583821044921875, 0.5824584350585937, 0.5823733520507812, 0.585213623046875, 0.5795903930664063, 0.5854503173828125, 0.5788968505859375, 0.5792072143554687, 0.5948954467773437, 0.609514892578125]",tokens/s,107.65431789131954,kWh,6.846787779038098e-06,3.751685254953086e-06,1.1130076005716207e-05,2.172854903970739e-05,tokens/kWh,2899411.2715428877,,s,1071,9.94141191768646,0.009282364068801553,0.0002327468440190461,0.00918835163116455,0.009577471733093262,0.009657343864440917,0.010174361610412595,"[0.009128959655761718, 0.009302016258239745, 0.009184255599975585, 0.009207807540893554, 0.009168959617614747, 0.00920569610595703, 0.009195520401000976, 0.009172991752624511, 0.009215999603271484, 0.009168895721435547, 0.00918015956878662, 0.009166848182678223, 0.009784319877624511, 0.009641983985900878, 0.010131456375122071, 0.009986047744750976, 0.009644031524658203, 0.009652223587036133, 0.00920576000213623, 0.00918835163116455, 0.009077759742736816, 0.009187328338623046, 0.00920473575592041, 0.009178112030029297, 0.009164799690246582, 0.00918015956878662, 0.009208831787109375, 0.009187392234802246, 0.009206720352172851, 0.009191424369812011, 0.009161727905273438, 0.009158656120300293, 0.009211903572082519, 0.009234432220458985, 0.009141247749328613, 0.009193471908569336, 0.009190400123596192, 0.009163776397705077, 0.009198592185974122, 0.009194496154785157, 0.009133055686950683, 0.009146368026733399, 0.009156607627868652, 0.009155584335327148, 0.00913920021057129, 0.009126912117004395, 0.009192447662353515, 0.009157631874084473, 0.009046015739440917, 0.009189375877380371, 0.009288703918457031, 0.009161760330200195, 0.009257951736450195, 0.009260031700134277, 0.009209856033325196, 0.009171968460083007, 0.009197567939758301, 0.009142271995544434, 0.009159680366516113, 0.009210880279541016, 0.009178112030029297, 0.00921292781829834, 0.009176063537597656, 0.009073696136474609, 0.009223199844360352, 0.009217984199523925, 0.009736191749572755, 0.00951296043395996, 0.009485312461853027, 0.009433088302612304, 0.009162752151489258, 0.009179136276245118, 0.009254912376403808, 0.00918015956878662, 0.009055232048034668, 0.009236543655395509, 0.009193408012390136, 0.009215999603271484, 0.009210880279541016, 0.009220095634460449, 0.009621503829956055, 0.009338879585266113, 0.00990822410583496, 0.00952019214630127, 0.009529279708862306, 0.009466912269592285, 0.00946787166595459, 0.009515007972717286, 0.009291775703430176, 0.009201663970947266, 0.00920576000213623, 0.009210880279541016, 0.009179136276245118, 0.009254912376403808, 0.009178112030029297, 0.009206784248352052, 0.009256959915161133, 0.009215999603271484, 0.009203712463378906, 0.009256959915161133, 0.009242624282836913, 0.00913100814819336, 0.009174015998840332, 0.009201663970947266, 0.009195520401000976, 0.009181183815002441, 0.00920576000213623, 0.009152511596679687, 0.00921292781829834, 0.009186304092407227, 0.009177087783813476, 0.009209856033325196, 0.00918943977355957, 0.009206720352172851, 0.009171968460083007, 0.009202688217163087, 0.009242624282836913, 0.00923033618927002, 0.00928767967224121, 0.00922316837310791, 0.009178112030029297, 0.009161727905273438, 0.009194496154785157, 0.00923136043548584, 0.009209856033325196, 0.009170944213867188, 0.009026559829711914, 0.009134112358093261, 0.009131999969482421, 0.009192447662353515, 0.009184255599975585, 0.009137151718139648, 0.009151488304138184, 0.009292799949645996, 0.00930611228942871, 0.009299967765808105, 0.009172991752624511, 0.009208831787109375, 0.009175040245056153, 0.009253888130187989, 0.009209856033325196, 0.009195520401000976, 0.00919961643218994, 0.009207807540893554, 0.0092293119430542, 0.009182208061218262, 0.009200639724731445, 0.009169919967651367, 0.009142271995544434, 0.009160703659057617, 0.009186304092407227, 0.009152511596679687, 0.009126912117004395, 0.009157631874084473, 0.009073663711547851, 0.009133055686950683, 0.009146368026733399, 0.009156607627868652, 0.009170944213867188, 0.00920576000213623, 0.009174015998840332, 0.009115648269653321, 0.009058303833007812, 0.009095168113708496, 0.009175040245056153, 0.009119744300842286, 0.009101311683654785, 0.009195520401000976, 0.009145343780517578, 0.009169919967651367, 0.009145343780517578, 0.009152511596679687, 0.009143296241760255, 0.009109503746032714, 0.009135104179382325, 0.009140224456787109, 0.00912384033203125, 0.009120767593383788, 0.009233407974243164, 0.00912384033203125, 0.009124863624572753, 0.009186304092407227, 0.009040896415710448, 0.00913100814819336, 0.009200639724731445, 0.00921395206451416, 0.009172991752624511, 0.009162752151489258, 0.009208831787109375, 0.009056256294250489, 0.009209856033325196, 0.009175040245056153, 0.009208895683288575, 0.009141183853149415, 0.009168895721435547, 0.009159680366516113, 0.009206784248352052, 0.009144319534301757, 0.009161791801452636, 0.009184191703796387, 0.009170944213867188, 0.009134079933166504, 0.009194496154785157, 0.009232447624206544, 0.009160639762878418, 0.009137151718139648, 0.009157631874084473, 0.009175040245056153, 0.00904918384552002, 0.009327520370483398, 0.009148415565490722, 0.009168895721435547, 0.00913920021057129, 0.00919654369354248, 0.009161727905273438, 0.009135104179382325, 0.00914739227294922, 0.009200639724731445, 0.009140224456787109, 0.009120767593383788, 0.009175040245056153, 0.009128959655761718, 0.009156607627868652, 0.00921395206451416, 0.009137151718139648, 0.009207839965820313, 0.009221088409423829, 0.009127936363220214, 0.009120767593383788, 0.009126912117004395, 0.009174015998840332, 0.00919654369354248, 0.009121824264526367, 0.009161696434020997, 0.009152511596679687, 0.009128959655761718, 0.009051136016845703, 0.009158656120300293, 0.009135104179382325, 0.009125887870788574, 0.009156607627868652, 0.009116671562194823, 0.009124863624572753, 0.009182208061218262, 0.01064243221282959, 0.010942463874816894, 0.010226688385009765, 0.009702400207519531, 0.009598976135253906, 0.009513983726501465, 0.009433088302612304, 0.009412639617919922, 0.009369600296020507, 0.009439231872558594, 0.009450495719909668, 0.00959488010406494, 0.009455615997314454, 0.009442303657531738, 0.009530367851257325, 0.009443327903747559, 0.009687040328979492, 0.009464832305908203, 0.009613311767578125, 0.010552319526672363, 0.00961740779876709, 0.00961638355255127, 0.009615360260009765, 0.009540639877319335, 0.009609184265136718, 0.009425919532775879, 0.00923136043548584, 0.009145343780517578, 0.009154560089111329, 0.009157631874084473, 0.009186304092407227, 0.009172991752624511, 0.00930303955078125, 0.009442303657531738, 0.009423871994018555, 0.009465855598449707, 0.009457663536071777, 0.009438207626342773, 0.009547776222229003, 0.009484288215637206, 0.009278464317321777, 0.009158656120300293, 0.009136128425598144, 0.009134207725524902, 0.009018239974975585, 0.009132032394409179, 0.009135104179382325, 0.009120767593383788, 0.009130016326904297, 0.009131999969482421, 0.009563136100769042, 0.009461759567260742, 0.00940236759185791, 0.00939724826812744, 0.010095616340637208, 0.009563136100769042, 0.009549823760986328, 0.009455615997314454, 0.009485312461853027, 0.009613311767578125, 0.009545727729797364, 0.009185312271118164, 0.009130975723266602, 0.009140288352966309, 0.009180095672607422, 0.009207807540893554, 0.00921292781829834, 0.009138175964355469, 0.009174015998840332, 0.009150464057922364, 0.009079808235168458, 0.009119744300842286, 0.009233407974243164, 0.009195520401000976, 0.00931430435180664, 0.00922214412689209, 0.009175040245056153, 0.009499648094177245, 0.009473024368286133, 0.009439264297485352, 0.009456607818603515, 0.009483263969421387, 0.009179136276245118, 0.009167872428894042, 0.009706496238708496, 0.009564191818237304, 0.009499615669250488, 0.009396224021911622, 0.009160703659057617, 0.009157631874084473, 0.009375743865966797, 0.009506815910339356, 0.009195520401000976, 0.009218048095703125, 0.009154560089111329, 0.009166848182678223, 0.009204895973205566, 0.009200480461120606, 0.009133055686950683, 0.009162752151489258, 0.009167872428894042, 0.009157631874084473, 0.009219072341918945, 0.009190400123596192, 0.009164799690246582, 0.00932147216796875, 0.009637887954711915, 0.009565183639526367, 0.009477120399475097, 0.009480192184448242, 0.009291775703430176, 0.009197567939758301, 0.00919654369354248, 0.009174015998840332, 0.009111552238464356, 0.009252863883972168, 0.009153535842895508, 0.009140224456787109, 0.009169919967651367, 0.009202688217163087, 0.009154560089111329, 0.009158656120300293, 0.009202688217163087, 0.009102335929870605, 0.0091976318359375, 0.009549759864807128, 0.009432064056396485, 0.009446399688720703, 0.009615360260009765, 0.009463808059692384, 0.009460736274719238, 0.009352191925048828, 0.009364480018615723, 0.009281536102294922, 0.009381888389587402, 0.00952627182006836, 0.009486335754394531, 0.00951193618774414, 0.009486335754394531, 0.00952627182006836, 0.009456640243530273, 0.009407487869262696, 0.009479167938232422, 0.009549823760986328, 0.009195520401000976, 0.00921395206451416, 0.009134079933166504, 0.009170944213867188, 0.009210880279541016, 0.009164799690246582, 0.009151488304138184, 0.009170944213867188, 0.009171968460083007, 0.009183232307434081, 0.009138175964355469, 0.009182208061218262, 0.00912384033203125, 0.00910848045349121, 0.009197567939758301, 0.009185279846191406, 0.009126912117004395, 0.009134079933166504, 0.009172991752624511, 0.00913920021057129, 0.00919961643218994, 0.009174015998840332, 0.009112575531005859, 0.009128959655761718, 0.009154560089111329, 0.009198592185974122, 0.009178112030029297, 0.009140224456787109, 0.009218048095703125, 0.009151488304138184, 0.009174015998840332, 0.009135104179382325, 0.009166848182678223, 0.009060352325439454, 0.009156607627868652, 0.009163776397705077, 0.009111552238464356, 0.009210880279541016, 0.0092293119430542, 0.009175040245056153, 0.009165823936462402, 0.009094143867492676, 0.009161727905273438, 0.009143296241760255, 0.009157631874084473, 0.009179136276245118, 0.009138175964355469, 0.009138175964355469, 0.009424896240234374, 0.009266271591186523, 0.009126815795898437, 0.009138175964355469, 0.009185279846191406, 0.009128959655761718, 0.009291775703430176, 0.009162752151489258, 0.009357312202453612, 0.009129983901977539, 0.009127936363220214, 0.009019392013549805, 0.009120767593383788, 0.009136128425598144, 0.009181183815002441, 0.00918835163116455, 0.00913100814819336, 0.009207807540893554, 0.009174015998840332, 0.009283583641052246, 0.009183232307434081, 0.009250816345214843, 0.009176063537597656, 0.009129983901977539, 0.009126912117004395, 0.009167872428894042, 0.009162752151489258, 0.00919654369354248, 0.009232383728027344, 0.009126912117004395, 0.00911359977722168, 0.009136128425598144, 0.009162752151489258, 0.009149439811706543, 0.01002291202545166, 0.011361280441284179, 0.01036083221435547, 0.009587712287902832, 0.009434111595153808, 0.009540608406066894, 0.009333760261535644, 0.00933683204650879, 0.009414655685424805, 0.009157631874084473, 0.009146368026733399, 0.009171968460083007, 0.009163776397705077, 0.009141247749328613, 0.009134079933166504, 0.009169919967651367, 0.009116671562194823, 0.00921497631072998, 0.009151488304138184, 0.009197567939758301, 0.009277440071105958, 0.009183232307434081, 0.009142271995544434, 0.00920473575592041, 0.009273344039916993, 0.009174015998840332, 0.00923852825164795, 0.009160703659057617, 0.0091146240234375, 0.00910540771484375, 0.009151488304138184, 0.009022463798522949, 0.00912281608581543, 0.009127936363220214, 0.009019392013549805, 0.00921395206451416, 0.009159680366516113, 0.009198592185974122, 0.009156607627868652, 0.009136128425598144, 0.009138175964355469, 0.009155584335327148, 0.009134079933166504, 0.009249792098999024, 0.009197567939758301, 0.009187328338623046, 0.009151488304138184, 0.009157631874084473, 0.009182239532470703, 0.009195487976074218, 0.009154560089111329, 0.009194496154785157, 0.009049087524414063, 0.00898252773284912, 0.009055232048034668, 0.009043968200683594, 0.009120767593383788, 0.009060352325439454, 0.009093119621276855, 0.009165823936462402, 0.009127936363220214, 0.00923136043548584, 0.009141247749328613, 0.009141247749328613, 0.00910540771484375, 0.009127936363220214, 0.009176063537597656, 0.009143296241760255, 0.009126912117004395, 0.009179136276245118, 0.009524224281311035, 0.01021235179901123, 0.00990822410583496, 0.010553343772888184, 0.009582592010498046, 0.009579520225524902, 0.009492511749267579, 0.009521120071411133, 0.009441280364990234, 0.009499648094177245, 0.00923136043548584, 0.009125887870788574, 0.009157631874084473, 0.009135104179382325, 0.009037919998168945, 0.00911248016357422, 0.00911359977722168, 0.00908083152770996, 0.00910028839111328, 0.00913920021057129, 0.009132032394409179, 0.009166848182678223, 0.009326592445373535, 0.009298944473266601, 0.009226240158081055, 0.009154560089111329, 0.009142271995544434, 0.00901734447479248, 0.009217023849487305, 0.009208831787109375, 0.009157631874084473, 0.00914739227294922, 0.009160703659057617, 0.009178112030029297, 0.009154560089111329, 0.009119744300842286, 0.009393152236938476, 0.009593855857849122, 0.009158656120300293, 0.00920576000213623, 0.00909721565246582, 0.009135104179382325, 0.009155584335327148, 0.009242624282836913, 0.009215999603271484, 0.00912384033203125, 0.00919865608215332, 0.009125823974609376, 0.009156607627868652, 0.009181183815002441, 0.009156607627868652, 0.009211903572082519, 0.00918015956878662, 0.009254912376403808, 0.009138175964355469, 0.009148415565490722, 0.009219072341918945, 0.00909823989868164, 0.009134079933166504, 0.009233407974243164, 0.009293824195861817, 0.009414655685424805, 0.009572352409362793, 0.009477120399475097, 0.00941875171661377, 0.009446399688720703, 0.00942899227142334, 0.009347071647644043, 0.009448448181152343, 0.009485312461853027, 0.009234432220458985, 0.009291775703430176, 0.009251839637756347, 0.009126912117004395, 0.00923136043548584, 0.009145343780517578, 0.009137151718139648, 0.009135104179382325, 0.009144351959228516, 0.009119711875915527, 0.00914739227294922, 0.00919660758972168, 0.009169856071472167, 0.009110527992248535, 0.00912179183959961, 0.009155584335327148, 0.009184255599975585, 0.009543680191040039, 0.009615360260009765, 0.00943616008758545, 0.00934502410888672, 0.009469951629638672, 0.009480192184448242, 0.009424896240234374, 0.009432064056396485, 0.00941158390045166, 0.009570303916931153, 0.009442303657531738, 0.009430015563964844, 0.009446399688720703, 0.009409536361694336, 0.009439231872558594, 0.00952627182006836, 0.009522175788879395, 0.009517056465148926, 0.009432064056396485, 0.00951296043395996, 0.009675775527954102, 0.009468928337097168, 0.00921292781829834, 0.009170944213867188, 0.009110527992248535, 0.009236479759216308, 0.009164799690246582, 0.009163776397705077, 0.009191424369812011, 0.009184255599975585, 0.009150464057922364, 0.009133055686950683, 0.009504768371582031, 0.009416831970214843, 0.00920358371734619, 0.009155584335327148, 0.009191424369812011, 0.009135104179382325, 0.009168895721435547, 0.009226271629333496, 0.009298912048339844, 0.009152511596679687, 0.009463808059692384, 0.00921395206451416, 0.009159680366516113, 0.00921292781829834, 0.009219072341918945, 0.009143296241760255, 0.009163776397705077, 0.009194496154785157, 0.009157631874084473, 0.009203712463378906, 0.009207807540893554, 0.009151488304138184, 0.00921497631072998, 0.009191424369812011, 0.009210880279541016, 0.009148415565490722, 0.00914739227294922, 0.00921292781829834, 0.009120767593383788, 0.00910643196105957, 0.009240575790405273, 0.009286656379699706, 0.009162752151489258, 0.009138175964355469, 0.009026559829711914, 0.009232383728027344, 0.009175040245056153, 0.009166848182678223, 0.009160703659057617, 0.009185279846191406, 0.009110527992248535, 0.009203712463378906, 0.009158656120300293, 0.00914739227294922, 0.009155584335327148, 0.00921497631072998, 0.009371647834777832, 0.009166848182678223, 0.00919654369354248, 0.009318400382995605, 0.009184255599975585, 0.00920473575592041, 0.009158656120300293, 0.009166848182678223, 0.009182208061218262, 0.009063424110412598, 0.009125887870788574, 0.009135104179382325, 0.009201663970947266, 0.009177087783813476, 0.00913920021057129, 0.00920473575592041, 0.009161727905273438, 0.009175040245056153, 0.009118783950805663, 0.009878463745117187, 0.00949350357055664, 0.009323519706726074, 0.00918015956878662, 0.009141247749328613, 0.009156607627868652, 0.009201663970947266, 0.009159680366516113, 0.00914739227294922, 0.00919654369354248, 0.009144319534301757, 0.009129983901977539, 0.009150464057922364, 0.009185279846191406, 0.009159680366516113, 0.009159680366516113, 0.009201663970947266, 0.009022463798522949, 0.009171968460083007, 0.009257984161376954, 0.009269248008728028, 0.009247743606567382, 0.009146368026733399, 0.009190400123596192, 0.009157631874084473, 0.009130016326904297, 0.009206751823425292, 0.009150464057922364, 0.009161727905273438, 0.00931430435180664, 0.00921395206451416, 0.009140224456787109, 0.008994815826416015, 0.00920473575592041, 0.009111552238464356, 0.00954265594482422, 0.009484288215637206, 0.009496576309204101, 0.009464896202087403, 0.009481151580810547, 0.009480192184448242, 0.009482239723205567, 0.009452544212341308, 0.00912281608581543, 0.009127936363220214, 0.009163776397705077, 0.00913920021057129, 0.009129983901977539, 0.009185279846191406, 0.009312255859375, 0.009155584335327148, 0.009352191925048828, 0.009210880279541016, 0.009151488304138184, 0.009157631874084473, 0.009209856033325196, 0.00913920021057129, 0.009104384422302245, 0.00914739227294922, 0.009206784248352052, 0.009142271995544434, 0.009175040245056153, 0.009296895980834961, 0.009165823936462402, 0.009224191665649414, 0.009506815910339356, 0.009744383811950684, 0.009487360000610352, 0.009561087608337402, 0.00951296043395996, 0.00951296043395996, 0.009447423934936524, 0.009662464141845703, 0.009775103569030762, 0.009499679565429688, 0.009511903762817383, 0.009404416084289552, 0.009179167747497559, 0.009443296432495116, 0.009151488304138184, 0.009132032394409179, 0.00910848045349121, 0.009227328300476074, 0.00903264045715332, 0.008987648010253906, 0.009135104179382325, 0.009157631874084473, 0.009137151718139648, 0.009272319793701172, 0.009177087783813476, 0.009298944473266601, 0.009266176223754884, 0.009195520401000976, 0.009146368026733399, 0.009155584335327148, 0.009013248443603515, 0.009151488304138184, 0.00910540771484375, 0.00914739227294922, 0.009162752151489258, 0.009162752151489258, 0.009167872428894042, 0.009166848182678223, 0.00922214412689209, 0.009149439811706543, 0.009158687591552734, 0.009228256225585938, 0.009141247749328613, 0.00920473575592041, 0.009191424369812011, 0.009198592185974122, 0.009184255599975585, 0.009156607627868652, 0.009198592185974122, 0.00914739227294922, 0.009162752151489258, 0.009160703659057617, 0.00918835163116455, 0.009164799690246582, 0.009153535842895508, 0.009177087783813476, 0.00918835163116455, 0.009183232307434081, 0.009174015998840332, 0.009055232048034668, 0.009302016258239745, 0.009183232307434081, 0.009200703620910645, 0.009078720092773437, 0.009041952133178712, 0.009062368392944336, 0.00900710391998291, 0.009136128425598144, 0.009181183815002441, 0.00921292781829834, 0.009128959655761718, 0.009500672340393066, 0.00922111988067627, 0.009170944213867188, 0.009210880279541016, 0.009193471908569336, 0.009481216430664062, 0.009588735580444336, 0.009482239723205567, 0.009192447662353515, 0.009126912117004395, 0.009182208061218262, 0.009177087783813476, 0.009135104179382325, 0.009125887870788574, 0.009160767555236817, 0.009034687995910645, 0.009136159896850586, 0.009224160194396973, 0.009165823936462402, 0.009152511596679687, 0.009225215911865235, 0.009199647903442383, 0.009096192359924317, 0.009163776397705077, 0.009185279846191406, 0.009187328338623046, 0.009151488304138184, 0.009151488304138184, 0.009167872428894042, 0.009112575531005859, 0.009160703659057617, 0.00921395206451416, 0.009183232307434081, 0.009149439811706543, 0.009164799690246582, 0.009228287696838379, 0.009158656120300293, 0.009142271995544434, 0.009195520401000976, 0.009164799690246582, 0.009153535842895508, 0.009115648269653321, 0.009124863624572753, 0.009029631614685058, 0.009118720054626465, 0.009218048095703125, 0.009789440155029297, 0.009326592445373535, 0.00920576000213623, 0.009138175964355469, 0.009169919967651367, 0.00921497631072998, 0.009181183815002441, 0.009210880279541016, 0.009149439811706543, 0.009207807540893554, 0.009171968460083007, 0.009166848182678223, 0.009178112030029297, 0.009241600036621094, 0.009164799690246582, 0.009317376136779786, 0.009128959655761718, 0.009174015998840332, 0.009158656120300293, 0.009224191665649414, 0.00918015956878662, 0.009187328338623046, 0.00919155216217041, 0.009061247825622558, 0.009132032394409179, 0.009176063537597656, 0.009174015998840332, 0.009133055686950683, 0.009178112030029297, 0.009234432220458985, 0.009144319534301757, 0.009141247749328613, 0.009160703659057617, 0.009172991752624511, 0.009193471908569336, 0.009167872428894042, 0.009539584159851074, 0.009190400123596192, 0.009181183815002441, 0.009003007888793945, 0.009202688217163087, 0.009111552238464356, 0.009185279846191406, 0.009197567939758301, 0.009170944213867188, 0.009170944213867188, 0.00921292781829834, 0.009209919929504394, 0.009170880317687988, 0.00921292781829834, 0.009210880279541016, 0.009162752151489258, 0.009183232307434081, 0.009203712463378906, 0.009176063537597656, 0.009218048095703125, 0.010158080101013184, 0.010158080101013184, 0.009790464401245117, 0.00993177604675293, 0.009844799995422364, 0.009648063659667968, 0.010369024276733398, 0.009623552322387695, 0.009567232131958007, 0.00954265594482422, 0.009529343605041504, 0.009562111854553223, 0.009648127555847168, 0.009622528076171874, 0.009673727989196777, 0.00954470443725586, 0.009589759826660157, 0.009613311767578125, 0.009913344383239747, 0.009473024368286133, 0.009555968284606933, 0.009554944038391112, 0.009560064315795898, 0.009574399948120118, 0.009567232131958007, 0.009565183639526367, 0.009787391662597657, 0.009535488128662109, 0.009738240242004394, 0.009574399948120118, 0.009425919532775879, 0.009177087783813476, 0.009161727905273438, 0.009126912117004395, 0.009246720314025878, 0.009224191665649414, 0.009158656120300293, 0.009141247749328613, 0.009195520401000976, 0.00914739227294922, 0.009161727905273438, 0.009367551803588867, 0.009404416084289552, 0.009179136276245118, 0.009172991752624511, 0.009356287956237793, 0.009441280364990234, 0.009779199600219727, 0.009608192443847656, 0.009628735542297363, 0.009593791961669922, 0.00952012825012207, 0.009589759826660157, 0.009575424194335937, 0.009543680191040039, 0.009555007934570313, 0.01003718376159668, 0.009634816169738769, 0.0095098876953125, 0.009538559913635255, 0.00982323169708252, 0.009583616256713867, 0.009564160346984863, 0.009549823760986328, 0.009597951889038087, 0.010098688125610352, 0.010272768020629883, 0.009614336013793945, 0.009540608406066894, 0.00962764835357666, 0.00984166431427002, 0.010084351539611817, 0.009706496238708496, 0.009720831871032716, 0.009593855857849122, 0.009675775527954102, 0.009612288475036621, 0.009787391662597657, 0.009767935752868653, 0.009583616256713867, 0.009681920051574706, 0.009623552322387695, 0.00961023998260498, 0.009648127555847168, 0.009543680191040039, 0.00963584041595459, 0.009719807624816895, 0.009588735580444336, 0.009638912200927734, 0.00953651237487793, 0.009631744384765625, 0.009630720138549804, 0.00959488010406494, 0.009650176048278808, 0.009547776222229003, 0.00962764835357666, 0.009593855857849122, 0.009491456031799317, 0.009606143951416016, 0.009563136100769042, 0.009755647659301758, 0.00959488010406494, 0.009577471733093262, 0.009555968284606933, 0.009686016082763671, 0.009670656204223632, 0.009595904350280762, 0.010513407707214355, 0.00971776008605957]",tokens/s,107.73117630249448,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gptj,MB,3841.552384,4578.607104,0.0,3992.977408,3875.045888,s,1,9.125650390625,9.125650390625,0.0,9.125650390625,9.125650390625,9.125650390625,9.125650390625,[9.125650390625],,kWh,2.6822235668751978e-05,1.4684602765636326e-05,3.700169626799643e-05,7.850853470238473e-05,,MB,2106.318848,4834.459648,0.0,4188.012544,4099.58912,s,10,0.9932444839477539,0.09932444839477539,0.0001841089078033206,0.09926801681518554,0.09943730163574219,0.0996375617980957,0.09979776992797851,"[0.09983782196044921, 0.09930764770507812, 0.09922838592529297, 0.0993927993774414, 0.09921440124511718, 0.09919900512695312, 0.09933961486816406, 0.09920038604736328, 0.09919145965576172, 0.09933296203613282]",tokens/s,2577.41174642623,kWh,1.1740670158003439e-06,6.430793321076198e-07,6.247766274336541e-06,8.064912622244505e-06,tokens/kWh,31742439.377942562,MB,2115.473408,4918.345728,0.0,4271.898624,4198.345728,s,10,20.463321044921877,2.0463321044921874,0.022925317093763248,2.048007141113281,2.075736474609375,2.0775850341796875,2.0790638818359377,"[2.07532568359375, 2.07943359375, 2.057294677734375, 2.070273193359375, 2.052175537109375, 2.0438387451171875, 2.01924560546875, 2.016760986328125, 2.0210909423828123, 2.027882080078125]",tokens/s,30.786791577818654,kWh,2.3951674101352627e-05,1.3126402816113106e-05,6.026814804946379e-05,9.734622496692951e-05,tokens/kWh,647174.5568089813,,s,630,20.461041671752916,0.03247784392341735,0.0007998145857065298,0.03214028739929199,0.03322275657653809,0.03348992156982422,0.03598411815643311,"[0.03212287902832031, 0.03201228713989258, 0.03259187316894531, 0.033067008972167966, 0.03291340637207031, 0.03304857635498047, 0.033107967376708985, 0.03348992156982422, 0.03327385711669922, 0.03346739196777344, 0.0330332145690918, 0.033075199127197266, 0.03314585494995117, 0.033040382385253905, 0.033099777221679685, 0.033105918884277344, 0.03368243026733399, 0.03319910430908203, 0.03454262542724609, 0.03299631881713867, 0.03314176177978516, 0.032933887481689454, 0.03283763122558594, 0.033116161346435545, 0.0325591049194336, 0.03196211242675781, 0.031903743743896484, 0.03210137557983399, 0.03261337661743164, 0.036951038360595705, 0.03242086410522461, 0.03211775970458984, 0.03180441665649414, 0.03193139266967773, 0.0319866886138916, 0.03189248085021973, 0.03196006393432617, 0.03242905426025391, 0.033104896545410156, 0.03334860610961914, 0.03300454330444336, 0.03307417678833008, 0.033040382385253905, 0.03300352096557617, 0.03303424072265625, 0.03302912139892578, 0.03273113632202149, 0.033050624847412106, 0.033562625885009766, 0.03324620819091797, 0.0328611831665039, 0.033124351501464845, 0.03284275054931641, 0.0319109115600586, 0.032, 0.03197542381286621, 0.03194879913330078, 0.03300556945800781, 0.03314176177978516, 0.03309363174438477, 0.035046398162841795, 0.03490611267089844, 0.03333631896972656, 0.03196108818054199, 0.03189760017395019, 0.03336294555664063, 0.0328908805847168, 0.033016895294189455, 0.0335164794921875, 0.03297894287109375, 0.03318374252319336, 0.0330618896484375, 0.03301990509033203, 0.03376332855224609, 0.03348992156982422, 0.03301375961303711, 0.03300454330444336, 0.03313049697875976, 0.033105918884277344, 0.033007614135742186, 0.03303014373779297, 0.033023998260498046, 0.03283763122558594, 0.032894977569580076, 0.033020927429199216, 0.03185868835449219, 0.0319866886138916, 0.031898624420166014, 0.03274956893920898, 0.03617587280273438, 0.0333199348449707, 0.0329881591796875, 0.032906238555908206, 0.033127422332763674, 0.03299225616455078, 0.033116161346435545, 0.03311718368530273, 0.033014785766601565, 0.03300454330444336, 0.03290419387817383, 0.03289702224731445, 0.03182694435119629, 0.033014785766601565, 0.03296460723876953, 0.03390771102905273, 0.03371110534667969, 0.03285811233520508, 0.03168767929077149, 0.033116161346435545, 0.03309363174438477, 0.03301990509033203, 0.0331141128540039, 0.033058815002441407, 0.03302809524536133, 0.033116161346435545, 0.033740798950195314, 0.033511425018310545, 0.03339571380615235, 0.0331141128540039, 0.03330252838134766, 0.036347904205322266, 0.032927745819091796, 0.031699968338012696, 0.03158835220336914, 0.03189452743530274, 0.03189555168151856, 0.0319498233795166, 0.03184025573730469, 0.031768575668334964, 0.031893503189086916, 0.031920127868652344, 0.03154841613769531, 0.032435199737548825, 0.03246387100219727, 0.03191500854492187, 0.03167436790466309, 0.03165184020996094, 0.031883264541625975, 0.031764480590820314, 0.03197849655151367, 0.03179520034790039, 0.03196620750427246, 0.03151155281066895, 0.03179929542541504, 0.031902719497680664, 0.031883264541625975, 0.03188121604919433, 0.03182284736633301, 0.032230400085449216, 0.03620556640625, 0.0334837760925293, 0.0329431037902832, 0.032914432525634765, 0.0330967025756836, 0.03304652786254883, 0.033104896545410156, 0.032903297424316406, 0.033135486602783204, 0.031900672912597655, 0.031851520538330076, 0.03188121604919433, 0.0318474235534668, 0.0324136962890625, 0.03341926574707031, 0.03308441543579101, 0.03300966262817383, 0.03297689437866211, 0.03299327850341797, 0.0331069450378418, 0.03297894287109375, 0.03301683044433594, 0.03300556945800781, 0.033068031311035154, 0.03313868713378906, 0.03310899353027344, 0.03307212829589844, 0.03295743942260742, 0.033463294982910154, 0.03194675254821777, 0.033152000427246094, 0.03402751922607422, 0.03569049453735352, 0.033018878936767575, 0.03329945755004883, 0.033140735626220705, 0.03301990509033203, 0.03316223907470703, 0.032982017517089846, 0.03302604675292969, 0.03199180793762207, 0.032328704833984374, 0.03298611068725586, 0.032996353149414064, 0.032960575103759764, 0.03298297500610352, 0.03301683044433594, 0.033083393096923826, 0.03297587203979492, 0.032996353149414064, 0.03301484680175781, 0.03309151840209961, 0.03298918533325195, 0.033181697845458984, 0.033006591796875, 0.03315814590454102, 0.03303936004638672, 0.03304652786254883, 0.033113086700439456, 0.033331199645996096, 0.0362158088684082, 0.03338137435913086, 0.03319807815551758, 0.032246784210205076, 0.03204710388183594, 0.03198054313659668, 0.03184127998352051, 0.03199692726135254, 0.031736831665039066, 0.03181260871887207, 0.031851520538330076, 0.033113086700439456, 0.033306625366210936, 0.03309363174438477, 0.0329881591796875, 0.03287142562866211, 0.03341107177734375, 0.03309260940551758, 0.033035263061523434, 0.033137664794921876, 0.032336894989013674, 0.03190784072875977, 0.032059391021728514, 0.03182387161254883, 0.031916032791137694, 0.031955968856811526, 0.03191705513000488, 0.03211264038085938, 0.03316121673583984, 0.03343769454956055, 0.03314176177978516, 0.033701889038085936, 0.03776102447509765, 0.03344486236572266, 0.03328204727172852, 0.032718849182128903, 0.0328089599609375, 0.03302195358276367, 0.03222732925415039, 0.032277503967285154, 0.032833534240722655, 0.031898624420166014, 0.03263692855834961, 0.03181260871887207, 0.03172863960266113, 0.03182387161254883, 0.03144908714294434, 0.031850496292114255, 0.03200204849243164, 0.033276927947998046, 0.03338444900512695, 0.03274956893920898, 0.03310182571411133, 0.032814079284667966, 0.03218227386474609, 0.031647743225097655, 0.0318156795501709, 0.03172659111022949, 0.0318382396697998, 0.03141526412963867, 0.03584307098388672, 0.03363225555419922, 0.033083393096923826, 0.03276800155639648, 0.03285708618164063, 0.03309568023681641, 0.03288883209228516, 0.03194777679443359, 0.031699968338012696, 0.032901119232177735, 0.033312767028808594, 0.03286732864379883, 0.033007614135742186, 0.03285606384277344, 0.033035263061523434, 0.032865280151367186, 0.03281919860839844, 0.03191193580627441, 0.03167027282714844, 0.031736831665039066, 0.03176243209838867, 0.03182592010498047, 0.03184025573730469, 0.03197337532043457, 0.03158527946472168, 0.0318474235534668, 0.03173785591125488, 0.03155763244628906, 0.03153510475158691, 0.0316682243347168, 0.031936511993408204, 0.03318476867675781, 0.03604172897338867, 0.03339468765258789, 0.03300966262817383, 0.03288576126098633, 0.03300249481201172, 0.03285299301147461, 0.03314176177978516, 0.03300352096557617, 0.03359027099609375, 0.033018878936767575, 0.03335475158691406, 0.03299020767211914, 0.03287756729125976, 0.03290419387817383, 0.0345354232788086, 0.03322163009643555, 0.03315302276611328, 0.03301273727416992, 0.03317452621459961, 0.033296382904052735, 0.03296768188476563, 0.03310899353027344, 0.033031169891357424, 0.03304550552368164, 0.03316121673583984, 0.03301068878173828, 0.03340595245361328, 0.0331069450378418, 0.03526144027709961, 0.03492454528808594, 0.03300249481201172, 0.032712703704833986, 0.031771743774414066, 0.031664031982421875, 0.03185868835449219, 0.03199795150756836, 0.033476608276367184, 0.03305779266357422, 0.032517120361328124, 0.03190483283996582, 0.03199788856506348, 0.03194470405578613, 0.031908863067626955, 0.0319109115600586, 0.03207680130004883, 0.03197235107421875, 0.031970304489135744, 0.03187814331054688, 0.031883264541625975, 0.031974399566650394, 0.031871999740600586, 0.03183923149108887, 0.03194367980957031, 0.03199692726135254, 0.03194777679443359, 0.031927295684814457, 0.03180441665649414, 0.031927295684814457, 0.03199897575378418, 0.032292865753173826, 0.03233894348144531, 0.032925697326660154, 0.03246182250976563, 0.0320634880065918, 0.031888383865356446, 0.03198464012145996, 0.032215038299560544, 0.032092159271240234, 0.03199488067626953, 0.0319866886138916, 0.03191193580627441, 0.0318525447845459, 0.03167231941223145, 0.03194675254821777, 0.03196211242675781, 0.03186892890930176, 0.0319866886138916, 0.03196211242675781, 0.03208396911621094, 0.03197235107421875, 0.03189248085021973, 0.031889408111572266, 0.03196006393432617, 0.031888383865356446, 0.032023551940917966, 0.031939584732055666, 0.031888383865356446, 0.0322242546081543, 0.03193139266967773, 0.03186892890930176, 0.032674816131591795, 0.03213824081420898, 0.03194572830200195, 0.03187609672546387, 0.03223244857788086, 0.03255295944213867, 0.03199692726135254, 0.03180031967163086, 0.031903743743896484, 0.0316180477142334, 0.03178188705444336, 0.03185766410827637, 0.03187814331054688, 0.03175833511352539, 0.03194675254821777, 0.03208396911621094, 0.03441664123535156, 0.0328642578125, 0.0325591049194336, 0.03189657592773437, 0.0318023681640625, 0.03179929542541504, 0.031883264541625975, 0.031887359619140625, 0.032020481109619144, 0.03199180793762207, 0.03201126480102539, 0.03195289611816406, 0.03194367980957031, 0.03228057479858398, 0.03151769638061523, 0.03172147178649903, 0.03250688171386719, 0.03232665634155273, 0.03203583908081055, 0.03236153411865234, 0.032384960174560544, 0.03189760017395019, 0.03195699119567871, 0.031867904663085936, 0.031936511993408204, 0.031959039688110355, 0.03203788757324219, 0.031937536239624024, 0.03205836868286133, 0.031922176361083986, 0.03190681648254395, 0.031954944610595705, 0.03193343925476074, 0.031920127868652344, 0.03165798377990723, 0.03177267265319824, 0.03181670379638672, 0.03168460845947266, 0.03200921630859375, 0.03198054313659668, 0.03189043235778809, 0.032352256774902347, 0.03187302398681641, 0.03190787124633789, 0.03189039993286133, 0.03249356842041016, 0.03248230361938476, 0.03197747230529785, 0.03295743942260742, 0.03236249542236328, 0.03191705513000488, 0.03199590492248535, 0.03197644805908203, 0.031898624420166014, 0.03187302398681641, 0.03188742446899414, 0.031975360870361326, 0.03197235107421875, 0.031882240295410154, 0.031904767990112305, 0.03180748748779297, 0.03316633605957031, 0.03213926315307617, 0.03198975944519043, 0.03187404823303223, 0.03215769577026367, 0.03208294296264649, 0.03184435272216797, 0.031903743743896484, 0.03217203140258789, 0.03214131164550781, 0.03215871810913086, 0.03236556625366211, 0.03197747230529785, 0.03174092864990234, 0.032568321228027344, 0.03197644805908203, 0.032471038818359374, 0.032290817260742184, 0.032385025024414066, 0.03281817626953125, 0.032292865753173826, 0.03187814331054688, 0.03150643157958984, 0.031751167297363284, 0.03221401596069336, 0.031920127868652344, 0.03187507247924805, 0.0315996150970459, 0.03180646324157715, 0.03147164726257324, 0.03148796844482422, 0.031526912689208986, 0.031543296813964845, 0.03180953598022461, 0.03156991958618164, 0.031855615615844726, 0.03157606315612793, 0.03184230422973633, 0.03184025573730469, 0.03219772720336914, 0.03180739212036133, 0.0315863037109375, 0.03157606315612793, 0.03161292839050293, 0.03141939163208008, 0.031562751770019534, 0.03306496047973633, 0.03304140853881836, 0.03188121604919433, 0.03189043235778809, 0.031835136413574217, 0.03183616065979004, 0.03188121604919433, 0.031871999740600586, 0.03185868835449219, 0.031921152114868165, 0.03189657592773437, 0.03187302398681641, 0.031920127868652344, 0.03199385643005371, 0.031932416915893554, 0.031884288787841795, 0.0318474235534668, 0.03188019180297851, 0.032039936065673826, 0.03190169525146484, 0.03195699119567871, 0.03198361587524414, 0.032008190155029294, 0.033291263580322264, 0.03256115341186523, 0.0325294075012207, 0.03183616065979004, 0.032056320190429685, 0.031921152114868165, 0.0319498233795166, 0.03203583908081055, 0.03228876876831055, 0.03316223907470703, 0.032440319061279296, 0.03184332847595215, 0.03197747230529785, 0.032043006896972655, 0.03199283218383789, 0.03202150344848633, 0.031908863067626955, 0.0319498233795166, 0.032623615264892575, 0.03355648040771484, 0.03237273788452148, 0.032361473083496094, 0.03220479965209961, 0.03279359817504883, 0.03252633666992188, 0.03194879913330078, 0.03182284736633301, 0.03191193580627441, 0.031908863067626955, 0.03180748748779297, 0.032118785858154295, 0.03182796859741211, 0.03186175918579102, 0.031835168838500974, 0.03179926490783692, 0.032039936065673826, 0.03248844909667969, 0.03323289489746094, 0.032917503356933595, 0.03235123062133789, 0.03189657592773437, 0.031900672912597655, 0.03189145660400391, 0.03195289611816406, 0.03184435272216797, 0.031932416915893554, 0.03184639930725098, 0.03182592010498047, 0.032023551940917966, 0.031870975494384765, 0.034769920349121096, 0.03258777618408203, 0.03220889663696289, 0.03236761474609375, 0.032156673431396485, 0.032004096984863284, 0.03195084762573242, 0.03183616065979004, 0.03285504150390625, 0.03214950561523437, 0.03244851303100586, 0.03257241439819336, 0.0322344970703125, 0.03208499145507813, 0.03205017471313477, 0.03192831993103027, 0.03223551940917969, 0.03370086288452148, 0.03307622528076172, 0.03198464012145996, 0.03262054443359375, 0.0324771842956543, 0.03212083053588867, 0.03190784072875977, 0.03204198455810547, 0.032110591888427735, 0.031922176361083986, 0.03202764892578125, 0.03201638412475586, 0.0321525764465332, 0.03171225547790527, 0.03179929542541504, 0.03205222320556641, 0.03185971260070801, 0.03189760017395019, 0.03199590492248535, 0.0318474235534668, 0.03193343925476074, 0.03180748748779297, 0.03264409637451172, 0.03197747230529785, 0.03216998291015625, 0.03189555168151856]",tokens/s,30.790221246151585,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 325, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669480f7-4b3307f05b6207322f5489e1;b8965509-315a-4f97-9989-40d61dfff628) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7423.840256,8015.839232,0.0,7430.209536,7414.23104,s,1,10.501890625,10.501890625,0.0,10.501890625,10.501890625,10.501890625,10.501890625,[10.501890625],,kWh,4.415293539375019e-05,2.418387855852522e-05,6.248560554400484e-05,0.00013082241949628024,,MB,1800.974336,8823.242752,0.0,8176.795648,8052.041728,s,10,2.106992095947265,0.21069920959472652,9.06962529781896e-05,0.21067728424072266,0.21081198577880858,0.21083020248413087,0.21084477584838868,"[0.2108079376220703, 0.21084841918945313, 0.2107332763671875, 0.21064204406738282, 0.210577880859375, 0.21080339050292968, 0.21067510986328125, 0.21067945861816406, 0.21064460754394532, 0.21057997131347655]",tokens/s,1215.0021848321508,kWh,2.491171864250727e-06,1.3649966684291692e-06,1.3641556052125062e-05,1.749772458480496e-05,tokens/kWh,14630473.737271568,MB,1813.85216,8970.043392,0.0,8323.596288,8263.496192,s,10,21.914744140625,2.1914744140625,0.01859272370841556,2.1836665039062497,2.2059440673828123,2.223183703613281,2.236975412597656,"[2.24042333984375, 2.196933837890625, 2.171068603515625, 2.18291552734375, 2.18441748046875, 2.195355224609375, 2.202113037109375, 2.1817333984375, 2.179576416015625, 2.180207275390625]",tokens/s,28.74776889738456,kWh,2.5997875414014213e-05,1.4247876856572992e-05,8.955752650707157e-05,0.00012980327877765876,tokens/kWh,485349.8354838423,,s,630,21.912178730010996,0.034781236079382515,0.0006119224992297861,0.03450470352172851,0.03560007820129395,0.035801343154907225,0.03667585121154786,"[0.035448833465576174, 0.0356126708984375, 0.03537510299682617, 0.035504127502441404, 0.03673088073730469, 0.03624755096435547, 0.03543142318725586, 0.03541401672363281, 0.03480886459350586, 0.03521225738525391, 0.035335166931152344, 0.03545395278930664, 0.03524095916748047, 0.03625369644165039, 0.03565055847167969, 0.03545395278930664, 0.03536076736450195, 0.03544678497314453, 0.035448833465576174, 0.03564755249023437, 0.03552761459350586, 0.03549900817871094, 0.03539152145385742, 0.03552355194091797, 0.035197952270507815, 0.035492862701416016, 0.03582668685913086, 0.03517440032958984, 0.03549900817871094, 0.0354785270690918, 0.03560857772827149, 0.03585228729248047, 0.03580825424194336, 0.035591167449951173, 0.035211265563964846, 0.03558195114135742, 0.035775489807128906, 0.035606529235839846, 0.0354969596862793, 0.03560857772827149, 0.0355153923034668, 0.035388416290283206, 0.03529420852661133, 0.03513958358764648, 0.0356577262878418, 0.03561062240600586, 0.03553177642822265, 0.035248126983642575, 0.03604172897338867, 0.035767295837402346, 0.03523481750488281, 0.03566796875, 0.035681278228759765, 0.0364031982421875, 0.036397056579589845, 0.035552257537841796, 0.03523993682861328, 0.03550822448730469, 0.035527679443359376, 0.03529011154174805, 0.03548876953125, 0.03553996658325195, 0.03555430221557617, 0.0355153923034668, 0.03756748962402344, 0.03612364959716797, 0.034786304473876956, 0.03439308929443359, 0.03455078506469727, 0.034683902740478514, 0.034958335876464845, 0.035746814727783204, 0.035558399200439454, 0.0357212142944336, 0.035296257019042966, 0.03439923095703125, 0.03480575942993164, 0.034385921478271485, 0.034323455810546875, 0.03447808074951172, 0.03445248031616211, 0.0343296012878418, 0.03462656021118164, 0.034348033905029295, 0.034388992309570314, 0.03423027038574219, 0.034508800506591795, 0.03450368118286133, 0.038675457000732424, 0.03549593734741211, 0.0345354232788086, 0.03529318237304688, 0.03447711944580078, 0.034501567840576175, 0.03517337417602539, 0.03518975830078125, 0.034457599639892575, 0.03552979278564453, 0.03551737594604492, 0.03583488082885742, 0.03563008117675781, 0.03438796615600586, 0.03507302474975586, 0.03490611267089844, 0.03527782440185547, 0.034359294891357424, 0.034375679016113284, 0.03425484848022461, 0.034418689727783204, 0.03521023941040039, 0.03438387298583984, 0.0343296012878418, 0.03433267211914062, 0.034525184631347655, 0.035076095581054685, 0.034522113800048826, 0.03427532958984375, 0.03450470352172851, 0.03439820861816406, 0.034298881530761716, 0.0345425910949707, 0.0341739501953125, 0.03489791870117188, 0.03467264175415039, 0.03423129653930664, 0.03426303863525391, 0.03474227142333984, 0.034298881530761716, 0.03481497573852539, 0.03445158386230469, 0.034242431640625, 0.03448627090454102, 0.03441049575805664, 0.034402305603027344, 0.03487539291381836, 0.034500606536865236, 0.03430912017822266, 0.03435212707519531, 0.03426713562011719, 0.034353248596191405, 0.034280353546142575, 0.03426816177368164, 0.03425177764892578, 0.03440639877319336, 0.034331649780273435, 0.03439411163330078, 0.034523136138916014, 0.03449343872070312, 0.03433369445800781, 0.03455692672729492, 0.03427840042114258, 0.03441561508178711, 0.03434393692016602, 0.034408447265625, 0.03437158584594727, 0.034476032257080076, 0.03468294525146484, 0.03443296051025391, 0.03441049575805664, 0.03458867263793945, 0.03448012924194336, 0.03446579360961914, 0.03434086227416992, 0.034460670471191404, 0.03441459274291992, 0.03439616012573242, 0.0343296012878418, 0.034574337005615234, 0.03438796615600586, 0.03443916702270508, 0.03437363052368164, 0.0343111686706543, 0.034315265655517575, 0.034353153228759765, 0.034229248046875, 0.03504844665527344, 0.03445555114746094, 0.03439411163330078, 0.034435073852539064, 0.034344959259033206, 0.03421388626098633, 0.03436851119995117, 0.03438489532470703, 0.034304000854492187, 0.03437055969238281, 0.03478937530517578, 0.03575296020507813, 0.035113983154296875, 0.034219009399414066, 0.03424051284790039, 0.034362400054931644, 0.03419644927978516, 0.034385921478271485, 0.03426201629638672, 0.03432447814941406, 0.03494604873657227, 0.03670732879638672, 0.0352174072265625, 0.03548262405395508, 0.03435007858276367, 0.03521331024169922, 0.03492454528808594, 0.034290687561035156, 0.03477503967285156, 0.03478015899658203, 0.03429580688476563, 0.03441459274291992, 0.034361343383789066, 0.03458560180664062, 0.03428147125244141, 0.03467264175415039, 0.03422003173828125, 0.03441254425048828, 0.03420467376708984, 0.03434598541259765, 0.03455897521972656, 0.034358272552490236, 0.03453952026367187, 0.03435011291503906, 0.03432342529296875, 0.03438796615600586, 0.03432755279541016, 0.03481087875366211, 0.034516990661621096, 0.03457535934448242, 0.034359294891357424, 0.03431219100952149, 0.03494911956787109, 0.03445452880859375, 0.03439616012573242, 0.03462144088745117, 0.0341739501953125, 0.03462963104248047, 0.03437977600097656, 0.034371646881103515, 0.03447391891479492, 0.03457535934448242, 0.03477913665771484, 0.03460505676269531, 0.03448627090454102, 0.034356224060058595, 0.03464704132080078, 0.03552358245849609, 0.03454364776611328, 0.03532489776611328, 0.03446681594848633, 0.03438796615600586, 0.0342845458984375, 0.03501465606689453, 0.03743641662597656, 0.03570175933837891, 0.03439513778686523, 0.034429950714111326, 0.034375679016113284, 0.03430092620849609, 0.03433574295043945, 0.0342999038696289, 0.03437670516967774, 0.03435520172119141, 0.03440332794189453, 0.034713600158691404, 0.03450470352172851, 0.03436441421508789, 0.03427840042114258, 0.034546688079833986, 0.0344453125, 0.03484985733032227, 0.03659564971923828, 0.03554304122924805, 0.03456409454345703, 0.035244033813476565, 0.03465318298339844, 0.034402305603027344, 0.03460505676269531, 0.034223102569580076, 0.03401728057861328, 0.03403366470336914, 0.033964031219482424, 0.034323486328125, 0.03472380828857422, 0.03442073440551758, 0.03444736099243164, 0.03472076797485352, 0.03450470352172851, 0.03475046539306641, 0.03464601516723633, 0.03483443069458008, 0.035418113708496096, 0.034375679016113284, 0.03462963104248047, 0.03497987365722656, 0.03449750518798828, 0.034484222412109376, 0.03530137634277344, 0.034459648132324217, 0.03537408065795898, 0.035046398162841795, 0.034444286346435544, 0.03439718246459961, 0.03507510375976562, 0.03439715194702148, 0.034625537872314455, 0.03482112121582031, 0.03446681594848633, 0.035234848022460935, 0.035631072998046874, 0.03453238296508789, 0.03551433563232422, 0.034751487731933595, 0.03448934555053711, 0.03483647918701172, 0.03484262466430664, 0.034631679534912106, 0.03508531188964844, 0.03404083251953125, 0.03443916702270508, 0.03455590438842773, 0.03430809783935547, 0.03430092620849609, 0.03502796936035156, 0.034513919830322266, 0.03464191818237305, 0.035009536743164066, 0.03450678253173828, 0.03460502243041992, 0.034830337524414064, 0.034530303955078126, 0.034718719482421875, 0.035227649688720705, 0.03632537460327148, 0.03540172958374024, 0.03525120162963867, 0.034508800506591795, 0.03498495864868164, 0.03449856185913086, 0.03438796615600586, 0.035151870727539065, 0.03437875366210937, 0.034723838806152346, 0.035320831298828126, 0.034776065826416014, 0.034402305603027344, 0.03547340774536133, 0.03500646209716797, 0.03441664123535156, 0.03477503967285156, 0.03451596832275391, 0.034522113800048826, 0.035266559600830076, 0.03522867202758789, 0.0347770881652832, 0.03404390335083008, 0.0344453125, 0.036367359161376955, 0.03641753768920898, 0.03445248031616211, 0.034871295928955076, 0.03544678497314453, 0.034408447265625, 0.03504230499267578, 0.034423809051513675, 0.0342927360534668, 0.03449343872070312, 0.035211265563964846, 0.03447808074951172, 0.03446169662475586, 0.03470848083496094, 0.03567718505859375, 0.03567001724243164, 0.035883007049560545, 0.03540787124633789, 0.03431935882568359, 0.03435116958618164, 0.03423225784301758, 0.034436161041259766, 0.0345968017578125, 0.035110912322998046, 0.034557952880859374, 0.034423809051513675, 0.03440435028076172, 0.0342927360534668, 0.03434188842773438, 0.03436646270751953, 0.03434905624389648, 0.034249729156494144, 0.034920448303222655, 0.03432550430297852, 0.03448012924194336, 0.03433267211914062, 0.03457228851318359, 0.034912254333496096, 0.036001792907714845, 0.03441049575805664, 0.034462718963623046, 0.03433369445800781, 0.03437977600097656, 0.03419753646850586, 0.03442172622680664, 0.0346951675415039, 0.03489382553100586, 0.034336769104003906, 0.03442483139038086, 0.034323455810546875, 0.03436646270751953, 0.034514942169189454, 0.034756607055664065, 0.034388992309570314, 0.034377727508544925, 0.034358272552490236, 0.03437158584594727, 0.03417702484130859, 0.034484222412109376, 0.03443916702270508, 0.03429580688476563, 0.03503411102294922, 0.03559936141967773, 0.03578572845458984, 0.03566796875, 0.03559833526611328, 0.03579289627075195, 0.03595673751831055, 0.03570687866210937, 0.0357918701171875, 0.03583590316772461, 0.03542425537109375, 0.035672065734863284, 0.03559219360351563, 0.03562905502319336, 0.03555123138427734, 0.03550515365600586, 0.03544371032714844, 0.03564646530151367, 0.03527782440185547, 0.03561881637573242, 0.035394561767578124, 0.03554611206054688, 0.03552153778076172, 0.03540991973876953, 0.0354785270690918, 0.03555430221557617, 0.035451904296875, 0.03551027297973633, 0.03564851379394531, 0.03583795166015625, 0.035514366149902346, 0.03539046478271484, 0.035094528198242186, 0.0342476806640625, 0.034285568237304685, 0.034590721130371094, 0.034405376434326174, 0.03433062362670898, 0.03434905624389648, 0.034274303436279296, 0.03415859222412109, 0.034260990142822266, 0.03435520172119141, 0.034301952362060545, 0.03470131301879883, 0.035296257019042966, 0.03482828903198242, 0.03429683303833008, 0.03429171371459961, 0.03434905624389648, 0.03436646270751953, 0.03428761672973633, 0.034233345031738284, 0.034305023193359374, 0.03434086227416992, 0.034328575134277346, 0.03436441421508789, 0.03436544036865234, 0.03473612976074219, 0.034391040802001956, 0.034402305603027344, 0.03428761672973633, 0.03422515106201172, 0.03429171371459961, 0.034260990142822266, 0.03432755279541016, 0.03437363052368164, 0.0346060791015625, 0.03488665771484375, 0.03424265670776367, 0.03435203170776367, 0.03427840042114258, 0.034369537353515625, 0.03437369537353516, 0.034419742584228516, 0.03432028961181641, 0.03437875366210937, 0.03424358367919922, 0.034408447265625, 0.03860172653198242, 0.0359628791809082, 0.03486412811279297, 0.03459993743896484, 0.034735103607177735, 0.03498297500610351, 0.03439097595214844, 0.03491123199462891, 0.03443199920654297, 0.03444121551513672, 0.03443199920654297, 0.0349409294128418, 0.03513958358764648, 0.034484222412109376, 0.03463884735107422, 0.034361343383789066, 0.034482177734375, 0.03424665451049805, 0.03444940948486328, 0.03436236953735351, 0.034444286346435544, 0.03420467376708984, 0.03447808074951172, 0.034322494506835936, 0.034458560943603514, 0.034359294891357424, 0.034523136138916014, 0.034391040802001956, 0.03531673431396484, 0.034516990661621096, 0.034699264526367186, 0.03430912017822266, 0.034344959259033206, 0.03441664123535156, 0.03458150482177735, 0.03433369445800781, 0.034336769104003906, 0.036598785400390625, 0.03622092819213867, 0.03555737686157227, 0.034746368408203124, 0.03423846435546875, 0.03469311904907227, 0.034133087158203124, 0.03427318572998047, 0.03426303863525391, 0.03442176055908203, 0.03426201629638672, 0.034310142517089845, 0.035195903778076174, 0.034440193176269535, 0.03486105728149414, 0.03438489532470703, 0.034844703674316406, 0.03465315246582031, 0.034339839935302735, 0.03438079833984375, 0.036187137603759766, 0.035037185668945314, 0.03460710525512695, 0.03489382553100586, 0.03445043182373047, 0.03438796615600586, 0.03444736099243164, 0.03433779144287109, 0.03471462249755859, 0.034531326293945314, 0.0343900146484375, 0.03433062362670898, 0.03419033432006836, 0.034391040802001956, 0.034157569885253904, 0.034157569885253904, 0.034157569885253904, 0.034477088928222654, 0.03427939224243164, 0.03425075149536133, 0.0347586555480957, 0.03434393692016602, 0.0346429443359375, 0.034902015686035154, 0.03434393692016602, 0.03496550369262695, 0.03439923095703125, 0.03451289749145508, 0.03479859161376953, 0.03469209671020508, 0.03460095977783203, 0.035286014556884765, 0.03462041473388672, 0.034318336486816405, 0.03439923095703125, 0.0343818244934082, 0.03427635192871094, 0.03430297470092773, 0.03410739135742188, 0.03435520172119141, 0.03434700775146484, 0.034590721130371094, 0.03591167831420899, 0.035672065734863284, 0.03445043182373047, 0.034121726989746096, 0.03457126235961914, 0.03425689697265625, 0.034348033905029295, 0.034601982116699216, 0.03456819152832031, 0.03476684951782227, 0.03530137634277344, 0.034191390991210935, 0.03477603149414062, 0.03434188842773438, 0.034418689727783204, 0.03436441421508789, 0.03463270568847656, 0.03442483139038086, 0.034974720001220705, 0.034440193176269535, 0.034032638549804685, 0.034371646881103515, 0.03446163177490234, 0.03449753570556641, 0.034405376434326174, 0.03422617721557617, 0.037341182708740234, 0.035914752960205076, 0.035517440795898435, 0.03439513778686523, 0.034356224060058595, 0.034331649780273435, 0.03427635192871094, 0.03424460983276367, 0.03419443130493164, 0.034353153228759765, 0.034612224578857424, 0.03434188842773438]",tokens/s,28.751134597909708,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948ffb-4794033e6b24e05329510985;c8a6740e-2e2a-4d3d-87ba-6d0cf00b7a4d) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 325, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-rw-1b/e4b9872bb803165eb22f0a867d4e6a64d34fce19/modeling_falcon.py"", line 900, in forward - transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-rw-1b/e4b9872bb803165eb22f0a867d4e6a64d34fce19/modeling_falcon.py"", line 797, in forward - outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-rw-1b/e4b9872bb803165eb22f0a867d4e6a64d34fce19/modeling_falcon.py"", line 453, in forward - attn_outputs = self.self_attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-rw-1b/e4b9872bb803165eb22f0a867d4e6a64d34fce19/modeling_falcon.py"", line 291, in forward - fused_qkv = self.query_key_value(hidden_states) # [batch_size, seq_length, 3 x hidden_size] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1062, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 792, in forward - self_attn_output, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 325, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4278.767616,4933.025792,0.0,4347.396096,4328.833024,s,1,9.3279384765625,9.3279384765625,0.0,9.3279384765625,9.3279384765625,9.3279384765625,9.3279384765625,[9.3279384765625],,kWh,3.0188583802791733e-05,1.6521705044914097e-05,4.2426700607944845e-05,8.913698945565067e-05,,MB,1598.19776,5459.410944,0.0,4812.96384,4756.928512,s,10,1.0709219284057616,0.10709219284057617,0.00014371386316469308,0.10704881668090821,0.10718108749389649,0.10731815910339355,0.10742781639099121,"[0.10702918243408203, 0.10745523071289062, 0.10715062713623047, 0.10714950561523437, 0.10705814361572266, 0.10703948974609374, 0.10714012908935547, 0.10703244781494141, 0.10688912200927735, 0.10697805023193359]",tokens/s,2390.463704306596,kWh,1.2656633846114078e-06,6.935198181145191e-07,6.890046883191123e-06,8.84923008591705e-06,tokens/kWh,28929070.38403337,MB,1640.001536,5564.268544,0.0,4917.82144,4876.091904,s,10,18.049930297851564,1.8049930297851564,0.01599911548615732,1.8043642578125,1.8269099853515625,1.831535009765625,1.835235029296875,"[1.8029969482421875, 1.8361600341796875, 1.8258822021484375, 1.784880615234375, 1.8047509765625, 1.8052191162109374, 1.8040362548828126, 1.8046922607421876, 1.7777861328125, 1.8035257568359375]",tokens/s,34.90318187405894,kWh,2.090809546101429e-05,1.1455714653153487e-05,5.755506022840256e-05,8.991887034257033e-05,tokens/kWh,700631.5777765492,,s,630,18.047715345382645,0.02864716721489316,0.0007013824359610274,0.02834483242034912,0.029521920776367186,0.02974853048324585,0.03069430835723877,"[0.028015615463256836, 0.02787942314147949, 0.027914239883422853, 0.027947008132934572, 0.028824575424194337, 0.02936422348022461, 0.029321216583251954, 0.02935398483276367, 0.0294021110534668, 0.029248512268066407, 0.02873036766052246, 0.02945030403137207, 0.028104639053344725, 0.028014591217041016, 0.029100032806396486, 0.029252607345581053, 0.028668928146362304, 0.02834534454345703, 0.028318719863891603, 0.028398591995239256, 0.02812518310546875, 0.02818355178833008, 0.028042240142822264, 0.02817843246459961, 0.028473344802856446, 0.02838937568664551, 0.029305856704711915, 0.02913382339477539, 0.029007871627807616, 0.029023231506347655, 0.029008895874023437, 0.028817407608032225, 0.02872831916809082, 0.02879283142089844, 0.028886016845703126, 0.028869632720947266, 0.027765760421752928, 0.028020736694335937, 0.028006399154663086, 0.029455360412597657, 0.028107776641845703, 0.028111871719360353, 0.02902118492126465, 0.028997631072998048, 0.029026304244995117, 0.028844032287597656, 0.028059648513793944, 0.028087295532226563, 0.028036096572875976, 0.0289300479888916, 0.029477888107299805, 0.02931711959838867, 0.028396543502807618, 0.029138944625854493, 0.029065216064453125, 0.029461503982543946, 0.027947008132934572, 0.02798182487487793, 0.028009471893310548, 0.02819891166687012, 0.028005376815795898, 0.028102655410766602, 0.028590080261230468, 0.028216320037841795, 0.02797772789001465, 0.02797670364379883, 0.028065792083740236, 0.027620351791381836, 0.028013568878173828, 0.028088319778442384, 0.02891263961791992, 0.029313024520874024, 0.028926975250244142, 0.029182975769042968, 0.02893721580505371, 0.029088768005371093, 0.029120512008666992, 0.028039167404174805, 0.0281395206451416, 0.028342271804809572, 0.029107200622558595, 0.029388799667358398, 0.029015039443969725, 0.028297216415405273, 0.028297216415405273, 0.029034496307373047, 0.031153152465820313, 0.029863935470581054, 0.030845951080322266, 0.029412351608276367, 0.029411327362060546, 0.028445695877075194, 0.029626367568969726, 0.029690879821777344, 0.029512704849243163, 0.02940108871459961, 0.02916147232055664, 0.029360128402709962, 0.029303808212280274, 0.029295616149902344, 0.029881343841552735, 0.02912665557861328, 0.027999231338500977, 0.028180479049682617, 0.028472320556640625, 0.028272640228271483, 0.030031871795654298, 0.02955673599243164, 0.029445119857788086, 0.02997452735900879, 0.02974515151977539, 0.029716480255126954, 0.029638656616210936, 0.029479936599731447, 0.02978508758544922, 0.030679040908813477, 0.030612480163574218, 0.02962124824523926, 0.02939801597595215, 0.02919526481628418, 0.029273088455200196, 0.029275136947631834, 0.029254655838012695, 0.029387775421142577, 0.02912563133239746, 0.029224960327148438, 0.028290048599243164, 0.028104703903198244, 0.02798591995239258, 0.027897855758666993, 0.02800230407714844, 0.027915264129638673, 0.028063743591308594, 0.02975129508972168, 0.029264896392822266, 0.029252607345581053, 0.029494272232055665, 0.029691904067993165, 0.02932633590698242, 0.029714431762695313, 0.029655040740966795, 0.02957107162475586, 0.029455360412597657, 0.029848575592041016, 0.0294072322845459, 0.029617151260375976, 0.029570047378540038, 0.02918502426147461, 0.029313024520874024, 0.02940006446838379, 0.029215744018554687, 0.029313024520874024, 0.029937664031982423, 0.029487104415893556, 0.029259775161743166, 0.029243392944335936, 0.029387775421142577, 0.029256704330444337, 0.029233152389526368, 0.030121984481811522, 0.029097984313964844, 0.02813747215270996, 0.028042240142822264, 0.02815180778503418, 0.028111871719360353, 0.02816819190979004, 0.02815488052368164, 0.028052480697631835, 0.028060672760009765, 0.02814156723022461, 0.028096511840820314, 0.028298240661621094, 0.02814156723022461, 0.02815180778503418, 0.02871603202819824, 0.029520896911621092, 0.02913382339477539, 0.02920857620239258, 0.029216768264770508, 0.029231103897094726, 0.029427711486816405, 0.029560831069946288, 0.029318143844604492, 0.029256704330444337, 0.029266944885253908, 0.029435903549194335, 0.029162496566772462, 0.029638656616210936, 0.02853068733215332, 0.03079782485961914, 0.029717504501342775, 0.030096384048461915, 0.029045759201049806, 0.028050432205200194, 0.027972639083862303, 0.02802377510070801, 0.02815488052368164, 0.027979776382446288, 0.027843584060668947, 0.028013568878173828, 0.02793574333190918, 0.02814361572265625, 0.028092416763305664, 0.02796953582763672, 0.028021760940551758, 0.02870783996582031, 0.02954547119140625, 0.02898124885559082, 0.029113344192504883, 0.02913587188720703, 0.028280832290649413, 0.028057600021362306, 0.028029951095581054, 0.02797772789001465, 0.02794291114807129, 0.02796953582763672, 0.02792959976196289, 0.02793984031677246, 0.02796544075012207, 0.02795929527282715, 0.028124160766601562, 0.02797670364379883, 0.027963392257690428, 0.027947008132934572, 0.02800230407714844, 0.027821056365966795, 0.028083200454711913, 0.028040191650390626, 0.028866559982299804, 0.028291072845458985, 0.028070911407470703, 0.02816716766357422, 0.027865087509155274, 0.028025856018066408, 0.027842559814453126, 0.028128255844116212, 0.028091392517089843, 0.027817983627319336, 0.02795417594909668, 0.02800230407714844, 0.02799308776855469, 0.029149183273315428, 0.02916966438293457, 0.02915225601196289, 0.02938163185119629, 0.029041664123535156, 0.028055551528930665, 0.028013568878173828, 0.02818764877319336, 0.028052480697631835, 0.028047359466552735, 0.0279418888092041, 0.02794598388671875, 0.02794393539428711, 0.027918336868286132, 0.02797670364379883, 0.02794495964050293, 0.02789990425109863, 0.02796031951904297, 0.028023807525634766, 0.029222911834716796, 0.0287324161529541, 0.028206079483032227, 0.028053504943847656, 0.028033023834228517, 0.028080127716064454, 0.028013568878173828, 0.028223487854003908, 0.027900928497314452, 0.028120063781738282, 0.028200960159301756, 0.02817433547973633, 0.028654592514038086, 0.02955673599243164, 0.029420543670654296, 0.029646848678588866, 0.02959667205810547, 0.028886016845703126, 0.02958028793334961, 0.029025279998779296, 0.028039167404174805, 0.028411903381347657, 0.028257280349731444, 0.02819993591308594, 0.02817228889465332, 0.028072959899902345, 0.028065792083740236, 0.02791935920715332, 0.028089344024658205, 0.028054527282714844, 0.028058624267578124, 0.028673023223876954, 0.027974655151367187, 0.0285849609375, 0.029229055404663085, 0.029262847900390625, 0.029345792770385744, 0.029864959716796875, 0.02953113555908203, 0.029419519424438476, 0.02920345687866211, 0.029162496566772462, 0.029289472579956056, 0.029211648941040037, 0.029507583618164062, 0.029666303634643554, 0.02935398483276367, 0.029578239440917968, 0.02840166473388672, 0.028010496139526365, 0.028144639968872072, 0.02961305618286133, 0.029473791122436522, 0.029414400100708008, 0.02832896041870117, 0.028050432205200194, 0.028014591217041016, 0.028023807525634766, 0.02834124755859375, 0.02815590476989746, 0.028016639709472657, 0.02799001693725586, 0.028064767837524415, 0.02836172866821289, 0.02815385627746582, 0.02795315170288086, 0.028039167404174805, 0.028058624267578124, 0.028485631942749022, 0.02956492805480957, 0.028627967834472655, 0.02850918388366699, 0.027909120559692382, 0.02810982322692871, 0.028116992950439453, 0.028188671112060547, 0.028169216156005858, 0.028082176208496092, 0.02831564712524414, 0.028110847473144532, 0.028092416763305664, 0.027918336868286132, 0.028880895614624022, 0.02933964729309082, 0.029329408645629884, 0.029261823654174804, 0.029262847900390625, 0.0292096004486084, 0.029270015716552734, 0.029023231506347655, 0.029173759460449217, 0.02919321632385254, 0.029246463775634765, 0.028589056015014647, 0.028062719345092774, 0.0299550724029541, 0.03057254409790039, 0.029500415802001953, 0.029783039093017577, 0.028325887680053712, 0.02834534454345703, 0.029240320205688477, 0.029501440048217774, 0.02910310363769531, 0.02922598457336426, 0.029717504501342775, 0.029894655227661132, 0.028957696914672853, 0.028013568878173828, 0.028015615463256836, 0.028128255844116212, 0.028494848251342773, 0.029100032806396486, 0.028124160766601562, 0.028110847473144532, 0.02793984031677246, 0.028021760940551758, 0.029658111572265625, 0.02814668846130371, 0.029191167831420898, 0.02899865531921387, 0.02922598457336426, 0.029163520812988283, 0.028980224609375, 0.028868608474731446, 0.028140544891357422, 0.02796236801147461, 0.028030975341796875, 0.02814771270751953, 0.029130752563476563, 0.03101900863647461, 0.030700544357299804, 0.02978508758544922, 0.029287424087524414, 0.028083200454711913, 0.02778112030029297, 0.028041215896606447, 0.029617151260375976, 0.029921279907226563, 0.029239295959472656, 0.02919219207763672, 0.029275136947631834, 0.0289751033782959, 0.02795724868774414, 0.028027904510498046, 0.027967487335205078, 0.027843584060668947, 0.028006399154663086, 0.02941542434692383, 0.029877248764038085, 0.029451263427734374, 0.029083648681640626, 0.02921779251098633, 0.02918604850769043, 0.029246463775634765, 0.029313024520874024, 0.029137920379638672, 0.029293567657470702, 0.029047807693481444, 0.02839756774902344, 0.02795315170288086, 0.027966463088989257, 0.02789990425109863, 0.027907072067260744, 0.028055551528930665, 0.02797260856628418, 0.02779136085510254, 0.02794086456298828, 0.028034048080444338, 0.02792448043823242, 0.027926528930664062, 0.028056575775146485, 0.027888639450073242, 0.028173311233520508, 0.028817407608032225, 0.028161024093627928, 0.028083200454711913, 0.02795929527282715, 0.027996160507202147, 0.027966463088989257, 0.027966463088989257, 0.029877248764038085, 0.03097804832458496, 0.029645824432373048, 0.029071359634399413, 0.029264896392822266, 0.02919424057006836, 0.029165567398071288, 0.02854400062561035, 0.02832793617248535, 0.029205503463745116, 0.029261823654174804, 0.029079551696777343, 0.029062143325805666, 0.028903423309326173, 0.029060096740722657, 0.02920038414001465, 0.029222911834716796, 0.029149183273315428, 0.028169216156005858, 0.027915264129638673, 0.027996160507202147, 0.02894745635986328, 0.02934783935546875, 0.029261823654174804, 0.028442623138427735, 0.028674047470092775, 0.028400640487670898, 0.02817024040222168, 0.02819174385070801, 0.02813030433654785, 0.028076032638549804, 0.028136447906494142, 0.02800127983093262, 0.02811392021179199, 0.028078079223632812, 0.02815999984741211, 0.028246015548706056, 0.028044288635253906, 0.028062719345092774, 0.028271615982055662, 0.028054527282714844, 0.028108800888061523, 0.027947008132934572, 0.028244991302490235, 0.028066816329956053, 0.028064767837524415, 0.027822080612182616, 0.02797772789001465, 0.027877376556396483, 0.028437503814697264, 0.029221887588500976, 0.029084672927856447, 0.02912665557861328, 0.02921779251098633, 0.029261823654174804, 0.02914303970336914, 0.02896281623840332, 0.02910207939147949, 0.028983295440673826, 0.027858943939208985, 0.027894784927368164, 0.028669952392578125, 0.028273664474487304, 0.027773952484130858, 0.02793779182434082, 0.027813888549804686, 0.02878873634338379, 0.02917580795288086, 0.029031423568725585, 0.02900480079650879, 0.029259775161743166, 0.028888063430786134, 0.028140544891357422, 0.027876352310180662, 0.027839487075805663, 0.02779545593261719, 0.027975679397583008, 0.027886592864990234, 0.028472320556640625, 0.03055718421936035, 0.02793267250061035, 0.027886592864990234, 0.027880447387695313, 0.02775859260559082, 0.02777497673034668, 0.02773196792602539, 0.02800230407714844, 0.02779955291748047, 0.02795929527282715, 0.02751590347290039, 0.02739200019836426, 0.028015615463256836, 0.027844608306884764, 0.027907072067260744, 0.02854400062561035, 0.02817024040222168, 0.027999231338500977, 0.027971584320068358, 0.02798899269104004, 0.02790297508239746, 0.02816716766357422, 0.028060672760009765, 0.028180479049682617, 0.028085248947143555, 0.02817843246459961, 0.027966463088989257, 0.028679168701171875, 0.028911615371704103, 0.02922598457336426, 0.029301759719848632, 0.029213695526123046, 0.028880895614624022, 0.02795212745666504, 0.027840511322021484, 0.02795417594909668, 0.027694080352783205, 0.028342271804809572, 0.02792959976196289, 0.02813542366027832, 0.027918336868286132, 0.02812211227416992, 0.027870208740234374, 0.02834432029724121, 0.028031999588012696, 0.028464128494262695, 0.02792755126953125, 0.02819993591308594, 0.028711936950683595, 0.028237823486328126, 0.028022783279418945, 0.028082176208496092, 0.02791219139099121, 0.02815692710876465, 0.02840985679626465, 0.02818662452697754, 0.02817945671081543, 0.028251136779785156, 0.028180479049682617, 0.02816819190979004, 0.028090368270874022, 0.028252159118652344, 0.030285823822021486, 0.02997760009765625, 0.02940928077697754, 0.02915123176574707, 0.02921062469482422, 0.02936627197265625, 0.029064191818237304, 0.02937651252746582, 0.029306880950927733, 0.029627391815185547, 0.02922700881958008, 0.028017663955688478, 0.028045312881469726, 0.028042240142822264, 0.02813849639892578, 0.028083200454711913, 0.028128255844116212, 0.028045312881469726, 0.02816409683227539, 0.028071935653686524, 0.027889663696289063, 0.02815590476989746, 0.028649471282958985, 0.028383232116699218, 0.02817535972595215, 0.02812211227416992, 0.028022783279418945, 0.0307589111328125, 0.030488576889038086, 0.02938368034362793, 0.029336576461791993, 0.029284351348876952, 0.029281280517578126, 0.02933350372314453, 0.029286399841308593, 0.02835558319091797, 0.02810982322692871, 0.02795417594909668, 0.028017663955688478, 0.028054527282714844, 0.028084224700927734, 0.027987968444824218, 0.028053504943847656, 0.028018688201904295, 0.02876518440246582, 0.029283327102661134, 0.02911027145385742, 0.02916044807434082]",tokens/s,34.90746545718201,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17659.572224,20059.783168,0.0,19474.153472,19254.604288,s,1,15.43833203125,15.43833203125,0.0,15.43833203125,15.43833203125,15.43833203125,15.43833203125,[15.43833203125],,kWh,9.9364866951389e-05,5.4439847860871957e-05,0.00013778177689199866,0.0002915864917042596,,MB,1730.912256,20407.9104,0.0,19761.463296,19446.641152,s,10,5.331217285156251,0.5331217285156251,7.870914088013054e-05,0.5331379089355469,0.5332213012695313,0.5332226196289063,0.5332236743164063,"[0.5331542358398438, 0.5331610107421875, 0.533046875, 0.5332239379882813, 0.5331771850585938, 0.5329619140625, 0.53312158203125, 0.5330892333984375, 0.5332210083007812, 0.533060302734375]",tokens/s,480.1905199264392,kWh,6.30467365127913e-06,3.4532379436978886e-06,3.545608099642106e-05,4.5213992591398074e-05,tokens/kWh,5661964.036520495,MB,1762.402304,20433.076224,0.0,19786.62912,19446.643712,s,10,34.75059521484375,3.4750595214843747,0.023859445207763643,3.4665679931640625,3.50187744140625,3.5135504150390626,3.5228887939453126,"[3.481065185546875, 3.4963193359375, 3.470353271484375, 3.4588046875, 3.499283447265625, 3.455359375, 3.46278271484375, 3.45739892578125, 3.4440048828125, 3.525223388671875]",tokens/s,18.129185877394555,kWh,4.196978028997099e-05,2.300378586910234e-05,0.00019147965610757863,0.000256453222266652,tokens/kWh,245658.83572519352,,s,630,34.74806676483156,0.05515566153147865,0.0011685302629421501,0.054568447113037113,0.056791038513183595,0.05708149623870849,0.05832241184234619,"[0.05542604827880859, 0.054602752685546874, 0.05443174362182617, 0.054263809204101565, 0.0545904655456543, 0.05444812774658203, 0.05534207916259765, 0.05692313766479492, 0.0563240966796875, 0.05446246337890625, 0.055008255004882815, 0.054788097381591794, 0.05449523162841797, 0.054675457000732425, 0.05462527847290039, 0.054572032928466796, 0.056052734375, 0.057024513244628906, 0.056559616088867185, 0.05617049789428711, 0.05644800186157226, 0.05457920074462891, 0.057390079498291016, 0.05472972869873047, 0.05429555130004883, 0.055408641815185546, 0.05425356674194336, 0.053935104370117185, 0.054212608337402345, 0.05564416122436523, 0.054191104888916014, 0.05429759979248047, 0.05423513412475586, 0.05468876647949219, 0.05706649780273437, 0.057591808319091796, 0.056791038513183595, 0.05479935836791992, 0.05453107070922852, 0.054401023864746094, 0.05414092636108398, 0.05570867156982422, 0.054148094177246094, 0.0541317138671875, 0.054392833709716794, 0.05427609634399414, 0.05446758270263672, 0.05643366241455078, 0.05680640029907227, 0.05612748718261719, 0.05441331100463867, 0.054642688751220705, 0.05465087890625, 0.054591487884521485, 0.05436415863037109, 0.054809600830078124, 0.056592384338378904, 0.05649407958984375, 0.05640192031860351, 0.05523660659790039, 0.05584384155273438, 0.057191425323486325, 0.056668159484863284, 0.05546393585205078, 0.057082878112792966, 0.05465804672241211, 0.055221248626708984, 0.05825843048095703, 0.05699071884155273, 0.05684428787231445, 0.05572710418701172, 0.05465497589111328, 0.05439180755615235, 0.056436737060546874, 0.05653708648681641, 0.05639475250244141, 0.055408641815185546, 0.05429759979248047, 0.05659033584594726, 0.054886398315429685, 0.05438361740112305, 0.05422796630859375, 0.05453823852539062, 0.05436723327636719, 0.05429248046875, 0.054232063293457033, 0.054468608856201174, 0.05426073455810547, 0.05600460815429688, 0.05666304016113281, 0.056455169677734375, 0.05714022445678711, 0.05662515258789062, 0.05427916717529297, 0.05458943939208984, 0.05416243362426758, 0.05589606475830078, 0.056828929901123044, 0.05547110366821289, 0.0565401611328125, 0.05475942230224609, 0.05461708831787109, 0.0590643196105957, 0.05499084854125977, 0.0545167350769043, 0.054280193328857425, 0.05422694396972656, 0.05419007873535156, 0.05423820877075195, 0.05525196838378906, 0.056787967681884766, 0.05534822463989258, 0.05419417572021484, 0.054100990295410156, 0.05549260711669922, 0.056615936279296876, 0.05587046432495117, 0.05638860702514648, 0.05676339340209961, 0.05646847915649414, 0.054397953033447265, 0.05697228622436523, 0.05620326232910156, 0.05429555130004883, 0.05614694213867188, 0.05461503982543945, 0.05511065673828125, 0.05422284698486328, 0.054182910919189455, 0.054010879516601565, 0.05421158218383789, 0.054174720764160154, 0.05418905639648437, 0.05429248046875, 0.05392588806152344, 0.05451980972290039, 0.055126014709472655, 0.05406105422973633, 0.05423616027832031, 0.05413888168334961, 0.054117374420166016, 0.05463040161132812, 0.05428531265258789, 0.0542023696899414, 0.05410201644897461, 0.05414400100708008, 0.054125568389892575, 0.055731201171875, 0.05789388656616211, 0.05486284637451172, 0.05455769729614258, 0.05631078338623047, 0.056153087615966796, 0.05637529754638672, 0.05669478225708008, 0.05439385604858398, 0.05379891204833984, 0.05428326416015625, 0.054117374420166016, 0.05433651351928711, 0.05695897674560547, 0.05638348770141602, 0.05409894561767578, 0.05664767837524414, 0.055175167083740234, 0.05442969512939453, 0.05489868927001953, 0.05736140823364258, 0.056097793579101565, 0.0542105598449707, 0.054196224212646485, 0.054363136291503904, 0.055580673217773435, 0.05712691116333008, 0.05484543991088867, 0.05432729721069336, 0.054251518249511715, 0.05532876968383789, 0.05756620788574219, 0.05446656036376953, 0.05637222290039062, 0.05744537734985351, 0.05668864059448242, 0.056513534545898435, 0.05653299331665039, 0.054719486236572266, 0.05434982299804687, 0.05506150436401367, 0.05458227157592774, 0.05673881530761719, 0.056586238861083986, 0.054435840606689455, 0.054392833709716794, 0.05434265518188477, 0.05423513412475586, 0.05528371047973633, 0.05575065612792969, 0.054079486846923826, 0.0542023696899414, 0.05438361740112305, 0.05422182464599609, 0.054447105407714844, 0.054128639221191405, 0.05512704086303711, 0.05448396682739258, 0.054435840606689455, 0.0544450569152832, 0.05426995086669922, 0.05433958435058594, 0.054177791595458984, 0.054184959411621096, 0.054027263641357424, 0.054488063812255856, 0.05414912033081055, 0.05605376052856445, 0.054184959411621096, 0.05412351989746094, 0.05566361618041992, 0.05655449676513672, 0.054384639739990234, 0.05454438400268555, 0.054193153381347656, 0.053787647247314455, 0.05691289520263672, 0.05450035095214844, 0.05448704147338867, 0.05441228866577148, 0.0549222412109375, 0.05664153671264648, 0.05761843109130859, 0.05740031814575195, 0.054624256134033204, 0.054035457611083984, 0.05401190567016602, 0.053836799621582034, 0.05436723327636719, 0.054593536376953126, 0.05415731048583984, 0.05465497589111328, 0.05699993515014649, 0.05427302551269531, 0.054695934295654294, 0.05424639892578125, 0.05509222412109375, 0.05588070297241211, 0.05657190322875977, 0.05630976104736328, 0.056268798828125, 0.05424435043334961, 0.0540948486328125, 0.05424332809448242, 0.055602176666259766, 0.054919166564941405, 0.054489086151123044, 0.06142873764038086, 0.059665409088134766, 0.056569854736328126, 0.05631590270996094, 0.05667327880859375, 0.056600574493408204, 0.056753150939941405, 0.05413888168334961, 0.05561753463745117, 0.054730751037597655, 0.0562503662109375, 0.05386956787109375, 0.05404159927368164, 0.05924863815307617, 0.05699174499511719, 0.05489664077758789, 0.05451878356933594, 0.05448294448852539, 0.054714366912841796, 0.05636403274536133, 0.05432320022583008, 0.05537484741210937, 0.056981502532958986, 0.055741439819335936, 0.05662310409545898, 0.05658726501464844, 0.05672243118286133, 0.054542335510253906, 0.054370304107666016, 0.05468364715576172, 0.05424639892578125, 0.05591756820678711, 0.05701529693603516, 0.05651251220703125, 0.05655756759643555, 0.05532364654541016, 0.05444403076171875, 0.05455462265014648, 0.05600665664672851, 0.05453209686279297, 0.05429964828491211, 0.05433651351928711, 0.05425254440307617, 0.05411328125, 0.05530931091308594, 0.054645759582519535, 0.05430681610107422, 0.054125568389892575, 0.05406105422973633, 0.05429145431518555, 0.05461811065673828, 0.05660671997070312, 0.056513534545898435, 0.05557759857177735, 0.0543559684753418, 0.057995262145996096, 0.057079807281494144, 0.056085502624511716, 0.054176769256591796, 0.05415731048583984, 0.05377536010742188, 0.05500928115844726, 0.05449318313598633, 0.054504447937011716, 0.054106113433837894, 0.053992446899414064, 0.054217727661132815, 0.05421670532226563, 0.05393407821655274, 0.05442969512939453, 0.05494681549072265, 0.056791038513183595, 0.05442047882080078, 0.05456588745117188, 0.054035457611083984, 0.05483212661743164, 0.056374271392822264, 0.05651763153076172, 0.05684838485717773, 0.05634969711303711, 0.05473996734619141, 0.05403443145751953, 0.05398323059082031, 0.05401398468017578, 0.05433340835571289, 0.053969921112060545, 0.05399859237670898, 0.0541102066040039, 0.05394636917114258, 0.05423513412475586, 0.05406105422973633, 0.053969921112060545, 0.05413785552978516, 0.05380915069580078, 0.05607833480834961, 0.05763481521606445, 0.056118270874023435, 0.05434470367431641, 0.0566558723449707, 0.05485567855834961, 0.053866497039794924, 0.054351871490478515, 0.05550899124145508, 0.05389926528930664, 0.054470657348632816, 0.056202239990234375, 0.05572710418701172, 0.05478400039672852, 0.05453926467895508, 0.055537662506103515, 0.05504512023925781, 0.054215679168701174, 0.05488844680786133, 0.054191104888916014, 0.05433446502685547, 0.054148094177246094, 0.05679513549804688, 0.0546611213684082, 0.0543109130859375, 0.056180736541748044, 0.05581721496582031, 0.05451366424560547, 0.055341056823730465, 0.05415935897827148, 0.05546188735961914, 0.056804351806640625, 0.055332862854003906, 0.05834854507446289, 0.05686272048950195, 0.054659072875976565, 0.056683521270751956, 0.05489152145385742, 0.05448191833496094, 0.05653504180908203, 0.05808844757080078, 0.05643775939941406, 0.055196670532226565, 0.05454950332641602, 0.05480550384521484, 0.05447782516479492, 0.054416385650634766, 0.054768638610839845, 0.054675457000732425, 0.054406143188476565, 0.05438054275512695, 0.054182910919189455, 0.053822463989257815, 0.05434265518188477, 0.054316032409667966, 0.054255615234375, 0.05428940963745117, 0.054182910919189455, 0.054563838958740236, 0.05422387313842773, 0.05424844741821289, 0.05455769729614258, 0.05404467010498047, 0.054489086151123044, 0.05455974578857422, 0.05427916717529297, 0.055392257690429686, 0.05605990219116211, 0.054801406860351565, 0.05711974334716797, 0.05754163360595703, 0.05650534439086914, 0.05552435302734375, 0.05425049591064453, 0.05424025726318359, 0.05414400100708008, 0.05409894561767578, 0.0546693115234375, 0.05423923110961914, 0.054019073486328124, 0.054141952514648435, 0.05394636917114258, 0.05424025726318359, 0.05396377563476563, 0.05407231903076172, 0.05451264190673828, 0.054329345703125, 0.05400166320800781, 0.05623295974731445, 0.05623603057861328, 0.05520588684082031, 0.054209537506103515, 0.054209537506103515, 0.05475942230224609, 0.05454643249511719, 0.05429964828491211, 0.055298046112060545, 0.05426892852783203, 0.054010879516601565, 0.054212608337402345, 0.054406143188476565, 0.053926910400390625, 0.055175167083740234, 0.054139904022216793, 0.0540682258605957, 0.054100990295410156, 0.054093822479248044, 0.054073345184326174, 0.05467647933959961, 0.05425459289550781, 0.05401497650146484, 0.05374259185791016, 0.05522431945800781, 0.054550529479980465, 0.059273216247558595, 0.05647564697265625, 0.054626304626464846, 0.05451878356933594, 0.05468979263305664, 0.05436928176879883, 0.054191104888916014, 0.05612441635131836, 0.05436006546020508, 0.05437542343139649, 0.054684703826904296, 0.05435081481933594, 0.0546693115234375, 0.05697228622436523, 0.05629030227661133, 0.054825984954833984, 0.05680025482177734, 0.05630771255493164, 0.056605697631835934, 0.05501337432861328, 0.05520588684082031, 0.05431808090209961, 0.05419417572021484, 0.05429043197631836, 0.054330368041992184, 0.054338558197021485, 0.05450137710571289, 0.05422694396972656, 0.054150142669677735, 0.05428531265258789, 0.05422079849243164, 0.055572479248046876, 0.05439180755615235, 0.05439487838745117, 0.05463347244262695, 0.05439180755615235, 0.05692927932739258, 0.05729894256591797, 0.05638143920898438, 0.05461708831787109, 0.054624256134033204, 0.054484992980957034, 0.05560627365112305, 0.05463142395019531, 0.054604801177978515, 0.05463961410522461, 0.05448396682739258, 0.05441228866577148, 0.054317054748535154, 0.05444095993041992, 0.054317054748535154, 0.054329345703125, 0.05447884750366211, 0.05436006546020508, 0.05449625778198242, 0.05452288055419922, 0.05429145431518555, 0.05449932861328125, 0.05457715225219727, 0.054363136291503904, 0.054179840087890625, 0.05412044906616211, 0.054160385131835936, 0.05430169677734375, 0.05430169677734375, 0.05456588745117188, 0.054201343536376956, 0.05408153533935547, 0.054168575286865236, 0.05418188858032227, 0.054150142669677735, 0.05429964828491211, 0.0544450569152832, 0.05663129425048828, 0.05422387313842773, 0.05419827270507813, 0.05486489486694336, 0.05435801696777344, 0.05415935897827148, 0.05391257476806641, 0.05403647994995117, 0.05437849426269531, 0.05777612686157227, 0.057608192443847656, 0.05677772903442383, 0.05657292938232422, 0.056068096160888675, 0.05428531265258789, 0.055820289611816405, 0.054645759582519535, 0.05428838348388672, 0.05433958435058594, 0.05437747192382812, 0.054370304107666016, 0.05427814483642578, 0.05568716812133789, 0.05441228866577148, 0.05451980972290039, 0.054387710571289063, 0.05443174362182617, 0.054317054748535154, 0.05425766372680664, 0.05454950332641602, 0.054400001525878906, 0.05428531265258789, 0.055005184173583986, 0.05452492904663086, 0.05465497589111328, 0.05428224182128906, 0.05440204620361328, 0.059003902435302735, 0.05680332946777344, 0.054340606689453126, 0.05436108779907227, 0.054217727661132815, 0.05452492904663086, 0.05454438400268555, 0.05428121566772461, 0.05434982299804687, 0.0543109130859375, 0.054691841125488284, 0.054719486236572266, 0.054542335510253906, 0.05427711868286133, 0.05441228866577148, 0.05457100677490234, 0.05428531265258789, 0.05598515319824219, 0.054793216705322265, 0.05651456069946289, 0.056599552154541016, 0.05658828735351563, 0.05653913497924805, 0.056659969329833984, 0.056562686920166014, 0.056543231964111325, 0.05707263946533203, 0.057115646362304685, 0.056567806243896485, 0.05732556915283203, 0.0567275505065918, 0.05671628952026367, 0.05678387069702148, 0.05674086380004883, 0.05685248184204102, 0.0567193603515625, 0.056840190887451174, 0.056807422637939455, 0.056497150421142575, 0.056441856384277345, 0.056651775360107424, 0.056766464233398435, 0.056684543609619144, 0.056809471130371096, 0.05655859375, 0.05675929641723633, 0.057059326171875, 0.05677875137329102, 0.05673267364501953, 0.056736766815185545, 0.05709209442138672, 0.05678092956542969, 0.058084224700927736, 0.05695180892944336, 0.05636710357666016, 0.05665280151367187, 0.05423923110961914, 0.054168575286865236]",tokens/s,18.13050505122264,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,1530.171392,1531.445248,0.0,945.815552,943.480832,s,1,8.4138681640625,8.4138681640625,0.0,8.4138681640625,8.4138681640625,8.4138681640625,8.4138681640625,[8.4138681640625],,kWh,1.5775408730552778e-05,8.630256074187768e-06,2.1589739494010107e-05,4.5995404298750656e-05,,MB,1640.783872,1726.480384,0.0,1080.03328,1046.519808,s,10,0.3391534729003906,0.03391534729003906,0.00019113792813514998,0.03394009590148926,0.034150530242919926,0.03420641632080078,0.03425112518310547,"[0.03362406539916992, 0.03375443267822266, 0.033962718963623045, 0.03402220916748047, 0.033804096221923825, 0.03426230239868164, 0.033984577178955075, 0.034138111114501955, 0.033683486938476566, 0.03391747283935547]",tokens/s,7548.205177164357,kWh,4.016145432791764e-07,2.2006556225964464e-07,1.780463759586479e-06,2.4021438651253e-06,tokens/kWh,106571468.81027736,MB,1672.43776,1812.463616,0.0,1166.016512,1082.823168,s,10,11.51160498046875,1.151160498046875,0.016164464719865032,1.1532300415039063,1.1703744018554687,1.1704326843261719,1.1704793103027344,"[1.1325938720703126, 1.1301783447265625, 1.124276123046875, 1.1529871826171876, 1.1592452392578125, 1.170490966796875, 1.153472900390625, 1.1703614501953126, 1.149892578125, 1.1681063232421875]",tokens/s,54.72738172208777,kWh,1.3345177504151255e-05,7.3127460801413645e-06,2.6333225398010234e-05,4.699114898230285e-05,tokens/kWh,1340678.007761125,,s,630,11.507787794113169,0.01826632983192565,0.00046660600853668974,0.01846681594848633,0.018697523307800293,0.01889950714111328,0.019449927253723145,"[0.017408000946044923, 0.017552383422851564, 0.018840576171875, 0.018364416122436524, 0.019357696533203125, 0.018722816467285155, 0.018437120437622072, 0.01827840042114258, 0.01815449523925781, 0.0176680965423584, 0.017690624237060547, 0.017531904220581054, 0.017673215866088866, 0.01799884796142578, 0.01822822380065918, 0.017748992919921876, 0.0175994873046875, 0.017892351150512697, 0.017763328552246094, 0.01945497512817383, 0.01786777687072754, 0.017674240112304687, 0.017565696716308594, 0.01768448066711426, 0.01760358428955078, 0.01760972785949707, 0.017512447357177736, 0.017889280319213868, 0.018142208099365235, 0.01763942337036133, 0.017704959869384765, 0.017827840805053712, 0.0176312313079834, 0.01761587142944336, 0.01765888023376465, 0.017746944427490235, 0.017715200424194336, 0.01760358428955078, 0.017604608535766602, 0.017688575744628905, 0.01755340766906738, 0.018524160385131837, 0.01963212776184082, 0.018679807662963867, 0.018929664611816405, 0.018571264266967775, 0.019478527069091797, 0.018563072204589845, 0.018275327682495117, 0.017864704132080078, 0.01756876754760742, 0.01764761543273926, 0.01782374382019043, 0.017878015518188475, 0.01759436798095703, 0.017656831741333007, 0.017551359176635743, 0.01757900810241699, 0.01760972785949707, 0.017702911376953127, 0.017566719055175782, 0.01801113510131836, 0.0176312313079834, 0.017741823196411134, 0.017590272903442384, 0.01756876754760742, 0.017535999298095704, 0.01761894416809082, 0.01782374382019043, 0.0182609920501709, 0.018110464096069336, 0.0177838077545166, 0.017582080841064454, 0.017612800598144532, 0.017691648483276368, 0.017580032348632812, 0.017605632781982423, 0.01759846305847168, 0.01766912078857422, 0.017760255813598632, 0.018112512588500978, 0.0180633602142334, 0.01823027229309082, 0.017839103698730468, 0.017663999557495116, 0.017949695587158202, 0.018192384719848635, 0.018365440368652345, 0.01846681594848633, 0.018328575134277342, 0.017896448135375977, 0.017978368759155275, 0.017744895935058593, 0.01762099266052246, 0.018076671600341796, 0.018173952102661133, 0.017967103958129883, 0.018313215255737304, 0.019252223968505858, 0.018301952362060548, 0.01827840042114258, 0.01784524726867676, 0.018025472640991212, 0.018101247787475586, 0.017795072555541993, 0.018787328720092773, 0.01835212707519531, 0.01842483139038086, 0.018110464096069336, 0.018147327423095702, 0.017761280059814453, 0.017695743560791014, 0.017879039764404296, 0.017999872207641602, 0.01762816047668457, 0.01776742362976074, 0.018101247787475586, 0.01819647979736328, 0.017681407928466796, 0.01765273666381836, 0.017663999557495116, 0.017657855987548828, 0.017530879974365234, 0.017672191619873046, 0.01761587142944336, 0.017781759262084963, 0.017787904739379884, 0.017657855987548828, 0.017714176177978515, 0.017605632781982423, 0.01763430404663086, 0.01757695960998535, 0.0175861759185791, 0.017743871688842772, 0.017449983596801756, 0.01763430404663086, 0.01763942337036133, 0.01760972785949707, 0.01801625633239746, 0.01824358367919922, 0.01785753631591797, 0.017661951065063478, 0.01765068817138672, 0.017562623977661132, 0.01764761543273926, 0.01757798385620117, 0.017624063491821287, 0.017563648223876953, 0.017502208709716797, 0.01765888023376465, 0.017892351150512697, 0.018126848220825196, 0.018074623107910158, 0.018326528549194337, 0.018173952102661133, 0.018077695846557617, 0.017557504653930665, 0.017695743560791014, 0.017605632781982423, 0.01761587142944336, 0.01763020706176758, 0.017921024322509766, 0.01761996841430664, 0.017546239852905272, 0.017584127426147463, 0.017726463317871095, 0.017557504653930665, 0.017597440719604493, 0.017596416473388672, 0.01757900810241699, 0.017613824844360353, 0.017682432174682617, 0.018041856765747072, 0.019537919998168944, 0.018654207229614257, 0.018331647872924805, 0.018174976348876954, 0.018191360473632814, 0.017927167892456054, 0.01768448066711426, 0.017625087738037108, 0.017730560302734375, 0.017716224670410157, 0.018873344421386717, 0.01861734390258789, 0.018255872726440428, 0.017648639678955077, 0.017718271255493166, 0.018153472900390624, 0.01741823959350586, 0.017723392486572266, 0.017632255554199217, 0.01789952087402344, 0.018111488342285157, 0.018119680404663087, 0.017914880752563478, 0.01766912078857422, 0.01775103950500488, 0.017729536056518554, 0.017633279800415038, 0.01769267272949219, 0.017656831741333007, 0.017533952713012696, 0.017518592834472657, 0.01786675262451172, 0.017838079452514647, 0.01799884796142578, 0.017541120529174805, 0.017521663665771483, 0.017905664443969727, 0.018700288772583007, 0.018682880401611326, 0.01864192008972168, 0.0186562557220459, 0.018572288513183592, 0.018519039154052733, 0.018526208877563476, 0.01839206314086914, 0.018549760818481444, 0.018488319396972656, 0.018655231475830078, 0.018521087646484375, 0.018489343643188477, 0.01846886444091797, 0.01866649627685547, 0.0184268798828125, 0.018601984024047852, 0.018501632690429686, 0.018537471771240235, 0.01845043182373047, 0.018347007751464844, 0.01844428825378418, 0.018564096450805666, 0.0184770565032959, 0.018618368148803712, 0.018491392135620118, 0.018587648391723634, 0.018734079360961914, 0.01924812889099121, 0.018815999984741212, 0.018457599639892578, 0.018509824752807616, 0.018566144943237304, 0.01862553596496582, 0.018692096710205077, 0.018523136138916017, 0.018521087646484375, 0.01858355140686035, 0.018520063400268554, 0.018544639587402344, 0.018522111892700196, 0.01848320007324219, 0.017479679107666016, 0.017704959869384765, 0.01761689567565918, 0.017731584548950196, 0.017572864532470703, 0.017649663925170898, 0.017763328552246094, 0.018159616470336915, 0.017827840805053712, 0.017747968673706056, 0.01765068817138672, 0.018784255981445314, 0.018647039413452148, 0.018569215774536133, 0.01804083251953125, 0.01859993553161621, 0.01843814468383789, 0.018488319396972656, 0.018397184371948243, 0.018491392135620118, 0.018528255462646484, 0.018549760818481444, 0.018469888687133788, 0.018579456329345705, 0.01846784019470215, 0.018540544509887694, 0.01847091293334961, 0.018542591094970702, 0.018495487213134765, 0.018465791702270508, 0.018538496017456055, 0.018457599639892578, 0.018448383331298827, 0.0184770565032959, 0.01847500801086426, 0.01842790412902832, 0.018513919830322266, 0.01843814468383789, 0.018464767456054687, 0.01859584045410156, 0.018974720001220705, 0.01886617660522461, 0.018509824752807616, 0.018618368148803712, 0.01863065528869629, 0.01885798454284668, 0.018588672637939452, 0.018655231475830078, 0.018526208877563476, 0.018485248565673826, 0.018497535705566406, 0.018549760818481444, 0.018532352447509767, 0.018541568756103514, 0.01843814468383789, 0.01859686470031738, 0.018530303955078126, 0.018506752014160157, 0.018374656677246092, 0.018573312759399413, 0.018504703521728515, 0.018702335357666015, 0.018495487213134765, 0.018510847091674804, 0.01850060844421387, 0.018572288513183592, 0.0184770565032959, 0.018488319396972656, 0.018564096450805666, 0.018489343643188477, 0.01847603225708008, 0.01846681594848633, 0.01844121551513672, 0.018478080749511717, 0.01858355140686035, 0.018497535705566406, 0.018453504562377928, 0.018479103088378905, 0.01859993553161621, 0.0184770565032959, 0.018473983764648438, 0.018512895584106445, 0.018491392135620118, 0.019125312805175782, 0.01871353530883789, 0.01859686470031738, 0.018523136138916017, 0.018547712326049806, 0.01843507194519043, 0.01843404769897461, 0.018440191268920898, 0.018509824752807616, 0.018533376693725585, 0.01867263984680176, 0.019177471160888672, 0.01884569549560547, 0.0185743350982666, 0.01862144088745117, 0.018518016815185546, 0.0186296329498291, 0.018522111892700196, 0.018421760559082033, 0.01855897521972656, 0.018494464874267577, 0.018469888687133788, 0.018479103088378905, 0.018524160385131837, 0.018565120697021483, 0.018529279708862305, 0.018506752014160157, 0.018592767715454102, 0.018381824493408205, 0.018549760818481444, 0.019116031646728517, 0.01886310386657715, 0.0185743350982666, 0.018547712326049806, 0.018903039932250978, 0.018724863052368163, 0.018565120697021483, 0.018545663833618165, 0.01861222457885742, 0.018551807403564453, 0.018515968322753908, 0.018494464874267577, 0.018568191528320312, 0.017550336837768556, 0.01764454460144043, 0.017745920181274414, 0.017781759262084963, 0.017760255813598632, 0.017758207321166994, 0.017640447616577147, 0.01779199981689453, 0.01768550491333008, 0.017746944427490235, 0.01757695960998535, 0.017735679626464843, 0.017732608795166017, 0.017675264358520508, 0.017760255813598632, 0.017765375137329103, 0.01779302406311035, 0.017779712677001954, 0.017709056854248048, 0.01775103950500488, 0.017761280059814453, 0.017763328552246094, 0.018295808792114256, 0.01839411163330078, 0.018780160903930664, 0.018514944076538087, 0.01861427116394043, 0.018579456329345705, 0.018497535705566406, 0.018507776260375978, 0.018480127334594726, 0.01862758445739746, 0.018550783157348632, 0.018464767456054687, 0.018516992568969725, 0.018526208877563476, 0.018550783157348632, 0.018697216033935548, 0.01842995262145996, 0.018566144943237304, 0.018695167541503906, 0.01868083190917969, 0.018552831649780274, 0.018522111892700196, 0.018898944854736328, 0.018678783416748047, 0.01861631965637207, 0.018694143295288086, 0.018561023712158203, 0.018557952880859374, 0.018490367889404297, 0.01844121551513672, 0.018549760818481444, 0.018485248565673826, 0.018570240020751954, 0.018576383590698242, 0.018751487731933594, 0.01903615951538086, 0.01943756866455078, 0.018993152618408202, 0.01863372802734375, 0.01858252716064453, 0.01857535934448242, 0.018258943557739257, 0.01846886444091797, 0.01860812759399414, 0.018569215774536133, 0.01866649627685547, 0.01859174346923828, 0.018537471771240235, 0.018547712326049806, 0.018529279708862305, 0.018512895584106445, 0.01861324882507324, 0.018505727767944336, 0.01865727996826172, 0.01901260757446289, 0.01884774398803711, 0.018718719482421875, 0.018594816207885743, 0.01848320007324219, 0.018174976348876954, 0.018284543991088868, 0.018637823104858398, 0.01844223976135254, 0.018533376693725585, 0.01865727996826172, 0.018556928634643553, 0.01860403251647949, 0.018562047958374024, 0.01846272087097168, 0.018543615341186523, 0.018979839324951172, 0.01881292724609375, 0.018876415252685547, 0.01860812759399414, 0.018541568756103514, 0.01863065528869629, 0.018584575653076172, 0.018733055114746093, 0.018225151062011717, 0.01816166305541992, 0.01863270378112793, 0.018551807403564453, 0.01843302345275879, 0.018536447525024414, 0.018603008270263673, 0.018554880142211915, 0.018522111892700196, 0.018503679275512695, 0.01856716728210449, 0.018585599899291993, 0.018487295150756835, 0.018537471771240235, 0.018556928634643553, 0.018979839324951172, 0.018908159255981445, 0.018437120437622072, 0.018507776260375978, 0.018543615341186523, 0.01863884735107422, 0.018511871337890624, 0.018513919830322266, 0.018530303955078126, 0.018516992568969725, 0.018486272811889647, 0.017549312591552735, 0.017822719573974608, 0.017893375396728514, 0.017548288345336914, 0.018408447265625, 0.018120704650878908, 0.01781760025024414, 0.01779199981689453, 0.017756160736083985, 0.017743871688842772, 0.01759436798095703, 0.01768550491333008, 0.017699840545654297, 0.017679359436035155, 0.017622016906738282, 0.01770086479187012, 0.017894399642944335, 0.01754521560668945, 0.01761075210571289, 0.01882931137084961, 0.0186296329498291, 0.01845248031616211, 0.01738956832885742, 0.017754112243652344, 0.017756160736083985, 0.01760870361328125, 0.017711103439331053, 0.018637823104858398, 0.018555904388427736, 0.018578432083129884, 0.0184453125, 0.01840640068054199, 0.01845964813232422, 0.01847091293334961, 0.01846886444091797, 0.018528255462646484, 0.018469888687133788, 0.01846681594848633, 0.01842790412902832, 0.018634752273559572, 0.01843814468383789, 0.018544639587402344, 0.018491392135620118, 0.018518016815185546, 0.01845043182373047, 0.018486272811889647, 0.018524160385131837, 0.01865830421447754, 0.018493440628051756, 0.018581504821777343, 0.018585599899291993, 0.018473983764648438, 0.018497535705566406, 0.01860095977783203, 0.018437120437622072, 0.018561023712158203, 0.018507776260375978, 0.018513919830322266, 0.01839820861816406, 0.018647039413452148, 0.018940000534057616, 0.01915894317626953, 0.01883443260192871, 0.017617919921875, 0.017663999557495116, 0.017688575744628905, 0.017747968673706056, 0.01777561569213867, 0.017756160736083985, 0.017736703872680663, 0.018933759689331055, 0.02001203155517578, 0.019474431991577147, 0.020100095748901366, 0.018899967193603515, 0.018700288772583007, 0.018592767715454102, 0.01860505676269531, 0.018523136138916017, 0.01863680076599121, 0.018610176086425782, 0.01885081672668457, 0.01863884735107422, 0.018472959518432617, 0.018415615081787108, 0.018546688079833985, 0.018516992568969725, 0.018576383590698242, 0.018763776779174804, 0.018516992568969725, 0.018543615341186523, 0.018768896102905275, 0.018562047958374024, 0.018547712326049806, 0.01865318489074707, 0.018584575653076172, 0.018489343643188477, 0.018571264266967775, 0.018519039154052733, 0.018549760818481444, 0.01846272087097168, 0.018568191528320312, 0.01862041664123535, 0.018463743209838866, 0.018488319396972656, 0.018533376693725585, 0.01848320007324219, 0.018495487213134765, 0.018555904388427736, 0.018661376953125, 0.01861324882507324, 0.01843609619140625, 0.018463743209838866, 0.018675712585449217, 0.018906112670898437, 0.019302400588989257, 0.017915903091430666, 0.01778892707824707, 0.01788313674926758, 0.017757183074951173, 0.017739776611328126, 0.018868223190307617, 0.01897881507873535, 0.018940927505493164, 0.01878937530517578, 0.019109888076782225]",tokens/s,54.7455350473423,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3724, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 45, in __init__ - assert self.in_features % self.group_size == 0 -AssertionError - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694959a-2d4f38f41e42b3ce4ddd3833;3fd20cf5-6e9d-43aa-8f5c-0d865dd92b88) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 976, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 866, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 583, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 339, in forward - query_states = self.q_proj(hidden_states).view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 1204, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 1004, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 738, in forward - hidden_states, self_attn_weights, present_key_value = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/dc0130882132de7cb2eb1fa54ba5294b8c922076/modeling_internlm2.py"", line 308, in forward - qkv_states = self.wqkv(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 155, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained - hf_quantizer.preprocess_model( - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model - return self._process_model_before_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading - model, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear - _, has_been_replaced = replace_with_awq_linear( - [Previous line repeated 1 more time] - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear - model._modules[name] = target_cls( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 47, in __init__ - assert out_features % (32 // self.w_bit) == 0 -AssertionError - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1068.519424,1074.266112,0.0,488.636416,482.553856,s,1,7.648740234375,7.648740234375,0.0,7.648740234375,7.648740234375,7.648740234375,7.648740234375,[7.648740234375],,kWh,8.087800811123694e-06,4.417118589456701e-06,1.163250930602544e-05,2.4137428706605835e-05,,MB,1549.27104,1263.009792,0.0,616.562688,581.925888,s,10,0.24054720115661624,0.02405472011566162,0.0006485781518972256,0.02386303997039795,0.024767964744567873,0.025027582836151124,0.025235277309417724,"[0.02467020797729492, 0.023386816024780273, 0.02471027183532715, 0.023615327835083008, 0.02441270446777344, 0.02355036735534668, 0.02411075210571289, 0.023428768157958985, 0.025287200927734375, 0.023374784469604493]",tokens/s,10642.401938957615,kWh,2.819028678036529e-07,1.5446885797638778e-07,8.602088557615454e-07,1.296580581541586e-06,tokens/kWh,197442414.02692112,MB,1594.79808,1277.689856,0.0,631.242752,597.192192,s,10,13.463059204101562,1.3463059204101562,0.02086983341782205,1.339052978515625,1.3807942016601562,1.3807970031738281,1.3807992443847656,"[1.3807998046875, 1.3183885498046874, 1.3366260986328125, 1.3358287353515625, 1.3414798583984375, 1.3486375732421876, 1.3244510498046875, 1.3325167236328126, 1.3807935791015624, 1.3635372314453125]",tokens/s,46.79471362705355,kWh,1.5768960309975795e-05,8.641186727316e-06,2.5226700902632238e-05,4.9636847939924036e-05,tokens/kWh,1269218.3854270827,,s,630,13.45813200187685,0.021362114288693383,0.0005615700025871868,0.021083711624145508,0.022027804946899415,0.022171633434295655,0.02325849382400513,"[0.02187468719482422, 0.022023168563842774, 0.021978111267089845, 0.02208665657043457, 0.022076416015625, 0.02210918426513672, 0.021983232498168945, 0.022032384872436524, 0.02209587287902832, 0.021907487869262696, 0.022006752014160157, 0.022215679168701173, 0.022042623519897463, 0.022043743133544923, 0.021964736938476562, 0.02214499282836914, 0.021938175201416017, 0.02210304069519043, 0.022018112182617188, 0.02203539276123047, 0.021964799880981444, 0.022032384872436524, 0.021996543884277343, 0.021895200729370116, 0.02150396728515625, 0.022046720504760742, 0.02148761558532715, 0.02169856071472168, 0.02202729606628418, 0.022171615600585937, 0.022046720504760742, 0.022012928009033202, 0.021989376068115234, 0.02230886459350586, 0.022168575286865236, 0.02209689521789551, 0.02224332809448242, 0.02492620849609375, 0.023196672439575194, 0.022459392547607423, 0.02164838409423828, 0.021086208343505858, 0.021007360458374022, 0.02126950454711914, 0.021786624908447266, 0.021994495391845705, 0.022172672271728516, 0.02208460807800293, 0.02191974449157715, 0.022196224212646484, 0.022038528442382813, 0.022321151733398437, 0.021894144058227538, 0.02152448081970215, 0.02144358444213867, 0.02109235191345215, 0.021015552520751952, 0.021609472274780273, 0.021102592468261717, 0.021308416366577147, 0.02107904052734375, 0.02084864044189453, 0.020953088760375976, 0.0206059513092041, 0.02070425605773926, 0.02102681541442871, 0.020644863128662108, 0.02065510368347168, 0.02063974380493164, 0.020620288848876952, 0.020825151443481446, 0.020617151260375978, 0.020567039489746093, 0.020633600234985353, 0.020596736907958983, 0.02066022491455078, 0.020571136474609376, 0.020503551483154296, 0.020685823440551757, 0.021394432067871092, 0.020619264602661135, 0.020549631118774413, 0.020808704376220705, 0.02087731170654297, 0.020926496505737305, 0.02091516876220703, 0.020855808258056642, 0.02086195182800293, 0.020931583404541015, 0.020847616195678712, 0.020939775466918945, 0.020974592208862306, 0.020964351654052735, 0.021140480041503908, 0.021197824478149413, 0.02122854423522949, 0.02108415985107422, 0.020958208084106447, 0.020756479263305663, 0.02068377685546875, 0.020858879089355468, 0.020853759765625, 0.02101759910583496, 0.020744192123413087, 0.020749311447143554, 0.021003263473510742, 0.020996095657348633, 0.021015552520751952, 0.021056543350219725, 0.021068767547607423, 0.020847616195678712, 0.02089574432373047, 0.02102783966064453, 0.020867071151733398, 0.020915199279785156, 0.021013504028320314, 0.021021696090698243, 0.0209039363861084, 0.02146816062927246, 0.020997119903564454, 0.021212160110473634, 0.022023168563842774, 0.021606399536132814, 0.020987903594970703, 0.021421056747436523, 0.02123366355895996, 0.02209587287902832, 0.021968896865844727, 0.02101759910583496, 0.021001216888427734, 0.020941823959350587, 0.021082111358642578, 0.02187468719482422, 0.02164531135559082, 0.020973567962646485, 0.021045248031616212, 0.020884479522705078, 0.020947967529296875, 0.020912128448486327, 0.021007360458374022, 0.021121023178100586, 0.02148454475402832, 0.021818368911743165, 0.020944896697998046, 0.021194751739501954, 0.02165452766418457, 0.021137407302856445, 0.02102579116821289, 0.021072895050048827, 0.020973567962646485, 0.02106572723388672, 0.021061632156372072, 0.02110873603820801, 0.020938751220703124, 0.02101968002319336, 0.02106159973144531, 0.02108415985107422, 0.02103193664550781, 0.020918272018432618, 0.020976640701293944, 0.021183488845825195, 0.02104422378540039, 0.02088960075378418, 0.020938751220703124, 0.021000192642211913, 0.021030975341796876, 0.021041088104248047, 0.02103091239929199, 0.020914176940917968, 0.020951040267944337, 0.020953088760375976, 0.020995071411132812, 0.020990976333618162, 0.02107084846496582, 0.021013504028320314, 0.021067808151245117, 0.025698272705078126, 0.021815296173095702, 0.021881856918334962, 0.021497856140136717, 0.020946943283081054, 0.02089779281616211, 0.020935680389404295, 0.020945920944213867, 0.021028863906860353, 0.020938751220703124, 0.020997119903564454, 0.0211015682220459, 0.02126335906982422, 0.020853759765625, 0.020969472885131835, 0.021037055969238282, 0.02106982421875, 0.020923391342163086, 0.020853759765625, 0.020985855102539062, 0.02167296028137207, 0.021009408950805664, 0.021061632156372072, 0.021029888153076173, 0.02106879997253418, 0.02124185562133789, 0.02170982360839844, 0.02109644889831543, 0.02096950340270996, 0.021037023544311525, 0.020958208084106447, 0.02099404716491699, 0.02104422378540039, 0.020946943283081054, 0.02144256019592285, 0.021605375289916993, 0.02162073516845703, 0.02124083137512207, 0.021061632156372072, 0.02149478340148926, 0.021072895050048827, 0.02103603172302246, 0.020983808517456053, 0.021136383056640624, 0.021019647598266602, 0.020962303161621093, 0.021123071670532227, 0.02106572723388672, 0.02103398323059082, 0.02104729652404785, 0.02101759910583496, 0.021083263397216796, 0.021018495559692384, 0.02107699203491211, 0.020980735778808594, 0.020936704635620116, 0.020979711532592774, 0.020974592208862306, 0.02104832077026367, 0.021164031982421876, 0.02103091239929199, 0.020950016021728517, 0.021389312744140625, 0.02122854423522949, 0.021013504028320314, 0.02085683250427246, 0.020960256576538085, 0.021165056228637694, 0.021711872100830077, 0.021643327713012694, 0.021662656784057616, 0.021414911270141602, 0.021773311614990236, 0.022328319549560546, 0.022394880294799805, 0.02206208038330078, 0.021090303421020508, 0.020969472885131835, 0.020962303161621093, 0.02104422378540039, 0.021579776763916016, 0.020960256576538085, 0.020946943283081054, 0.0213309440612793, 0.021048416137695314, 0.02122332763671875, 0.021011455535888672, 0.02086911964416504, 0.020964351654052735, 0.021731327056884766, 0.021011455535888672, 0.02097974395751953, 0.021024736404418945, 0.02103500747680664, 0.02145075225830078, 0.021538816452026367, 0.021751808166503905, 0.02333695983886719, 0.02123263931274414, 0.021041215896606444, 0.020980672836303713, 0.02105548858642578, 0.021004287719726563, 0.02102681541442871, 0.021019647598266602, 0.021246976852416992, 0.02104319953918457, 0.02105241584777832, 0.02105855941772461, 0.02124595260620117, 0.02123673629760742, 0.021094400405883788, 0.021498880386352538, 0.02163711929321289, 0.021530624389648437, 0.02108415985107422, 0.021000192642211913, 0.021204992294311522, 0.02251366424560547, 0.02204364776611328, 0.021933055877685546, 0.020974592208862306, 0.02109132766723633, 0.020846656799316406, 0.022145984649658203, 0.02112512016296387, 0.02102579116821289, 0.021540864944458008, 0.02131046485900879, 0.021021696090698243, 0.021993471145629884, 0.021542911529541017, 0.021519359588623048, 0.021009408950805664, 0.020957183837890626, 0.020972543716430665, 0.021737472534179687, 0.021554176330566405, 0.021019647598266602, 0.021078016281127928, 0.02168320083618164, 0.021165056228637694, 0.020955135345458984, 0.020952064514160155, 0.02145894432067871, 0.02146303939819336, 0.021947391510009767, 0.021716991424560548, 0.02163302421569824, 0.021758975982666014, 0.02148659133911133, 0.021780479431152345, 0.02127052879333496, 0.021037055969238282, 0.0218787841796875, 0.021703680038452147, 0.020983808517456053, 0.020965375900268556, 0.02168320083618164, 0.021489664077758788, 0.02107084846496582, 0.021754911422729492, 0.021602272033691406, 0.022817792892456053, 0.022072320938110353, 0.02149478340148926, 0.020985855102539062, 0.021222400665283202, 0.0214835205078125, 0.021021696090698243, 0.02104319953918457, 0.02142207908630371, 0.02166579246520996, 0.021721088409423828, 0.02162892723083496, 0.021399551391601563, 0.021013504028320314, 0.02149580764770508, 0.020985855102539062, 0.020959232330322267, 0.020952064514160155, 0.02251571273803711, 0.022778879165649413, 0.021862432479858397, 0.021429216384887696, 0.02104934310913086, 0.02100223922729492, 0.0224901123046875, 0.021377023696899415, 0.0210513916015625, 0.02103606414794922, 0.02111075210571289, 0.020900863647460938, 0.020975616455078124, 0.020916223526000977, 0.020973567962646485, 0.021032960891723632, 0.02106368064880371, 0.02104729652404785, 0.020875263214111327, 0.020960256576538085, 0.021783552169799804, 0.020914335250854493, 0.02091913604736328, 0.02090598487854004, 0.020992000579833983, 0.020933631896972657, 0.020891647338867187, 0.02103603172302246, 0.02104319953918457, 0.021398527145385742, 0.020970495223999023, 0.020720640182495118, 0.02083328056335449, 0.021037055969238282, 0.021021696090698243, 0.020988927841186524, 0.020999168395996092, 0.02085171127319336, 0.021046272277832033, 0.02067558479309082, 0.021242879867553712, 0.021081087112426757, 0.020972543716430665, 0.020989952087402345, 0.02126950454711914, 0.020993024826049804, 0.021323776245117186, 0.021769216537475586, 0.020985855102539062, 0.02103398323059082, 0.020736000061035157, 0.020801536560058592, 0.02103091239929199, 0.020970495223999023, 0.02085478401184082, 0.020963327407836914, 0.021004287719726563, 0.0207607364654541, 0.02089353561401367, 0.020953088760375976, 0.020972543716430665, 0.020964351654052735, 0.02123673629760742, 0.021147647857666017, 0.02103398323059082, 0.020941823959350587, 0.021515264511108398, 0.02104729652404785, 0.021034015655517577, 0.020985824584960937, 0.020964351654052735, 0.02106879997253418, 0.02096335983276367, 0.02094691276550293, 0.020915199279785156, 0.020701183319091796, 0.020915199279785156, 0.021373952865600586, 0.021448703765869142, 0.020734975814819336, 0.02066534423828125, 0.021465087890625, 0.021180416107177736, 0.020973567962646485, 0.022383615493774413, 0.02328883171081543, 0.02208563232421875, 0.021242879867553712, 0.021061632156372072, 0.020952064514160155, 0.020876287460327148, 0.020921344757080077, 0.021122047424316406, 0.021092384338378907, 0.020825056076049803, 0.021000255584716798, 0.021036991119384764, 0.020998144149780275, 0.02108518409729004, 0.021122047424316406, 0.020969472885131835, 0.020973567962646485, 0.020948991775512696, 0.021098495483398438, 0.020909088134765625, 0.020980768203735352, 0.02095097541809082, 0.020964351654052735, 0.02102579116821289, 0.021112831115722656, 0.020989952087402345, 0.020725759506225586, 0.02086809539794922, 0.021175296783447265, 0.020977664947509765, 0.021006336212158205, 0.020993024826049804, 0.02090188789367676, 0.021018688201904296, 0.02101241683959961, 0.021005311965942384, 0.02108723258972168, 0.02103603172302246, 0.02128179168701172, 0.021053440093994142, 0.02102272033691406, 0.020962303161621093, 0.020979711532592774, 0.020780031204223632, 0.020964351654052735, 0.020936704635620116, 0.02104934310913086, 0.0214517765045166, 0.020968448638916014, 0.021235712051391603, 0.02088243293762207, 0.020765695571899414, 0.020956159591674805, 0.020900863647460938, 0.020962303161621093, 0.02130227279663086, 0.021345279693603517, 0.020930559158325195, 0.02109542465209961, 0.02101862335205078, 0.021005311965942384, 0.024156160354614258, 0.02190745544433594, 0.021873664855957032, 0.02184806442260742, 0.021710847854614256, 0.021884992599487306, 0.02194630432128906, 0.021975040435791016, 0.02186240005493164, 0.021971967697143553, 0.021959680557250977, 0.021926912307739257, 0.02190745544433594, 0.022063104629516602, 0.021971967697143553, 0.02191564750671387, 0.021819391250610352, 0.021841920852661133, 0.02188800048828125, 0.022171648025512695, 0.02260479927062988, 0.02206515121459961, 0.022378496170043945, 0.021952512741088868, 0.021801984786987305, 0.021876735687255858, 0.021820415496826173, 0.021735424041748046, 0.021965824127197265, 0.022013952255249023, 0.021801984786987305, 0.02186444854736328, 0.021799936294555664, 0.022146047592163084, 0.021948415756225585, 0.02191974449157715, 0.022026239395141603, 0.02183782386779785, 0.02169856071472168, 0.021798912048339843, 0.02190028762817383, 0.021976064682006836, 0.02188902473449707, 0.02192076873779297, 0.021831680297851562, 0.021876735687255858, 0.021965824127197265, 0.022014976501464844, 0.02189619255065918, 0.02184499168395996, 0.022009855270385743, 0.022017023086547852, 0.021989376068115234, 0.02184806442260742, 0.02332057571411133, 0.02195769691467285, 0.021914560317993163, 0.022000640869140626, 0.021489664077758788, 0.021839872360229492, 0.022038528442382813, 0.02146611213684082, 0.020808704376220705, 0.02103196716308594, 0.021587968826293946, 0.022215679168701173, 0.022779903411865234, 0.022239231109619142, 0.021940256118774416, 0.021854175567626952, 0.02147532844543457, 0.021606559753417968, 0.021836639404296875, 0.02182963180541992, 0.021534719467163087, 0.021409791946411134, 0.021591039657592775, 0.021813247680664064, 0.022112255096435548, 0.02191974449157715, 0.0216494083404541, 0.022929407119750975, 0.02328374481201172, 0.022344671249389648, 0.021944320678710938, 0.021934112548828124, 0.021856224060058594, 0.02188703918457031, 0.021897151947021486, 0.0219238395690918, 0.022074367523193358, 0.022040576934814454, 0.02191564750671387, 0.02188800048828125, 0.021755903244018555, 0.021902336120605468, 0.02186956787109375, 0.02189107131958008, 0.02186240005493164, 0.021902336120605468, 0.021924863815307616, 0.021946367263793946, 0.02190028762817383, 0.021926912307739257, 0.02228223991394043, 0.021191680908203125, 0.02085273551940918, 0.021008384704589843, 0.020938751220703124, 0.021021696090698243, 0.020884479522705078, 0.020947967529296875, 0.020974592208862306, 0.021960704803466798, 0.021404672622680664, 0.021029888153076173, 0.021005311965942384, 0.02103193664550781, 0.020788223266601562, 0.020976640701293944, 0.020894784927368164, 0.020937664031982422, 0.02090598487854004, 0.020968448638916014, 0.02090188789367676, 0.020930559158325195, 0.02099404716491699]",tokens/s,46.811845797926644,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b1f-1afd41a335ddeba67849ea25;2e38c0ca-0386-414d-9c86-103e1a9f6c64) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4938.518528,7434.928128,0.0,6849.298432,6445.09696,s,1,10.186806640625,10.186806640625,0.0,10.186806640625,10.186806640625,10.186806640625,10.186806640625,[10.186806640625],,kWh,4.01894579152895e-05,2.2011323305695715e-05,5.6119767118056885e-05,0.0001183205483390421,,MB,2830.974976,7818.706944,0.0,7172.25984,6823.3216,s,10,1.087984031677246,0.1087984031677246,5.409855505544957e-05,0.10879366302490234,0.10883513641357422,0.10888224029541016,0.1089199234008789,"[0.10892934417724609, 0.10882466888427735, 0.10877302551269531, 0.10882189178466797, 0.10872617340087891, 0.10876000213623047, 0.1087600326538086, 0.1088153305053711, 0.10881430053710937, 0.10875926208496094]",tokens/s,2352.9757105474064,kWh,1.2868544335065616e-06,7.048127511146058e-07,7.4131611237600614e-06,9.404828308381228e-06,tokens/kWh,27220060.97355998,MB,2835.300352,7820.804096,0.0,7174.356992,6823.32416,s,10,19.92638525390625,1.9926385253906251,0.01668275242510895,1.987087646484375,2.01650087890625,2.021864514160156,2.0261554223632814,"[1.9826319580078124, 1.9915433349609375, 1.992612548828125, 2.0038236083984375, 1.9790799560546875, 1.9816824951171874, 2.0153089599609375, 2.0272281494140625, 1.9747484130859374, 1.977725830078125]",tokens/s,31.616371558233247,kWh,2.323718328968602e-05,1.2734846833348886e-05,6.617906441664861e-05,0.0001021510945396835,tokens/kWh,616733.4797917986,,s,630,19.92447076797485,0.03162614407615056,0.000625852512862737,0.031364607810974124,0.03243161506652832,0.03258695659637451,0.03361864578247071,"[0.031531007766723636, 0.03127603149414063, 0.03196620750427246, 0.031458303451538085, 0.03121766471862793, 0.03134668731689453, 0.03153817558288574, 0.031508480072021484, 0.032688129425048826, 0.032661502838134765, 0.03122483253479004, 0.031325183868408206, 0.03137126350402832, 0.03117465591430664, 0.031188991546630858, 0.03102207946777344, 0.03079167938232422, 0.030947328567504883, 0.030914560317993164, 0.031323135375976564, 0.031321088790893556, 0.03118489646911621, 0.03120947265625, 0.03204915237426758, 0.032522239685058595, 0.031228927612304686, 0.03119206428527832, 0.031226879119873048, 0.031245311737060546, 0.031164415359497072, 0.03128832054138184, 0.03142758369445801, 0.031281152725219724, 0.03136000061035156, 0.03146444892883301, 0.031127552032470703, 0.03098111915588379, 0.031285247802734374, 0.03136716842651367, 0.0315996150970459, 0.031322111129760744, 0.03172659111022949, 0.03080806350708008, 0.031107072830200196, 0.032522239685058595, 0.03354009628295898, 0.031678464889526366, 0.03185868835449219, 0.03251302337646484, 0.03125964736938477, 0.03118182373046875, 0.0321003532409668, 0.03120639991760254, 0.031120384216308594, 0.031251455307006834, 0.03243622589111328, 0.03234611129760742, 0.031127552032470703, 0.03118694305419922, 0.031204351425170897, 0.031084543228149415, 0.03078041648864746, 0.031070207595825194, 0.03159859275817871, 0.03224371337890625, 0.03218534469604492, 0.031459327697753905, 0.03253760147094727, 0.032309249877929686, 0.03129241561889649, 0.032336894989013674, 0.03201740646362305, 0.03291340637207031, 0.032555007934570314, 0.03081318473815918, 0.031136768341064453, 0.03180748748779297, 0.030838783264160157, 0.031077375411987306, 0.03211775970458984, 0.031122432708740235, 0.03102617645263672, 0.03120128059387207, 0.0326901741027832, 0.03243110275268555, 0.03207167816162109, 0.03240345764160156, 0.03125760078430176, 0.03160780715942383, 0.032331775665283204, 0.031220735549926756, 0.031074304580688477, 0.03226009750366211, 0.031369216918945314, 0.03133132743835449, 0.03118796730041504, 0.031122432708740235, 0.031101951599121092, 0.031127552032470703, 0.030996480941772462, 0.030785535812377928, 0.030846975326538087, 0.03119923210144043, 0.03131494331359863, 0.03185868835449219, 0.032330753326416016, 0.03196313667297363, 0.031116287231445314, 0.031137792587280274, 0.031129600524902344, 0.03120025634765625, 0.03122585678100586, 0.03126681518554687, 0.031220735549926756, 0.031079423904418944, 0.032525310516357424, 0.031066112518310547, 0.032173057556152344, 0.032500736236572264, 0.031085567474365236, 0.03165798377990723, 0.03228672027587891, 0.032217086791992186, 0.03201126480102539, 0.03121049690246582, 0.03076812744140625, 0.03130572891235352, 0.031204351425170897, 0.03154022407531738, 0.03117158317565918, 0.031016960144042968, 0.031045631408691408, 0.03102003288269043, 0.031111167907714843, 0.031056896209716797, 0.031007743835449218, 0.031044607162475587, 0.031014911651611327, 0.03058176040649414, 0.03080396842956543, 0.031094783782958983, 0.03119206428527832, 0.0310118408203125, 0.031710208892822264, 0.03227238464355469, 0.0319498233795166, 0.031143936157226562, 0.031488000869750975, 0.03118284797668457, 0.03115724754333496, 0.03170816040039062, 0.032328704833984374, 0.03117977523803711, 0.031254528045654296, 0.031286272048950195, 0.030966783523559572, 0.03198054313659668, 0.03134566307067871, 0.03198464012145996, 0.03270553588867187, 0.031955968856811526, 0.031247360229492187, 0.03213107299804688, 0.031704063415527346, 0.031325183868408206, 0.03238195037841797, 0.03228160095214844, 0.03219968032836914, 0.032279552459716795, 0.03235532760620117, 0.031170560836791993, 0.031475711822509765, 0.032304126739501955, 0.03240447998046875, 0.03219968032836914, 0.032497665405273435, 0.032271358489990236, 0.03220172882080078, 0.03221811294555664, 0.03116646385192871, 0.031768575668334964, 0.03194777679443359, 0.03163340759277344, 0.03127910423278808, 0.031955968856811526, 0.03117158317565918, 0.0324505615234375, 0.03228057479858398, 0.03229183959960937, 0.03093708801269531, 0.031936511993408204, 0.03156684875488281, 0.033617919921875, 0.03266559982299805, 0.03222220611572266, 0.03137433624267578, 0.03161497688293457, 0.03129958343505859, 0.031303680419921875, 0.032276481628417966, 0.031438848495483396, 0.03133235168457031, 0.0324771842956543, 0.03280588912963867, 0.03258163070678711, 0.03261644744873047, 0.03220377731323242, 0.031204351425170897, 0.03180851173400879, 0.031664127349853514, 0.0317890567779541, 0.03127705574035645, 0.03217203140258789, 0.03312025451660156, 0.032026622772216795, 0.03394559860229492, 0.033770496368408204, 0.031369216918945314, 0.03131391906738281, 0.03120947265625, 0.031252479553222655, 0.03074764823913574, 0.031297536849975584, 0.032282623291015625, 0.03224883270263672, 0.0319180793762207, 0.03238195037841797, 0.03193446350097656, 0.03288678359985352, 0.03205734252929687, 0.032246784210205076, 0.03117158317565918, 0.03158323287963867, 0.03127603149414063, 0.03250995254516602, 0.03128217506408691, 0.03129343986511231, 0.031245311737060546, 0.031088640213012695, 0.03103539276123047, 0.03139583969116211, 0.03219558334350586, 0.03125964736938477, 0.03159756851196289, 0.0324505615234375, 0.03137433624267578, 0.03113369560241699, 0.031075328826904298, 0.031119359970092773, 0.031164415359497072, 0.031156223297119142, 0.03101798439025879, 0.03227033615112305, 0.03222732925415039, 0.03078348731994629, 0.03248230361938476, 0.031768575668334964, 0.03078758430480957, 0.03097804832458496, 0.03147776031494141, 0.03105996894836426, 0.031138816833496095, 0.03140403175354004, 0.03159040069580078, 0.031139839172363282, 0.03120128059387207, 0.031025152206420898, 0.031268863677978515, 0.031649791717529296, 0.031070207595825194, 0.030680063247680665, 0.030906368255615234, 0.03259494400024414, 0.03319910430908203, 0.032148479461669925, 0.03125657653808594, 0.031281152725219724, 0.03137126350402832, 0.03141324806213379, 0.03114188766479492, 0.031085567474365236, 0.031055871963500976, 0.03168563270568848, 0.031336448669433595, 0.031388671875, 0.031120384216308594, 0.031137792587280274, 0.030819328308105468, 0.032210945129394535, 0.03240652847290039, 0.03203379058837891, 0.03128217506408691, 0.031251455307006834, 0.03123302459716797, 0.03170816040039062, 0.031235071182250978, 0.031123455047607423, 0.031235071182250978, 0.031291391372680666, 0.031302656173706055, 0.031077375411987306, 0.03115007972717285, 0.03118694305419922, 0.03127807998657227, 0.03115110397338867, 0.03139379119873047, 0.031047679901123046, 0.031291391372680666, 0.031202304840087892, 0.032601089477539064, 0.0314654712677002, 0.031230976104736328, 0.03125964736938477, 0.031188991546630858, 0.031104000091552734, 0.03120332717895508, 0.031139839172363282, 0.03121049690246582, 0.03231129455566406, 0.031955968856811526, 0.031340543746948245, 0.03125657653808594, 0.03176959991455078, 0.03137740707397461, 0.03160063934326172, 0.03141939163208008, 0.031040512084960937, 0.03120128059387207, 0.031226879119873048, 0.0311910400390625, 0.03136614418029785, 0.031270912170410156, 0.03218841552734375, 0.033135616302490234, 0.03129446411132813, 0.03123302459716797, 0.0324771842956543, 0.03200307083129883, 0.032036865234375, 0.032487422943115234, 0.0323133430480957, 0.031409151077270506, 0.031867904663085936, 0.03258879852294922, 0.03314688110351562, 0.03146649551391602, 0.03134464073181152, 0.03125555229187012, 0.03117465591430664, 0.031046655654907225, 0.030642175674438478, 0.030886911392211915, 0.031123455047607423, 0.031080448150634765, 0.032379905700683595, 0.031474687576293944, 0.03124940872192383, 0.03121664047241211, 0.031114240646362305, 0.03099852752685547, 0.030712831497192384, 0.030637056350708007, 0.03099750328063965, 0.031108095169067384, 0.03196928024291992, 0.03184537506103516, 0.031244287490844725, 0.03124019241333008, 0.030843904495239258, 0.031091712951660157, 0.031202304840087892, 0.03099443244934082, 0.031235071182250978, 0.03118284797668457, 0.031083520889282228, 0.031148031234741212, 0.03126067161560059, 0.03118182373046875, 0.031006719589233397, 0.03102003288269043, 0.031123455047607423, 0.03078246307373047, 0.030966783523559572, 0.032892929077148435, 0.03126169586181641, 0.03133337593078613, 0.030909439086914063, 0.030871551513671876, 0.031267839431762694, 0.031321088790893556, 0.031164415359497072, 0.030962688446044922, 0.03125043106079101, 0.03141632080078125, 0.031698944091796875, 0.03116646385192871, 0.030831615447998048, 0.031513599395751955, 0.032399360656738284, 0.03422822570800781, 0.03287449645996094, 0.032541694641113283, 0.03212287902832031, 0.03237273788452148, 0.03250380706787109, 0.031926271438598636, 0.0320706558227539, 0.03243724822998047, 0.03234918212890625, 0.03256524658203125, 0.032415744781494144, 0.03246284866333008, 0.032471038818359374, 0.03203071975708008, 0.03241984176635742, 0.03241676712036133, 0.032302078247070314, 0.032290817260742184, 0.03193343925476074, 0.03232563018798828, 0.032091136932373046, 0.03215564727783203, 0.03236556625366211, 0.03232563018798828, 0.03242905426025391, 0.03242496109008789, 0.03220889663696289, 0.0323583984375, 0.03213824081420898, 0.03254272079467774, 0.03237273788452148, 0.03242803192138672, 0.03240959930419922, 0.032368640899658206, 0.03223551940917969, 0.03236249542236328, 0.032345088958740234, 0.03209523010253906, 0.031735807418823245, 0.031817728042602536, 0.0316866569519043, 0.03219251251220703, 0.03248230361938476, 0.03219148635864258, 0.03234611129760742, 0.03222323226928711, 0.032121856689453124, 0.03240652847290039, 0.03204915237426758, 0.03219660949707031, 0.032210945129394535, 0.03228672027587891, 0.03242905426025391, 0.03222732925415039, 0.03205836868286133, 0.031783935546875, 0.032639999389648434, 0.0323583984375, 0.032330753326416016, 0.03186483192443847, 0.03162623977661133, 0.03189248085021973, 0.03211775970458984, 0.032069633483886716, 0.03202969741821289, 0.03232153701782227, 0.03221913528442383, 0.032077823638916016, 0.0322949104309082, 0.032176128387451174, 0.031661056518554685, 0.03179110336303711, 0.03184435272216797, 0.03256422424316406, 0.03240652847290039, 0.032333824157714845, 0.03189555168151856, 0.032043006896972655, 0.032399360656738284, 0.03211673736572265, 0.032538623809814454, 0.03199283218383789, 0.03211775970458984, 0.03242291259765625, 0.03261644744873047, 0.032113662719726564, 0.03176345634460449, 0.0323768310546875, 0.032570369720458986, 0.03273113632202149, 0.03242496109008789, 0.031855615615844726, 0.03361894226074219, 0.03380940628051758, 0.03228979110717774, 0.032008190155029294, 0.031497215270996096, 0.03121049690246582, 0.032118785858154295, 0.031455232620239255, 0.03170816040039062, 0.031286272048950195, 0.031941631317138675, 0.03228876876831055, 0.031441919326782225, 0.03095142364501953, 0.030849023818969725, 0.03122585678100586, 0.030915584564208985, 0.03084492874145508, 0.031135744094848632, 0.03141836738586426, 0.031140863418579103, 0.030885887145996094, 0.03116543960571289, 0.034574337005615234, 0.032712703704833986, 0.03131084823608398, 0.031230976104736328, 0.0312412166595459, 0.031253503799438476, 0.03115724754333496, 0.031105024337768555, 0.03175628852844238, 0.03214745712280274, 0.03121049690246582, 0.031096832275390625, 0.03132928085327148, 0.031916032791137694, 0.031084543228149415, 0.03118694305419922, 0.030809087753295897, 0.032368640899658206, 0.032584705352783204, 0.032210945129394535, 0.03225804901123047, 0.03117568016052246, 0.031024127960205077, 0.031339519500732424, 0.030917631149291993, 0.03139379119873047, 0.03117568016052246, 0.03133030319213867, 0.031526912689208986, 0.031267839431762694, 0.031007743835449218, 0.03118284797668457, 0.031265792846679685, 0.031438848495483396, 0.03129651260375976, 0.030846975326538087, 0.03100364875793457, 0.031116287231445314, 0.031029247283935548, 0.030895103454589845, 0.03095142364501953, 0.03114188766479492, 0.031095808029174804, 0.030864383697509764, 0.03098521614074707, 0.031164415359497072, 0.03116543960571289, 0.03128934478759766, 0.03116646385192871, 0.03136307144165039, 0.03144396781921387, 0.03117158317565918, 0.03136716842651367, 0.031116287231445314, 0.031119359970092773, 0.03141939163208008, 0.03411763381958008, 0.0329543685913086, 0.0314839038848877, 0.03125760078430176, 0.031082496643066407, 0.03123302459716797, 0.03118284797668457, 0.030845951080322266, 0.031097856521606446, 0.03236556625366211, 0.03173785591125488, 0.031286272048950195, 0.0312729606628418, 0.031251455307006834, 0.031127552032470703, 0.031205375671386718, 0.03118182373046875, 0.03117568016052246, 0.031212543487548827, 0.03127193641662598, 0.03123404884338379, 0.0316180477142334, 0.032072704315185545, 0.031438848495483396, 0.03120742416381836, 0.03163033676147461, 0.031488000869750975, 0.03148185539245606, 0.03121151924133301, 0.03131084823608398, 0.03099033546447754, 0.031696895599365234, 0.031322111129760744, 0.03243724822998047, 0.031987712860107424, 0.0313702392578125, 0.03244646453857422, 0.031547391891479495, 0.03099545669555664, 0.030859264373779297, 0.031114240646362305, 0.031185920715332032, 0.030915584564208985, 0.03133337593078613, 0.03137843132019043, 0.030879743576049806, 0.0312412166595459, 0.031023103713989256, 0.030921728134155273, 0.031406080245971676, 0.03101286315917969, 0.031081472396850586, 0.031229951858520507, 0.031543296813964845, 0.03138150405883789, 0.031093759536743162, 0.031139839172363282, 0.03082956886291504, 0.031109119415283205]",tokens/s,31.61940948577747,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8229.064704,11294.736384,0.0,10701.766656,10468.923392,s,1,11.9398232421875,11.9398232421875,0.0,11.9398232421875,11.9398232421875,11.9398232421875,11.9398232421875,[11.9398232421875],,kWh,5.796567191805701e-05,3.174299352402799e-05,8.378645591800016e-05,0.00017349512136008517,,MB,4038.590464,11816.927232,0.0,11163.140096,10923.3664,s,10,2.0492736206054687,0.20492736206054687,9.353162226330598e-05,0.20491548919677732,0.2050119369506836,0.20508796920776368,0.20514879501342775,"[0.20516400146484376, 0.20493612670898437, 0.2048948516845703, 0.2049950408935547, 0.204830078125, 0.20486735534667969, 0.20482867431640625, 0.20494137573242188, 0.2049398651123047, 0.20487625122070313]",tokens/s,1249.223126799258,kWh,2.421981373752807e-06,1.3270629092076317e-06,1.4565737276163137e-05,1.8314781559123576e-05,tokens/kWh,13977780.688979752,MB,4044.009472,11819.024384,0.0,11165.237248,10923.36896,s,10,25.759557128906252,2.5759557128906247,0.027403258136047843,2.5793834228515626,2.6011118896484375,2.607806188964844,2.613161628417969,"[2.61450048828125, 2.597078857421875, 2.580906494140625, 2.599624267578125, 2.5770458984375, 2.5798447265625, 2.578922119140625, 2.531884765625, 2.521021728515625, 2.578727783203125]",tokens/s,24.456942207792913,kWh,3.0301808174302234e-05,1.6603774379938096e-05,0.00010593827579363671,0.00015284385834787704,tokens/kWh,412185.3549169779,,s,630,25.75748918151856,0.04088490346272786,0.0011186479654957684,0.041556991577148435,0.04201922721862793,0.04229171047210693,0.042981263236999516,"[0.04204646301269531, 0.041902080535888675, 0.04132556915283203, 0.04206284713745117, 0.0424376335144043, 0.0420945930480957, 0.04141363143920898, 0.04181401443481445, 0.04176588821411133, 0.04170035171508789, 0.041637889862060545, 0.041984001159667966, 0.041708545684814455, 0.04159385681152344, 0.041695232391357424, 0.04310220718383789, 0.04150377655029297, 0.04170646286010742, 0.041635841369628904, 0.041816062927246093, 0.04164505767822266, 0.04208844757080078, 0.040659969329833984, 0.04196044921875, 0.04203212738037109, 0.041864192962646485, 0.041783294677734374, 0.04182220840454102, 0.03995750427246094, 0.04033433532714844, 0.039790592193603515, 0.039792640686035156, 0.039686145782470705, 0.039616512298583983, 0.03962879943847656, 0.04169318389892578, 0.04174643325805664, 0.04174028778076172, 0.04177407836914063, 0.04170956802368164, 0.04164198303222656, 0.04186214447021484, 0.04173315048217773, 0.04179657745361328, 0.041731071472167966, 0.04181401443481445, 0.04177305603027344, 0.04190105438232422, 0.04226559829711914, 0.04193280029296875, 0.04175462341308594, 0.04172902297973633, 0.041565185546875, 0.042001407623291014, 0.04086886215209961, 0.03969126510620117, 0.040948734283447266, 0.04179046249389649, 0.041673728942871094, 0.04197580718994141, 0.041869312286376956, 0.04189388656616211, 0.039815166473388675, 0.03967385482788086, 0.03955916976928711, 0.039608318328857424, 0.03968307113647461, 0.039588863372802735, 0.041373695373535156, 0.041981952667236325, 0.04197478485107422, 0.041839614868164066, 0.04193894577026367, 0.04181094360351562, 0.04072959899902344, 0.03972915267944336, 0.039623680114746096, 0.03969843292236328, 0.04011212921142578, 0.041837566375732424, 0.041793537139892575, 0.04174131011962891, 0.04147609710693359, 0.04184985733032227, 0.041831424713134766, 0.041024513244628906, 0.041869312286376956, 0.041708545684814455, 0.04171059036254883, 0.04159897613525391, 0.04172185516357422, 0.04014591979980469, 0.04238336181640625, 0.0417781753540039, 0.04170035171508789, 0.041596927642822266, 0.04184064102172851, 0.041646080017089845, 0.03987046432495117, 0.04252262496948242, 0.042068992614746094, 0.0396308479309082, 0.03966156768798828, 0.041973758697509765, 0.04181401443481445, 0.03953664016723633, 0.039532543182373044, 0.03948031997680664, 0.04246015930175781, 0.04394598388671875, 0.0424192008972168, 0.04191641616821289, 0.03959500885009765, 0.04068044662475586, 0.041875457763671874, 0.04201881790161133, 0.04221235275268555, 0.03967180633544922, 0.04165324783325195, 0.041439231872558595, 0.041760768890380856, 0.0416184310913086, 0.04171571350097656, 0.041283584594726565, 0.042403839111328126, 0.041924606323242186, 0.040894462585449216, 0.04052787017822266, 0.04153958511352539, 0.04156825637817383, 0.040738815307617186, 0.039567359924316405, 0.0416255989074707, 0.04253081512451172, 0.04216729736328125, 0.041867263793945314, 0.04190924835205078, 0.04174643325805664, 0.04070707321166992, 0.0416255989074707, 0.041589759826660154, 0.041565185546875, 0.041624576568603515, 0.041761791229248044, 0.04263628768920898, 0.041913345336914064, 0.04177510452270508, 0.041728000640869144, 0.04163071823120117, 0.041193473815917966, 0.041692161560058595, 0.041804798126220705, 0.04178124618530273, 0.041815040588378906, 0.041659423828125, 0.04182115173339844, 0.039521278381347655, 0.03964108657836914, 0.039672832489013675, 0.04148121643066406, 0.04308070373535156, 0.040018943786621096, 0.03965951919555664, 0.039588863372802735, 0.03954483032226563, 0.03968307113647461, 0.039577598571777346, 0.039686145782470705, 0.03964825439453125, 0.04005785751342773, 0.04038553619384765, 0.042092544555664066, 0.04180889511108398, 0.0420239372253418, 0.04165119934082031, 0.04183347320556641, 0.039825408935546876, 0.03985100936889648, 0.039293952941894535, 0.03952025604248047, 0.04073062515258789, 0.0396759033203125, 0.03957145690917969, 0.039605247497558595, 0.04198912048339844, 0.04209356689453125, 0.04187033462524414, 0.0395786247253418, 0.039433216094970705, 0.0401448974609375, 0.04347903823852539, 0.041368576049804685, 0.039554046630859374, 0.039413761138916016, 0.03952230453491211, 0.040700927734375, 0.041760768890380856, 0.041643009185791016, 0.04185702514648437, 0.04174540710449219, 0.041711616516113284, 0.039479297637939455, 0.03948441696166992, 0.03938816070556641, 0.04212736129760742, 0.042033153533935545, 0.04178124618530273, 0.04155801773071289, 0.03953049468994141, 0.03944857788085938, 0.04049100875854492, 0.04163993453979492, 0.04173619079589844, 0.04161228942871094, 0.04200755310058594, 0.042331134796142575, 0.04187033462524414, 0.04150374221801758, 0.03946905517578125, 0.0393779182434082, 0.03943526458740235, 0.03958169555664062, 0.04163481521606445, 0.041573375701904294, 0.04169113540649414, 0.04161228942871094, 0.04159078216552734, 0.04118937683105469, 0.04219903945922852, 0.042008575439453126, 0.04179148864746094, 0.041588737487792966, 0.04177407836914063, 0.04193075180053711, 0.04170547103881836, 0.041605121612548826, 0.04141056060791016, 0.041594879150390625, 0.041632801055908206, 0.04172592163085938, 0.04244377517700195, 0.04163686370849609, 0.041640960693359375, 0.041556991577148435, 0.04163379287719727, 0.041611263275146484, 0.041637889862060545, 0.04158054351806641, 0.041796607971191405, 0.041616382598876955, 0.041626625061035157, 0.041586688995361325, 0.03984384155273438, 0.039501823425292966, 0.03936870574951172, 0.03960934448242188, 0.03950387191772461, 0.0394700813293457, 0.041090049743652345, 0.041880577087402344, 0.04241408157348633, 0.04209356689453125, 0.04172083282470703, 0.03967385482788086, 0.03960422515869141, 0.03956633758544922, 0.039629825592041014, 0.03956838226318359, 0.03958272171020508, 0.03954585647583008, 0.039539710998535156, 0.04112691116333008, 0.0424898567199707, 0.042103809356689455, 0.04182220840454102, 0.04181196975708008, 0.04176793670654297, 0.04163379287719727, 0.04167270278930664, 0.04168294525146484, 0.04113817596435547, 0.0416890869140625, 0.04169420623779297, 0.04206387329101562, 0.041796607971191405, 0.042498046875, 0.0398837776184082, 0.039656448364257815, 0.0416890869140625, 0.041690113067626954, 0.04160409545898437, 0.041747455596923826, 0.04106752014160156, 0.04168499374389648, 0.041692161560058595, 0.04180582427978516, 0.04246835327148438, 0.04212428665161133, 0.04172902297973633, 0.041278465270996094, 0.041752574920654296, 0.041690113067626954, 0.04194611358642578, 0.04156927871704102, 0.041586688995361325, 0.0416102409362793, 0.03948031997680664, 0.03961548614501953, 0.03945471954345703, 0.0395786247253418, 0.039762943267822266, 0.03984998321533203, 0.03959296035766602, 0.03948441696166992, 0.03954073715209961, 0.03990528106689453, 0.04214886474609375, 0.03952435302734375, 0.03948646545410156, 0.039482368469238284, 0.03957657623291016, 0.03946393585205078, 0.039501823425292966, 0.03952844619750977, 0.03954483032226563, 0.03956224060058594, 0.03949260711669922, 0.03950284957885742, 0.03949772644042969, 0.03942297744750976, 0.03952537536621094, 0.039962623596191404, 0.04293836975097656, 0.045431808471679686, 0.04215193557739258, 0.04185804748535156, 0.04186316680908203, 0.041556991577148435, 0.04161228942871094, 0.04159795379638672, 0.04209766387939453, 0.04190415954589844, 0.041498592376708984, 0.04154163360595703, 0.04165222549438476, 0.03947417449951172, 0.03946188735961914, 0.039518207550048826, 0.03955814361572266, 0.03943116760253906, 0.039479297637939455, 0.03946393585205078, 0.03946393585205078, 0.04034560012817383, 0.042205184936523435, 0.041760768890380856, 0.04005068969726563, 0.04222566223144531, 0.04210176086425781, 0.0418785285949707, 0.041760768890380856, 0.04227481460571289, 0.04289023971557617, 0.04230758285522461, 0.041798656463623046, 0.04181401443481445, 0.041215999603271485, 0.041853950500488284, 0.04182425689697265, 0.04161536026000977, 0.041404415130615234, 0.04162252807617187, 0.041589759826660154, 0.04159795379638672, 0.04169728088378906, 0.03993907165527344, 0.0416255989074707, 0.041543678283691404, 0.04070502471923828, 0.041717758178710936, 0.04163071823120117, 0.04171980667114258, 0.041662464141845705, 0.041611263275146484, 0.0419051513671875, 0.041801727294921875, 0.041616416931152346, 0.041657310485839844, 0.041534465789794923, 0.04014899063110351, 0.03957452774047852, 0.03958476638793945, 0.03959296035766602, 0.03957657623291016, 0.039651329040527344, 0.03968307113647461, 0.03954380798339844, 0.03952435302734375, 0.04126105499267578, 0.04168294525146484, 0.03947110366821289, 0.039894016265869144, 0.04033536148071289, 0.03953664016723633, 0.03943731307983398, 0.039478271484375, 0.04005376052856445, 0.03949055862426758, 0.041540607452392575, 0.041265151977539063, 0.03953561782836914, 0.03947724914550781, 0.0394598388671875, 0.03948748779296875, 0.03959500885009765, 0.04235878372192383, 0.041763839721679685, 0.04174950408935547, 0.041768959045410156, 0.04226969528198242, 0.04175155258178711, 0.041695232391357424, 0.04147814559936523, 0.04166758346557617, 0.041024513244628906, 0.04181196975708008, 0.04166758346557617, 0.04235366439819336, 0.04199116897583008, 0.041855998992919925, 0.04151603317260742, 0.041599998474121096, 0.04163174438476563, 0.041646080017089845, 0.039624702453613284, 0.04156313705444336, 0.0415467529296875, 0.04173004913330078, 0.04157952117919922, 0.04186316680908203, 0.04175872039794922, 0.041847808837890625, 0.040784896850585936, 0.04081356811523437, 0.04280319976806641, 0.04158464050292969, 0.04134809494018555, 0.04108083343505859, 0.04192768096923828, 0.041670654296875, 0.04172390365600586, 0.041708545684814455, 0.041637889862060545, 0.04172697448730469, 0.039667713165283204, 0.03975680160522461, 0.03997081756591797, 0.03945779037475586, 0.04057907104492187, 0.03972198486328125, 0.04176588821411133, 0.04063436889648438, 0.041777153015136716, 0.039731201171875, 0.03981107330322266, 0.039608318328857424, 0.03970048141479492, 0.03960627365112305, 0.03969945526123047, 0.03967385482788086, 0.039686145782470705, 0.04103680038452148, 0.04020633697509766, 0.04014796829223633, 0.03978035354614258, 0.039618560791015625, 0.03964211273193359, 0.04009983825683594, 0.03989811325073242, 0.03960627365112305, 0.03971072006225586, 0.039600128173828124, 0.03962060928344727, 0.03965235137939453, 0.03972608184814453, 0.039626750946044925, 0.03962879943847656, 0.03955814361572266, 0.039812095642089845, 0.03952844619750977, 0.039613441467285154, 0.03951411056518555, 0.03946188735961914, 0.03938816070556641, 0.03947315216064453, 0.039362560272216796, 0.039793663024902344, 0.0393891830444336, 0.03952947235107422, 0.04015513610839844, 0.03948646545410156, 0.039608318328857424, 0.039501823425292966, 0.039392257690429686, 0.03999846267700195, 0.03947520065307617, 0.041594879150390625, 0.041734142303466795, 0.04165427017211914, 0.03960934448242188, 0.03944038391113281, 0.03951923370361328, 0.03937484741210937, 0.03946700668334961, 0.039347198486328124, 0.039311359405517575, 0.039215103149414066, 0.03951308822631836, 0.03935539245605469, 0.039913471221923826, 0.03989299011230469, 0.03958272171020508, 0.03945167922973633, 0.039604190826416016, 0.039362560272216796, 0.03942092895507812, 0.03942092895507812, 0.03948953628540039, 0.03949158477783203, 0.039498752593994144, 0.03930112075805664, 0.03948441696166992, 0.03927142333984375, 0.03947417449951172, 0.03946495819091797, 0.03943219375610352, 0.03930931091308594, 0.03950796890258789, 0.03935232162475586, 0.0395489273071289, 0.03947417449951172, 0.039597057342529295, 0.03943526458740235, 0.039569408416748046, 0.039451648712158206, 0.039803905487060545, 0.040397823333740236, 0.04060160064697266, 0.04159795379638672, 0.041665534973144534, 0.041632766723632815, 0.04175360107421875, 0.041670654296875, 0.041804798126220705, 0.039593982696533206, 0.039669761657714846, 0.04102963256835938, 0.03955507278442383, 0.03949772644042969, 0.03956121444702149, 0.03947417449951172, 0.03947520065307617, 0.03943833541870117, 0.039583744049072264, 0.042998783111572264, 0.042554367065429685, 0.0420423698425293, 0.04230553436279297, 0.039948287963867186, 0.03969126510620117, 0.0395601921081543, 0.03949977493286133, 0.03966361618041992, 0.04022784042358398, 0.03974758529663086, 0.041657344818115234, 0.041981952667236325, 0.04174028778076172, 0.039766014099121096, 0.039613441467285154, 0.03952435302734375, 0.04226559829711914, 0.04205158233642578, 0.04177407836914063, 0.04186521530151367, 0.04172288131713867, 0.04178636932373047, 0.04174848175048828, 0.041919486999511715, 0.04301107025146484, 0.04239155197143555, 0.042477569580078124, 0.042022911071777344, 0.0416184310913086, 0.04163071823120117, 0.04165529632568359, 0.041831424713134766, 0.041624576568603515, 0.04169420623779297, 0.0417259521484375, 0.04196147155761719, 0.04166758346557617, 0.04166963195800781, 0.0416993293762207, 0.04167782211303711, 0.04164505767822266, 0.04171673583984375, 0.041599998474121096, 0.039523326873779296, 0.039446529388427735, 0.03954073715209961, 0.039406593322753904, 0.03948441696166992, 0.039419902801513675, 0.03947520065307617, 0.03954585647583008, 0.04245094299316406, 0.04081356811523437, 0.041678848266601565, 0.041578495025634765, 0.04168601608276367, 0.0416993293762207, 0.039534591674804685, 0.03942092895507812, 0.03959603118896484, 0.03995647811889649, 0.03990224075317383, 0.03966255950927734, 0.039711742401123046, 0.039610366821289066]",tokens/s,24.45890574039476,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694914e-3c9d31e241d54aa821b5a02c;1d89675e-d458-4e6f-9eb2-3812b338cf89) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 836, in forward - inputs_embeds = self.project_in(inputs_embeds) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c31-56d5ad366ee7254e2b0990c4;f19ecd97-032a-4c76-86d9-0ee7aa2c3620) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 170, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fa8-613d450c2f712f3110dd5bbb;c460b950-a2e4-4a33-b773-1b675861d953) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949256-054bc40c134a0df42d637844;3152bcb1-1075-4284-be44-b99c5b849ec7) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11154.407424,12432.441344,0.0,11846.811648,11814.785024,s,1,12.0977890625,12.0977890625,0.0,12.0977890625,12.0977890625,12.0977890625,12.0977890625,[12.0977890625],,kWh,6.229142272570193e-05,3.412524387063318e-05,8.689895840802153e-05,0.00018331562500435665,,MB,2152.448,13397.131264,0.0,12750.68416,12641.86368,s,10,3.5912645568847656,0.35912645568847656,0.00014298467746071724,0.3591465911865235,0.35922806396484375,0.3593048583984375,0.3593662939453125,"[0.3590035705566406, 0.3592015380859375, 0.3593816528320313, 0.3590222778320312, 0.35911700439453126, 0.359125244140625, 0.3591679382324219, 0.35921099853515626, 0.35921066284179687, 0.35882366943359373]",tokens/s,712.840827917358,kWh,4.242819021230334e-06,2.324410711042483e-06,2.3428818346214147e-05,2.9996048078486964e-05,tokens/kWh,8534457.583550883,MB,2166.407168,13690.732544,0.0,13044.28544,12933.698048,s,10,27.270395751953128,2.7270395751953123,0.016563235699652266,2.720166748046875,2.7529238769531252,2.758838037109375,2.7635693652343747,"[2.723796630859375, 2.715986328125, 2.71101025390625, 2.729712158203125, 2.721241455078125, 2.764752197265625, 2.751609619140625, 2.719092041015625, 2.71830810546875, 2.714886962890625]",tokens/s,23.10197496693384,kWh,3.2460032476686255e-05,1.7790285409443384e-05,0.0001296524985631867,0.00017990281644931632,tokens/kWh,350189.07009579183,,s,630,27.2551556854248,0.04326215188162667,0.0006048566668655743,0.04299929618835449,0.044199527740478514,0.04441712703704834,0.0451032381439209,"[0.043363327026367186, 0.04294041442871094, 0.04281958389282227, 0.04280934524536133, 0.042929153442382816, 0.04283494567871094, 0.0430274543762207, 0.04527718353271484, 0.04439244842529297, 0.04322304153442383, 0.04414156723022461, 0.0435865592956543, 0.04326092910766602, 0.042840065002441405, 0.04329369735717774, 0.04394188690185547, 0.04308684921264649, 0.042916862487792966, 0.042692607879638675, 0.04298137664794922, 0.043815937042236325, 0.04284415817260742, 0.04421017456054688, 0.044281856536865234, 0.043848705291748044, 0.04310732650756836, 0.042793983459472655, 0.04324249649047852, 0.04297830581665039, 0.04292505645751953, 0.04331622314453125, 0.04289023971557617, 0.042949630737304685, 0.043189247131347655, 0.04307865524291992, 0.04321791839599609, 0.0428851203918457, 0.04287590408325195, 0.04352204895019531, 0.04340019226074219, 0.04293734359741211, 0.042949630737304685, 0.04290764617919922, 0.04284928131103516, 0.04295987319946289, 0.043312126159667966, 0.04328550338745117, 0.04314112091064453, 0.043033599853515625, 0.042910720825195314, 0.04303564834594727, 0.04291481781005859, 0.04294655990600586, 0.042864639282226565, 0.04284620666503906, 0.04274380874633789, 0.043055103302001956, 0.04298854446411133, 0.04307558441162109, 0.04295270538330078, 0.0430909423828125, 0.04296499252319336, 0.044750846862792966, 0.04335308837890625, 0.04286361694335938, 0.042949630737304685, 0.04295475387573242, 0.04314828872680664, 0.044027904510498046, 0.043087871551513675, 0.0428851203918457, 0.04300697708129883, 0.04294041442871094, 0.04290764617919922, 0.04291891098022461, 0.04249292755126953, 0.04277043151855469, 0.04267212677001953, 0.04356915283203125, 0.044365825653076174, 0.04417536163330078, 0.04276633453369141, 0.04298342514038086, 0.04352614212036133, 0.04316057586669922, 0.043119617462158207, 0.043055103302001956, 0.042775550842285154, 0.0429854736328125, 0.04280319976806641, 0.042738689422607425, 0.04278681564331055, 0.04283699035644531, 0.042813438415527344, 0.04278169631958008, 0.0428328971862793, 0.04286975860595703, 0.042845184326171876, 0.04282777786254883, 0.04274995040893555, 0.0429567985534668, 0.04298649597167969, 0.04354764938354492, 0.043112449645996094, 0.04284108734130859, 0.042674175262451174, 0.042790912628173826, 0.043084800720214846, 0.04299673461914062, 0.04316159820556641, 0.04303564834594727, 0.042861568450927735, 0.04294246292114258, 0.043014144897460936, 0.0429117431640625, 0.04417638397216797, 0.044493824005126956, 0.042900478363037106, 0.042997760772705076, 0.042867710113525394, 0.04289945602416992, 0.04282470321655273, 0.04289535903930664, 0.04419276809692383, 0.04333055877685547, 0.043394046783447264, 0.043177982330322266, 0.04311040115356445, 0.04296499252319336, 0.042913791656494144, 0.04287692642211914, 0.04281139373779297, 0.042889217376708984, 0.042850303649902347, 0.04256256103515625, 0.042856449127197264, 0.043028480529785154, 0.043052032470703126, 0.04313190460205078, 0.04288716888427734, 0.042889217376708984, 0.04303769683837891, 0.04360806274414063, 0.042823680877685545, 0.04280831909179687, 0.04298035049438476, 0.042945537567138675, 0.04317695999145508, 0.043000831604003906, 0.0439552001953125, 0.043044864654541014, 0.042889217376708984, 0.04294246292114258, 0.04297625732421875, 0.04293734359741211, 0.04280217742919922, 0.04282470321655273, 0.04298035049438476, 0.0429117431640625, 0.04280627059936523, 0.04293119812011719, 0.042805248260498044, 0.04282265472412109, 0.04303974533081055, 0.04299059295654297, 0.04298854446411133, 0.04361318588256836, 0.04302950286865234, 0.044333057403564455, 0.04304793548583984, 0.04296908950805664, 0.042867710113525394, 0.042843135833740234, 0.043104255676269534, 0.04293119812011719, 0.04296499252319336, 0.04284620666503906, 0.04306739044189453, 0.042780670166015625, 0.04298342514038086, 0.04248371124267578, 0.04317593765258789, 0.04290252685546875, 0.04288614273071289, 0.04273766326904297, 0.0430489616394043, 0.043066368103027344, 0.043698177337646485, 0.042962944030761716, 0.043049983978271485, 0.042742782592773435, 0.04299980926513672, 0.043014144897460936, 0.04274585723876953, 0.042968063354492186, 0.04290252685546875, 0.04290662384033203, 0.04294144058227539, 0.04284620666503906, 0.04256358337402344, 0.04308070373535156, 0.04293939208984375, 0.04295167922973633, 0.04285542297363281, 0.046064640045166014, 0.04410572814941406, 0.04326297760009765, 0.04313190460205078, 0.042979328155517575, 0.04288409423828125, 0.04298342514038086, 0.04285235214233398, 0.04292403030395508, 0.04394905471801758, 0.045241344451904295, 0.045104129791259766, 0.04426444625854492, 0.04420505523681641, 0.04314521789550781, 0.04293427276611328, 0.042877952575683595, 0.04394598388671875, 0.04392755126953125, 0.042831871032714845, 0.04315135955810547, 0.043401214599609376, 0.04297727966308594, 0.042913791656494144, 0.04335513687133789, 0.04419276809692383, 0.04316159820556641, 0.043270145416259766, 0.04451839828491211, 0.0429035530090332, 0.04289228820800781, 0.04335513687133789, 0.04311040115356445, 0.04293222427368164, 0.04296192169189453, 0.045071361541748046, 0.042943489074707034, 0.04292300796508789, 0.04300902557373047, 0.0429854736328125, 0.04293222427368164, 0.04298956680297852, 0.04314726257324219, 0.042896385192871096, 0.04294246292114258, 0.04290764617919922, 0.04428083038330078, 0.04283596801757812, 0.043104255676269534, 0.0435865592956543, 0.04373196792602539, 0.043049983978271485, 0.04329983901977539, 0.04287590408325195, 0.04290662384033203, 0.04298854446411133, 0.04398080062866211, 0.04510105514526367, 0.0430458869934082, 0.04348108673095703, 0.04387430572509766, 0.04446515274047851, 0.04387123107910156, 0.045074432373046876, 0.04472012710571289, 0.04340019226074219, 0.042927104949951174, 0.04288819122314453, 0.0430571517944336, 0.04326399993896484, 0.04317184066772461, 0.04298649597167969, 0.042821632385253904, 0.04292505645751953, 0.042859519958496094, 0.04290252685546875, 0.04278579330444336, 0.04279808044433594, 0.04269574356079102, 0.04271916961669922, 0.0427509765625, 0.04323430252075195, 0.043150337219238284, 0.04307865524291992, 0.04292607879638672, 0.04278169631958008, 0.04286873626708984, 0.04294655990600586, 0.04338687896728516, 0.04326502227783203, 0.04277248001098633, 0.04289535903930664, 0.042840065002441405, 0.043736064910888675, 0.04286259078979492, 0.04287078475952148, 0.042780670166015625, 0.04280012893676758, 0.04286975860595703, 0.04291481781005859, 0.04285542297363281, 0.04283494567871094, 0.04285747146606445, 0.042812416076660156, 0.04290662384033203, 0.043046913146972655, 0.04275609588623047, 0.04294246292114258, 0.04288211059570313, 0.0434400634765625, 0.043166721343994144, 0.04428595352172852, 0.04369305419921875, 0.04283699035644531, 0.044065792083740236, 0.04282470321655273, 0.04381695938110351, 0.04314112091064453, 0.044083198547363284, 0.04433817672729492, 0.044352512359619144, 0.044042240142822264, 0.04388147354125976, 0.044142593383789064, 0.04432793426513672, 0.0435333137512207, 0.04434841537475586, 0.044368896484375, 0.0449617919921875, 0.042799102783203126, 0.04424192047119141, 0.04283699035644531, 0.043758590698242186, 0.04354662322998047, 0.04413337707519531, 0.04431564712524414, 0.04442521667480469, 0.04449484634399414, 0.04355891036987305, 0.043975742340087894, 0.04406367874145508, 0.04401971054077149, 0.043670528411865236, 0.04335923385620117, 0.04407295989990234, 0.04436275100708008, 0.04419891357421875, 0.0447907829284668, 0.04404121780395508, 0.044418048858642575, 0.04359372711181641, 0.0445849609375, 0.04451532745361328, 0.04432179260253906, 0.042856449127197264, 0.04405964660644531, 0.04328755187988281, 0.04385177612304687, 0.043447296142578126, 0.043312126159667966, 0.04370739364624023, 0.043041793823242185, 0.044148735046386715, 0.04325068664550781, 0.044025856018066405, 0.04333055877685547, 0.04341964721679688, 0.044505088806152344, 0.043177982330322266, 0.044499969482421874, 0.04386816024780273, 0.04412211227416992, 0.04304281616210937, 0.042995712280273435, 0.04313190460205078, 0.042933246612548825, 0.04295987319946289, 0.04321177673339844, 0.04305100631713867, 0.042912769317626956, 0.043154430389404294, 0.043030529022216796, 0.04410060882568359, 0.04338380813598633, 0.04322099304199219, 0.04306022262573242, 0.04433407974243164, 0.04420505523681641, 0.04372889709472656, 0.04311040115356445, 0.043769855499267575, 0.044197887420654294, 0.043447296142578126, 0.04469964981079102, 0.04423987197875977, 0.04379852676391602, 0.04352819061279297, 0.04365619277954102, 0.043597824096679685, 0.042998783111572264, 0.044085247039794925, 0.043817985534667966, 0.04403507232666016, 0.04424806213378906, 0.0436756477355957, 0.043796478271484376, 0.04818022537231445, 0.04454604721069336, 0.04313907241821289, 0.04417331314086914, 0.04326707077026367, 0.043824127197265625, 0.04353228759765625, 0.04370534515380859, 0.04441600036621094, 0.04452864074707031, 0.04478464126586914, 0.04291584014892578, 0.04410060882568359, 0.043235328674316405, 0.04337561416625976, 0.04372889709472656, 0.04307865524291992, 0.043428863525390625, 0.04325888061523438, 0.04427775955200195, 0.0433623046875, 0.043905025482177736, 0.043638782501220705, 0.04319744110107422, 0.04363161468505859, 0.04308377456665039, 0.04263628768920898, 0.043030529022216796, 0.04300697708129883, 0.042945537567138675, 0.04278988647460937, 0.0432097282409668, 0.04304383850097656, 0.042799102783203126, 0.04304076766967774, 0.04297011184692383, 0.04309708786010742, 0.04309401702880859, 0.04299980926513672, 0.043184127807617184, 0.043049983978271485, 0.04291481781005859, 0.0429854736328125, 0.04292403030395508, 0.04301004791259765, 0.04276224136352539, 0.04282572937011719, 0.043238399505615234, 0.04325580978393555, 0.042874881744384766, 0.04293734359741211, 0.04269363021850586, 0.04291584014892578, 0.042774528503417966, 0.042810367584228515, 0.042810367584228515, 0.04508671951293945, 0.04304793548583984, 0.042987518310546875, 0.04292812728881836, 0.04364492797851562, 0.042917953491210935, 0.04541331100463867, 0.04384153747558594, 0.04337152099609375, 0.043417598724365236, 0.04301926422119141, 0.042972190856933594, 0.04297315216064453, 0.042913791656494144, 0.0429117431640625, 0.043420673370361325, 0.043251712799072264, 0.04325068664550781, 0.04314726257324219, 0.04286873626708984, 0.04285747146606445, 0.042845184326171876, 0.043025409698486325, 0.04429312133789062, 0.04416307067871094, 0.043096065521240234, 0.04267929458618164, 0.04285440063476562, 0.0427509765625, 0.04347596740722656, 0.043655166625976564, 0.042790912628173826, 0.042916862487792966, 0.042872833251953124, 0.04295270538330078, 0.042947582244873043, 0.04274687957763672, 0.04292505645751953, 0.04354150390625, 0.04331417465209961, 0.0433070068359375, 0.04301107025146484, 0.042856449127197264, 0.04284620666503906, 0.04445695877075195, 0.042987518310546875, 0.042864639282226565, 0.042933246612548825, 0.04276326370239258, 0.04284415817260742, 0.04281139373779297, 0.0429936637878418, 0.04368076705932617, 0.043979774475097655, 0.04330086517333984, 0.043469825744628904, 0.042782718658447266, 0.042851329803466794, 0.04271513748168945, 0.04278169631958008, 0.044377086639404296, 0.04367462539672851, 0.042900478363037106, 0.04283801651000976, 0.04273049545288086, 0.042927104949951174, 0.04288819122314453, 0.04294144058227539, 0.04261478424072265, 0.04292607879638672, 0.04279500961303711, 0.04290662384033203, 0.04280934524536133, 0.04295884704589844, 0.042777599334716795, 0.04294451141357422, 0.04280217742919922, 0.042891265869140625, 0.04348518371582031, 0.043390975952148435, 0.04270694351196289, 0.04291788864135742, 0.043611137390136716, 0.04440371322631836, 0.043205631256103515, 0.04307251358032226, 0.04280627059936523, 0.042889217376708984, 0.04274687957763672, 0.0427960319519043, 0.043638782501220705, 0.043308032989501956, 0.04287692642211914, 0.042858497619628906, 0.042761215209960936, 0.04279500961303711, 0.04352204895019531, 0.04526079940795898, 0.04291993713378906, 0.04285235214233398, 0.04438937759399414, 0.04293119812011719, 0.04272742462158203, 0.04303155136108398, 0.04280319976806641, 0.042793983459472655, 0.04282572937011719, 0.04292812728881836, 0.04286361694335938, 0.04274892807006836, 0.04271206283569336, 0.04275404739379883, 0.042790912628173826, 0.04266393661499023, 0.0427960319519043, 0.04270694351196289, 0.04356300735473633, 0.04423372650146484, 0.04470272064208984, 0.04311552047729492, 0.04323635101318359, 0.04357120132446289, 0.04285337448120117, 0.04279808044433594, 0.04366745758056641, 0.042929153442382816, 0.04280319976806641, 0.04277964782714844, 0.0433520622253418, 0.04286975860595703, 0.04287180709838867, 0.042775550842285154, 0.042788864135742184, 0.04278681564331055, 0.042848255157470705, 0.04275609588623047, 0.04275302505493164, 0.04282777786254883, 0.04281651306152344, 0.04294655990600586, 0.04279296112060547, 0.04270284652709961, 0.04339712142944336, 0.043123710632324216, 0.04329779052734375, 0.044300289154052735, 0.042787841796875, 0.04273356628417969, 0.04280217742919922, 0.042782718658447266, 0.042859519958496094, 0.04284928131103516, 0.04482662582397461, 0.04328755187988281, 0.04283596801757812, 0.04347391891479492, 0.042966014862060545, 0.04275404739379883, 0.042665985107421874, 0.04401049423217773, 0.0444026870727539, 0.04302336120605469, 0.04289023971557617, 0.04290457534790039]",tokens/s,23.114892729704863,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948cd2-4d6c6b664524e1e5746d86dd;e5c7161f-f625-4c7e-b538-0425a6e939d7) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6655.565824,7732.723712,0.0,7147.094016,7138.9184,s,1,10.7392001953125,10.7392001953125,0.0,10.7392001953125,10.7392001953125,10.7392001953125,10.7392001953125,[10.7392001953125],,kWh,4.337964110555327e-05,2.3760041904830296e-05,6.179643832604498e-05,0.00012893612133642855,,MB,1580.945408,8464.62976,0.0,7818.182656,7724.300288,s,10,1.893032470703125,0.18930324707031249,0.00019315291339763006,0.18927529907226562,0.1894895782470703,0.189630615234375,0.18974344482421876,"[0.1897716522216797, 0.18945823669433592, 0.18937660217285157, 0.18932086181640626, 0.18920040893554688, 0.18914300537109374, 0.18916773986816407, 0.18928643798828124, 0.18904336547851563, 0.18926416015625]",tokens/s,1352.3275694521737,kWh,2.236331012434473e-06,1.2253947662784007e-06,1.2493731168981967e-05,1.595545694769484e-05,tokens/kWh,16044667.4037114,MB,1622.749184,8611.4304,0.0,7964.983296,7904.605696,s,10,21.011601684570312,2.101160168457031,0.03237982667971443,2.1055194091796876,2.1332814208984376,2.138290856933594,2.1422984057617187,"[2.081607421875, 2.124879150390625, 2.132168212890625, 2.058447509765625, 2.0397393798828123, 2.093465576171875, 2.12909228515625, 2.09132861328125, 2.1175732421875, 2.14330029296875]",tokens/s,29.983435316244123,kWh,2.448323828346892e-05,1.3417574273009846e-05,8.52306525660164e-05,0.0001231314651224952,tokens/kWh,511648.2609650226,,s,630,21.009244092941273,0.0333480064967322,0.001023252844351724,0.03386470413208008,0.034110667037963865,0.03432135696411133,0.03550397464752198,"[0.03222828674316406, 0.03194367980957031, 0.03243833541870117, 0.033866687774658205, 0.031936511993408204, 0.03184230422973633, 0.03189452743530274, 0.031749120712280275, 0.031936511993408204, 0.03191500854492187, 0.031922176361083986, 0.03186380767822266, 0.03196416091918945, 0.031476736068725586, 0.031388671875, 0.03148492813110351, 0.031644672393798826, 0.031854591369628905, 0.03343462371826172, 0.034062335968017575, 0.0339046401977539, 0.0339128303527832, 0.03438387298583984, 0.03401318359375, 0.03403571319580078, 0.03411763381958008, 0.034065406799316404, 0.03425484848022461, 0.03431628799438476, 0.03380223846435547, 0.03405209732055664, 0.03402956771850586, 0.03400191879272461, 0.03379513549804687, 0.03381856155395508, 0.033808383941650394, 0.03383910369873047, 0.033808383941650394, 0.03351551818847656, 0.0338524169921875, 0.033888256072998044, 0.03390156936645508, 0.03395891189575195, 0.03381862258911133, 0.03389440155029297, 0.03378073501586914, 0.033914878845214845, 0.033791999816894534, 0.03426611328125, 0.03376844787597656, 0.03391385650634766, 0.0318525447845459, 0.031916032791137694, 0.03373567962646484, 0.03406438446044922, 0.03189043235778809, 0.0318525447845459, 0.03187404823303223, 0.031888383865356446, 0.03170816040039062, 0.03184537506103516, 0.03172761535644531, 0.031954944610595705, 0.034418689727783204, 0.03392102432250976, 0.033890304565429685, 0.03376537704467773, 0.03384633636474609, 0.03395475387573242, 0.03381350326538086, 0.03373567962646484, 0.031835136413574217, 0.0317890567779541, 0.03402751922607422, 0.03389440155029297, 0.03382988739013672, 0.033825790405273434, 0.03383910369873047, 0.03385958480834961, 0.034187263488769534, 0.03391385650634766, 0.033919998168945316, 0.03397836685180664, 0.033925121307373046, 0.03381657409667969, 0.03386470413208008, 0.03387494277954101, 0.03403673553466797, 0.03365273666381836, 0.03383091354370117, 0.03384832000732422, 0.03391795349121094, 0.03380428695678711, 0.03402751922607422, 0.033964031219482424, 0.03400396728515625, 0.03386470413208008, 0.03383091354370117, 0.033800193786621094, 0.033808383941650394, 0.033732608795166014, 0.032835582733154296, 0.03397017669677734, 0.033562625885009766, 0.03395686340332031, 0.033544193267822264, 0.03177471923828125, 0.03184025573730469, 0.031731712341308595, 0.03419340896606445, 0.03386880111694336, 0.03516723251342774, 0.03419750213623047, 0.033865726470947266, 0.03386675262451172, 0.03391795349121094, 0.03374796676635742, 0.033966079711914066, 0.03399679946899414, 0.03385139083862305, 0.03386982345581055, 0.03343155288696289, 0.033842174530029294, 0.03390054321289063, 0.03389952087402344, 0.03399168014526367, 0.03283251190185547, 0.03402751922607422, 0.03381043243408203, 0.03435520172119141, 0.03400908660888672, 0.03395891189575195, 0.03408588790893555, 0.03192934417724609, 0.03292160034179688, 0.03441766357421875, 0.03592601776123047, 0.03434393692016602, 0.03435520172119141, 0.03406950378417969, 0.03372748947143555, 0.03376230239868164, 0.03397119903564453, 0.03404185485839844, 0.03386368179321289, 0.03467673492431641, 0.033949695587158206, 0.03179622459411621, 0.03186380767822266, 0.033546241760253906, 0.03430092620849609, 0.03406028747558594, 0.03451596832275391, 0.032492542266845705, 0.03388108825683594, 0.03382476806640625, 0.03339468765258789, 0.0338524169921875, 0.03387596893310547, 0.0341104621887207, 0.03403776168823242, 0.033933311462402346, 0.03407769775390625, 0.03390771102905273, 0.0339947509765625, 0.03410432052612305, 0.03400294494628906, 0.033895423889160156, 0.03403059387207031, 0.03386982345581055, 0.03394559860229492, 0.033873920440673826, 0.03394867324829102, 0.03379507064819336, 0.03408998489379883, 0.033926143646240234, 0.03398860931396484, 0.03386368179321289, 0.03403673553466797, 0.03391897583007813, 0.03397324752807617, 0.033838081359863284, 0.033827838897705076, 0.03396710586547851, 0.03394867324829102, 0.032639999389648434, 0.034129920959472655, 0.0338524169921875, 0.033966079711914066, 0.032048126220703126, 0.03177369689941406, 0.03174399948120117, 0.031749120712280275, 0.031716352462768556, 0.03175833511352539, 0.03176140785217285, 0.03173990440368652, 0.03183718490600586, 0.03195084762573242, 0.03199180793762207, 0.03380633544921875, 0.03424460983276367, 0.033939456939697264, 0.03376537704467773, 0.03361382293701172, 0.033734657287597655, 0.03378176116943359, 0.03380326461791992, 0.03374393463134766, 0.03375302505493164, 0.034051071166992186, 0.03376639938354492, 0.03384320068359375, 0.03363532638549805, 0.03170816040039062, 0.0317573127746582, 0.031734783172607424, 0.03184230422973633, 0.03176243209838867, 0.031732736587524416, 0.03175935935974121, 0.031752191543579104, 0.031784959793090824, 0.03179110336303711, 0.03179936027526856, 0.03174393653869629, 0.031893503189086916, 0.03173785591125488, 0.03171225547790527, 0.03172352027893066, 0.03171737670898438, 0.03407462310791016, 0.03379916763305664, 0.03383193588256836, 0.03387801742553711, 0.03386368179321289, 0.03390259170532227, 0.033708030700683594, 0.03381760025024414, 0.03378483200073242, 0.034108417510986325, 0.0339159049987793, 0.0318156795501709, 0.03176345634460449, 0.03172352027893066, 0.0318474235534668, 0.03177676773071289, 0.03180031967163086, 0.031833087921142575, 0.03181465530395508, 0.03370700836181641, 0.03394047927856445, 0.03170508766174317, 0.031710208892822264, 0.03178700828552246, 0.031659072875976565, 0.03596486282348633, 0.03418009567260742, 0.03398963165283203, 0.034081790924072264, 0.0338974723815918, 0.03384729766845703, 0.033767425537109375, 0.03369267272949219, 0.03383705520629883, 0.03402956771850586, 0.03176345634460449, 0.031732736587524416, 0.03184435272216797, 0.03179315185546875, 0.031855615615844726, 0.03179315185546875, 0.031870975494384765, 0.03177779197692871, 0.03184127998352051, 0.03179520034790039, 0.03186380767822266, 0.03179827117919922, 0.0318474235534668, 0.03175526428222656, 0.03183616065979004, 0.03177881622314453, 0.03179315185546875, 0.03182899284362793, 0.03179007911682129, 0.03177267265319824, 0.031882240295410154, 0.031718399047851564, 0.031904767990112305, 0.03182489585876465, 0.03183923149108887, 0.0317890567779541, 0.03182899284362793, 0.031817728042602536, 0.03178188705444336, 0.03179929542541504, 0.031959039688110355, 0.031850496292114255, 0.0319180793762207, 0.03178291130065918, 0.031817728042602536, 0.031851520538330076, 0.031904767990112305, 0.03181158447265625, 0.03181260871887207, 0.03175628852844238, 0.03386982345581055, 0.03406950378417969, 0.03387289428710937, 0.03387187194824219, 0.03362201690673828, 0.033833984375, 0.03181977653503418, 0.031768575668334964, 0.03184435272216797, 0.03172352027893066, 0.031704063415527346, 0.031699968338012696, 0.03166720008850098, 0.03168358421325684, 0.03177471923828125, 0.032161792755126956, 0.033928192138671875, 0.033942527770996093, 0.03375001525878906, 0.03373056030273437, 0.033650688171386715, 0.033805313110351565, 0.033724414825439454, 0.031736831665039066, 0.03179929542541504, 0.03182796859741211, 0.03172249603271484, 0.03179520034790039, 0.03163852882385254, 0.03170099258422852, 0.03185663986206055, 0.031297536849975584, 0.03165593528747559, 0.03177779197692871, 0.03290828704833984, 0.03507814407348633, 0.0343111686706543, 0.03394867324829102, 0.03402751922607422, 0.03357900619506836, 0.033947647094726564, 0.03385036849975586, 0.03394047927856445, 0.03386880111694336, 0.03179315185546875, 0.03386777496337891, 0.03411251068115234, 0.03192831993103027, 0.03380428695678711, 0.03383500671386719, 0.03387596893310547, 0.033888256072998044, 0.033923072814941405, 0.03400191879272461, 0.03390156936645508, 0.03458969497680664, 0.034141185760498044, 0.03395993423461914, 0.03400089645385742, 0.03391385650634766, 0.03394867324829102, 0.033947647094726564, 0.033909759521484374, 0.03391385650634766, 0.03389952087402344, 0.03370086288452148, 0.0339865608215332, 0.03379302215576172, 0.03394662475585938, 0.033893375396728515, 0.03389440155029297, 0.03364147186279297, 0.031927295684814457, 0.03181260871887207, 0.03192422485351563, 0.03392204666137695, 0.033791999816894534, 0.03394047927856445, 0.034097152709960936, 0.035573760986328126, 0.03403987121582031, 0.033874881744384765, 0.03387494277954101, 0.03243417739868164, 0.03633561706542969, 0.03462144088745117, 0.03393740844726562, 0.03433779144287109, 0.03413913726806641, 0.03369881439208984, 0.031851520538330076, 0.03196416091918945, 0.03401523208618164, 0.03385343933105469, 0.033898494720458985, 0.0337520637512207, 0.03393638229370117, 0.0337786865234375, 0.03383705520629883, 0.03387088012695313, 0.03382780838012695, 0.03378688049316406, 0.033844223022460936, 0.03165081596374512, 0.03178598403930664, 0.03168358421325684, 0.03419955062866211, 0.033805313110351565, 0.0340469741821289, 0.035737598419189456, 0.03432550430297852, 0.033906688690185545, 0.03384832000732422, 0.033821697235107424, 0.033903617858886716, 0.03518259048461914, 0.03402342224121094, 0.0339947509765625, 0.03404390335083008, 0.03356159973144531, 0.03392921447753906, 0.03383603286743164, 0.03393740844726562, 0.033890304565429685, 0.03395481491088867, 0.0338155517578125, 0.034051071166992186, 0.03412582397460937, 0.03417292785644531, 0.03449139022827148, 0.034385921478271485, 0.033931262969970705, 0.03422208023071289, 0.03405619049072266, 0.034034687042236327, 0.03223244857788086, 0.031925247192382815, 0.03227545547485351, 0.03182489585876465, 0.031834112167358396, 0.03177779197692871, 0.03186278343200684, 0.03179315185546875, 0.031921152114868165, 0.03181363105773926, 0.031848447799682614, 0.03182694435119629, 0.03177164840698242, 0.03155558395385742, 0.03174297523498535, 0.03181260871887207, 0.03182796859741211, 0.03186073684692383, 0.031920127868652344, 0.03206246566772461, 0.03194470405578613, 0.03366195297241211, 0.0339159049987793, 0.03390771102905273, 0.03393843078613281, 0.03391897583007813, 0.03400089645385742, 0.034070526123046875, 0.03444326400756836, 0.03399884796142578, 0.03389440155029297, 0.03395174407958984, 0.03392409515380859, 0.033876991271972655, 0.03386470413208008, 0.033919998168945316, 0.03379097747802735, 0.033903617858886716, 0.03434598541259765, 0.036722686767578124, 0.03423027038574219, 0.0340684814453125, 0.033974273681640625, 0.03387801742553711, 0.03381350326538086, 0.033919998168945316, 0.03394559860229492, 0.03333222579956055, 0.03387187194824219, 0.03364044952392578, 0.033873920440673826, 0.03195187187194824, 0.03182284736633301, 0.03216998291015625, 0.033983486175537106, 0.033906688690185545, 0.03400089645385742, 0.03450777435302734, 0.03435520172119141, 0.03397017669677734, 0.033865726470947266, 0.03391692733764649, 0.03260211181640625, 0.03192831993103027, 0.03198566436767578, 0.031936511993408204, 0.03203379058837891, 0.03197952079772949, 0.03187507247924805, 0.03386982345581055, 0.032024574279785153, 0.031980575561523436, 0.03311407852172851, 0.03427942276000977, 0.033979393005371096, 0.03399679946899414, 0.03399168014526367, 0.03389235305786133, 0.03390566253662109, 0.03406131362915039, 0.03400703811645508, 0.034065406799316404, 0.03448012924194336, 0.034181121826171876, 0.033925121307373046, 0.033980415344238284, 0.03381248092651367, 0.034127872467041014, 0.03193036842346191, 0.033964031219482424, 0.03386982345581055, 0.03400703811645508, 0.033931262969970705, 0.03407155227661133, 0.03436441421508789, 0.03596799850463867, 0.03419955062866211, 0.03394355010986328, 0.03228979110717774, 0.033890304565429685, 0.03381760025024414, 0.03228057479858398, 0.03400294494628906, 0.034044929504394535, 0.03377664184570312, 0.03399884796142578, 0.0338155517578125, 0.03392102432250976, 0.03181056022644043, 0.03344076919555664, 0.033964031219482424, 0.034080768585205076, 0.033949695587158206, 0.03386265563964844, 0.03385139083862305, 0.03397017669677734, 0.033919998168945316, 0.033923072814941405, 0.03380326461791992, 0.03399168014526367, 0.03379609680175781, 0.03392102432250976, 0.03371110534667969, 0.033942527770996093, 0.03383500671386719, 0.03405516815185547, 0.03392204666137695, 0.034095104217529294, 0.03397119903564453, 0.03406643295288086, 0.034049022674560545, 0.0339343376159668, 0.03396710586547851, 0.03414323043823242, 0.03404390335083008, 0.03426816177368164, 0.034203647613525394, 0.03388313674926758, 0.033919998168945316, 0.03386470413208008, 0.03397529602050781, 0.03402751922607422, 0.03386470413208008, 0.03410227203369141, 0.03386675262451172, 0.033896446228027344, 0.03388620758056641, 0.03393228912353516, 0.03416166305541992, 0.034083839416503905, 0.03385548782348633, 0.033947647094726564, 0.0338155517578125, 0.033933311462402346, 0.03395276641845703, 0.03388518524169922, 0.03417599868774414, 0.03392409515380859, 0.03403878402709961, 0.03398860931396484, 0.033979393005371096, 0.033890304565429685, 0.03391897583007813, 0.03415859222412109, 0.033893375396728515, 0.034088958740234376, 0.033925121307373046, 0.0339046401977539, 0.033896446228027344, 0.03384729766845703, 0.0339128303527832, 0.03389952087402344, 0.03408486557006836, 0.033957889556884766, 0.03527782440185547, 0.0353331184387207, 0.034151424407958986, 0.034097152709960936, 0.03393024063110352, 0.03384729766845703, 0.03396915054321289, 0.033955841064453124, 0.03384832000732422, 0.03388518524169922, 0.03395891189575195, 0.033995777130126956, 0.03406335830688476, 0.03388620758056641, 0.03385343933105469]",tokens/s,29.986799963529776,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949353-6dbed5c34f1fb2443b523cf0;50b4a2e8-f415-4f7f-a4f5-d7dd784d2130) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 900, in forward - transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 797, in forward - outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 453, in forward - attn_outputs = self.self_attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/4a70170c215b36a3cce4b4253f6d0612bb7d4146/modeling_falcon.py"", line 291, in forward - fused_qkv = self.query_key_value(hidden_states) # [batch_size, seq_length, 3 x hidden_size] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,917.991424,845.676544,0.0,260.046848,253.883392,s,1,7.2373349609375,7.2373349609375,0.0,7.2373349609375,7.2373349609375,7.2373349609375,7.2373349609375,[7.2373349609375],,kWh,5.202897611796845e-06,2.835741321418227e-06,6.784449871954923e-06,1.4823088805169995e-05,,MB,1519.480832,981.991424,0.0,335.54432,313.01632,s,14,0.1805254716873169,0.012894676549094064,0.00027989824268468056,0.012738895893096923,0.013144368171691895,0.013350678730010987,0.013647569522857667,"[0.012694016456604004, 0.012720383644104005, 0.0127324800491333, 0.012900639533996582, 0.012745311737060547, 0.012865728378295898, 0.012691295623779297, 0.013050784111022949, 0.01267903995513916, 0.013150848388671876, 0.012721343994140625, 0.013721792221069336, 0.013129247665405273, 0.012722559928894043]",tokens/s,19853.15405356063,kWh,1.5381531420173292e-07,8.428303452941536e-08,3.909005586692279e-07,6.289989074003762e-07,tokens/kWh,406995937.49381274,MB,1565.462528,1007.157248,0.0,360.710144,313.805312,s,14,9.88284216308594,0.7059172973632813,0.0026585923152524205,0.7058397216796874,0.7091084167480468,0.7104883178710937,0.7115158764648437,"[0.7069877319335938, 0.7022655029296875, 0.7055581665039062, 0.705840087890625, 0.7006841430664063, 0.70583935546875, 0.7042123413085938, 0.7063041381835937, 0.7045219116210938, 0.7097966918945312, 0.7056138916015625, 0.7117727661132812, 0.70750244140625, 0.7059429931640625]",tokens/s,89.24558193334472,kWh,8.271233728159949e-06,4.532266729095672e-06,1.3251277021731903e-05,2.6054777478987517e-05,tokens/kWh,2417982.65407593,,s,882,9.876868084907532,0.011198263134815796,0.0002295521086725433,0.011118080139160156,0.011466649627685547,0.011575860500335693,0.01216022527694702,"[0.011113471984863281, 0.011094016075134277, 0.011103232383728028, 0.011077631950378418, 0.011073535919189453, 0.011110400199890137, 0.011131903648376466, 0.011068415641784669, 0.011123711585998536, 0.011101183891296386, 0.011182080268859864, 0.011273247718811035, 0.011241439819335938, 0.01144217586517334, 0.01123635196685791, 0.011075584411621094, 0.011143168449401856, 0.01113804817199707, 0.011088895797729491, 0.010958847999572753, 0.011098112106323242, 0.011068415641784669, 0.011102208137512207, 0.011103232383728028, 0.011223039627075194, 0.011097087860107421, 0.011111424446105958, 0.011147263526916504, 0.011074560165405273, 0.011116543769836425, 0.011068415641784669, 0.01104793643951416, 0.011076607704162598, 0.011102208137512207, 0.011085824012756347, 0.011119615554809571, 0.011070464134216309, 0.011082752227783203, 0.011111424446105958, 0.011143168449401856, 0.01125376033782959, 0.011539456367492675, 0.011365376472473144, 0.011886591911315919, 0.012194815635681153, 0.011504639625549316, 0.011556863784790039, 0.011359231948852539, 0.011249664306640626, 0.011276288032531738, 0.011276320457458496, 0.011144160270690918, 0.0112424955368042, 0.011182080268859864, 0.01115340805053711, 0.011156543731689453, 0.011109312057495118, 0.011482111930847168, 0.011859968185424804, 0.011389951705932617, 0.011258879661560058, 0.011189248085021973, 0.011095040321350098, 0.010971136093139648, 0.011141119956970215, 0.01115443229675293, 0.011116543769836425, 0.011086848258972168, 0.011091967582702637, 0.011096063613891602, 0.011119615554809571, 0.011073535919189453, 0.011060223579406739, 0.011069439888000488, 0.0112424955368042, 0.01123635196685791, 0.011215871810913085, 0.011139072418212891, 0.011117568016052246, 0.011473919868469238, 0.011086848258972168, 0.011103232383728028, 0.011058176040649414, 0.011116543769836425, 0.011124735832214355, 0.011011072158813476, 0.011062272071838379, 0.011101183891296386, 0.011088895797729491, 0.011108351707458495, 0.011090944290161133, 0.011380736351013183, 0.011436032295227052, 0.01141759967803955, 0.011396096229553223, 0.011455488204956055, 0.01131827163696289, 0.011066399574279785, 0.011115488052368163, 0.011114496231079102, 0.011091967582702637, 0.0111595516204834, 0.011067392349243164, 0.011091967582702637, 0.011133952140808106, 0.011115519523620606, 0.011053055763244628, 0.011062272071838379, 0.0109619197845459, 0.011099136352539063, 0.011058176040649414, 0.011094016075134277, 0.011113471984863281, 0.011123711585998536, 0.011125760078430176, 0.011092991828918456, 0.011124735832214355, 0.01113804817199707, 0.01115340805053711, 0.011076607704162598, 0.011144191741943359, 0.011106304168701172, 0.011111424446105958, 0.01112883186340332, 0.011062272071838379, 0.011302911758422851, 0.011113471984863281, 0.011155455589294434, 0.011106304168701172, 0.011088895797729491, 0.011424768447875976, 0.011531295776367188, 0.011295712471008301, 0.01146675205230713, 0.011449343681335449, 0.011512831687927246, 0.011291647911071777, 0.01112063980102539, 0.011175935745239257, 0.01116262435913086, 0.011168767929077148, 0.011204607963562012, 0.011139072418212891, 0.01115135955810547, 0.011148287773132324, 0.01117081642150879, 0.011085824012756347, 0.011502592086791993, 0.011698176383972168, 0.011395071983337402, 0.01114521598815918, 0.011058176040649414, 0.010962944030761718, 0.011168767929077148, 0.011201536178588867, 0.011399168014526367, 0.01112166404724121, 0.011060223579406739, 0.011098112106323242, 0.011140095710754394, 0.011126784324645997, 0.011072511672973634, 0.011068415641784669, 0.011092991828918456, 0.011099136352539063, 0.011148287773132324, 0.01121177577972412, 0.011476991653442382, 0.01113804817199707, 0.011080703735351562, 0.01115443229675293, 0.011057151794433593, 0.011110400199890137, 0.011142144203186035, 0.010974207878112792, 0.0110632963180542, 0.011080703735351562, 0.011094016075134277, 0.011099136352539063, 0.011096063613891602, 0.011065343856811523, 0.011111424446105958, 0.011111424446105958, 0.011398143768310547, 0.011436032295227052, 0.011053055763244628, 0.011115519523620606, 0.011413503646850585, 0.01113804817199707, 0.01112063980102539, 0.01113702392578125, 0.011059200286865235, 0.01112883186340332, 0.011135999679565429, 0.011149312019348144, 0.011043840408325196, 0.011077631950378418, 0.011333632469177245, 0.011125760078430176, 0.011110400199890137, 0.011083776473999024, 0.011109375953674316, 0.01111244773864746, 0.011122688293457032, 0.011123711585998536, 0.01112063980102539, 0.011123711585998536, 0.011208703994750976, 0.01112063980102539, 0.011102239608764648, 0.011145183563232422, 0.011139072418212891, 0.011117568016052246, 0.011122688293457032, 0.011103232383728028, 0.011144191741943359, 0.011116543769836425, 0.011171839714050292, 0.011254783630371093, 0.011328512191772461, 0.011157504081726074, 0.011131903648376466, 0.011158528327941895, 0.011167743682861327, 0.011103232383728028, 0.011173888206481934, 0.011146240234375, 0.011124799728393555, 0.011066304206848145, 0.011133952140808106, 0.011163647651672364, 0.011143168449401856, 0.011109375953674316, 0.011080703735351562, 0.0111278076171875, 0.01113804817199707, 0.011118592262268067, 0.01113804817199707, 0.011109375953674316, 0.011544575691223144, 0.01163263988494873, 0.011212800025939941, 0.011439104080200196, 0.011453439712524414, 0.01141759967803955, 0.011473919868469238, 0.011419648170471192, 0.011455488204956055, 0.011427840232849122, 0.011460607528686523, 0.011473919868469238, 0.011207679748535156, 0.011041791915893554, 0.011534336090087891, 0.011119615554809571, 0.011086848258972168, 0.011082752227783203, 0.011062272071838379, 0.01112063980102539, 0.011104255676269532, 0.01107148838043213, 0.010971136093139648, 0.010898431777954102, 0.010942463874816894, 0.010997759819030761, 0.011016192436218262, 0.010969087600708008, 0.010972160339355469, 0.010958847999572753, 0.010920991897583008, 0.01108886432647705, 0.010974207878112792, 0.011518976211547852, 0.011600895881652832, 0.0115098237991333, 0.01144108772277832, 0.011287551879882812, 0.011161600112915039, 0.011094016075134277, 0.011068415641784669, 0.01107148838043213, 0.01112883186340332, 0.01111244773864746, 0.011141119956970215, 0.010935296058654785, 0.011082752227783203, 0.011066368103027344, 0.011085824012756347, 0.01112166404724121, 0.011040767669677735, 0.011060223579406739, 0.011077631950378418, 0.011092991828918456, 0.01103667163848877, 0.011096063613891602, 0.01120358371734619, 0.011106304168701172, 0.011068415641784669, 0.011041791915893554, 0.011052032470703126, 0.011074560165405273, 0.011077631950378418, 0.011001855850219726, 0.011066368103027344, 0.011106304168701172, 0.011045887947082519, 0.01100499153137207, 0.01095468807220459, 0.011124735832214355, 0.011267071723937988, 0.011041791915893554, 0.011097087860107421, 0.011097087860107421, 0.011585536003112793, 0.011482111930847168, 0.011072511672973634, 0.01107148838043213, 0.011082752227783203, 0.011080703735351562, 0.011125760078430176, 0.011080703735351562, 0.011361280441284179, 0.011143168449401856, 0.011060223579406739, 0.011103232383728028, 0.011099136352539063, 0.011067392349243164, 0.011193344116210938, 0.011048959732055665, 0.011609087944030762, 0.011873279571533203, 0.01137664031982422, 0.011041791915893554, 0.011001952171325683, 0.011071392059326172, 0.011150336265563965, 0.011089920043945312, 0.011059200286865235, 0.011156479835510253, 0.011415552139282227, 0.011393024444580077, 0.011936767578125, 0.011339776039123535, 0.011051008224487305, 0.011320320129394532, 0.011185215950012208, 0.011122624397277832, 0.011214847564697266, 0.0111278076171875, 0.010938367843627929, 0.011035648345947266, 0.011139072418212891, 0.011257856369018555, 0.011497471809387207, 0.01123737621307373, 0.011097087860107421, 0.01112166404724121, 0.011122688293457032, 0.01121177577972412, 0.011088895797729491, 0.011084799766540527, 0.011089920043945312, 0.011109375953674316, 0.011116543769836425, 0.011084799766540527, 0.011299839973449707, 0.011453439712524414, 0.011069439888000488, 0.011087871551513672, 0.01092915153503418, 0.010972160339355469, 0.010968128204345702, 0.011346879959106446, 0.01112985610961914, 0.011440128326416015, 0.011486207962036133, 0.01143398380279541, 0.011405311584472656, 0.011099136352539063, 0.011010047912597656, 0.011130880355834961, 0.010933247566223145, 0.01093939208984375, 0.010933247566223145, 0.01094758415222168, 0.011048959732055665, 0.010889216423034668, 0.011011072158813476, 0.011073535919189453, 0.011418656349182129, 0.011074527740478516, 0.011065343856811523, 0.011146240234375, 0.011080703735351562, 0.011009023666381837, 0.011033599853515624, 0.01103872013092041, 0.011101183891296386, 0.011169792175292969, 0.011479040145874024, 0.011517951965332032, 0.011304960250854493, 0.011249664306640626, 0.011156479835510253, 0.011075584411621094, 0.011083776473999024, 0.011083807945251465, 0.01108784008026123, 0.011085824012756347, 0.011131903648376466, 0.011072511672973634, 0.011072511672973634, 0.011125760078430176, 0.011460607528686523, 0.011097087860107421, 0.011042816162109375, 0.011113471984863281, 0.011012096405029297, 0.011220992088317871, 0.011346943855285644, 0.01154355239868164, 0.011681792259216308, 0.011337727546691894, 0.011193344116210938, 0.011194368362426758, 0.011152383804321288, 0.01124454402923584, 0.011672575950622559, 0.01145036792755127, 0.01111244773864746, 0.011564031600952148, 0.01186508846282959, 0.011414527893066406, 0.011155455589294434, 0.011041791915893554, 0.01116262435913086, 0.011147263526916504, 0.01104691219329834, 0.011006976127624512, 0.010818559646606446, 0.011026432037353515, 0.011104255676269532, 0.011080703735351562, 0.01103872013092041, 0.011274239540100098, 0.010928128242492676, 0.010946559906005859, 0.010902527809143067, 0.01091481590270996, 0.011552767753601074, 0.011420672416687011, 0.011418623924255371, 0.011150336265563965, 0.011001855850219726, 0.011339776039123535, 0.01105510425567627, 0.011147263526916504, 0.011058176040649414, 0.01107968044281006, 0.011087871551513672, 0.010973183631896973, 0.011013119697570802, 0.011103232383728028, 0.011077631950378418, 0.0110632963180542, 0.01107148838043213, 0.011065343856811523, 0.011041791915893554, 0.011210751533508301, 0.011083776473999024, 0.011017215728759766, 0.011140095710754394, 0.011088895797729491, 0.0110448637008667, 0.011075584411621094, 0.0110632963180542, 0.011150336265563965, 0.012613632202148438, 0.011708415985107423, 0.01153331184387207, 0.01255628776550293, 0.011728896141052245, 0.011509759902954102, 0.01145241641998291, 0.011161600112915039, 0.01112883186340332, 0.011567135810852051, 0.0113919677734375, 0.011234304428100587, 0.011123711585998536, 0.011054080009460449, 0.011067392349243164, 0.011408384323120118, 0.011431936264038087, 0.011108351707458495, 0.011176959991455078, 0.011174912452697755, 0.011085824012756347, 0.01107968044281006, 0.011046976089477538, 0.010912704467773437, 0.01093222427368164, 0.010954751968383789, 0.010962944030761718, 0.010818559646606446, 0.01103052806854248, 0.011080703735351562, 0.011081727981567382, 0.011352095603942871, 0.011445216178894043, 0.011554816246032714, 0.011482111930847168, 0.011204607963562012, 0.011126784324645997, 0.011522047996520996, 0.011370495796203613, 0.011042816162109375, 0.011576319694519043, 0.011851776123046874, 0.011132927894592285, 0.011097087860107421, 0.01117081642150879, 0.011392000198364258, 0.011051008224487305, 0.011018239974975585, 0.011065343856811523, 0.010911744117736816, 0.011124735832214355, 0.011065343856811523, 0.011072511672973634, 0.011073535919189453, 0.011072511672973634, 0.01107151985168457, 0.011360223770141602, 0.011068415641784669, 0.011046976089477538, 0.01121788787841797, 0.0113919677734375, 0.011126784324645997, 0.011057151794433593, 0.011027520179748534, 0.011102144241333007, 0.011103232383728028, 0.011056127548217774, 0.01102233600616455, 0.011113471984863281, 0.01112063980102539, 0.011076607704162598, 0.011107328414916993, 0.010992639541625977, 0.011231231689453124, 0.01140940761566162, 0.011295743942260742, 0.011095040321350098, 0.011076607704162598, 0.01115340805053711, 0.011110400199890137, 0.011113471984863281, 0.011068415641784669, 0.011113504409790038, 0.011084768295288086, 0.01116262435913086, 0.011100159645080567, 0.01111244773864746, 0.011290623664855956, 0.01143295955657959, 0.01140121555328369, 0.011424768447875976, 0.011407360076904297, 0.011397120475769042, 0.011308032035827637, 0.01112063980102539, 0.01111244773864746, 0.011111424446105958, 0.012191743850708007, 0.011603967666625976, 0.011265055656433105, 0.011476960182189942, 0.011413503646850585, 0.011225088119506836, 0.011089920043945312, 0.011096063613891602, 0.011086848258972168, 0.011051008224487305, 0.01107148838043213, 0.011080703735351562, 0.011087871551513672, 0.011049983978271484, 0.011110400199890137, 0.011126784324645997, 0.011094016075134277, 0.011230208396911622, 0.011166751861572266, 0.011174880027770995, 0.01112883186340332, 0.011111424446105958, 0.011049983978271484, 0.011084799766540527, 0.011074560165405273, 0.011111424446105958, 0.011061247825622558, 0.011397120475769042, 0.01165721607208252, 0.011380736351013183, 0.011218943595886231, 0.01176371192932129, 0.011509759902954102, 0.011463680267333985, 0.011396096229553223, 0.011204607963562012, 0.011058176040649414, 0.011082816123962402, 0.011145152091979981, 0.011084799766540527, 0.011328512191772461, 0.0111278076171875, 0.011355135917663574, 0.011090944290161133, 0.011098112106323242, 0.011539456367492675, 0.011467776298522948, 0.01143398380279541, 0.01124454402923584, 0.011108351707458495, 0.011034624099731445, 0.012030976295471191, 0.01143295955657959, 0.011188223838806152, 0.011200511932373047, 0.011144191741943359, 0.011106304168701172, 0.011243519783020019, 0.011146240234375, 0.011102208137512207, 0.011078656196594238, 0.01105510425567627, 0.01215283203125, 0.013003775596618652, 0.011920384407043457, 0.011386879920959473, 0.01121177577972412, 0.011105279922485351, 0.011139072418212891, 0.011085824012756347, 0.011092991828918456, 0.011083776473999024, 0.011173888206481934, 0.011061247825622558, 0.011067392349243164, 0.011070464134216309, 0.011052032470703126, 0.011085824012756347, 0.011065343856811523, 0.011080703735351562, 0.011053055763244628, 0.011098112106323242, 0.01107968044281006, 0.011068415641784669, 0.010951680183410644, 0.01113916778564453, 0.01111030387878418, 0.011091967582702637, 0.01104793643951416, 0.011204607963562012, 0.011165696144104004, 0.01115340805053711, 0.011096063613891602, 0.011108351707458495, 0.011126784324645997, 0.011089920043945312, 0.011085824012756347, 0.010926079750061036, 0.010921983718872071, 0.011188223838806152, 0.01115443229675293, 0.011087871551513672, 0.011045887947082519, 0.012353535652160644, 0.0113438720703125, 0.011108384132385254, 0.010998751640319824, 0.011084799766540527, 0.011020319938659668, 0.01109603214263916, 0.01102233600616455, 0.011051008224487305, 0.011139072418212891, 0.011191295623779298, 0.011116543769836425, 0.011056127548217774, 0.0110632963180542, 0.011130880355834961, 0.011779071807861329, 0.01154150390625, 0.011231231689453124, 0.011086848258972168, 0.011066368103027344, 0.011066368103027344, 0.011062272071838379, 0.011051008224487305, 0.011470848083496094, 0.011373567581176757, 0.011390975952148438, 0.011231231689453124, 0.011074560165405273, 0.011089920043945312, 0.011146240234375, 0.011142144203186035, 0.011123711585998536, 0.011062272071838379, 0.011287551879882812, 0.011710463523864746, 0.012401663780212402, 0.011870207786560059, 0.012767231941223145, 0.011701248168945312, 0.011585536003112793, 0.011469823837280273, 0.011447296142578126, 0.011469823837280273, 0.011465727806091308, 0.011426815986633301, 0.011168767929077148, 0.011033599853515624, 0.01112063980102539, 0.01112063980102539, 0.011035648345947266, 0.011103232383728028, 0.011091967582702637, 0.011113471984863281, 0.011035648345947266, 0.0111278076171875, 0.011108351707458495, 0.011106304168701172, 0.011092991828918456, 0.011101216316223144, 0.011111392021179198, 0.011076607704162598, 0.011110400199890137, 0.01101414394378662, 0.011106304168701172, 0.011171839714050292, 0.011479040145874024, 0.011163647651672364, 0.012489727973937988, 0.011556863784790039, 0.01114527988433838, 0.011066304206848145, 0.011132927894592285, 0.011366399765014648, 0.011155455589294434, 0.01112985610961914, 0.011271167755126953, 0.011150336265563965, 0.011140095710754394, 0.011337727546691894, 0.011455488204956055, 0.011296768188476563, 0.011066368103027344, 0.011192319869995117, 0.011467776298522948, 0.011440128326416015, 0.01143500804901123, 0.011383808135986329, 0.011266048431396485, 0.011130880355834961, 0.011086848258972168, 0.010967040061950683, 0.011163647651672364, 0.011122688293457032, 0.011111424446105958, 0.01111244773864746, 0.011147263526916504, 0.011059200286865235, 0.011065343856811523, 0.011100159645080567, 0.011126784324645997, 0.011155455589294434, 0.011169792175292969, 0.011119615554809571, 0.011731967926025391, 0.011816960334777832, 0.012122112274169922, 0.011695103645324707, 0.011471872329711913, 0.011400192260742188, 0.011194368362426758, 0.011069439888000488, 0.01142579174041748, 0.011266048431396485, 0.011091967582702637, 0.011084799766540527, 0.011287551879882812, 0.011107392311096191, 0.01105299186706543, 0.011097087860107421, 0.011102208137512207, 0.011074560165405273, 0.011106304168701172, 0.011596799850463867, 0.011314175605773925, 0.011147263526916504, 0.011498496055603028, 0.01131827163696289, 0.011115519523620606, 0.011124735832214355, 0.011110400199890137, 0.011105279922485351, 0.011107328414916993, 0.011045887947082519, 0.011165696144104004, 0.010999808311462403, 0.01103872013092041, 0.011164671897888183, 0.01112063980102539, 0.010998784065246582, 0.0110632963180542, 0.011109375953674316, 0.011109375953674316, 0.011041791915893554, 0.01107148838043213, 0.011060223579406739, 0.011090944290161133, 0.011173888206481934, 0.01115135955810547, 0.011065343856811523, 0.011283455848693847, 0.011098112106323242, 0.011097087860107421, 0.011086848258972168, 0.011049983978271484, 0.011101183891296386, 0.011125791549682617, 0.010976223945617676, 0.011057151794433593, 0.011078656196594238, 0.011374591827392578, 0.011523072242736816, 0.01135308837890625, 0.011087871551513672, 0.011109375953674316, 0.011190272331237794, 0.011122688293457032, 0.01112063980102539, 0.011104255676269532, 0.011113535881042481, 0.011125696182250977, 0.01107968044281006, 0.011601920127868653, 0.01144217586517334, 0.011396096229553223, 0.01127731227874756, 0.011076607704162598, 0.01113702392578125, 0.011106304168701172, 0.010982399940490722, 0.011017215728759766, 0.011100159645080567, 0.011097087860107421, 0.011040767669677735, 0.01107148838043213, 0.011056127548217774, 0.011095040321350098, 0.011051008224487305, 0.011060223579406739, 0.011199487686157226, 0.011157535552978515, 0.011122655868530274, 0.011873279571533203, 0.011408384323120118, 0.011439104080200196, 0.011429887771606445, 0.011400192260742188, 0.011689984321594238, 0.011454463958740235, 0.011175968170166016, 0.011122655868530274, 0.011073535919189453, 0.01104793643951416, 0.011611136436462402, 0.011412480354309081, 0.011358207702636718]",tokens/s,89.29956261618507,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,7855.341568,11287.396352,0.0,10701.766656,10468.923392,s,1,11.66769140625,11.66769140625,0.0,11.66769140625,11.66769140625,11.66769140625,11.66769140625,[11.66769140625],,kWh,5.717754347711737e-05,3.132206343202097e-05,7.965450816793673e-05,0.00016815411507707507,,MB,3558.596608,11809.5872,0.0,11163.140096,10923.3664,s,10,2.0471322021484375,0.20471322021484376,3.6044173934397446e-05,0.20472217559814454,0.20475722961425782,0.20475755920410157,0.20475782287597658,"[0.20473283386230468, 0.20468838500976563, 0.20464614868164063, 0.204743896484375, 0.20475715637207031, 0.20475788879394533, 0.2046834259033203, 0.20467706298828126, 0.20473388671875, 0.20471151733398438]",tokens/s,1250.529886303051,kWh,2.4205035323842506e-06,1.3263197566007916e-06,1.3261195416205e-05,1.700801870519004e-05,tokens/kWh,15051723.80377739,MB,3562.860544,11811.684352,0.0,11165.237248,10923.36896,s,10,24.805058105468753,2.480505810546875,0.005273209555665597,2.4812991943359375,2.4861370849609377,2.4865133666992185,2.4868143920898436,"[2.474960205078125, 2.481760498046875, 2.48439013671875, 2.48337744140625, 2.4868896484375, 2.48041064453125, 2.468515380859375, 2.486053466796875, 2.47786279296875, 2.480837890625]",tokens/s,25.398045726049094,kWh,2.9560663218377086e-05,1.620055591782138e-05,9.903678331098883e-05,0.0001447980024471873,tokens/kWh,435088.8750898219,,s,630,24.803042343139666,0.03936990848117405,0.0005729008105310292,0.0392412166595459,0.03989401435852051,0.04025103282928467,0.041684418983459474,"[0.03884646224975586, 0.03902771377563476, 0.03882905578613281, 0.038817790985107424, 0.03926015853881836, 0.039378944396972655, 0.03932364654541016, 0.03930419158935547, 0.039354366302490236, 0.03931545639038086, 0.040785919189453124, 0.04037222290039062, 0.039585792541503906, 0.03973734283447266, 0.03931340789794922, 0.03933184051513672, 0.0392437744140625, 0.03933695983886719, 0.03890176010131836, 0.03885772705078125, 0.03935641479492188, 0.039684097290039064, 0.039946239471435545, 0.03934822463989258, 0.039242752075195314, 0.03934105682373047, 0.039244800567626956, 0.0403322868347168, 0.03949260711669922, 0.03953561782836914, 0.0397946891784668, 0.03926732635498047, 0.038796287536621094, 0.038765567779541016, 0.03930828857421875, 0.03915673446655273, 0.039397377014160156, 0.03878809738159179, 0.03945881652832031, 0.03978137588500977, 0.0391833610534668, 0.03929087829589844, 0.03864678573608398, 0.039158782958984374, 0.039206912994384766, 0.03935129547119141, 0.039570430755615234, 0.03989299011230469, 0.038787071228027346, 0.03865190505981445, 0.03861913681030273, 0.03868057632446289, 0.03932876968383789, 0.039367679595947266, 0.039196670532226564, 0.0392355842590332, 0.038778881072998046, 0.03901542282104492, 0.038661121368408206, 0.038768638610839845, 0.0396943359375, 0.03925196838378906, 0.03946086502075195, 0.03874611282348633, 0.03904716873168945, 0.039367679595947266, 0.039828479766845705, 0.03987558364868164, 0.03903385543823242, 0.03892428970336914, 0.03871641540527344, 0.03867136001586914, 0.03931238555908203, 0.040809471130371096, 0.04120576095581055, 0.0397496337890625, 0.04004761505126953, 0.039261184692382815, 0.039229438781738284, 0.039444480895996094, 0.03926323318481445, 0.04005990219116211, 0.03971481704711914, 0.039196670532226564, 0.03969740676879883, 0.03919974517822265, 0.03935641479492188, 0.03917926406860352, 0.03940556716918946, 0.03921100616455078, 0.03934003067016602, 0.03932160186767578, 0.03959296035766602, 0.04025241470336914, 0.03951411056518555, 0.03877068710327149, 0.03900928115844727, 0.03919462585449219, 0.039981056213378906, 0.039293952941894535, 0.039204864501953124, 0.03918950271606445, 0.03946700668334961, 0.03872665786743164, 0.03864473724365235, 0.0413829116821289, 0.039478271484375, 0.03940966415405273, 0.03941580963134766, 0.03887820816040039, 0.03935846328735351, 0.03875942230224609, 0.03910553741455078, 0.03915059280395508, 0.038809600830078124, 0.03908607864379883, 0.03909939193725586, 0.03991961669921875, 0.04015923309326172, 0.03971686553955078, 0.03927449417114258, 0.03892428970336914, 0.039011329650878904, 0.039056385040283206, 0.03988684844970703, 0.03862220764160156, 0.03998617553710938, 0.04002304077148437, 0.041565185546875, 0.03993088150024414, 0.04297830581665039, 0.03964211273193359, 0.038898689270019535, 0.03905945587158203, 0.03880550384521484, 0.039011329650878904, 0.03919974517822265, 0.03943731307983398, 0.03991551971435547, 0.03922739028930664, 0.03887411117553711, 0.03915980911254883, 0.039119873046875, 0.03928575897216797, 0.040005630493164065, 0.03918438339233398, 0.03909632110595703, 0.0396759033203125, 0.0390748176574707, 0.03920588684082031, 0.03956531143188476, 0.0393809928894043, 0.039142398834228515, 0.039311359405517575, 0.039139328002929685, 0.03947315216064453, 0.03910041427612305, 0.03917004776000976, 0.03908403015136719, 0.03924991989135742, 0.039152641296386716, 0.039626750946044925, 0.039196670532226564, 0.039934974670410156, 0.039430145263671876, 0.039191551208496093, 0.03902668762207031, 0.039223297119140625, 0.03915673446655273, 0.039841793060302735, 0.03970560073852539, 0.03927449417114258, 0.03911270523071289, 0.039397377014160156, 0.03935129547119141, 0.039185409545898435, 0.04005580902099609, 0.03925503921508789, 0.03918643188476562, 0.03911475372314453, 0.03923763275146484, 0.03953868865966797, 0.039021568298339845, 0.039416831970214845, 0.039201793670654295, 0.03910553741455078, 0.039152641296386716, 0.039241729736328126, 0.03987865447998047, 0.03930112075805664, 0.03941888046264649, 0.03925094223022461, 0.03934515380859375, 0.03905331039428711, 0.038833152770996096, 0.03916185760498047, 0.039384063720703126, 0.03914956665039063, 0.038988800048828126, 0.03851264190673828, 0.03924582290649414, 0.03919462585449219, 0.03921612930297851, 0.0391464958190918, 0.03921920013427734, 0.039806976318359374, 0.03924889755249023, 0.0391014404296875, 0.039504894256591795, 0.03990323257446289, 0.039777278900146484, 0.03911884689331055, 0.03907891082763672, 0.0393779182434082, 0.03968204879760742, 0.04086272048950195, 0.039411712646484375, 0.039008255004882815, 0.03926323318481445, 0.0391014404296875, 0.03911782455444336, 0.039300094604492186, 0.03934207916259766, 0.039005184173583986, 0.03902873611450195, 0.03891404724121094, 0.039172096252441405, 0.03915059280395508, 0.03924582290649414, 0.03919974517822265, 0.03928678512573242, 0.038932479858398435, 0.039018497467041016, 0.03911167907714844, 0.039929855346679685, 0.038948863983154294, 0.039392257690429686, 0.03942707061767578, 0.039261184692382815, 0.03903180694580078, 0.03969126510620117, 0.03927347183227539, 0.039256065368652344, 0.03924684906005859, 0.039070720672607424, 0.03917926406860352, 0.039142398834228515, 0.039221248626708984, 0.04063641738891602, 0.045744129180908207, 0.04066918563842774, 0.03956224060058594, 0.038989822387695314, 0.03929190444946289, 0.039654399871826174, 0.03950694274902344, 0.039206912994384766, 0.03939328002929687, 0.03924582290649414, 0.03927040100097656, 0.039934974670410156, 0.04043775939941406, 0.03979673767089844, 0.03968102264404297, 0.03928166580200195, 0.0392540168762207, 0.039155712127685545, 0.03930419158935547, 0.03928166580200195, 0.040374271392822264, 0.040359935760498046, 0.03937484741210937, 0.03916697692871094, 0.03885567855834961, 0.03919769668579102, 0.03942707061767578, 0.039299072265625, 0.0392540168762207, 0.039465984344482424, 0.03942195129394531, 0.03976806259155274, 0.039244800567626956, 0.039175167083740234, 0.03916799926757813, 0.03928166580200195, 0.03931545639038086, 0.03935539245605469, 0.03914035034179687, 0.0391536636352539, 0.03918131256103516, 0.03978854370117187, 0.04184371185302734, 0.03971788787841797, 0.03964211273193359, 0.04000460815429688, 0.039373825073242184, 0.03924582290649414, 0.03887308883666992, 0.039201793670654295, 0.039465984344482424, 0.03864473724365235, 0.0388853759765625, 0.038970367431640625, 0.039311359405517575, 0.03945779037475586, 0.03996160125732422, 0.039244800567626956, 0.039239681243896485, 0.03897958374023437, 0.039158782958984374, 0.03910451126098633, 0.040615936279296876, 0.04144537734985351, 0.0396759033203125, 0.03915980911254883, 0.039016448974609375, 0.03914342498779297, 0.03917107009887695, 0.03926425552368164, 0.03863347244262695, 0.039209983825683595, 0.039398399353027344, 0.03931340789794922, 0.03936153411865234, 0.03932364654541016, 0.03968307113647461, 0.03943731307983398, 0.03917311859130859, 0.03874303817749023, 0.03873689651489258, 0.03875635147094727, 0.039087104797363284, 0.03921612930297851, 0.03912707138061523, 0.03940143966674805, 0.03916185760498047, 0.039272449493408204, 0.03911782455444336, 0.039926784515380856, 0.039449600219726565, 0.038948863983154294, 0.039177215576171875, 0.03916697692871094, 0.03914035034179687, 0.03918438339233398, 0.039152641296386716, 0.040002559661865236, 0.03888844680786133, 0.039360511779785154, 0.03922534561157227, 0.039880702972412106, 0.03970355224609375, 0.03963187026977539, 0.040242176055908206, 0.038599681854248044, 0.039065601348876954, 0.039298046112060545, 0.03956326293945313, 0.039465984344482424, 0.039106559753417966, 0.03890585708618164, 0.039136257171630856, 0.03893862533569336, 0.03917824172973633, 0.039218177795410154, 0.03973836898803711, 0.04150067138671875, 0.03988991928100586, 0.03970457458496094, 0.04218163299560547, 0.04012851333618164, 0.039242752075195314, 0.039155712127685545, 0.03899596786499023, 0.03929292678833008, 0.039229438781738284, 0.039602176666259765, 0.03923353576660156, 0.03957452774047852, 0.0393072624206543, 0.03923660659790039, 0.039011329650878904, 0.03927142333984375, 0.03934003067016602, 0.039057407379150394, 0.0387512321472168, 0.0393072624206543, 0.03860275268554687, 0.03879116821289062, 0.03871334457397461, 0.03888332748413086, 0.03922739028930664, 0.039175167083740234, 0.03910451126098633, 0.039139328002929685, 0.0392806396484375, 0.039188480377197264, 0.03975884628295898, 0.039122943878173826, 0.039180286407470705, 0.0390041618347168, 0.039175167083740234, 0.03999641418457031, 0.03966259384155273, 0.03910758590698242, 0.039479297637939455, 0.03907276916503906, 0.039201793670654295, 0.03915673446655273, 0.039741439819335936, 0.03914342498779297, 0.039196670532226564, 0.039144447326660156, 0.03924787139892578, 0.039357440948486325, 0.03923455810546875, 0.039272449493408204, 0.03923660659790039, 0.03920076751708984, 0.039163902282714845, 0.0389826545715332, 0.03922022247314453, 0.03968511962890625, 0.039242752075195314, 0.0391014404296875, 0.03879116821289062, 0.03913318252563477, 0.03922022247314453, 0.03914137649536133, 0.03919974517822265, 0.039067649841308595, 0.0391464958190918, 0.039032833099365234, 0.03902975845336914, 0.039041023254394534, 0.0390748176574707, 0.038986751556396484, 0.03918745422363281, 0.039051265716552735, 0.03920281600952148, 0.03896115112304688, 0.039430145263671876, 0.039702529907226565, 0.039397377014160156, 0.039229438781738284, 0.03913216018676758, 0.03953049468994141, 0.0391833610534668, 0.039638015747070314, 0.03918950271606445, 0.039144447326660156, 0.0391014404296875, 0.03910041427612305, 0.03923353576660156, 0.03926528167724609, 0.039373825073242184, 0.03929292678833008, 0.039206912994384766, 0.03941785430908203, 0.03914342498779297, 0.03921408081054688, 0.03923353576660156, 0.03928678512573242, 0.039261184692382815, 0.03905331039428711, 0.03888947296142578, 0.039411712646484375, 0.03928985595703125, 0.039877632141113284, 0.03996876907348633, 0.03969126510620117, 0.03889254379272461, 0.039411712646484375, 0.039193599700927735, 0.03946188735961914, 0.03931238555908203, 0.03938508987426758, 0.03924684906005859, 0.03914342498779297, 0.039244800567626956, 0.03934105682373047, 0.04015513610839844, 0.04221952056884765, 0.040643585205078124, 0.04020326232910156, 0.04071219253540039, 0.039651329040527344, 0.039354366302490236, 0.039359489440917966, 0.03907788848876953, 0.03893350219726562, 0.0400076789855957, 0.03925708770751953, 0.04020019149780273, 0.03947724914550781, 0.03976396942138672, 0.03936972808837891, 0.03915980911254883, 0.0390830078125, 0.0402606086730957, 0.03932160186767578, 0.03902054214477539, 0.039175167083740234, 0.038916095733642575, 0.039185409545898435, 0.03926835250854492, 0.039182334899902346, 0.03913420867919922, 0.03899289703369141, 0.0391649284362793, 0.03922227096557617, 0.039228416442871096, 0.0392540168762207, 0.03987353515625, 0.039242752075195314, 0.03915059280395508, 0.03908812713623047, 0.0389939193725586, 0.03923763275146484, 0.04003123092651367, 0.03915776062011719, 0.0391649284362793, 0.0391383056640625, 0.03907993698120117, 0.0391464958190918, 0.03887513732910156, 0.03898777770996094, 0.03915776062011719, 0.03915468978881836, 0.039024639129638675, 0.038509567260742186, 0.03897753524780274, 0.03863449478149414, 0.03860684967041016, 0.03901542282104492, 0.03914137649536133, 0.03906867218017578, 0.039160831451416016, 0.039482368469238284, 0.03947212982177734, 0.039182334899902346, 0.03919462585449219, 0.03859763336181641, 0.038735870361328126, 0.03982438278198242, 0.041245697021484375, 0.04086579132080078, 0.03949977493286133, 0.03911577606201172, 0.039754753112792966, 0.03918950271606445, 0.03969023895263672, 0.03928268814086914, 0.039218177795410154, 0.03968102264404297, 0.0393994255065918, 0.03937996673583984, 0.03973529434204102, 0.04043366241455078, 0.03968000030517578, 0.03914956665039063, 0.03966054534912109, 0.03950899124145508, 0.039206912994384766, 0.04024934387207031, 0.03983871841430664, 0.03913420867919922, 0.03972608184814453, 0.03932160186767578, 0.03870207977294922, 0.038781951904296875, 0.03913420867919922, 0.03960934448242188, 0.039261184692382815, 0.039261184692382815, 0.0391649284362793, 0.03916185760498047, 0.03924070358276367, 0.03885158538818359, 0.03859763336181641, 0.03888127899169922, 0.03921612930297851, 0.039218177795410154, 0.03869696044921875, 0.03957350540161133, 0.039226367950439454, 0.03935129547119141, 0.039193599700927735, 0.039293952941894535, 0.03902361679077149, 0.03957657623291016, 0.03920076751708984, 0.03921612930297851, 0.039337982177734376, 0.03926630401611328, 0.03907583999633789, 0.03947417449951172, 0.03948441696166992, 0.0391464958190918, 0.04173311996459961, 0.040888320922851565, 0.03945471954345703, 0.03926732635498047, 0.04192972946166992, 0.04020019149780273, 0.039556095123291016, 0.03866828918457031, 0.03896012878417969, 0.03969740676879883, 0.03947212982177734, 0.039354366302490236, 0.03920588684082031, 0.03916287994384766, 0.03914137649536133, 0.03925299072265625, 0.03970969772338867, 0.03940966415405273, 0.03908095932006836, 0.03916697692871094, 0.039468032836914066, 0.03905843353271484, 0.039070720672607424, 0.03917107009887695, 0.03914035034179687, 0.03915673446655273, 0.040716289520263675, 0.039863296508789066, 0.03901542282104492, 0.03936358261108398, 0.03902054214477539]",tokens/s,25.400109844760784,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1200, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 976, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 242, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694814e-5675fa9451fdcf132e3bcca0;cfb71b90-400f-470d-8819-592c449d1da7) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1873.760256,2926.051328,0.0,2340.421632,2285.568,s,1,8.7185517578125,8.7185517578125,0.0,8.7185517578125,8.7185517578125,8.7185517578125,8.7185517578125,[8.7185517578125],,kWh,2.172347315070056e-05,1.1890000631956035e-05,3.107391374801427e-05,6.468738753067087e-05,,MB,1899.610112,3326.60736,0.0,2680.160256,2578.857984,s,10,0.34021232223510744,0.03402123222351074,9.705367990477013e-05,0.03399543952941894,0.034097178268432615,0.03419362964630127,0.034270790748596194,"[0.03429008102416992, 0.033954433441162106, 0.03400515365600586, 0.03399177551269531, 0.03407574462890625, 0.03395904159545898, 0.03402060699462891, 0.03398108673095703, 0.033935295104980466, 0.03399910354614258]",tokens/s,7524.712753440142,kWh,4.0115326896895853e-07,2.1981231009454724e-07,1.9052086804743811e-06,2.526174259537887e-06,tokens/kWh,101339010.57436556,MB,1906.515968,3326.60736,0.0,2680.160256,2578.860544,s,10,14.553461303710938,1.455346130371094,0.0150556009710558,1.4502889404296875,1.4745070678710936,1.4793602233886718,1.4832427478027344,"[1.48421337890625, 1.4734285888671874, 1.4723291015625, 1.4498572998046875, 1.4519647216796876, 1.448634765625, 1.4507205810546875, 1.4387630615234375, 1.43600244140625, 1.44754736328125]",tokens/s,43.28867111766452,kWh,1.7330376186586464e-05,9.496960680085777e-06,3.485773739653254e-05,6.168507426320477e-05,tokens/kWh,1021316.756970812,,s,630,14.551711746215824,0.023097955152723525,0.0005359271759577272,0.022953983306884765,0.023832678985595702,0.023937894630432128,0.02441878589630127,"[0.0230645751953125, 0.02310041618347168, 0.023508991241455078, 0.023425024032592775, 0.02390118408203125, 0.023942144393920898, 0.02391859245300293, 0.02394419288635254, 0.023831552505493164, 0.023842815399169923, 0.023044095993041993, 0.022673408508300782, 0.02285260772705078, 0.02291814422607422, 0.023186431884765626, 0.02305638313293457, 0.023018495559692383, 0.023208959579467774, 0.02294169616699219, 0.02293350410461426, 0.022785024642944338, 0.022615039825439453, 0.022932479858398438, 0.02306355285644531, 0.022981632232666017, 0.023835647583007814, 0.023960575103759766, 0.02430975914001465, 0.023941120147705077, 0.02388991928100586, 0.023841791152954102, 0.023953407287597657, 0.023787519454956055, 0.023833599090576172, 0.02253004837036133, 0.023642112731933593, 0.02327756881713867, 0.022768640518188478, 0.023631872177124022, 0.024041471481323243, 0.02425753593444824, 0.02404249572753906, 0.023780351638793946, 0.022968320846557616, 0.022970367431640625, 0.024167423248291017, 0.02457804870605469, 0.024160255432128908, 0.02391449546813965, 0.0259368953704834, 0.0244715518951416, 0.02390323257446289, 0.02388275146484375, 0.023789567947387694, 0.023818239212036133, 0.023766016006469725, 0.023995391845703123, 0.02384486389160156, 0.02385408020019531, 0.022760448455810548, 0.02265907287597656, 0.02305536079406738, 0.023529472351074218, 0.02305536079406738, 0.022923263549804687, 0.02325299263000488, 0.02391859245300293, 0.02391551971435547, 0.02453811264038086, 0.024031232833862305, 0.0230031681060791, 0.022983648300170897, 0.02291814422607422, 0.022936576843261718, 0.022976512908935546, 0.022765567779541016, 0.022974464416503908, 0.022939647674560547, 0.022990848541259764, 0.02309119987487793, 0.023794687271118165, 0.02349056053161621, 0.022984703063964843, 0.022626304626464845, 0.02309939193725586, 0.023833599090576172, 0.02371379280090332, 0.023828479766845705, 0.024435712814331056, 0.023611391067504883, 0.0228853759765625, 0.02287820816040039, 0.023011327743530274, 0.022922239303588866, 0.023017471313476562, 0.02306252861022949, 0.022624256134033204, 0.022798336029052735, 0.023832576751708984, 0.022965248107910157, 0.023802879333496094, 0.023444480895996093, 0.022658048629760744, 0.022757375717163086, 0.02262118339538574, 0.02352025604248047, 0.02390425682067871, 0.0238919677734375, 0.02386944007873535, 0.023984128952026368, 0.023763967514038087, 0.023833599090576172, 0.02394316864013672, 0.023797760009765623, 0.023924736022949217, 0.023759872436523437, 0.02387046432495117, 0.02289356803894043, 0.022887424468994142, 0.02289459228515625, 0.022840320587158205, 0.02364313507080078, 0.023867391586303712, 0.0239237117767334, 0.024377344131469726, 0.0239237117767334, 0.0241582088470459, 0.023611391067504883, 0.02392985534667969, 0.02385408020019531, 0.024197120666503907, 0.024267776489257813, 0.023958528518676758, 0.02364825630187988, 0.023980031967163085, 0.023609344482421874, 0.02389094352722168, 0.023658496856689453, 0.023793664932250977, 0.024065023422241212, 0.023771135330200196, 0.022793216705322264, 0.02289971160888672, 0.022931455612182617, 0.022952959060668944, 0.02289254379272461, 0.02287001609802246, 0.022610944747924806, 0.023205888748168944, 0.02389504051208496, 0.023795711517333985, 0.023731199264526368, 0.030712831497192384, 0.023394304275512694, 0.023809024810791016, 0.0231014404296875, 0.022965248107910157, 0.02309734344482422, 0.02284236717224121, 0.022995967864990235, 0.022938623428344726, 0.022958080291748048, 0.022871040344238282, 0.02289971160888672, 0.023030784606933592, 0.02307072067260742, 0.022923263549804687, 0.02290278434753418, 0.022937599182128905, 0.02287615966796875, 0.023468032836914062, 0.02287308883666992, 0.022898687362670898, 0.02281881523132324, 0.022751232147216797, 0.022754304885864256, 0.022605823516845702, 0.022642688751220705, 0.02292531204223633, 0.02309734344482422, 0.024071168899536134, 0.02367897605895996, 0.02305023956298828, 0.02290892791748047, 0.022931455612182617, 0.02294169616699219, 0.02290790367126465, 0.02281881523132324, 0.02271334457397461, 0.022785024642944338, 0.02285055923461914, 0.022969343185424804, 0.022974464416503908, 0.022921215057373046, 0.02309734344482422, 0.023004159927368165, 0.022739967346191405, 0.023018495559692383, 0.023027711868286133, 0.022971391677856445, 0.022928384780883788, 0.022944768905639647, 0.023014400482177736, 0.022921215057373046, 0.022996992111206056, 0.022912000656127928, 0.023021568298339845, 0.02270515251159668, 0.022595584869384764, 0.022684671401977538, 0.022675455093383787, 0.02264371109008789, 0.022599679946899414, 0.022709247589111328, 0.02290892791748047, 0.022965248107910157, 0.022915071487426757, 0.022799360275268556, 0.02264473533630371, 0.022749183654785156, 0.022956031799316406, 0.022929407119750975, 0.022964223861694336, 0.02288332748413086, 0.023740415573120118, 0.023362560272216795, 0.023798784255981444, 0.02346700859069824, 0.022977535247802734, 0.022872064590454103, 0.022584320068359375, 0.022700031280517577, 0.02308608055114746, 0.02365132713317871, 0.023621631622314454, 0.023599103927612306, 0.02269491195678711, 0.02294169616699219, 0.02293452835083008, 0.023302143096923827, 0.02348748779296875, 0.023426048278808592, 0.023229440689086913, 0.02288025665283203, 0.022635520935058592, 0.022793216705322264, 0.023387136459350585, 0.02291097640991211, 0.02267750358581543, 0.02290176010131836, 0.02365951919555664, 0.023933952331542968, 0.02307481575012207, 0.022832128524780275, 0.023521280288696288, 0.023189504623413085, 0.02270412826538086, 0.022996992111206056, 0.02309836769104004, 0.02307174491882324, 0.02289356803894043, 0.023037952423095705, 0.02294169616699219, 0.02292736053466797, 0.022905855178833007, 0.022944768905639647, 0.023152639389038086, 0.022882303237915038, 0.02271129608154297, 0.022983680725097655, 0.022819839477539062, 0.022732799530029296, 0.02285772705078125, 0.023541759490966797, 0.023352319717407227, 0.023026687622070312, 0.02285260772705078, 0.022938623428344726, 0.02288640022277832, 0.023350271224975586, 0.023628799438476563, 0.023568384170532225, 0.023561216354370116, 0.02373017692565918, 0.02366771125793457, 0.023714815139770508, 0.02353561592102051, 0.02291302490234375, 0.022714368820190428, 0.022988800048828126, 0.02287820816040039, 0.022684671401977538, 0.022948863983154297, 0.023448575973510744, 0.023259136199951173, 0.022773759841918945, 0.022987775802612305, 0.022862848281860353, 0.022957056045532227, 0.02305433654785156, 0.022890495300292968, 0.023015424728393553, 0.022944768905639647, 0.02289459228515625, 0.022914047241210937, 0.02287513542175293, 0.022632448196411133, 0.022992895126342772, 0.02289356803894043, 0.023212032318115236, 0.022998016357421876, 0.022938623428344726, 0.022932479858398438, 0.022940671920776368, 0.022602752685546876, 0.022985727310180663, 0.02288128089904785, 0.022890495300292968, 0.022801408767700194, 0.02290073585510254, 0.02287718391418457, 0.023924736022949217, 0.02306252861022949, 0.02290073585510254, 0.02304204750061035, 0.022841344833374022, 0.023856128692626953, 0.02367897605895996, 0.023776256561279296, 0.022989824295043947, 0.022972415924072266, 0.023002111434936523, 0.023385087966918947, 0.023236608505249022, 0.02347520065307617, 0.023145471572875977, 0.022968320846557616, 0.023258111953735353, 0.02287001609802246, 0.022543359756469726, 0.022578176498413087, 0.02291097640991211, 0.022582271575927734, 0.022812671661376953, 0.022946815490722656, 0.022617088317871094, 0.02265907287597656, 0.02267238426208496, 0.02348543930053711, 0.024803327560424804, 0.02350387191772461, 0.022866943359375, 0.022906879425048828, 0.02292531204223633, 0.023012351989746094, 0.022625280380249024, 0.02264473533630371, 0.02266214370727539, 0.022767616271972657, 0.022979583740234375, 0.022991872787475585, 0.022766592025756836, 0.022998016357421876, 0.022872064590454103, 0.02291097640991211, 0.022990848541259764, 0.022943744659423827, 0.02308915138244629, 0.022953983306884765, 0.022863872528076173, 0.022964223861694336, 0.02294988822937012, 0.022578176498413087, 0.022766592025756836, 0.022807552337646485, 0.022545408248901368, 0.02257004737854004, 0.022666175842285155, 0.023017471313476562, 0.02288844871520996, 0.022990848541259764, 0.022953983306884765, 0.022800384521484376, 0.023204864501953124, 0.023141376495361327, 0.023532543182373047, 0.022887424468994142, 0.022742015838623047, 0.02294988822937012, 0.022743040084838868, 0.022955007553100586, 0.023038976669311522, 0.023391231536865235, 0.024227840423583984, 0.02348953628540039, 0.02310758399963379, 0.022951936721801756, 0.02304102325439453, 0.023026687622070312, 0.02309222412109375, 0.02294272041320801, 0.023143423080444335, 0.023028736114501954, 0.02262118339538574, 0.022641664505004884, 0.022813695907592774, 0.023355392456054686, 0.023415807723999024, 0.02325606346130371, 0.02330112075805664, 0.023553024291992186, 0.023043071746826172, 0.022758399963378906, 0.022962175369262695, 0.022847488403320314, 0.023038976669311522, 0.022946815490722656, 0.023006208419799806, 0.0231147518157959, 0.02370867156982422, 0.02351923179626465, 0.023233535766601563, 0.022798336029052735, 0.022599679946899414, 0.022722560882568358, 0.02291916847229004, 0.02264473533630371, 0.022788095474243163, 0.022790143966674805, 0.022991872787475585, 0.022964223861694336, 0.022905855178833007, 0.022898687362670898, 0.02290790367126465, 0.022969343185424804, 0.022994943618774414, 0.02287615966796875, 0.02305433654785156, 0.022922239303588866, 0.022599679946899414, 0.022776832580566408, 0.022978559494018554, 0.02269491195678711, 0.022974464416503908, 0.022749183654785156, 0.022592512130737305, 0.02247475242614746, 0.02262118339538574, 0.02267238426208496, 0.022963199615478515, 0.022982656478881838, 0.023007232666015624, 0.02329292869567871, 0.022944768905639647, 0.023126016616821288, 0.02291097640991211, 0.022992895126342772, 0.022808576583862306, 0.022984703063964843, 0.022971391677856445, 0.022982656478881838, 0.02305843162536621, 0.02311884880065918, 0.022962175369262695, 0.0228351993560791, 0.02270515251159668, 0.02265088081359863, 0.02294169616699219, 0.022811647415161132, 0.022906879425048828, 0.02287820816040039, 0.022874111175537108, 0.022707199096679686, 0.022560768127441407, 0.022607872009277344, 0.022639616012573242, 0.022616064071655274, 0.022597631454467772, 0.022590463638305663, 0.022597631454467772, 0.02269900894165039, 0.022776832580566408, 0.022978559494018554, 0.022905855178833007, 0.023154687881469727, 0.022879232406616212, 0.02286899185180664, 0.022923263549804687, 0.023015424728393553, 0.022906879425048828, 0.02284339141845703, 0.022882303237915038, 0.022976512908935546, 0.022772735595703125, 0.02264678382873535, 0.022673408508300782, 0.023121919631958008, 0.022692863464355468, 0.02264371109008789, 0.022722560882568358, 0.022591487884521484, 0.022552576065063477, 0.022977535247802734, 0.022995967864990235, 0.02304819107055664, 0.02267033576965332, 0.022717439651489257, 0.022874111175537108, 0.022559743881225586, 0.022649856567382814, 0.022755327224731444, 0.023034879684448242, 0.02285670471191406, 0.022978559494018554, 0.023010303497314453, 0.022936576843261718, 0.022975488662719725, 0.023010303497314453, 0.02303385543823242, 0.023026687622070312, 0.022905855178833007, 0.022778879165649413, 0.02290995216369629, 0.023000064849853515, 0.022952959060668944, 0.022749183654785156, 0.022822912216186524, 0.02269593620300293, 0.022578176498413087, 0.022615039825439453, 0.02288844871520996, 0.022936576843261718, 0.022569984436035157, 0.022527999877929687, 0.02264678382873535, 0.02272051239013672, 0.022626304626464845, 0.022675455093383787, 0.02328780746459961, 0.022887424468994142, 0.02290073585510254, 0.02302566337585449, 0.022968320846557616, 0.023012351989746094, 0.02268569564819336, 0.022634496688842775, 0.02255564880371094, 0.022634496688842775, 0.02272153663635254, 0.022823936462402345, 0.022723583221435546, 0.022687744140625, 0.022674432754516603, 0.022641664505004884, 0.022618112564086915, 0.022541311264038084, 0.02249625587463379, 0.022586368560791017, 0.02253926467895508, 0.022797311782836914, 0.022975488662719725, 0.02264473533630371, 0.022730752944946288, 0.022991872787475585, 0.02267033576965332, 0.022602752685546876, 0.023036928176879884, 0.02308915138244629, 0.023037952423095705, 0.022979583740234375, 0.02290278434753418, 0.022879232406616212, 0.022995967864990235, 0.022992895126342772, 0.023007232666015624, 0.02267750358581543, 0.022635520935058592, 0.022641664505004884, 0.02288332748413086, 0.02285772705078125, 0.02291302490234375, 0.023254016876220703, 0.022960128784179686, 0.02290790367126465, 0.022962175369262695, 0.02290483283996582, 0.022658048629760744, 0.022584320068359375, 0.022477823257446287, 0.022656000137329102, 0.02330316734313965, 0.023136255264282226, 0.02306355285644531, 0.022755327224731444, 0.022627328872680662, 0.023166976928710937, 0.023004159927368165, 0.0230328311920166, 0.022840320587158205, 0.02291302490234375, 0.023026687622070312, 0.02292531204223633, 0.023018495559692383, 0.022953983306884765, 0.023358463287353515, 0.024358911514282225, 0.023825408935546875, 0.023508991241455078, 0.023772159576416017, 0.02268057632446289, 0.022535167694091796, 0.022936576843261718, 0.022939647674560547, 0.02289254379272461, 0.02304819107055664, 0.022961151123046874, 0.022938623428344726, 0.02291097640991211, 0.02287001609802246, 0.022938623428344726, 0.023026687622070312, 0.022564863204956053, 0.022952959060668944, 0.022960128784179686, 0.02330112075805664, 0.022920192718505858, 0.022841344833374022, 0.022951936721801756, 0.02287615966796875, 0.022879232406616212]",tokens/s,43.29387573003786,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 326, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1268, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1064, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 804, in forward - attn_outputs, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 313, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 170, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 123204 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 326, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490fd-3f2a747c7c64b2cc25b371b8;a3c05cd9-19aa-49de-a101-169897cabdb7) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948221-1bf0f7923d277e3521096580;412f76c7-1f76-41e9-8f60-855126f5a1e6) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 326, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481b3-270490a33b4dcdad4ec189ca;d450b818-373e-4673-8006-318abea629fb) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694946b-22a890fe488c5642487892bb;ba366b92-4f46-44a6-8f4c-32484422f882) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2220.445696,2693.267456,0.0,2107.63776,1984.899072,s,1,8.0358681640625,8.0358681640625,0.0,8.0358681640625,8.0358681640625,8.0358681640625,8.0358681640625,[8.0358681640625],,kWh,1.439787143610829e-05,7.865565474106388e-06,1.934112658402709e-05,4.160456349424177e-05,,MB,2348.736512,3062.366208,0.0,2415.919104,2256.887808,s,10,0.591340160369873,0.0591340160369873,7.731423047302354e-05,0.05913092803955078,0.05924469757080078,0.059254173278808595,0.05926175384521484,"[0.05911872100830078, 0.0590645751953125, 0.05920390319824219, 0.05926364898681641, 0.05915091323852539, 0.059041313171386715, 0.05914313507080078, 0.05924259185791016, 0.059057342529296876, 0.05905401611328125]",tokens/s,4329.149568327584,kWh,6.980188465278541e-07,3.8247658833592594e-07,3.4174194005998736e-06,4.4979148354636534e-06,tokens/kWh,56915261.70784224,MB,2362.118144,3106.4064,0.0,2457.862144,2339.93984,s,10,18.062030883789063,1.8062030883789064,0.016072961729553175,1.80634765625,1.8298086547851564,1.8303840637207032,1.8308443908691405,"[1.7896077880859376, 1.8152529296875, 1.83095947265625, 1.816201904296875, 1.809857177734375, 1.7874505615234375, 1.802838134765625, 1.8296807861328126, 1.79585791015625, 1.78432421875]",tokens/s,34.8797986258253,kWh,2.1860433322570748e-05,1.1979930446833323e-05,4.4410479972798394e-05,7.825084374220247e-05,tokens/kWh,805103.1399425366,,s,630,18.058653572082537,0.028664529479496062,0.0006548777463018216,0.02892953586578369,0.029231820678710937,0.029389927005767823,0.030267106189727788,"[0.027833343505859375, 0.027704320907592773, 0.027588607788085938, 0.027659263610839844, 0.027656192779541015, 0.027773952484130858, 0.027696128845214843, 0.02852556800842285, 0.02775654411315918, 0.02772684860229492, 0.027580415725708008, 0.02770227241516113, 0.027624448776245116, 0.028310527801513673, 0.028935167312622072, 0.028812288284301758, 0.027459583282470702, 0.027618303298950195, 0.027670528411865233, 0.027634687423706054, 0.027643903732299805, 0.027701248168945314, 0.027571199417114257, 0.027825151443481445, 0.027563007354736328, 0.027518976211547853, 0.027599872589111327, 0.02760704040527344, 0.02894643211364746, 0.02932326316833496, 0.029009920120239258, 0.029087743759155273, 0.029051904678344728, 0.028998720169067384, 0.02869343948364258, 0.028976127624511717, 0.028886016845703126, 0.029034496307373047, 0.02915635108947754, 0.02933452796936035, 0.028000255584716797, 0.027858943939208985, 0.027568128585815428, 0.027715583801269532, 0.029000703811645507, 0.029050880432128907, 0.028898303985595702, 0.029071359634399413, 0.029090816497802735, 0.029137920379638672, 0.029090816497802735, 0.028864511489868162, 0.028981311798095703, 0.029246400833129883, 0.029024255752563476, 0.02914201545715332, 0.028907520294189453, 0.0289300479888916, 0.02902016067504883, 0.02890444755554199, 0.028867584228515625, 0.02898841667175293, 0.029064191818237304, 0.02775040054321289, 0.02771046447753906, 0.02772684860229492, 0.027619327545166016, 0.027636735916137696, 0.027830272674560546, 0.027695104598999022, 0.02777190399169922, 0.027599872589111327, 0.027851776123046876, 0.02833305549621582, 0.028980224609375, 0.02874367904663086, 0.02897920036315918, 0.028869632720947266, 0.028940288543701172, 0.028884992599487305, 0.02894745635986328, 0.029068288803100587, 0.03098521614074707, 0.02955366325378418, 0.028901376724243165, 0.028834815979003905, 0.029182975769042968, 0.028907520294189453, 0.02897100830078125, 0.02876313591003418, 0.0289751033782959, 0.028869632720947266, 0.029065216064453125, 0.02878976058959961, 0.02897715187072754, 0.028824575424194337, 0.029668352127075196, 0.02916864013671875, 0.029253631591796874, 0.029096960067749023, 0.029249536514282228, 0.028778495788574218, 0.029033472061157226, 0.029214719772338867, 0.028893184661865235, 0.028903423309326173, 0.02916147232055664, 0.029045759201049806, 0.02915225601196289, 0.029014015197753908, 0.029046783447265623, 0.02934681510925293, 0.0289751033782959, 0.028932096481323243, 0.02892185592651367, 0.029238271713256835, 0.029284351348876952, 0.02899456024169922, 0.02896281623840332, 0.028878847122192384, 0.02915839958190918, 0.029080575942993164, 0.02914508819580078, 0.028943359375, 0.028144639968872072, 0.027801599502563477, 0.027667392730712892, 0.02772684860229492, 0.02775040054321289, 0.029634559631347656, 0.029050880432128907, 0.029073408126831055, 0.02894745635986328, 0.029121536254882813, 0.0287825927734375, 0.029259775161743166, 0.029189119338989256, 0.029042688369750977, 0.02891263961791992, 0.0291409912109375, 0.02915123176574707, 0.02918809509277344, 0.02880102348327637, 0.029304832458496095, 0.029107200622558595, 0.030499839782714845, 0.029928447723388672, 0.029510656356811524, 0.02915328025817871, 0.029314048767089845, 0.02895564842224121, 0.029050880432128907, 0.02893414306640625, 0.029165567398071288, 0.029053951263427736, 0.029085695266723634, 0.029033472061157226, 0.0292044792175293, 0.0289300479888916, 0.029089792251586914, 0.029114368438720704, 0.030483455657958985, 0.029455360412597657, 0.029327360153198243, 0.02890547180175781, 0.0292096004486084, 0.028893184661865235, 0.028898303985595702, 0.02875187110900879, 0.029159423828125, 0.029443071365356444, 0.029014015197753908, 0.02914303970336914, 0.02919628715515137, 0.029024255752563476, 0.02896384048461914, 0.028993535995483398, 0.028931072235107422, 0.029087743759155273, 0.02896691131591797, 0.028902399063110353, 0.028014591217041016, 0.028656639099121094, 0.029524991989135742, 0.028914688110351562, 0.028880895614624022, 0.028887039184570314, 0.0291409912109375, 0.029029375076293946, 0.027634687423706054, 0.027852800369262694, 0.027583488464355467, 0.027669504165649415, 0.027623424530029295, 0.027616256713867186, 0.027671552658081053, 0.02796134376525879, 0.02770636749267578, 0.027842559814453126, 0.02896076774597168, 0.0289751033782959, 0.027683839797973633, 0.027886592864990234, 0.028132352828979492, 0.028067840576171874, 0.028483583450317384, 0.029190143585205077, 0.029071359634399413, 0.029084672927856447, 0.02891779136657715, 0.028879840850830077, 0.02917683219909668, 0.029249536514282228, 0.02894233512878418, 0.02902016067504883, 0.029072383880615234, 0.02956185531616211, 0.02917068862915039, 0.02896281623840332, 0.028846080780029298, 0.029100032806396486, 0.028876800537109375, 0.029075456619262696, 0.028853248596191407, 0.0305930233001709, 0.02997964859008789, 0.029115392684936525, 0.02883072090148926, 0.029212671279907225, 0.028926975250244142, 0.029082624435424805, 0.029063167572021483, 0.02896588706970215, 0.028898303985595702, 0.029091840744018556, 0.029062143325805666, 0.029112319946289062, 0.029900800704956054, 0.029361152648925783, 0.029097984313964844, 0.029077503204345705, 0.029066240310668946, 0.029249536514282228, 0.029037567138671876, 0.02892185592651367, 0.02917580795288086, 0.02920140838623047, 0.028881919860839843, 0.02896895980834961, 0.028916736602783204, 0.029070335388183592, 0.02937343978881836, 0.028171199798583985, 0.029096960067749023, 0.02898841667175293, 0.028872703552246092, 0.02897715187072754, 0.029068288803100587, 0.02889727973937988, 0.028985408782958983, 0.028800960540771484, 0.02895052719116211, 0.028839935302734376, 0.02894233512878418, 0.029265920639038087, 0.02955264091491699, 0.028980224609375, 0.02915123176574707, 0.028943359375, 0.029257728576660157, 0.029182975769042968, 0.029155328750610353, 0.02896281623840332, 0.029070335388183592, 0.029067264556884766, 0.02918809509277344, 0.029259775161743166, 0.02913484764099121, 0.029045759201049806, 0.02902016067504883, 0.028932096481323243, 0.028980224609375, 0.02916044807434082, 0.02940006446838379, 0.029123584747314454, 0.02895359992980957, 0.028837888717651368, 0.02909388732910156, 0.029083648681640626, 0.029041664123535156, 0.028882944107055664, 0.029238271713256835, 0.02897305679321289, 0.02778112030029297, 0.027610111236572265, 0.027806720733642577, 0.027458560943603515, 0.02755891227722168, 0.02752614402770996, 0.027460607528686523, 0.0273756160736084, 0.027794431686401368, 0.027699199676513672, 0.027616256713867186, 0.027896831512451172, 0.02798080062866211, 0.027966463088989257, 0.029611007690429687, 0.02915328025817871, 0.029268991470336913, 0.029084672927856447, 0.028390432357788088, 0.02781692886352539, 0.02924236869812012, 0.028943359375, 0.027662336349487306, 0.02795724868774414, 0.027650047302246093, 0.02753228759765625, 0.027618303298950195, 0.02793267250061035, 0.02772377586364746, 0.02795724868774414, 0.02772787284851074, 0.027807743072509765, 0.0285347843170166, 0.029055999755859374, 0.030306304931640625, 0.029273088455200196, 0.029027328491210938, 0.02917580795288086, 0.029042688369750977, 0.02898227119445801, 0.0285614070892334, 0.0289617919921875, 0.028910591125488282, 0.028874752044677734, 0.029147136688232423, 0.029443071365356444, 0.028872703552246092, 0.02916659164428711, 0.029071359634399413, 0.02895257568359375, 0.02901094436645508, 0.02895359992980957, 0.027650047302246093, 0.027907072067260744, 0.027765760421752928, 0.027752447128295898, 0.02774527931213379, 0.027923456192016603, 0.027823104858398437, 0.027860992431640624, 0.027614208221435548, 0.02775040054321289, 0.0277391357421875, 0.027741184234619142, 0.027894784927368164, 0.02776268768310547, 0.02774630355834961, 0.028457984924316407, 0.029043712615966798, 0.02915225601196289, 0.028862464904785157, 0.029085695266723634, 0.029008895874023437, 0.029147136688232423, 0.028580863952636718, 0.02776268768310547, 0.027907072067260744, 0.02792959976196289, 0.027765760421752928, 0.027648000717163085, 0.027864063262939453, 0.02795417594909668, 0.027979776382446288, 0.028314624786376953, 0.02912665557861328, 0.027599872589111327, 0.028055551528930665, 0.027794431686401368, 0.027834367752075196, 0.027847679138183593, 0.02774835205078125, 0.028811264038085937, 0.029254655838012695, 0.02914201545715332, 0.029092863082885743, 0.027833343505859375, 0.02769817543029785, 0.027668479919433595, 0.027686912536621092, 0.027497472763061522, 0.02809753608703613, 0.028868671417236327, 0.02890847969055176, 0.028848127365112306, 0.029001728057861328, 0.028925952911376954, 0.029037567138671876, 0.02895257568359375, 0.02911846351623535, 0.029101055145263673, 0.02778828811645508, 0.027585535049438475, 0.02757427215576172, 0.02777497673034668, 0.027797504425048827, 0.02796544075012207, 0.029297664642333986, 0.029022207260131837, 0.02919424057006836, 0.028879871368408205, 0.028983295440673826, 0.028863487243652345, 0.029026336669921875, 0.028882911682128906, 0.029106176376342774, 0.02889727973937988, 0.030688255310058594, 0.03101900863647461, 0.02952191925048828, 0.02832896041870117, 0.027848703384399414, 0.02774630355834961, 0.02795929527282715, 0.028922880172729492, 0.028984319686889647, 0.02911027145385742, 0.029049856185913086, 0.028445695877075194, 0.027709440231323244, 0.027995136260986327, 0.029240320205688477, 0.029048831939697265, 0.029062143325805666, 0.029007871627807616, 0.029055999755859374, 0.028900352478027344, 0.02897203254699707, 0.02889625549316406, 0.028681215286254884, 0.029377536773681642, 0.029473791122436522, 0.02902016067504883, 0.0289617919921875, 0.02896076774597168, 0.028925952911376954, 0.02915839958190918, 0.028841983795166014, 0.028903423309326173, 0.02879078483581543, 0.028940288543701172, 0.02877542304992676, 0.028874752044677734, 0.028712959289550782, 0.029076480865478517, 0.02903654479980469, 0.028956672668457032, 0.02876620864868164, 0.02895462417602539, 0.029030399322509767, 0.028949504852294923, 0.028824575424194337, 0.028892160415649414, 0.0287825927734375, 0.029026304244995117, 0.02901094436645508, 0.02897920036315918, 0.028882944107055664, 0.029096960067749023, 0.02901094436645508, 0.029016063690185546, 0.028786687850952147, 0.028894208908081056, 0.02904473686218262, 0.029061119079589845, 0.02880614471435547, 0.028983295440673826, 0.02899660873413086, 0.029132799148559572, 0.028873727798461913, 0.029130752563476563, 0.02896076774597168, 0.030058496475219725, 0.02974515151977539, 0.029917184829711913, 0.02916147232055664, 0.02910207939147949, 0.029019136428833008, 0.02898124885559082, 0.029015039443969725, 0.029035520553588868, 0.029091840744018556, 0.029008895874023437, 0.029131776809692384, 0.029078527450561522, 0.028684288024902343, 0.02995814323425293, 0.0301711368560791, 0.029247488021850586, 0.0291409912109375, 0.028462080001831053, 0.027898880004882814, 0.027701248168945314, 0.02771046447753906, 0.02757427215576172, 0.027639808654785155, 0.02879283142089844, 0.029123584747314454, 0.029288448333740235, 0.02958847999572754, 0.028480512619018555, 0.027691007614135742, 0.028188671112060547, 0.029084672927856447, 0.02855423927307129, 0.027704320907592773, 0.02755072021484375, 0.02794495964050293, 0.02774220848083496, 0.02770534324645996, 0.027666431427001953, 0.027782144546508788, 0.02773708724975586, 0.028035072326660155, 0.027611135482788086, 0.028063743591308594, 0.02831974411010742, 0.02900275230407715, 0.028863487243652345, 0.028985343933105468, 0.02875289535522461, 0.028951551437377928, 0.028993535995483398, 0.029092863082885743, 0.028890111923217773, 0.028366847991943358, 0.028087295532226563, 0.028956672668457032, 0.028883968353271484, 0.02899046325683594, 0.02874470329284668, 0.02951475143432617, 0.029231103897094726, 0.029240320205688477, 0.029345792770385744, 0.029074432373046875, 0.028452863693237306, 0.028216320037841795, 0.028664831161499024, 0.029162496566772462, 0.02899558448791504, 0.02909491157531738, 0.029438976287841798, 0.02902016067504883, 0.02882252883911133, 0.02809343910217285, 0.027813888549804686, 0.027915264129638673, 0.027874336242675782, 0.027747295379638673, 0.02795315170288086, 0.02857472038269043, 0.028456960678100586, 0.028926975250244142, 0.02895871925354004, 0.02779955291748047, 0.027778047561645508, 0.027685888290405275, 0.027672576904296874, 0.027856895446777344, 0.027354112625122072, 0.027661312103271486, 0.02774527931213379, 0.027656192779541015, 0.02856038475036621, 0.028217344284057616, 0.02775449562072754, 0.029198335647583007, 0.029148160934448244, 0.02900275230407715, 0.028915712356567383, 0.028886016845703126, 0.02895359992980957, 0.02837811279296875, 0.02920857620239258, 0.0287457275390625, 0.027794431686401368, 0.027701248168945314, 0.027813888549804686, 0.027641855239868163, 0.027641855239868163, 0.02772889518737793, 0.027732992172241212, 0.027666431427001953, 0.02811801528930664, 0.02897203254699707, 0.029031423568725585, 0.02878976058959961, 0.028850175857543944, 0.028875776290893555, 0.029106176376342774, 0.028857343673706053, 0.02897305679321289, 0.02895257568359375, 0.02905292892456055, 0.029034496307373047, 0.028931072235107422, 0.02892902374267578, 0.028992511749267577, 0.029041664123535156, 0.029139968872070314, 0.028985343933105468, 0.02913484764099121, 0.02893824005126953, 0.028460031509399415, 0.02749951934814453, 0.027808767318725586, 0.028003328323364256, 0.027354112625122072, 0.027797504425048827, 0.028634111404418947, 0.028033023834228517, 0.027601919174194335, 0.027634687423706054, 0.027460607528686523, 0.02750771141052246, 0.027864063262939453, 0.02779648017883301]",tokens/s,34.88632181160716,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1174, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 978, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 718, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 326, in forward - query_states = self.q_proj(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3572.097024,4698.144768,0.0,4112.515072,3976.487424,s,1,9.7618046875,9.7618046875,0.0,9.7618046875,9.7618046875,9.7618046875,9.7618046875,[9.7618046875],,kWh,3.5068982135416767e-05,1.9187330361455525e-05,4.7629760326000064e-05,0.00010188607282287236,,MB,1646.030848,4928.831488,0.0,4282.384384,4102.201856,s,10,0.9187590713500976,0.09187590713500976,5.788447408542122e-05,0.09186855697631835,0.09191582107543946,0.09197320747375488,0.09201911659240723,"[0.09190306854248047, 0.09182720184326172, 0.09182498931884765, 0.09182838439941406, 0.09188019561767578, 0.09188800048828125, 0.09186534118652344, 0.09203059387207031, 0.09183952331542969, 0.09187177276611327]",tokens/s,2786.3670464098195,kWh,1.087057450184764e-06,5.955972681583612e-07,5.872785024422019e-06,7.555439742765144e-06,tokens/kWh,33882872.30338085,MB,1704.701952,4937.220096,0.0,4290.772992,4102.204416,s,10,18.4486845703125,1.8448684570312501,0.009306056788317373,1.8436890258789063,1.8577589477539063,1.8594960510253906,1.8608857336425781,"[1.861233154296875, 1.83599658203125, 1.8573729248046875, 1.841143798828125, 1.839673583984375, 1.8307066650390624, 1.83662744140625, 1.8462342529296876, 1.84876123046875, 1.8509349365234375]",tokens/s,34.14877616877855,kWh,2.1712336881759754e-05,1.1896900940278869e-05,5.540618066537796e-05,8.90154184874166e-05,tokens/kWh,707742.5582053048,,s,630,18.44670977783202,0.029280491710844493,0.0005472436529037357,0.029065728187561035,0.03014123592376709,0.030334515285491942,0.031179611511230484,"[0.029069311141967775, 0.02896895980834961, 0.029071359634399413, 0.028993535995483398, 0.028919807434082033, 0.029101055145263673, 0.029228031158447267, 0.029165567398071288, 0.0297891845703125, 0.030139392852783203, 0.029006847381591795, 0.02877440071105957, 0.028916736602783204, 0.02909388732910156, 0.029050880432128907, 0.029289472579956056, 0.03003392028808594, 0.030310400009155275, 0.030078975677490235, 0.030333951950073244, 0.030126079559326172, 0.030680063247680665, 0.030490623474121094, 0.030176256179809572, 0.03012505531311035, 0.03016294479370117, 0.02980352020263672, 0.030877695083618165, 0.030688255310058594, 0.03038515281677246, 0.030224384307861327, 0.030509056091308592, 0.030176256179809572, 0.03019980812072754, 0.030271488189697264, 0.03038924789428711, 0.02893926429748535, 0.02902016067504883, 0.02900480079650879, 0.02934272003173828, 0.029038591384887694, 0.028980224609375, 0.02899660873413086, 0.02895359992980957, 0.028767232894897462, 0.028931072235107422, 0.029076480865478517, 0.029146112442016602, 0.029040672302246093, 0.029291488647460937, 0.030103551864624024, 0.028737535476684572, 0.02893414306640625, 0.028903423309326173, 0.02916966438293457, 0.030099456787109374, 0.030060543060302734, 0.030005247116088866, 0.029905920028686524, 0.028935167312622072, 0.02893926429748535, 0.02891366386413574, 0.029180927276611326, 0.02857164764404297, 0.02873036766052246, 0.02892083168029785, 0.02893414306640625, 0.030208000183105467, 0.030093311309814453, 0.029253631591796874, 0.0294021110534668, 0.029014015197753908, 0.029001728057861328, 0.029025279998779296, 0.02895052719116211, 0.02896281623840332, 0.0289617919921875, 0.02952191925048828, 0.03018854331970215, 0.03017830467224121, 0.03037696075439453, 0.030279680252075194, 0.030369792938232422, 0.030096384048461915, 0.030063615798950196, 0.030069759368896484, 0.028997631072998048, 0.028670976638793946, 0.02871500778198242, 0.028824575424194337, 0.028835840225219726, 0.028612607955932616, 0.028620800018310546, 0.02855833625793457, 0.02875699234008789, 0.028628992080688476, 0.028646400451660156, 0.02860851287841797, 0.028673023223876954, 0.028701696395874023, 0.029008895874023437, 0.029059072494506837, 0.029107200622558595, 0.02902835273742676, 0.02897920036315918, 0.028860416412353516, 0.028824575424194337, 0.028464128494262695, 0.028750848770141602, 0.028907520294189453, 0.029008895874023437, 0.02897715187072754, 0.029027328491210938, 0.029989887237548828, 0.030126079559326172, 0.03033497619628906, 0.028888063430786134, 0.02892083168029785, 0.029047807693481444, 0.02876416015625, 0.028489728927612305, 0.028505088806152344, 0.029046783447265623, 0.02912563133239746, 0.028580863952636718, 0.028956672668457032, 0.028660736083984374, 0.028767232894897462, 0.028850175857543944, 0.028844032287597656, 0.02877542304992676, 0.028867584228515625, 0.029369344711303712, 0.030087167739868165, 0.030026752471923827, 0.030098432540893554, 0.030093311309814453, 0.030043136596679686, 0.0301711368560791, 0.030055423736572266, 0.029989887237548828, 0.029641727447509765, 0.028866559982299804, 0.0289617919921875, 0.028906496047973632, 0.029262847900390625, 0.02996940803527832, 0.03019980812072754, 0.030013439178466796, 0.030139392852783203, 0.030021631240844726, 0.02997760009765625, 0.030015487670898438, 0.030041088104248048, 0.02997145652770996, 0.030121984481811522, 0.029989887237548828, 0.03016908836364746, 0.030091264724731445, 0.030073856353759764, 0.030079999923706056, 0.029480991363525392, 0.028941280364990236, 0.02898227119445801, 0.02902118492126465, 0.029180927276611326, 0.029121536254882813, 0.028956672668457032, 0.028935167312622072, 0.028887039184570314, 0.029091840744018556, 0.02896895980834961, 0.028949504852294923, 0.030139392852783203, 0.0301977596282959, 0.029019136428833008, 0.028919807434082033, 0.028505088806152344, 0.028848127365112306, 0.028901376724243165, 0.030325759887695314, 0.030432256698608398, 0.02958950424194336, 0.030277631759643556, 0.029248512268066407, 0.028642303466796876, 0.02897715187072754, 0.028606464385986328, 0.028820480346679687, 0.02999295997619629, 0.029826047897338868, 0.030272512435913085, 0.030885887145996094, 0.028922880172729492, 0.028993535995483398, 0.030104576110839845, 0.030266368865966797, 0.02897305679321289, 0.0290119686126709, 0.029049856185913086, 0.029871103286743163, 0.030224384307861327, 0.029039615631103514, 0.029270015716552734, 0.02911948776245117, 0.028957696914672853, 0.028882944107055664, 0.028882944107055664, 0.028917760848999025, 0.029062143325805666, 0.02900275230407715, 0.028856319427490236, 0.029107200622558595, 0.028999679565429686, 0.02897817611694336, 0.02882252883911133, 0.028948480606079102, 0.02894540786743164, 0.02857881546020508, 0.02900377655029297, 0.0291778564453125, 0.029023231506347655, 0.029075456619262696, 0.02894540786743164, 0.029058048248291016, 0.029064191818237304, 0.02894643211364746, 0.03036057662963867, 0.030785535812377928, 0.030313472747802734, 0.029460479736328125, 0.02897715187072754, 0.028855295181274415, 0.029451263427734374, 0.029249536514282228, 0.029029375076293946, 0.029027328491210938, 0.02892799949645996, 0.02898739242553711, 0.028906496047973632, 0.029121536254882813, 0.028841983795166014, 0.02883072090148926, 0.028876800537109375, 0.02923417663574219, 0.02909388732910156, 0.029080575942993164, 0.02889625549316406, 0.028617727279663087, 0.028909568786621095, 0.029054975509643553, 0.029000703811645507, 0.02877132797241211, 0.028612607955932616, 0.028924928665161134, 0.02892902374267578, 0.028877824783325196, 0.028917760848999025, 0.028924928665161134, 0.02918707275390625, 0.029031423568725585, 0.02913587188720703, 0.029053951263427736, 0.028680192947387696, 0.028997631072998048, 0.029009920120239258, 0.029090816497802735, 0.0291778564453125, 0.028959743499755858, 0.02899660873413086, 0.029035520553588868, 0.02905292892456055, 0.029069311141967775, 0.02896998405456543, 0.029029375076293946, 0.030204927444458008, 0.03136614418029785, 0.030315519332885742, 0.029502464294433595, 0.02983526420593262, 0.030285823822021486, 0.029646848678588866, 0.029077503204345705, 0.029322240829467775, 0.029092863082885743, 0.03177369689941406, 0.030483455657958985, 0.029396991729736328, 0.02918400001525879, 0.029022207260131837, 0.02902016067504883, 0.02909491157531738, 0.029027328491210938, 0.029005823135375978, 0.02916147232055664, 0.02897100830078125, 0.028829696655273438, 0.028627967834472655, 0.028997631072998048, 0.029243392944335936, 0.02922700881958008, 0.029054975509643553, 0.029016063690185546, 0.02900275230407715, 0.02893619155883789, 0.02898636817932129, 0.02900480079650879, 0.029139968872070314, 0.028907520294189453, 0.029034496307373047, 0.029257728576660157, 0.028481536865234375, 0.028848127365112306, 0.02872831916809082, 0.028919807434082033, 0.02871500778198242, 0.028873727798461913, 0.028881919860839843, 0.028907520294189453, 0.028917760848999025, 0.028883968353271484, 0.028853248596191407, 0.029062143325805666, 0.028926975250244142, 0.028890111923217773, 0.02877952003479004, 0.028613632202148437, 0.029008895874023437, 0.028742656707763672, 0.028718080520629883, 0.029070335388183592, 0.029008895874023437, 0.02905292892456055, 0.029042688369750977, 0.029001728057861328, 0.02895871925354004, 0.029104127883911132, 0.02899865531921387, 0.02914508819580078, 0.02899046325683594, 0.029279232025146484, 0.02918809509277344, 0.029083648681640626, 0.029014015197753908, 0.029063167572021483, 0.028999679565429686, 0.029309951782226562, 0.029155328750610353, 0.02936729621887207, 0.028983295440673826, 0.029039615631103514, 0.029057024002075195, 0.029634559631347656, 0.03129958343505859, 0.029508607864379883, 0.028943359375, 0.02895052719116211, 0.029165567398071288, 0.028883968353271484, 0.02895257568359375, 0.028883968353271484, 0.02910310363769531, 0.029066240310668946, 0.02898227119445801, 0.029293567657470702, 0.029026304244995117, 0.029073408126831055, 0.029070335388183592, 0.029046783447265623, 0.02898739242553711, 0.029197311401367186, 0.029009920120239258, 0.02873958396911621, 0.028882944107055664, 0.029042688369750977, 0.029254655838012695, 0.02874163246154785, 0.029083648681640626, 0.029240320205688477, 0.029092863082885743, 0.0291276798248291, 0.029017087936401367, 0.028948480606079102, 0.028984319686889647, 0.029501440048217774, 0.02912563133239746, 0.02909491157531738, 0.029005823135375978, 0.02899865531921387, 0.028991487503051756, 0.02899046325683594, 0.028967935562133788, 0.02897305679321289, 0.0291276798248291, 0.028553216934204102, 0.029138944625854493, 0.02911948776245117, 0.029025279998779296, 0.028993535995483398, 0.029009920120239258, 0.028914688110351562, 0.02910207939147949, 0.0289617919921875, 0.028991487503051756, 0.0285665283203125, 0.029033472061157226, 0.02942361640930176, 0.029273088455200196, 0.029296640396118165, 0.029451263427734374, 0.029050880432128907, 0.029120512008666992, 0.029009920120239258, 0.029108224868774416, 0.029075456619262696, 0.029122560501098634, 0.03035545539855957, 0.03140403175354004, 0.02979430389404297, 0.029122560501098634, 0.029046783447265623, 0.02895462417602539, 0.02893414306640625, 0.02898944091796875, 0.029395967483520507, 0.029247488021850586, 0.02960383987426758, 0.029088768005371093, 0.02904473686218262, 0.028999679565429686, 0.029017087936401367, 0.02896384048461914, 0.02895257568359375, 0.02912665557861328, 0.028980224609375, 0.02889727973937988, 0.02892083168029785, 0.029051904678344728, 0.029104127883911132, 0.02894745635986328, 0.029949951171875, 0.029655040740966795, 0.029660160064697266, 0.029087743759155273, 0.029043712615966798, 0.02889625549316406, 0.02914201545715332, 0.028883968353271484, 0.029080575942993164, 0.028899328231811523, 0.029114368438720704, 0.029146112442016602, 0.029609983444213867, 0.028867584228515625, 0.02893926429748535, 0.028646400451660156, 0.028816383361816408, 0.029405183792114258, 0.029511680603027345, 0.02883072090148926, 0.02893824005126953, 0.02899558448791504, 0.029075456619262696, 0.03017215919494629, 0.029379583358764647, 0.029293567657470702, 0.02915225601196289, 0.029042688369750977, 0.029236223220825194, 0.029115392684936525, 0.0291409912109375, 0.029702144622802733, 0.03059097671508789, 0.029428735733032226, 0.029255680084228516, 0.029228031158447267, 0.029254655838012695, 0.029139968872070314, 0.029558784484863283, 0.029264896392822266, 0.029253631591796874, 0.029303808212280274, 0.029222911834716796, 0.029115392684936525, 0.03058380889892578, 0.029913087844848633, 0.02904473686218262, 0.029034496307373047, 0.029090816497802735, 0.029018112182617187, 0.028888063430786134, 0.029327360153198243, 0.030107648849487304, 0.030159872055053712, 0.02898227119445801, 0.029611007690429687, 0.029517824172973633, 0.02892185592651367, 0.029100032806396486, 0.029100032806396486, 0.028694528579711914, 0.029656063079833983, 0.03013734436035156, 0.03002060890197754, 0.02916966438293457, 0.02891366386413574, 0.028941312789916993, 0.028851200103759765, 0.028862464904785157, 0.02921062469482422, 0.028914688110351562, 0.029000703811645507, 0.0299182071685791, 0.030627840042114256, 0.030664703369140626, 0.029305856704711915, 0.029876224517822264, 0.02959872055053711, 0.02912563133239746, 0.030361600875854492, 0.029281280517578126, 0.02898841667175293, 0.029730815887451172, 0.03017523193359375, 0.02960383987426758, 0.029318143844604492, 0.02911027145385742, 0.02913484764099121, 0.029059072494506837, 0.02902118492126465, 0.029314048767089845, 0.028664831161499024, 0.0289617919921875, 0.029042688369750977, 0.029041664123535156, 0.029075456619262696, 0.029883392333984377, 0.029303808212280274, 0.029061119079589845, 0.02898227119445801, 0.02917683219909668, 0.029029375076293946, 0.02933145523071289, 0.030095359802246095, 0.02980659294128418, 0.02917683219909668, 0.02960588836669922, 0.030697471618652345, 0.029083648681640626, 0.02915123176574707, 0.029129728317260743, 0.030701568603515625, 0.02933247947692871, 0.029032447814941405, 0.0291276798248291, 0.02911027145385742, 0.029261823654174804, 0.030110719680786133, 0.02895257568359375, 0.029207551956176758, 0.029148160934448244, 0.029026304244995117, 0.02903654479980469, 0.02898636817932129, 0.029047807693481444, 0.029051904678344728, 0.029055999755859374, 0.02980659294128418, 0.03159347152709961, 0.030249984741210937, 0.028997631072998048, 0.02914508819580078, 0.029047807693481444, 0.029241344451904298, 0.029067264556884766, 0.029449216842651366, 0.029211648941040037, 0.028834815979003905, 0.029005823135375978, 0.02970419120788574, 0.029870080947875976, 0.029024255752563476, 0.02930892753601074, 0.02896588706970215, 0.028916736602783204, 0.02898841667175293, 0.02895257568359375, 0.028888063430786134, 0.02916147232055664, 0.029043712615966798, 0.029000703811645507, 0.02894643211364746, 0.029082624435424805, 0.028767232894897462, 0.029059072494506837, 0.029076480865478517, 0.029121536254882813, 0.029132799148559572, 0.029229055404663085, 0.029089792251586914, 0.02916864013671875, 0.030119935989379884, 0.0313384952545166, 0.03078348731994629, 0.030204927444458008, 0.029066240310668946, 0.029283327102661134, 0.029338623046875, 0.03015782356262207, 0.029174783706665038, 0.029076480865478517, 0.029034496307373047, 0.028951551437377928, 0.03141939163208008, 0.030851072311401367, 0.02937548828125, 0.02915328025817871, 0.029043712615966798, 0.029037567138671876, 0.029008895874023437, 0.029065216064453125, 0.02899456024169922, 0.02914406394958496, 0.029075456619262696, 0.029074432373046875, 0.028915712356567383, 0.029041664123535156, 0.029832191467285156, 0.029070335388183592, 0.02995712089538574]",tokens/s,34.15243192892263,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,2201.800704,2405.957632,0.0,1820.327936,1730.89792,s,1,8.8347353515625,8.8347353515625,0.0,8.8347353515625,8.8347353515625,8.8347353515625,8.8347353515625,[8.8347353515625],,kWh,2.4470479031947438e-05,1.339559805997913e-05,3.3853638193998004e-05,7.171971528592458e-05,,MB,2311.237632,2582.1184,0.0,1935.671296,1893.8368,s,10,0.5981350021362305,0.05981350021362305,0.0002393327107253519,0.05983438301086426,0.06010357246398926,0.060112153816223146,0.06011901889801026,"[0.059250526428222657, 0.059764511108398435, 0.06010166549682617, 0.05993859100341797, 0.05978047943115234, 0.05961808013916016, 0.05988828659057617, 0.05992118453979492, 0.05975094223022461, 0.060120735168457035]",tokens/s,4279.970225546068,kWh,6.975492870833478e-07,3.8222388822776225e-07,3.285067987529262e-06,4.364841162840372e-06,tokens/kWh,58650473.2816923,MB,2323.181568,2728.91904,0.0,2082.471936,1953.434112,s,10,14.86483703613281,1.4864837036132812,0.016408756532317027,1.4975529785156252,1.50154306640625,1.5020247070312498,1.50241001953125,"[1.49774853515625, 1.4684833984375, 1.49790087890625, 1.47410986328125, 1.45597119140625, 1.4701373291015625, 1.50250634765625, 1.50143603515625, 1.497357421875, 1.49918603515625]",tokens/s,42.381897525591626,kWh,1.7627579550277803e-05,9.657050097356572e-06,3.804243729667067e-05,6.532706694430502e-05,tokens/kWh,964378.2117711029,,s,630,14.861542396545417,0.023589749835786364,0.0005018264753177855,0.02368870449066162,0.024019660568237303,0.024194969558715817,0.025172695674896238,"[0.022692863464355468, 0.02365132713317871, 0.023758848190307616, 0.023991296768188477, 0.023793664932250977, 0.023727104187011717, 0.02369331169128418, 0.02389606475830078, 0.02370355224609375, 0.02367692756652832, 0.023635967254638672, 0.023744512557983398, 0.023622655868530275, 0.023616512298583983, 0.02410188865661621, 0.023788543701171876, 0.02367283248901367, 0.023624704360961913, 0.023710720062255858, 0.023622655868530275, 0.02365951919555664, 0.023582719802856447, 0.023851007461547852, 0.023871488571166992, 0.023805952072143553, 0.023665664672851562, 0.023829504013061522, 0.02370355224609375, 0.02369945526123047, 0.023638015747070314, 0.024000511169433594, 0.02407526397705078, 0.023751680374145507, 0.023669759750366212, 0.023791616439819335, 0.02371583938598633, 0.02365951919555664, 0.02365132713317871, 0.023762943267822266, 0.023811071395874024, 0.02371379280090332, 0.023641088485717773, 0.023657472610473632, 0.02471116828918457, 0.024035327911376952, 0.023816192626953125, 0.02367897605895996, 0.023848960876464844, 0.023619583129882812, 0.023521280288696288, 0.023611391067504883, 0.023574527740478517, 0.02366054344177246, 0.023624704360961913, 0.02346598434448242, 0.02368716812133789, 0.02424115180969238, 0.025191423416137695, 0.024293376922607423, 0.02370150375366211, 0.023803903579711915, 0.023757823944091795, 0.023630847930908205, 0.022622207641601562, 0.022681600570678712, 0.02264575958251953, 0.02288844871520996, 0.0229171199798584, 0.02268057632446289, 0.022674432754516603, 0.02287513542175293, 0.02274508857727051, 0.022633472442626954, 0.022598655700683593, 0.022701055526733398, 0.022855680465698244, 0.022924287796020508, 0.022648832321166993, 0.02287718391418457, 0.02267033576965332, 0.022599679946899414, 0.022627328872680662, 0.022560768127441407, 0.023174144744873046, 0.02366464042663574, 0.023542783737182618, 0.02384486389160156, 0.023798784255981444, 0.023781375885009767, 0.023435264587402343, 0.023397375106811523, 0.0234833927154541, 0.023456768035888673, 0.023476224899291992, 0.023529472351074218, 0.02329395294189453, 0.023397375106811523, 0.02345779228210449, 0.023744512557983398, 0.02281881523132324, 0.02261299133300781, 0.022725631713867187, 0.023855104446411132, 0.0237076473236084, 0.023710720062255858, 0.023617536544799804, 0.023391231536865235, 0.02390937614440918, 0.02347315216064453, 0.02368409538269043, 0.02372915267944336, 0.023434240341186522, 0.023432191848754884, 0.023421951293945312, 0.02429439926147461, 0.023626752853393555, 0.024574975967407226, 0.023630847930908205, 0.02388172721862793, 0.023604223251342774, 0.0237260799407959, 0.02366771125793457, 0.023634944915771484, 0.023669759750366212, 0.02370867156982422, 0.023638015747070314, 0.02389606475830078, 0.023739391326904297, 0.02371174430847168, 0.023008256912231444, 0.02347724723815918, 0.02452275276184082, 0.026205184936523438, 0.024057855606079103, 0.023775232315063476, 0.023602176666259765, 0.023847936630249023, 0.023569408416748046, 0.023818239212036133, 0.023867391586303712, 0.023727104187011717, 0.023833599090576172, 0.02349260711669922, 0.023649343490600584, 0.023520191192626952, 0.023540735244750977, 0.023623680114746092, 0.023587839126586914, 0.023480319976806642, 0.023795711517333985, 0.02348543930053711, 0.024846336364746095, 0.02365235137939453, 0.025196544647216795, 0.023644159317016602, 0.02372403144836426, 0.023394304275512694, 0.02373017692565918, 0.023423999786376954, 0.02385408020019531, 0.023402496337890624, 0.022755327224731444, 0.024026111602783205, 0.02347520065307617, 0.023601152420043944, 0.02408038330078125, 0.025172992706298827, 0.024017919540405275, 0.024203264236450195, 0.02330316734313965, 0.023835647583007814, 0.02349158477783203, 0.023601152420043944, 0.023508991241455078, 0.023658496856689453, 0.023434240341186522, 0.02349465560913086, 0.023924736022949217, 0.023666688919067383, 0.023669759750366212, 0.023584768295288085, 0.023779327392578126, 0.023738367080688477, 0.02372915267944336, 0.023557119369506836, 0.023714815139770508, 0.02350387191772461, 0.023706623077392578, 0.023546880722045898, 0.023598079681396485, 0.02312499237060547, 0.023341056823730468, 0.022586368560791017, 0.023422975540161133, 0.02392678451538086, 0.023628799438476563, 0.023574527740478517, 0.02368409538269043, 0.023500799179077148, 0.023563264846801758, 0.02310553550720215, 0.02271027183532715, 0.023027711868286133, 0.02348441505432129, 0.023592960357666014, 0.02393497657775879, 0.02388582420349121, 0.023333887100219726, 0.023597055435180665, 0.02265395164489746, 0.022996992111206056, 0.023577600479125976, 0.023578624725341796, 0.02347110366821289, 0.02347724723815918, 0.02347724723815918, 0.023387136459350585, 0.023419904708862304, 0.023544832229614256, 0.023555072784423828, 0.023585792541503905, 0.023456768035888673, 0.02352639961242676, 0.023856128692626953, 0.02345881652832031, 0.023624704360961913, 0.023411712646484374, 0.02348134422302246, 0.023560192108154295, 0.023463935852050782, 0.023448575973510744, 0.023727104187011717, 0.023608320236206053, 0.023438335418701172, 0.023546880722045898, 0.023524351119995117, 0.023641088485717773, 0.02350796890258789, 0.023762943267822266, 0.023448575973510744, 0.023645183563232423, 0.023500799179077148, 0.023588863372802735, 0.02272051239013672, 0.02287820816040039, 0.022786048889160155, 0.02267955207824707, 0.022607872009277344, 0.023037952423095705, 0.02292633628845215, 0.022552576065063477, 0.023973888397216796, 0.022684671401977538, 0.022618112564086915, 0.022708223342895507, 0.022631423950195313, 0.022998016357421876, 0.02371379280090332, 0.023548927307128906, 0.023823360443115234, 0.023985151290893555, 0.025584640502929686, 0.024259584426879883, 0.024018943786621092, 0.023988224029541014, 0.023752704620361328, 0.023670783996582033, 0.023880704879760743, 0.023703712463378906, 0.023712608337402345, 0.023794687271118165, 0.02387353515625, 0.024054784774780274, 0.023731199264526368, 0.02368409538269043, 0.023784448623657226, 0.023670783996582033, 0.0236759033203125, 0.024062976837158204, 0.02351411247253418, 0.023640064239501952, 0.022689792633056642, 0.02263039970397949, 0.022756351470947265, 0.022708223342895507, 0.022771711349487304, 0.022656000137329102, 0.022714368820190428, 0.0227061767578125, 0.022639616012573242, 0.022603776931762694, 0.02274406433105469, 0.022649856567382814, 0.02244915199279785, 0.02233344078063965, 0.022587392807006838, 0.022614015579223632, 0.022607872009277344, 0.02254643249511719, 0.022708223342895507, 0.02264575958251953, 0.02244095993041992, 0.022603776931762694, 0.022700031280517577, 0.02269491195678711, 0.02268262481689453, 0.022633472442626954, 0.022740991592407226, 0.022676479339599608, 0.02262118339538574, 0.02271232032775879, 0.022432767868041992, 0.022336511611938475, 0.022331392288208008, 0.022567935943603516, 0.0228351993560791, 0.022724607467651366, 0.022676479339599608, 0.022623231887817383, 0.023175167083740233, 0.0227061767578125, 0.022649856567382814, 0.022771711349487304, 0.022632448196411133, 0.022609920501708985, 0.022591487884521484, 0.022658048629760744, 0.022594560623168947, 0.022552576065063477, 0.022640640258789063, 0.022755327224731444, 0.02388275146484375, 0.023616512298583983, 0.02270207977294922, 0.02265190315246582, 0.02267955207824707, 0.022614015579223632, 0.02264473533630371, 0.022639616012573242, 0.022648832321166993, 0.022716415405273437, 0.022689792633056642, 0.022649856567382814, 0.022524927139282228, 0.02313113594055176, 0.025392127990722657, 0.024681472778320314, 0.02392985534667969, 0.02375372886657715, 0.02371686363220215, 0.023794687271118165, 0.023826431274414063, 0.023572479248046875, 0.023770111083984375, 0.023792640686035156, 0.02384588813781738, 0.02371686363220215, 0.023758848190307616, 0.02372812843322754, 0.02374143981933594, 0.023746559143066406, 0.02367897605895996, 0.023764991760253908, 0.024123392105102538, 0.023521280288696288, 0.02367692756652832, 0.02371583938598633, 0.023814144134521483, 0.02369331169128418, 0.023792640686035156, 0.02386534309387207, 0.02370457649230957, 0.023794687271118165, 0.02393600082397461, 0.023802879333496094, 0.02373017692565918, 0.023808000564575195, 0.02366361618041992, 0.023710720062255858, 0.023714815139770508, 0.023718912124633788, 0.023780351638793946, 0.023778303146362305, 0.023610368728637695, 0.0237076473236084, 0.023963647842407225, 0.023819263458251954, 0.02371686363220215, 0.023795711517333985, 0.023767040252685546, 0.023739391326904297, 0.02373324775695801, 0.02374143981933594, 0.023839744567871093, 0.023718912124633788, 0.02368716812133789, 0.023636991500854493, 0.023910400390625, 0.023827455520629884, 0.023954431533813478, 0.02390630340576172, 0.02386124801635742, 0.024174591064453126, 0.02386227226257324, 0.02370867156982422, 0.023822336196899413, 0.02386636734008789, 0.023973888397216796, 0.02393087959289551, 0.024040447235107423, 0.023757823944091795, 0.024159231185913087, 0.02408857536315918, 0.024182783126831055, 0.024216575622558592, 0.02395955276489258, 0.024230911254882814, 0.023819263458251954, 0.02390630340576172, 0.02367283248901367, 0.023819263458251954, 0.0237127685546875, 0.02404351997375488, 0.02368921661376953, 0.023735296249389647, 0.023666688919067383, 0.023838720321655273, 0.02369024085998535, 0.023742464065551756, 0.02372915267944336, 0.02369843292236328, 0.023798784255981444, 0.024246271133422852, 0.02391347122192383, 0.023991296768188477, 0.023972864151000976, 0.023748607635498048, 0.024010751724243166, 0.023661567687988282, 0.023771135330200196, 0.023710720062255858, 0.02430259132385254, 0.023769088745117187, 0.023665664672851562, 0.023626752853393555, 0.02385817527770996, 0.023629823684692384, 0.023815168380737304, 0.02385305595397949, 0.02366361618041992, 0.023355392456054686, 0.02370355224609375, 0.02370969581604004, 0.023710720062255858, 0.023863296508789062, 0.023816192626953125, 0.023781375885009767, 0.023734272003173826, 0.02371686363220215, 0.02369638442993164, 0.02367180824279785, 0.02370457649230957, 0.0236810245513916, 0.02368000030517578, 0.024159231185913087, 0.025363456726074218, 0.024507392883300783, 0.023956480026245116, 0.024253440856933595, 0.023781375885009767, 0.023948287963867186, 0.023754751205444336, 0.023987199783325194, 0.023723007202148438, 0.023769088745117187, 0.023581695556640626, 0.02390732765197754, 0.023639039993286134, 0.023797760009765623, 0.02368921661376953, 0.024037376403808593, 0.02406399917602539, 0.023792640686035156, 0.024142847061157227, 0.02373222351074219, 0.023658496856689453, 0.02349158477783203, 0.02371788787841797, 0.02372915267944336, 0.02369740867614746, 0.023618560791015625, 0.023608320236206053, 0.023629823684692384, 0.024230911254882814, 0.024123392105102538, 0.023748607635498048, 0.02409984016418457, 0.023798784255981444, 0.02368819236755371, 0.023910400390625, 0.02370969581604004, 0.023787519454956055, 0.02369228744506836, 0.023604223251342774, 0.02367180824279785, 0.023817216873168946, 0.023743488311767577, 0.023953407287597657, 0.023991296768188477, 0.02414182472229004, 0.02386534309387207, 0.02438041687011719, 0.024482816696166993, 0.02413363265991211, 0.024049663543701173, 0.023560192108154295, 0.023843839645385743, 0.02412646484375, 0.023521280288696288, 0.023564287185668945, 0.023801855087280274, 0.02328985595703125, 0.023172096252441408, 0.024184831619262694, 0.023859199523925782, 0.023207935333251953, 0.02333286476135254, 0.023791616439819335, 0.023635967254638672, 0.023799808502197265, 0.02368000030517578, 0.023650367736816405, 0.02420832061767578, 0.024002559661865236, 0.023570432662963867, 0.023792640686035156, 0.02364825630187988, 0.02352230453491211, 0.023661567687988282, 0.023746559143066406, 0.02369024085998535, 0.02367283248901367, 0.02370560073852539, 0.02333286476135254, 0.023774208068847655, 0.022627328872680662, 0.023387136459350585, 0.023076864242553712, 0.023625728607177734, 0.023636991500854493, 0.02373324775695801, 0.023746559143066406, 0.0236943359375, 0.02365132713317871, 0.023739391326904297, 0.023636991500854493, 0.0237076473236084, 0.023786495208740235, 0.023562240600585937, 0.023857152938842774, 0.02507776069641113, 0.024054784774780274, 0.023956480026245116, 0.02410188865661621, 0.023820287704467775, 0.024175615310668946, 0.023788543701171876, 0.022722560882568358, 0.02350284767150879, 0.023766016006469725, 0.023740415573120118, 0.023875583648681642, 0.024556543350219725, 0.02517196846008301, 0.024253440856933595, 0.024236032485961914, 0.02392268753051758, 0.023998464584350586, 0.024016895294189454, 0.023863296508789062, 0.023742464065551756, 0.02404249572753906, 0.023738367080688477, 0.02368511962890625, 0.02367180824279785, 0.02368511962890625, 0.02364825630187988, 0.023961599349975587, 0.023635967254638672, 0.02388479995727539, 0.023829504013061522, 0.023747583389282227, 0.023888896942138672, 0.0236810245513916, 0.023779327392578126, 0.023780351638793946, 0.0236759033203125, 0.023562240600585937, 0.023742464065551756, 0.023657472610473632, 0.023731199264526368, 0.0237127685546875, 0.023770111083984375, 0.02369024085998535, 0.024011775970458983, 0.023799808502197265, 0.02388991928100586, 0.023640064239501952, 0.023521280288696288, 0.023627775192260742, 0.02366361618041992, 0.023808000564575195, 0.02367180824279785, 0.023805952072143553, 0.0241530876159668, 0.02372915267944336, 0.02371583938598633, 0.023763967514038087, 0.023677951812744142, 0.023654399871826173, 0.023755775451660157, 0.023763967514038087, 0.023818239212036133, 0.023779327392578126, 0.023727104187011717, 0.0236943359375, 0.023673856735229492, 0.02366054344177246, 0.02372403144836426, 0.023585792541503905]",tokens/s,42.39129312354851,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494cc-70275b937cc5b7317485c356;6524d7bd-443b-4276-ae45-e563395c40fa) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 760, in forward - outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 646, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 413, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 243, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5169.487872,5651.300352,0.0,5058.330624,5057.441792,s,1,11.1882578125,11.1882578125,0.0,11.1882578125,11.1882578125,11.1882578125,11.1882578125,[11.1882578125],,kWh,5.189868549722721e-05,2.842584186395593e-05,7.428783720800358e-05,0.00015461236456918673,,MB,1795.842048,5869.40416,0.0,5215.617024,5189.838848,s,10,1.3773021240234373,0.13773021240234376,6.401024066507488e-05,0.13774081420898437,0.13781373138427733,0.13781892929077147,0.1378230876159668,"[0.1376494140625, 0.1376913604736328, 0.13776188659667968, 0.13767478942871095, 0.13775222778320312, 0.1378241271972656, 0.13772940063476563, 0.13777836608886718, 0.1378125762939453, 0.13762797546386718]",tokens/s,1858.7062020362036,kWh,1.628488425371019e-06,8.922866030188737e-07,9.151380989287923e-06,1.1672156017677816e-05,tokens/kWh,21932537.537390746,MB,1808.818176,5888.278528,0.0,5234.491392,5189.841408,s,10,26.982919677734372,2.698291967773437,0.013055487294218408,2.7006922607421875,2.7159890625,2.716798974609375,2.717446904296875,"[2.71760888671875, 2.704180419921875, 2.67853173828125, 2.67783056640625, 2.71580908203125, 2.69308837890625, 2.703909423828125, 2.69747509765625, 2.704961669921875, 2.6895244140625]",tokens/s,23.348103449303903,kWh,3.1321727412893504e-05,1.7165388932594134e-05,8.279122034251073e-05,0.0001312783366879984,tokens/kWh,479896.39105291566,,s,630,26.980491291046118,0.042826176652454195,0.0008773766412497071,0.04311603164672852,0.04367021598815918,0.04389857444763184,0.045045646591186525,"[0.04191231918334961, 0.042444801330566405, 0.04377702331542969, 0.043361278533935545, 0.043000831604003906, 0.04372684860229492, 0.0437841911315918, 0.043535358428955076, 0.04361523056030273, 0.043594753265380856, 0.04388249588012695, 0.04346879959106445, 0.04343296051025391, 0.04345548629760742, 0.043394046783447264, 0.04402380752563476, 0.043453441619873044, 0.04313497543334961, 0.04273971176147461, 0.042518527984619144, 0.04310835266113281, 0.043053054809570314, 0.043684864044189455, 0.043529216766357424, 0.04335513687133789, 0.04307558441162109, 0.043169792175292966, 0.043286529541015625, 0.04366745758056641, 0.04331110382080078, 0.043324417114257815, 0.04359167861938477, 0.043399166107177735, 0.043079681396484375, 0.04327731323242188, 0.04318719863891601, 0.04291788864135742, 0.04319027328491211, 0.04349235153198242, 0.043020286560058595, 0.04297830581665039, 0.04236800003051758, 0.04296499252319336, 0.043284481048583984, 0.04329369735717774, 0.04331622314453125, 0.04283699035644531, 0.04371660614013672, 0.04334694290161133, 0.043020286560058595, 0.04320153427124023, 0.043698177337646485, 0.04370534515380859, 0.042621952056884765, 0.042877952575683595, 0.042177536010742187, 0.04178230285644531, 0.04153033447265625, 0.04184883117675781, 0.04339203262329101, 0.04148323059082031, 0.04246015930175781, 0.04348108673095703, 0.04179455947875976, 0.04173004913330078, 0.04213555145263672, 0.043394046783447264, 0.04268134307861328, 0.04184985733032227, 0.04296499252319336, 0.04341350555419922, 0.04373299026489258, 0.04349235153198242, 0.04319539260864258, 0.041952255249023435, 0.04101836776733398, 0.041384960174560545, 0.041455615997314454, 0.04123852920532227, 0.041194496154785154, 0.04306124877929687, 0.04175462341308594, 0.04183859252929688, 0.041444351196289066, 0.04119859313964844, 0.04147814559936523, 0.04178739166259766, 0.0425082893371582, 0.0438579216003418, 0.043817985534667966, 0.04366950225830078, 0.043703296661376956, 0.04364799880981445, 0.04344319915771484, 0.0440893440246582, 0.04357529449462891, 0.04338790512084961, 0.04302950286865234, 0.0425164794921875, 0.043292671203613284, 0.04343091201782227, 0.04402687835693359, 0.04379545593261719, 0.042933246612548825, 0.043235328674316405, 0.04303769683837891, 0.043289600372314455, 0.042891265869140625, 0.04324249649047852, 0.04316774368286133, 0.04308889770507812, 0.04340019226074219, 0.04307865524291992, 0.043908096313476565, 0.04309196853637695, 0.04312268829345703, 0.04477030563354492, 0.04506316757202149, 0.04374016189575195, 0.04308070373535156, 0.04295065689086914, 0.04333363342285156, 0.04332748794555664, 0.043133953094482425, 0.04315750503540039, 0.04293632125854492, 0.04185804748535156, 0.041573375701904294, 0.041632766723632815, 0.04154470443725586, 0.041393150329589845, 0.041613311767578126, 0.041111553192138675, 0.04178124618530273, 0.04316569519042969, 0.04336844635009766, 0.0431912956237793, 0.04288409423828125, 0.04336947250366211, 0.043328510284423825, 0.043227134704589845, 0.04346879959106445, 0.044818431854248046, 0.043848705291748044, 0.04283699035644531, 0.042913791656494144, 0.043084800720214846, 0.04325580978393555, 0.04304076766967774, 0.042848255157470705, 0.04156313705444336, 0.041798656463623046, 0.041675777435302735, 0.041760768890380856, 0.04182527923583984, 0.04178124618530273, 0.04195942306518555, 0.04585472106933594, 0.042627071380615236, 0.04175360107421875, 0.04164505767822266, 0.04207206344604492, 0.04193280029296875, 0.04164710235595703, 0.04152217483520508, 0.04177305603027344, 0.04148428726196289, 0.041078784942626956, 0.04143513488769531, 0.04165222549438476, 0.04108902359008789, 0.041588737487792966, 0.04355379104614258, 0.04361011123657227, 0.043177982330322266, 0.0427509765625, 0.04280012893676758, 0.04171059036254883, 0.04170342254638672, 0.04166041564941406, 0.04426137542724609, 0.04386611175537109, 0.04327936172485351, 0.043186176300048826, 0.04292812728881836, 0.043589630126953126, 0.04349747085571289, 0.043268096923828124, 0.04280831909179687, 0.04168092727661133, 0.0430939826965332, 0.043109375, 0.043041793823242185, 0.04291584014892578, 0.04321484756469727, 0.043227134704589845, 0.04330188751220703, 0.04330086517333984, 0.04331827163696289, 0.04314316940307617, 0.04338790512084961, 0.04203007888793946, 0.04169728088378906, 0.0417710075378418, 0.04132044982910156, 0.04406377410888672, 0.04203414535522461, 0.04132556915283203, 0.041616382598876955, 0.04151398468017578, 0.041442302703857424, 0.041414657592773435, 0.04146995162963867, 0.04171571350097656, 0.04147507095336914, 0.041417728424072264, 0.041915393829345705, 0.04171673583984375, 0.041635841369628904, 0.0413040657043457, 0.04122009658813477, 0.041611263275146484, 0.0412149772644043, 0.04145459365844727, 0.0410880012512207, 0.04142899322509765, 0.04166451263427735, 0.04142899322509765, 0.041695232391357424, 0.04209151840209961, 0.04338790512084961, 0.043041793823242185, 0.04295987319946289, 0.043170814514160154, 0.04323942565917969, 0.04333363342285156, 0.04338175964355469, 0.04333465576171875, 0.043243518829345705, 0.04348416137695312, 0.043276287078857424, 0.04364492797851562, 0.04317491149902344, 0.043238399505615234, 0.04342784118652344, 0.04333158493041992, 0.043646976470947264, 0.04335103988647461, 0.04345139312744141, 0.043518974304199216, 0.043302913665771485, 0.04314316940307617, 0.04304793548583984, 0.04320460891723633, 0.04327423858642578, 0.04293836975097656, 0.04372377777099609, 0.043222015380859374, 0.043322368621826174, 0.043340801239013675, 0.043399166107177735, 0.043222015380859374, 0.04365926361083984, 0.043055103302001956, 0.04307763290405273, 0.042363903045654294, 0.04283391952514649, 0.04294451141357422, 0.044270591735839845, 0.043270145416259766, 0.043044864654541014, 0.043218944549560545, 0.04329779052734375, 0.04395008087158203, 0.04319641494750977, 0.04300799942016602, 0.04268236923217773, 0.04306124877929687, 0.04279500961303711, 0.042277889251708986, 0.04232396697998047, 0.0410880012512207, 0.04209971237182617, 0.04328550338745117, 0.043154430389404294, 0.043068416595458986, 0.04309811019897461, 0.042621952056884765, 0.042979328155517575, 0.04302643203735351, 0.043186176300048826, 0.04337356948852539, 0.04295270538330078, 0.04292812728881836, 0.0429854736328125, 0.0420208625793457, 0.04192665481567383, 0.04168294525146484, 0.04290662384033203, 0.04359065628051758, 0.043235328674316405, 0.04326092910766602, 0.04320665740966797, 0.04350054550170898, 0.043363327026367186, 0.0432803840637207, 0.04353126525878906, 0.043205631256103515, 0.04322918319702149, 0.04329676818847656, 0.043245567321777346, 0.04426444625854492, 0.045175807952880856, 0.04331622314453125, 0.04337152099609375, 0.04241100692749023, 0.0439818229675293, 0.04359270477294922, 0.04376579284667969, 0.043676639556884764, 0.043291648864746096, 0.04330188751220703, 0.04327526473999024, 0.04329983901977539, 0.043450366973876955, 0.04322611236572266, 0.04331008148193359, 0.04318310546875, 0.043128833770751954, 0.04277862548828125, 0.04289535903930664, 0.04310835266113281, 0.042894336700439455, 0.04308582305908203, 0.043273216247558595, 0.043216896057128903, 0.04389068984985352, 0.04441190338134766, 0.043619327545166016, 0.04337868881225586, 0.041896961212158204, 0.041450496673583984, 0.04151603317260742, 0.04137267303466797, 0.04179046249389649, 0.04192870330810547, 0.04196556854248047, 0.04181404876708984, 0.04138595199584961, 0.04190003204345703, 0.04178636932373047, 0.041728000640869144, 0.04179251098632813, 0.0412303352355957, 0.04164505767822266, 0.041237503051757815, 0.04101939010620117, 0.04145663833618164, 0.04169625473022461, 0.041675777435302735, 0.042359809875488284, 0.042627071380615236, 0.04333567810058594, 0.04345446395874023, 0.04355276870727539, 0.04296499252319336, 0.04314521789550781, 0.043399166107177735, 0.04325273513793945, 0.04297011184692383, 0.043515903472900394, 0.04321791839599609, 0.043453441619873044, 0.04345651245117187, 0.04273152160644531, 0.04337152099609375, 0.04330086517333984, 0.043020286560058595, 0.042498046875, 0.04549017715454102, 0.04358553695678711, 0.04295167922973633, 0.04352716827392578, 0.04346777725219726, 0.04325068664550781, 0.04368998336791992, 0.043053054809570314, 0.04329574584960937, 0.043232257843017576, 0.0428328971862793, 0.04330086517333984, 0.04374528121948242, 0.043947006225585936, 0.043717632293701174, 0.043483135223388675, 0.043270145416259766, 0.04295065689086914, 0.04283494567871094, 0.04286975860595703, 0.04124979019165039, 0.04172902297973633, 0.04205673599243164, 0.0417474250793457, 0.04167270278930664, 0.041624576568603515, 0.042191871643066405, 0.04349235153198242, 0.04292095947265625, 0.04264448165893555, 0.04338995361328125, 0.04319539260864258, 0.04244172668457031, 0.04100198364257813, 0.04112998580932617, 0.041624576568603515, 0.04328755187988281, 0.04365107345581055, 0.043302913665771485, 0.04311142349243164, 0.04321484756469727, 0.04320870590209961, 0.04395008087158203, 0.043758590698242186, 0.043256832122802735, 0.04332339096069336, 0.04356403350830078, 0.043325439453125, 0.04317695999145508, 0.04329574584960937, 0.04436479949951172, 0.043491329193115234, 0.043052032470703126, 0.043068416595458986, 0.0422553596496582, 0.04146380615234375, 0.042092544555664066, 0.04150067138671875, 0.041880577087402344, 0.04167782211303711, 0.04280319976806641, 0.043431934356689454, 0.042021888732910156, 0.0420945930480957, 0.0419788818359375, 0.043028480529785154, 0.04305100631713867, 0.043224063873291016, 0.04332748794555664, 0.04345548629760742, 0.04324761581420898, 0.04350873565673828, 0.04258816146850586, 0.0415467529296875, 0.04105011367797851, 0.04167782211303711, 0.041210880279541014, 0.041442302703857424, 0.04109209442138672, 0.04111052703857422, 0.04182015991210938, 0.04677427291870117, 0.04354355239868164, 0.043361278533935545, 0.04247347259521484, 0.0416993293762207, 0.04193280029296875, 0.04176486587524414, 0.04164812850952149, 0.04166963195800781, 0.04313910293579101, 0.04293833541870117, 0.04301926422119141, 0.043146240234375, 0.04328755187988281, 0.04559667205810547, 0.04353843307495117, 0.043150337219238284, 0.04323430252075195, 0.04333567810058594, 0.0432988166809082, 0.043396095275878906, 0.043237377166748046, 0.043184127807617184, 0.04335308837890625, 0.04335615921020508, 0.043207679748535156, 0.04296192169189453, 0.043154430389404294, 0.043496448516845705, 0.043081729888916016, 0.04345446395874023, 0.04312678527832031, 0.04342169570922851, 0.04342476654052734, 0.04328140640258789, 0.043156478881835936, 0.04374425506591797, 0.0441712646484375, 0.043087871551513675, 0.04245196914672852, 0.04151398468017578, 0.04103782272338867, 0.04155392074584961, 0.042365951538085936, 0.043200511932373044, 0.04328243255615234, 0.04309811019897461, 0.043199489593505856, 0.04277964782714844, 0.0430571517944336, 0.04312063980102539, 0.043379711151123046, 0.04341964721679688, 0.04335513687133789, 0.043055103302001956, 0.04318105697631836, 0.04349542236328125, 0.04316262435913086, 0.041850879669189454, 0.04201065444946289, 0.04156310272216797, 0.04171059036254883, 0.04162047958374023, 0.041680896759033206, 0.041940990447998046, 0.04337356948852539, 0.04330188751220703, 0.04478464126586914, 0.04351078414916992, 0.04324147033691406, 0.04315750503540039, 0.04369919967651367, 0.04500275039672851, 0.04430131149291992, 0.043905025482177736, 0.0436940803527832, 0.04225228881835937, 0.04183859252929688, 0.04166860961914062, 0.04177203369140625, 0.041796607971191405, 0.04197478485107422, 0.041366527557373044, 0.041804798126220705, 0.041643009185791016, 0.04156825637817383, 0.041836544036865236, 0.041692161560058595, 0.043253761291503906, 0.04359167861938477, 0.043493377685546876, 0.04370739364624023, 0.04295475387573242, 0.04314316940307617, 0.04325888061523438, 0.04347187042236328, 0.04369715118408203, 0.04337868881225586, 0.04317491149902344, 0.04352000045776367, 0.04395929718017578, 0.044442623138427735, 0.04355379104614258, 0.04348518371582031, 0.04309503936767578, 0.043066368103027344, 0.04215398406982422, 0.042164222717285156, 0.04169728088378906, 0.04206284713745117, 0.04334182357788086, 0.04280627059936523, 0.04316159820556641, 0.04372480010986328, 0.04220108795166016, 0.04175360107421875, 0.04203007888793946, 0.041734142303466795, 0.041611263275146484, 0.041923583984375, 0.041973758697509765, 0.04171878433227539, 0.04179148864746094, 0.041659393310546876, 0.04161740875244141, 0.041783294677734374, 0.04179763031005859, 0.04170444869995117, 0.04215500640869141, 0.04175667190551758, 0.041804798126220705, 0.042060832977294925, 0.043439071655273435, 0.043595775604248044, 0.04354457473754883, 0.043224063873291016, 0.04323123168945313, 0.04315750503540039, 0.04332339096069336, 0.04295475387573242, 0.04326092910766602, 0.04151705551147461, 0.041714687347412106, 0.042447872161865234, 0.04549324798583984, 0.043431934356689454, 0.043676673889160154, 0.043207679748535156, 0.043466751098632815, 0.043253761291503906, 0.04345446395874023, 0.043363327026367186, 0.04293836975097656, 0.043087871551513675, 0.04337664031982422, 0.04310835266113281, 0.044184574127197264, 0.043514881134033206, 0.043905025482177736, 0.043396095275878906, 0.04167782211303711, 0.04153958511352539, 0.04196454238891602, 0.041398273468017575, 0.04293632125854492, 0.04338790512084961, 0.043448318481445314, 0.043665409088134766, 0.04228505706787109, 0.042643455505371096]",tokens/s,23.35020490190534,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,codegen,MB,4285.759488,5028.446208,0.0,4435.47648,4430.934016,s,1,9.54695703125,9.54695703125,0.0,9.54695703125,9.54695703125,9.54695703125,9.54695703125,[9.54695703125],,kWh,3.232308283610716e-05,1.7665426344811913e-05,4.767781591996645e-05,9.766632510088553e-05,,MB,1626.46016,5384.962048,0.0,4731.174912,4679.06816,s,10,1.1703365936279297,0.11703365936279297,0.00024215781753867297,0.11699436950683594,0.11729552307128906,0.11738134689331055,0.11745000595092774,"[0.1168235855102539, 0.11689756774902343, 0.11673951721191406, 0.11746717071533203, 0.11718396759033203, 0.11687657928466796, 0.11672950744628906, 0.11725107574462891, 0.11709117126464844, 0.11727645111083984]",tokens/s,2187.404900383614,kWh,1.3829196111273357e-06,7.574442775466175e-07,7.486998883651205e-06,9.627362772325158e-06,tokens/kWh,26590874.993918195,MB,1659.67872,5510.791168,0.0,4857.004032,4807.839232,s,10,24.233937255859377,2.423393725585938,0.021310944544071262,2.4319962158203126,2.441043920898437,2.4412316772460936,2.441381882324219,"[2.37485205078125, 2.416015869140625, 2.426403564453125, 2.4375888671875, 2.43819677734375, 2.425048828125, 2.394637451171875, 2.44141943359375, 2.438772216796875, 2.441002197265625]",tokens/s,25.99660110317716,kWh,2.787514146984493e-05,1.5276612048823967e-05,7.149336985614654e-05,0.00011464512337481543,tokens/kWh,549521.8474669064,,s,630,24.23156020355227,0.038462793973892455,0.0007209435567183956,0.03861401557922363,0.03905239028930664,0.03931361274719238,0.04017582046508789,"[0.03716403198242187, 0.037272575378417966, 0.03725516891479492, 0.03728486251831055, 0.03732275390625, 0.03720601654052735, 0.03707289505004883, 0.037048320770263675, 0.03704422378540039, 0.037059585571289064, 0.03712614440917969, 0.037526527404785154, 0.03712716674804688, 0.03720294570922852, 0.037026817321777344, 0.03711795043945312, 0.037220352172851565, 0.03708927917480469, 0.03690086364746094, 0.03749478530883789, 0.03700326538085937, 0.03701760101318359, 0.03707494354248047, 0.03699302291870117, 0.036969470977783206, 0.038217727661132815, 0.03707494354248047, 0.038397953033447264, 0.03861708831787109, 0.03852492904663086, 0.03866316986083984, 0.03964518356323242, 0.038697982788085936, 0.0385689582824707, 0.03712307357788086, 0.03736883163452148, 0.037644287109375, 0.03851366424560547, 0.03867852783203125, 0.038957054138183594, 0.03741798400878906, 0.03714048004150391, 0.03807743835449219, 0.03909427261352539, 0.037098495483398435, 0.03750502395629883, 0.038166526794433595, 0.038542335510253906, 0.03850137710571289, 0.038556671142578124, 0.03865190505981445, 0.03869388961791992, 0.03849728012084961, 0.03857408142089844, 0.038572032928466796, 0.038629375457763675, 0.037103614807128905, 0.036913150787353514, 0.036929534912109374, 0.03719987106323242, 0.03703705596923828, 0.037341182708740234, 0.03705855941772461, 0.03846758270263672, 0.038545406341552735, 0.038591487884521485, 0.03711795043945312, 0.0370247688293457, 0.037389312744140625, 0.03697663879394531, 0.03709030532836914, 0.037012481689453126, 0.03693363189697266, 0.03750809478759766, 0.03899289703369141, 0.039005184173583986, 0.03894681549072266, 0.038470657348632815, 0.03879935836791992, 0.03872665786743164, 0.03851984024047852, 0.040555488586425784, 0.03868467330932617, 0.03845939254760742, 0.038561790466308594, 0.038575103759765625, 0.03902361679077149, 0.038629375457763675, 0.038545406341552735, 0.04016128158569336, 0.04018175888061523, 0.03955712127685547, 0.03853209686279297, 0.038555648803710936, 0.03857408142089844, 0.0388218879699707, 0.03896831893920898, 0.03859251022338867, 0.03842559814453125, 0.038198272705078126, 0.03717324829101563, 0.03737702560424805, 0.038537216186523435, 0.038547454833984376, 0.037495807647705076, 0.0380313606262207, 0.03712614440917969, 0.038661121368408206, 0.03851161575317383, 0.03849728012084961, 0.03863449478149414, 0.038545406341552735, 0.0387665901184082, 0.0425082893371582, 0.039126014709472655, 0.0389119987487793, 0.03965235137939453, 0.03717529678344727, 0.03884339141845703, 0.03710771179199219, 0.03728691101074219, 0.03717836761474609, 0.0370964469909668, 0.03709542465209961, 0.03703603363037109, 0.03712819290161133, 0.03704012680053711, 0.03697459030151367, 0.03703398513793945, 0.03711385726928711, 0.03761459350585938, 0.038749183654785156, 0.038653953552246094, 0.03853414535522461, 0.03873177719116211, 0.03879731369018555, 0.03889561462402344, 0.03879423904418945, 0.0387512321472168, 0.038694911956787106, 0.03926220703125, 0.03888127899169922, 0.038768638610839845, 0.03862636947631836, 0.038661056518554685, 0.0385986557006836, 0.038609920501708986, 0.03861606216430664, 0.03871027374267578, 0.0371486701965332, 0.03827199935913086, 0.03706060791015625, 0.03704115295410156, 0.036985855102539066, 0.036944896697998046, 0.03729715347290039, 0.03991142272949219, 0.04007424163818359, 0.0388935661315918, 0.038629375457763675, 0.03880857467651367, 0.03881369781494141, 0.03870003128051758, 0.03863347244262695, 0.038860801696777345, 0.03885772705078125, 0.03868467330932617, 0.03915980911254883, 0.03879219055175781, 0.038593536376953126, 0.03866316986083984, 0.038540287017822264, 0.038865921020507815, 0.03872051239013672, 0.03870105743408203, 0.03863049697875977, 0.03877264022827148, 0.03850342559814453, 0.038621185302734375, 0.038397953033447264, 0.03875635147094727, 0.03874303817749023, 0.03851980972290039, 0.038537216186523435, 0.03861094284057617, 0.038487041473388675, 0.03858432006835937, 0.03940454483032227, 0.04082175827026367, 0.037032958984375, 0.03828326416015625, 0.039311359405517575, 0.03886899185180664, 0.03848089599609375, 0.03858432006835937, 0.038484992980957033, 0.03853107070922852, 0.03864371109008789, 0.03851264190673828, 0.038642688751220705, 0.038637569427490234, 0.03860172653198242, 0.038591487884521485, 0.03860275268554687, 0.038545406341552735, 0.03864780807495117, 0.03797094345092773, 0.038703102111816406, 0.03880550384521484, 0.038594558715820314, 0.03897958374023437, 0.03887104034423828, 0.03917824172973633, 0.0391014404296875, 0.03872665786743164, 0.03858943939208984, 0.038714366912841795, 0.0391833610534668, 0.039486495971679685, 0.038637535095214844, 0.038725631713867184, 0.03865497589111328, 0.03865702438354492, 0.03871641540527344, 0.03894374465942383, 0.038588417053222655, 0.03868569564819336, 0.03869081497192383, 0.03814710235595703, 0.03868975830078125, 0.03856281661987305, 0.03874508666992187, 0.03864166259765625, 0.0385873908996582, 0.03878604888916016, 0.03854336166381836, 0.039172096252441405, 0.038776832580566405, 0.038942718505859376, 0.03875430297851563, 0.03854131317138672, 0.03861503982543945, 0.038645759582519534, 0.03865702438354492, 0.03949260711669922, 0.038811649322509766, 0.03864883041381836, 0.03870207977294922, 0.038607872009277344, 0.03856486511230469, 0.03885670471191406, 0.03865190505981445, 0.03932262420654297, 0.03872051239013672, 0.0393891830444336, 0.038662143707275394, 0.038735870361328126, 0.03923865509033203, 0.03856281661987305, 0.03888844680786133, 0.03954278564453125, 0.03922227096557617, 0.038787071228027346, 0.038697982788085936, 0.0387061767578125, 0.03873689651489258, 0.038580223083496096, 0.03905228805541992, 0.039054336547851565, 0.03850137710571289, 0.03852799987792969, 0.038491134643554685, 0.03856793594360351, 0.038665214538574216, 0.03856281661987305, 0.03868467330932617, 0.03861606216430664, 0.038419456481933595, 0.03852492904663086, 0.03855257415771484, 0.03853414535522461, 0.03845939254760742, 0.03848089599609375, 0.03852799987792969, 0.038624256134033204, 0.03855462265014648, 0.03875532913208008, 0.038801406860351564, 0.0385904655456543, 0.03855052947998047, 0.03868364715576172, 0.03855052947998047, 0.03858227157592774, 0.038537216186523435, 0.039147518157958985, 0.038795265197753906, 0.03847270584106445, 0.03850649642944336, 0.0387061767578125, 0.03865599822998047, 0.03856281661987305, 0.038449153900146485, 0.038561790466308594, 0.03850649642944336, 0.038624256134033204, 0.03846451187133789, 0.03859251022338867, 0.03851571273803711, 0.03859763336181641, 0.03854848098754883, 0.03929087829589844, 0.038697982788085936, 0.03854131317138672, 0.0388853759765625, 0.03859251022338867, 0.03725414276123047, 0.03873689651489258, 0.03845939254760742, 0.039228416442871096, 0.037367809295654295, 0.037064704895019535, 0.03766988754272461, 0.038662143707275394, 0.0386324462890625, 0.03861708831787109, 0.03856486511230469, 0.038691841125488284, 0.03862835311889649, 0.0385904655456543, 0.03854643249511719, 0.03882393646240234, 0.0386324462890625, 0.03865497589111328, 0.03871027374267578, 0.038785022735595705, 0.03881267166137695, 0.038978561401367184, 0.0386693115234375, 0.03867136001586914, 0.03869081497192383, 0.03862732696533203, 0.03860070419311523, 0.038537216186523435, 0.038814720153808595, 0.0385873908996582, 0.039272449493408204, 0.03907891082763672, 0.03872665786743164, 0.0387512321472168, 0.038555648803710936, 0.038621185302734375, 0.03861913681030273, 0.038580223083496096, 0.03854643249511719, 0.03875020980834961, 0.03862527847290039, 0.03887104034423828, 0.03877171325683594, 0.03868057632446289, 0.038964225769042966, 0.03991244888305664, 0.041150463104248046, 0.03951308822631836, 0.039564289093017575, 0.03867340850830078, 0.03860684967041016, 0.03890892791748047, 0.03846144104003906, 0.038785022735595705, 0.03712204742431641, 0.03705548858642578, 0.037082111358642575, 0.037816383361816405, 0.03716499328613281, 0.037177345275878904, 0.03714252853393555, 0.0369879035949707, 0.03695820617675781, 0.03697868728637695, 0.03703091049194336, 0.037002239227294925, 0.036959232330322264, 0.036994049072265625, 0.03703603363037109, 0.03707596969604492, 0.03847782516479492, 0.037528575897216795, 0.03702783966064453, 0.03735039901733399, 0.03847788619995117, 0.03794425582885742, 0.03688345718383789, 0.0366561279296875, 0.03789209747314453, 0.037026817321777344, 0.037063743591308596, 0.038784961700439456, 0.03881881713867188, 0.038316032409667966, 0.037459968566894535, 0.036664321899414064, 0.037988353729248046, 0.039261184692382815, 0.0395335693359375, 0.038950912475585936, 0.03851468658447266, 0.0382740478515625, 0.03727360153198242, 0.037303295135498044, 0.03772825622558594, 0.03709132766723633, 0.03704115295410156, 0.037466110229492186, 0.03709952163696289, 0.03704217529296875, 0.037048320770263675, 0.037010433197021485, 0.03723161697387695, 0.03743129730224609, 0.038286334991455076, 0.03867136001586914, 0.038580223083496096, 0.038599681854248044, 0.03888332748413086, 0.039229438781738284, 0.038752254486083985, 0.03855769729614258, 0.03849728012084961, 0.03845632171630859, 0.038539264678955076, 0.03848908615112305, 0.03851878356933594, 0.03875328063964844, 0.03867852783203125, 0.03859763336181641, 0.039462913513183595, 0.03973734283447266, 0.039347198486328124, 0.03929292678833008, 0.03839487838745117, 0.03934105682373047, 0.038545406341552735, 0.038803455352783206, 0.03862124633789062, 0.03922323226928711, 0.03870003128051758, 0.03857001495361328, 0.03928163146972656, 0.03905331039428711, 0.03867852783203125, 0.03850035095214844, 0.03864883041381836, 0.038662143707275394, 0.03855462265014648, 0.03869286346435547, 0.03855155181884766, 0.03854438400268555, 0.038662143707275394, 0.03879935836791992, 0.03868057632446289, 0.04156927871704102, 0.03931545639038086, 0.03869286346435547, 0.03856588745117188, 0.03845536041259766, 0.03843270492553711, 0.03851366424560547, 0.03855257415771484, 0.03844812774658203, 0.03858432006835937, 0.03848601531982422, 0.0387665901184082, 0.03863654327392578, 0.03882495880126953, 0.03926015853881836, 0.03877273559570313, 0.038520832061767575, 0.03858943939208984, 0.03846656036376953, 0.038665214538574216, 0.03863859176635742, 0.03903180694580078, 0.03870105743408203, 0.038697982788085936, 0.038572032928466796, 0.03859763336181641, 0.03905023956298828, 0.03867647933959961, 0.038578174591064454, 0.03851776123046875, 0.038697982788085936, 0.038642688751220705, 0.03841740798950195, 0.03846656036376953, 0.038449153900146485, 0.03854950332641602, 0.038607872009277344, 0.03851161575317383, 0.03890995025634766, 0.03857408142089844, 0.03869900894165039, 0.039398399353027344, 0.038487041473388675, 0.03981414413452149, 0.03858432006835937, 0.03869081497192383, 0.03843174362182617, 0.03870003128051758, 0.03875635147094727, 0.039119873046875, 0.03870207977294922, 0.03891302490234375, 0.03872870254516601, 0.038575103759765625, 0.03849728012084961, 0.03894579315185547, 0.038749183654785156, 0.038588417053222655, 0.038454273223876956, 0.03879116821289062, 0.03860889434814453, 0.038588417053222655, 0.038569984436035154, 0.03902259063720703, 0.0389304313659668, 0.03908403015136719, 0.038558719635009765, 0.0387327995300293, 0.03861913681030273, 0.03864371109008789, 0.038623233795166016, 0.038591487884521485, 0.03875635147094727, 0.03890790557861328, 0.038626304626464845, 0.03908812713623047, 0.038588417053222655, 0.03854438400268555, 0.0387583999633789, 0.03883212661743164, 0.038507518768310545, 0.03859763336181641, 0.03848601531982422, 0.03882393646240234, 0.03858432006835937, 0.03858534240722656, 0.03867238235473633, 0.038744064331054685, 0.038670337677001954, 0.038593536376953126, 0.038558719635009765, 0.03913017654418945, 0.03901433563232422, 0.03870412826538086, 0.03853004837036133, 0.038763519287109374, 0.03872665786743164, 0.03849625778198242, 0.03880448150634765, 0.038970367431640625, 0.038604801177978515, 0.03908198547363281, 0.03859763336181641, 0.03870207977294922, 0.03858124923706055, 0.038588417053222655, 0.03851161575317383, 0.0370247688293457, 0.0384983024597168, 0.038816768646240236, 0.038612991333007815, 0.0385904655456543, 0.03890176010131836, 0.03860172653198242, 0.03873177719116211, 0.03940454483032227, 0.03897139358520508, 0.03863347244262695, 0.03855769729614258, 0.03853107070922852, 0.03878911972045898, 0.03903078460693359, 0.03898777770996094, 0.038870014190673825, 0.038991870880126955, 0.03862527847290039, 0.038588417053222655, 0.03865292739868164, 0.03884646224975586, 0.03862015914916992, 0.03863449478149414, 0.03865804672241211, 0.038687744140625, 0.03864883041381836, 0.03863552093505859, 0.038740993499755856, 0.03863040161132812, 0.0385689582824707, 0.038607872009277344, 0.038540287017822264, 0.03863449478149414, 0.03830374526977539, 0.03956326293945313, 0.03882700729370117, 0.03863552093505859, 0.03869388961791992, 0.03885158538818359, 0.038942718505859376, 0.038749183654785156, 0.04076236724853516, 0.03910451126098633, 0.03883724975585937, 0.03863859176635742, 0.03883724975585937, 0.03890073776245117, 0.03870515060424805, 0.03863961410522461, 0.038661121368408206, 0.038621185302734375, 0.038882305145263675, 0.038749183654785156, 0.038811649322509766, 0.03874611282348633, 0.038735870361328126, 0.03862531280517578, 0.03792483139038086, 0.03840716934204102, 0.03872358322143555, 0.03916185760498047, 0.03885260772705078]",tokens/s,25.99915130135304,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 155, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 85038 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493b5-372aadfd1c4a48c5729717bb;7f946987-14ca-4b71-b3f4-0991b37851e6) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490ac-350d4b3736bdc7876956a189;ab2f9c35-cdab-4c0c-ab62-30f03550ed69) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1097, in forward - outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 988, in forward - outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 753, in forward - attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 170, in forward - query, key, value, present = self._attn_projections_and_rope( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 224, in _attn_projections_and_rope - qkv = self.query_key_value(hidden_states) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,codegen,MB,8795.222016,10030.153728,0.0,9437.184,9411.072,s,1,10.97068359375,10.97068359375,0.0,10.97068359375,10.97068359375,10.97068359375,10.97068359375,[10.97068359375],,kWh,4.8469541169448703e-05,2.654629442639807e-05,6.944672222400361e-05,0.00014446255781985038,,MB,1877.250048,10608.96768,0.0,9955.180544,9777.695232,s,10,2.637535003662109,0.2637535003662109,0.0003188414010928411,0.2635480346679687,0.2642257965087891,0.26427453460693356,0.2643135250854492,"[0.2642149658203125, 0.2643232727050781, 0.2635287170410156, 0.2635159912109375, 0.26351239013671873, 0.26415252685546875, 0.2636955871582031, 0.2634954833984375, 0.26356134033203127, 0.26353472900390623]",tokens/s,970.6032323535213,kWh,3.116471359594178e-06,1.7074396233429656e-06,1.745551104042079e-05,2.227942202335794e-05,tokens/kWh,11490423.752088694,MB,1892.581376,10734.7968,0.0,10081.009664,9978.154496,s,10,24.53856103515625,2.4538561035156254,0.014433812406254229,2.4491400146484374,2.477028442382813,2.478650573730469,2.479948278808594,"[2.461951416015625, 2.4425673828125, 2.43628173828125, 2.446909912109375, 2.4513701171875, 2.44056298828125, 2.4433818359375, 2.458594970703125, 2.47666796875, 2.480272705078125]",tokens/s,25.673877090730898,kWh,2.8901583715961244e-05,1.583932594952591e-05,0.00010699733852178025,0.00015173824818726743,tokens/kWh,415188.66042428993,,s,630,24.536079368591313,0.03894615772792271,0.000702224460558318,0.03859199905395508,0.039840049743652343,0.040015973472595216,0.04095839088439942,"[0.038438911437988284, 0.03957657623291016, 0.038171646118164065, 0.03959807968139648, 0.03974246215820312, 0.03979776000976563, 0.0396759033203125, 0.03913420867919922, 0.03865190505981445, 0.03960319900512695, 0.039610366821289066, 0.03976499176025391, 0.039801856994628904, 0.03892428970336914, 0.03967795181274414, 0.03950080108642578, 0.0395786247253418, 0.03918643188476562, 0.03985919952392578, 0.039656448364257815, 0.0395786247253418, 0.03965951919555664, 0.03960627365112305, 0.03977523040771484, 0.039600128173828124, 0.039651329040527344, 0.03817574310302734, 0.03822079849243164, 0.03831808090209961, 0.03821363067626953, 0.03948953628540039, 0.03981107330322266, 0.038258689880371094, 0.038255615234375, 0.03828326416015625, 0.03891712188720703, 0.038695934295654294, 0.03830988693237305, 0.03841535949707031, 0.03844710540771484, 0.03834470367431641, 0.03828736114501953, 0.03875532913208008, 0.03846758270263672, 0.03832627105712891, 0.038416385650634766, 0.03840512084960938, 0.03846656036376953, 0.03827097702026367, 0.03831193542480469, 0.038391807556152346, 0.03831193542480469, 0.03842969512939453, 0.03803443145751953, 0.04006707382202149, 0.04129075241088867, 0.03973017501831055, 0.03908095932006836, 0.039874561309814455, 0.03963904190063477, 0.03991654586791992, 0.039997440338134765, 0.039242752075195314, 0.04032716751098633, 0.03839590454101562, 0.038416385650634766, 0.03910860824584961, 0.0395417594909668, 0.0384901123046875, 0.039608318328857424, 0.03969228744506836, 0.0397762565612793, 0.03854950332641602, 0.038406143188476564, 0.039907329559326174, 0.03957555389404297, 0.03968102264404297, 0.039608318328857424, 0.03981619262695312, 0.041411582946777346, 0.04038246536254883, 0.040032257080078126, 0.03961548614501953, 0.03840716934204102, 0.03865702438354492, 0.038354942321777344, 0.03846041488647461, 0.038779903411865234, 0.03841740798950195, 0.038452224731445314, 0.03832524871826172, 0.03832627105712891, 0.03845017623901367, 0.0383559684753418, 0.03854336166381836, 0.037972991943359374, 0.0382105598449707, 0.03836928176879883, 0.03792486572265625, 0.03798220825195313, 0.038212608337402344, 0.03829043197631836, 0.038406143188476564, 0.03833651351928711, 0.03782963180541992, 0.03866419219970703, 0.038346752166748044, 0.04085452651977539, 0.03995443344116211, 0.03838054275512695, 0.038381568908691405, 0.038231040954589846, 0.03834163284301758, 0.03887104034423828, 0.038338558197021484, 0.03829862213134766, 0.03839078521728516, 0.038381568908691405, 0.03854643249511719, 0.038280193328857424, 0.03827609634399414, 0.03830988693237305, 0.03839692687988281, 0.037917697906494144, 0.03785830307006836, 0.037884929656982425, 0.0397209587097168, 0.039913471221923826, 0.03967692947387695, 0.039798782348632815, 0.03970764923095703, 0.03995647811889649, 0.039537662506103514, 0.03962879943847656, 0.038362110137939456, 0.03834470367431641, 0.03841740798950195, 0.038345729827880856, 0.03836006546020508, 0.03837235260009766, 0.038389759063720705, 0.038437889099121096, 0.03834265518188477, 0.038406143188476564, 0.03889664077758789, 0.03963187026977539, 0.03985203170776367, 0.03911577606201172, 0.039478271484375, 0.03960319900512695, 0.039354366302490236, 0.03825151824951172, 0.038365184783935545, 0.03834470367431641, 0.0384634895324707, 0.03786137771606445, 0.037920768737792966, 0.03889049530029297, 0.03830886459350586, 0.03828224182128906, 0.03829555130004883, 0.03825254440307617, 0.03821875381469726, 0.03806105422973633, 0.03787776184082031, 0.03839897537231445, 0.038438911437988284, 0.03842047882080078, 0.03923763275146484, 0.03962265777587891, 0.03853209686279297, 0.03836415863037109, 0.03840204620361328, 0.03847782516479492, 0.038188030242919925, 0.038346752166748044, 0.03826278305053711, 0.038487041473388675, 0.03842867279052734, 0.03834470367431641, 0.03827814483642578, 0.038422527313232424, 0.038265857696533206, 0.03835903930664063, 0.03859763336181641, 0.03831500625610352, 0.038201343536376955, 0.03829862213134766, 0.03830476760864258, 0.03830169677734375, 0.03822489547729492, 0.04022784042358398, 0.040089599609375, 0.039629825592041014, 0.038330368041992184, 0.038345729827880856, 0.03827507019042969, 0.0387583999633789, 0.038371326446533204, 0.03950899124145508, 0.03969228744506836, 0.039613441467285154, 0.039452671051025394, 0.03953561782836914, 0.039572479248046875, 0.039172096252441405, 0.03829555130004883, 0.039782398223876955, 0.03958272171020508, 0.03964825439453125, 0.03971072006225586, 0.039572479248046875, 0.03839487838745117, 0.03830681610107422, 0.039049217224121094, 0.038714366912841795, 0.03942911911010742, 0.03978956985473633, 0.03858227157592774, 0.03832217788696289, 0.03847987365722656, 0.03829555130004883, 0.038591487884521485, 0.038623233795166016, 0.03837952041625976, 0.03864883041381836, 0.03965235137939453, 0.03968307113647461, 0.03893964767456055, 0.03829145431518555, 0.03792281723022461, 0.03830886459350586, 0.0384266242980957, 0.038330368041992184, 0.03831193542480469, 0.03840921783447265, 0.038816768646240236, 0.03833139038085937, 0.03849625778198242, 0.03830579376220703, 0.0383631362915039, 0.03835903930664063, 0.03839590454101562, 0.038422527313232424, 0.03851264190673828, 0.03830476760864258, 0.03838054275512695, 0.03830681610107422, 0.038296577453613284, 0.03990323257446289, 0.039629825592041014, 0.03824025726318359, 0.0383559684753418, 0.03834265518188477, 0.03854131317138672, 0.03838054275512695, 0.038345729827880856, 0.03827097702026367, 0.03824332809448242, 0.0384532470703125, 0.03828940963745117, 0.03820236968994141, 0.03862015914916992, 0.03835903930664063, 0.038386688232421876, 0.03833958435058594, 0.038330368041992184, 0.038653953552246094, 0.03856486511230469, 0.03831500625610352, 0.03891712188720703, 0.03926937484741211, 0.03848089599609375, 0.03926835250854492, 0.03841024017333984, 0.03868569564819336, 0.03863961410522461, 0.038317054748535154, 0.038299648284912106, 0.03829145431518555, 0.03840512084960938, 0.038525951385498046, 0.038430721282958984, 0.03847679901123047, 0.038712318420410154, 0.04016025543212891, 0.040997886657714845, 0.039894016265869144, 0.039927806854248044, 0.038539264678955076, 0.03869388961791992, 0.03988787078857422, 0.0395786247253418, 0.03936870574951172, 0.03950592041015625, 0.038714366912841795, 0.03962265777587891, 0.039608318328857424, 0.03968102264404297, 0.040048641204833986, 0.0408616943359375, 0.03885363388061523, 0.038507518768310545, 0.03961958312988281, 0.039640064239501956, 0.03971686553955078, 0.03963391876220703, 0.03943116760253906, 0.038819839477539066, 0.038212608337402344, 0.038356990814208985, 0.03841331100463867, 0.03955916976928711, 0.03853823852539062, 0.0386058235168457, 0.03856076812744141, 0.03835289764404297, 0.038831104278564454, 0.038540287017822264, 0.03883212661743164, 0.03863654327392578, 0.03831398391723633, 0.0385167350769043, 0.03839078521728516, 0.03839897537231445, 0.038441982269287106, 0.03843174362182617, 0.03835084915161133, 0.038386688232421876, 0.038386688232421876, 0.03848089599609375, 0.03835084915161133, 0.03835801696777344, 0.03851878356933594, 0.03838566589355469, 0.03833446502685547, 0.038076416015625, 0.038340606689453126, 0.03867340850830078, 0.03839385604858398, 0.03882291030883789, 0.03952537536621094, 0.03833651351928711, 0.038779903411865234, 0.03847987365722656, 0.03876761627197266, 0.03857305526733398, 0.03835903930664063, 0.038365184783935545, 0.03828838348388672, 0.03827507019042969, 0.038391807556152346, 0.0384901123046875, 0.03830169677734375, 0.038340606689453126, 0.038340606689453126, 0.03846656036376953, 0.038408191680908206, 0.03908915328979492, 0.039951358795166016, 0.03973529434204102, 0.03951308822631836, 0.03945574569702148, 0.03968102264404297, 0.03950080108642578, 0.03827302551269531, 0.03902259063720703, 0.03958476638793945, 0.03955507278442383, 0.03994009780883789, 0.039927806854248044, 0.03914137649536133, 0.03841228866577148, 0.03819417572021484, 0.03949158477783203, 0.0383744010925293, 0.03942195129394531, 0.03946086502075195, 0.03849318313598633, 0.03827199935913086, 0.03812351989746094, 0.03804467010498047, 0.03827609634399414, 0.03947212982177734, 0.038335487365722655, 0.038468608856201174, 0.03831500625610352, 0.038250495910644534, 0.03890892791748047, 0.03840409469604492, 0.038414337158203124, 0.038441982269287106, 0.03837952041625976, 0.03851366424560547, 0.038441982269287106, 0.038432769775390625, 0.038430721282958984, 0.03839385604858398, 0.03838259124755859, 0.038319103240966795, 0.03860889434814453, 0.03867852783203125, 0.03834777450561523, 0.038578174591064454, 0.03868467330932617, 0.038335487365722655, 0.038432769775390625, 0.038441982269287106, 0.0388403205871582, 0.039923713684082034, 0.03971788787841797, 0.03974348831176758, 0.03971583938598633, 0.03849625778198242, 0.03918438339233398, 0.03837952041625976, 0.039411712646484375, 0.03851366424560547, 0.03845119857788086, 0.03846963119506836, 0.038351871490478515, 0.038487041473388675, 0.03851059341430664, 0.0384266242980957, 0.039414783477783204, 0.0397209587097168, 0.03876147079467773, 0.03966566467285156, 0.03975065612792969, 0.03990528106689453, 0.039384063720703126, 0.03859251022338867, 0.0384450569152832, 0.03850137710571289, 0.0387512321472168, 0.03983871841430664, 0.039618560791015625, 0.03966259384155273, 0.039539710998535156, 0.03887513732910156, 0.03846656036376953, 0.03885363388061523, 0.038508544921875, 0.03945062255859375, 0.03951923370361328, 0.03843993759155274, 0.03833139038085937, 0.03943423843383789, 0.04014387130737305, 0.03966156768798828, 0.038403072357177735, 0.03854643249511719, 0.03846041488647461, 0.03959603118896484, 0.04021145629882812, 0.040220672607421876, 0.03986841583251953, 0.039591934204101564, 0.03964211273193359, 0.040035327911376956, 0.03971481704711914, 0.03972710418701172, 0.03958476638793945, 0.03977318572998047, 0.03982131195068359, 0.03960115051269531, 0.03840512084960938, 0.038242305755615234, 0.038280193328857424, 0.03823001480102539, 0.0383559684753418, 0.03830988693237305, 0.03846041488647461, 0.0382371826171875, 0.03834368133544922, 0.03827097702026367, 0.039190528869628906, 0.03833446502685547, 0.038370304107666016, 0.038484992980957033, 0.03837952041625976, 0.03830886459350586, 0.038307838439941407, 0.03822182464599609, 0.038763519287109374, 0.038215679168701173, 0.03832627105712891, 0.03828736114501953, 0.03847270584106445, 0.03827199935913086, 0.03838873672485352, 0.038422527313232424, 0.039550975799560545, 0.03959296035766602, 0.039626750946044925, 0.03900723266601563, 0.03986022567749024, 0.03967488098144531, 0.03965542221069336, 0.03916185760498047, 0.040016895294189454, 0.03972198486328125, 0.039801856994628904, 0.03966259384155273, 0.03968819046020508, 0.03971481704711914, 0.038951934814453124, 0.038231040954589846, 0.038556671142578124, 0.03835903930664063, 0.040008705139160154, 0.03877273559570313, 0.038424575805664066, 0.039531520843505856, 0.04003430557250977, 0.04069683074951172, 0.04116070556640625, 0.03995238494873047, 0.04024627304077148, 0.03970560073852539, 0.03977011108398437, 0.03911577606201172, 0.03949363327026367, 0.03958476638793945, 0.038547454833984376, 0.03836108779907227, 0.03830681610107422, 0.03839692687988281, 0.03871334457397461, 0.038267902374267575, 0.038319103240966795, 0.041232383728027344, 0.04008038330078125, 0.039877632141113284, 0.03979980850219727, 0.03970457458496094, 0.039585792541503906, 0.039669761657714846, 0.03885567855834961, 0.040525825500488284, 0.03848294448852539, 0.04014796829223633, 0.03969331359863281, 0.03987046432495117, 0.039894016265869144, 0.03968204879760742, 0.04115558242797852, 0.040363006591796875, 0.03977830505371094, 0.03976806259155274, 0.0395786247253418, 0.03959091186523438, 0.03783168029785156, 0.038302719116210936, 0.03876249694824219, 0.038362110137939456, 0.038368255615234374, 0.03850035095214844, 0.03841228866577148, 0.03988991928100586, 0.03970355224609375, 0.0397127685546875, 0.03896012878417969, 0.03830681610107422, 0.03825766372680664, 0.03836620712280273, 0.03843174362182617, 0.03891507339477539, 0.04003942489624023, 0.03968102264404297, 0.038727680206298826, 0.038345729827880856, 0.040005630493164065, 0.04003123092651367, 0.03926835250854492, 0.03828326416015625, 0.03830374526977539, 0.03845017623901367, 0.038836223602294925, 0.0397762565612793, 0.039653377532958986, 0.03952640151977539, 0.03954995346069336, 0.039521278381347655, 0.03949363327026367, 0.03951411056518555, 0.039640064239501956, 0.03848294448852539, 0.0385873908996582, 0.03851161575317383, 0.03850035095214844, 0.03832524871826172, 0.03948953628540039, 0.039367679595947266, 0.03963289642333984, 0.039793663024902344, 0.039591934204101564, 0.039809024810791016, 0.04001484680175781, 0.038681598663330076, 0.03832320022583008, 0.038882305145263675, 0.03955199813842773, 0.039672832489013675, 0.039994369506835936, 0.03970355224609375, 0.03969228744506836, 0.03962060928344727, 0.03847577667236328, 0.03942707061767578, 0.039605247497558595, 0.040495105743408207, 0.03966156768798828, 0.03939328002929687, 0.03991142272949219, 0.03948134231567383, 0.03975167846679688, 0.039593982696533206, 0.041164798736572264, 0.03978342437744141, 0.039828479766845705, 0.03974246215820312, 0.038599681854248044, 0.038665214538574216, 0.03962879943847656, 0.038776832580566405, 0.03917004776000976, 0.03952025604248047, 0.03966668701171875, 0.03988479995727539]",tokens/s,25.67647383821494,,,,,,,, -4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 105, in run - _ = backend.generate(self.inputs, self.config.generate_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 400, in generate - return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1914, in generate - result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2651, in _sample - outputs = self( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1118, in forward - outputs = self.model.decoder( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 884, in forward - layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 525, in forward - hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 155, in forward - query_states = self.q_proj(hidden_states) * self.scaling - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl - return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl - return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context - return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 162, in forward - assert AWQ_INSTALLED, ( -AssertionError: AWQ kernels could not be loaded. Please install them from https://github.com/casper-hansen/AutoAWQ_kernels - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491b0-327a16341b2f51a514e18177;e075165f-03de-49cf-b752-40507cae6236) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17845.592064,24073.732096,0.0,23444.062208,22019.972096,s,1,18.30278515625,18.30278515625,0.0,18.30278515625,18.30278515625,18.30278515625,18.30278515625,[18.30278515625],,kWh,0.00013539926711595552,7.419471216508945e-05,0.0002966507928759965,0.0005062447721570415,,MB,4439.38816,24145.035264,0.0,23496.491008,20927.465984,s,10,4.8092201232910154,0.48092201232910153,7.940632690276601e-05,0.4809117889404297,0.4809989929199219,0.48104788208007815,0.4810869934082031,"[0.48109677124023437, 0.48090194702148437, 0.4808143920898437, 0.480920166015625, 0.4809034118652344, 0.48095892333984375, 0.480946533203125, 0.480876220703125, 0.4809881286621094, 0.4808136291503906]",tokens/s,532.3108392568558,kWh,5.685029517427553e-06,3.1151111570075464e-06,2.461777101695325e-05,3.341791169138835e-05,tokens/kWh,7660562.4661450675,MB,4447.977472,22997.89312,0.0,22347.251712,20927.468544,s,10,39.970671874999994,3.9970671874999995,0.07263700592770361,4.005329223632812,4.065270776367187,4.096067761230469,4.120705349121094,"[4.052052734375, 4.04518994140625, 4.058427001953125, 3.885306640625, 3.92515673828125, 3.914036376953125, 4.01465087890625, 4.12686474609375, 3.996007568359375, 3.952979248046875]",tokens/s,15.761556422423787,kWh,4.670348497570004e-05,2.559431230603086e-05,0.00015487662257844973,0.0002271744198601806,tokens/kWh,277319.955471988,,s,630,39.96800815963748,0.06344128279307532,0.002185252898918491,0.062209022521972655,0.06664110412597656,0.06703447036743164,0.068138588180542,"[0.06469318389892578, 0.06754713439941407, 0.06694707489013672, 0.06734028625488281, 0.06714060974121094, 0.06292889785766602, 0.062002174377441405, 0.06645247650146484, 0.06760243225097656, 0.06651904296875, 0.06579609680175781, 0.061851646423339846, 0.06652108764648437, 0.06675762939453125, 0.06158848190307617, 0.06626509094238281, 0.06525132751464843, 0.06767820739746094, 0.06668697357177734, 0.06631629180908204, 0.06170111846923828, 0.06551551818847656, 0.06633574676513672, 0.06627839660644531, 0.06670950317382812, 0.06632243347167968, 0.06207897567749023, 0.06449561309814453, 0.06434918212890625, 0.062104576110839846, 0.06393344116210938, 0.0669849624633789, 0.06731059265136718, 0.06710476684570313, 0.06168371200561523, 0.06521548461914063, 0.0663367691040039, 0.061470718383789064, 0.06167552185058594, 0.06140415954589844, 0.06133145523071289, 0.06163558578491211, 0.06174720001220703, 0.06175129699707031, 0.06503321838378906, 0.0619417610168457, 0.06173798370361328, 0.06327910232543946, 0.06668492889404297, 0.06669414520263672, 0.06622105407714844, 0.061617153167724606, 0.06169702529907226, 0.06157004928588867, 0.06171648025512695, 0.061515777587890626, 0.0616099853515625, 0.06119731140136719, 0.06148710250854492, 0.06148812866210938, 0.06532915496826172, 0.0662282257080078, 0.06538240051269531, 0.06658048248291015, 0.06649549102783203, 0.06421504211425781, 0.061963264465332034, 0.0662282257080078, 0.06702489471435547, 0.06236057662963867, 0.06610739135742187, 0.06646886444091797, 0.06638182067871094, 0.06704230499267579, 0.06278758239746093, 0.06345830535888672, 0.06624050903320312, 0.06708121490478515, 0.06760140991210938, 0.06596607971191407, 0.0666982421875, 0.06628147125244141, 0.0646328353881836, 0.06619647979736328, 0.06626201629638671, 0.061243392944335937, 0.0620871696472168, 0.06616883087158203, 0.06619545745849609, 0.06737715148925781, 0.06657536315917968, 0.06626713562011718, 0.066408447265625, 0.06555136108398438, 0.06458367919921874, 0.06642790222167969, 0.06174515151977539, 0.061456382751464846, 0.062238719940185545, 0.06215475082397461, 0.06140108871459961, 0.06139801788330078, 0.06458163452148437, 0.06653030395507813, 0.0664801254272461, 0.06248755264282226, 0.06177996826171875, 0.06146047973632812, 0.061461505889892576, 0.06147174453735352, 0.06102937698364258, 0.06137139129638672, 0.06134988784790039, 0.06124851226806641, 0.0617891845703125, 0.06618521881103516, 0.061608959197998046, 0.06162432098388672, 0.06165708923339844, 0.06165708923339844, 0.06728704071044922, 0.06686822509765625, 0.06678118133544922, 0.0615813102722168, 0.061661182403564455, 0.0616099853515625, 0.06569369506835937, 0.06265651321411132, 0.062266368865966794, 0.06179123306274414, 0.062296062469482424, 0.062209022521972655, 0.06156902313232422, 0.06150656127929687, 0.06158950424194336, 0.06379315185546874, 0.06649549102783203, 0.0662282257080078, 0.062170112609863284, 0.06663372802734376, 0.06669209289550782, 0.0616806411743164, 0.061795326232910154, 0.06176768112182617, 0.06179635238647461, 0.06365900802612305, 0.06668800354003906, 0.06237286376953125, 0.062048255920410154, 0.06956646728515625, 0.06710066986083985, 0.06624050903320312, 0.06211379241943359, 0.06652722930908203, 0.065364990234375, 0.06657331085205079, 0.06178303909301758, 0.06343065643310547, 0.06666035461425782, 0.06669312286376954, 0.0666644515991211, 0.06604287719726562, 0.06440345764160156, 0.066302978515625, 0.06172876739501953, 0.06530355072021485, 0.06487654113769531, 0.06563021087646484, 0.06163763046264648, 0.06285823822021484, 0.06669721221923829, 0.0619233283996582, 0.06515609741210937, 0.0659978256225586, 0.06326988983154297, 0.06625484466552735, 0.06649651336669922, 0.06644838714599609, 0.06773452758789063, 0.06515711975097656, 0.061740032196044924, 0.06646886444091797, 0.0666275863647461, 0.0617441291809082, 0.06526464080810547, 0.06654054260253907, 0.06360268783569335, 0.06627225494384766, 0.061862911224365234, 0.062440448760986325, 0.06177791976928711, 0.06170828628540039, 0.06163455963134765, 0.06138675308227539, 0.061467647552490234, 0.06136627197265625, 0.0615464973449707, 0.061541374206542966, 0.0615096321105957, 0.0612782096862793, 0.06135295867919922, 0.06133248138427734, 0.06103859329223633, 0.061502464294433595, 0.06199808120727539, 0.061459457397460934, 0.06133760070800781, 0.06127718353271484, 0.06130380630493164, 0.06131097412109375, 0.061454334259033204, 0.06154035186767578, 0.06148710250854492, 0.06159872055053711, 0.06168371200561523, 0.06156902313232422, 0.06167244720458984, 0.061557758331298826, 0.061623294830322264, 0.06313881683349609, 0.06449664306640625, 0.0626800651550293, 0.06197248077392578, 0.06160281753540039, 0.06179840087890625, 0.06173593521118164, 0.061677566528320314, 0.06154035186767578, 0.06142668914794922, 0.06169497680664063, 0.06150860977172851, 0.06169702529907226, 0.06160076904296875, 0.0615731201171875, 0.061638656616210936, 0.06163558578491211, 0.0615813102722168, 0.06235033416748047, 0.06175027084350586, 0.06162227249145508, 0.06146867370605469, 0.061618175506591794, 0.061674495697021485, 0.06138163375854492, 0.061608959197998046, 0.06144307327270508, 0.06176358413696289, 0.06200831985473633, 0.061644798278808595, 0.061298686981201174, 0.06122700881958008, 0.0614205436706543, 0.06243123245239258, 0.061873153686523436, 0.06191308975219727, 0.06390886306762696, 0.06339174270629883, 0.06185062408447266, 0.06147993469238281, 0.061346817016601565, 0.06215577697753906, 0.06186700820922852, 0.06198067092895508, 0.06547148895263671, 0.06391296005249024, 0.06155980682373047, 0.06201753616333008, 0.06230835342407227, 0.06194278335571289, 0.06260121536254883, 0.06333542251586914, 0.0629483528137207, 0.0628039665222168, 0.06288281631469726, 0.0629227523803711, 0.06166016006469727, 0.06170214462280273, 0.06292582321166992, 0.062202880859375, 0.06541004943847656, 0.06263091278076172, 0.06276812744140625, 0.0629299201965332, 0.06176358413696289, 0.062036991119384766, 0.06136524963378906, 0.062105598449707033, 0.062042110443115236, 0.061489151000976565, 0.061434879302978515, 0.06267801666259766, 0.06134783935546875, 0.061499393463134766, 0.06386175918579101, 0.06234931182861328, 0.06261759948730469, 0.062281726837158206, 0.061835262298583986, 0.06204313659667969, 0.062063617706298826, 0.06369792175292968, 0.06136524963378906, 0.06181478500366211, 0.06190387344360351, 0.06159257507324219, 0.061693950653076174, 0.06160281753540039, 0.0616734733581543, 0.06165708923339844, 0.06224486541748047, 0.06181478500366211, 0.06145024108886719, 0.062296062469482424, 0.062453758239746096, 0.061677566528320314, 0.06235238265991211, 0.06168166351318359, 0.061628414154052735, 0.0615280647277832, 0.06177587127685547, 0.06200934219360352, 0.06227763366699219, 0.062129150390625, 0.06205952072143555, 0.06167244720458984, 0.061770751953125, 0.06167244720458984, 0.06199808120727539, 0.06192435073852539, 0.06199193572998047, 0.06131097412109375, 0.06163148880004883, 0.061692928314208986, 0.06158643341064453, 0.06158233642578125, 0.06152908706665039, 0.06219468688964844, 0.06160486221313476, 0.06316646575927734, 0.06207078552246094, 0.06183833694458008, 0.06304460906982422, 0.06257049560546875, 0.06215577697753906, 0.06263808059692383, 0.06171955108642578, 0.062258174896240234, 0.06182400131225586, 0.061856769561767576, 0.06223052978515625, 0.061396991729736325, 0.06178815841674805, 0.06151270294189453, 0.061574142456054685, 0.0627701759338379, 0.062491649627685546, 0.06277119827270508, 0.062081024169921874, 0.06140313720703125, 0.06250188827514648, 0.06255513763427735, 0.06174310302734375, 0.06269440078735351, 0.061917182922363284, 0.06154035186767578, 0.06544588470458984, 0.06323814392089844, 0.06338252639770507, 0.06156902313232422, 0.06191001510620117, 0.06207692718505859, 0.06383308792114258, 0.062473217010498044, 0.06176051330566406, 0.06121062469482422, 0.0622479362487793, 0.061946880340576174, 0.06296268844604493, 0.06240870285034179, 0.06272000122070312, 0.0627701759338379, 0.062120960235595706, 0.061620223999023435, 0.06257664108276367, 0.0632985610961914, 0.06218239974975586, 0.06183833694458008, 0.06468300628662109, 0.06580429077148438, 0.06625177764892579, 0.06183321762084961, 0.0616703987121582, 0.06164377593994141, 0.06176768112182617, 0.06176051330566406, 0.06163251113891602, 0.06231347274780273, 0.06219776153564453, 0.06177382278442383, 0.06196223831176758, 0.06268518447875976, 0.06773862457275391, 0.0620759048461914, 0.061758464813232425, 0.06195711898803711, 0.06190284729003906, 0.06191206359863281, 0.06213119888305664, 0.061699073791503904, 0.06657331085205079, 0.062268417358398435, 0.06195097732543945, 0.062249984741210934, 0.0628848648071289, 0.06201139068603516, 0.06428467559814453, 0.0669276123046875, 0.06627327728271484, 0.06154444885253906, 0.06168985748291016, 0.06171340942382812, 0.06218035125732422, 0.06548582458496094, 0.06662348937988281, 0.06811750030517578, 0.062287872314453124, 0.06452326202392578, 0.06180454254150391, 0.06601728057861328, 0.06737612915039062, 0.06701363372802735, 0.06697984313964844, 0.06413414764404297, 0.06648524475097656, 0.06641868591308593, 0.06593023681640625, 0.06525746917724609, 0.06814720153808594, 0.06635826873779296, 0.06619545745849609, 0.06598655700683594, 0.06253260803222656, 0.06578482818603516, 0.06346752166748047, 0.06581145477294922, 0.0662630386352539, 0.06600908660888671, 0.06619033813476563, 0.0654233627319336, 0.06647808074951173, 0.06610636901855468, 0.06634291076660156, 0.06652210998535156, 0.06708019256591796, 0.06704434967041016, 0.06678937530517579, 0.06192025756835937, 0.06437580871582031, 0.06618521881103516, 0.06446489715576172, 0.06646886444091797, 0.06600192260742188, 0.06682112121582032, 0.0660848617553711, 0.0664463348388672, 0.06645145416259765, 0.06346956634521485, 0.06603366088867188, 0.06664089965820312, 0.0678092803955078, 0.06718156433105468, 0.06599577331542969, 0.06637773132324219, 0.06620877075195312, 0.0662630386352539, 0.06291763305664062, 0.06612889862060548, 0.06541004943847656, 0.06608076477050781, 0.0662824935913086, 0.06613811492919922, 0.06512640380859375, 0.06638182067871094, 0.06654463958740234, 0.06744166564941406, 0.06706073760986328, 0.062105598449707033, 0.06295040130615234, 0.06639103698730468, 0.06405939483642578, 0.06645247650146484, 0.06655897521972656, 0.06624870300292969, 0.061591552734375, 0.06571417236328125, 0.066408447265625, 0.06217523193359375, 0.06535167694091797, 0.06308454513549805, 0.0666429443359375, 0.06837760162353515, 0.062483455657958986, 0.061835262298583986, 0.0635975685119629, 0.06238617706298828, 0.06215884780883789, 0.062013439178466793, 0.061927425384521485, 0.06213017654418945, 0.0625797119140625, 0.06178815841674805, 0.06176665496826172, 0.06181273651123047, 0.06182195281982422, 0.06188032150268555, 0.061966335296630856, 0.062159870147705076, 0.0629043197631836, 0.06195404815673828, 0.06150348663330078, 0.06199705505371094, 0.06183116912841797, 0.061742080688476565, 0.06151065444946289, 0.061704193115234375, 0.06223462295532227, 0.06256639862060547, 0.0617891845703125, 0.06535065460205078, 0.06605209350585937, 0.06625177764892579, 0.06638489532470702, 0.06166425704956055, 0.06198886489868164, 0.061467647552490234, 0.061608959197998046, 0.061825023651123044, 0.06183833694458008, 0.061530113220214844, 0.0616519660949707, 0.06151679992675781, 0.06891519927978515, 0.0626319351196289, 0.06237696075439453, 0.06679756927490234, 0.0651704330444336, 0.0668375015258789, 0.06627532958984375, 0.06649958038330078, 0.06384947204589844, 0.07516365051269532, 0.06713549041748047, 0.06672383880615235, 0.06699417877197265, 0.06204415893554688, 0.062371841430664064, 0.06462770843505859, 0.06674124908447265, 0.06264524841308594, 0.06215884780883789, 0.061808639526367185, 0.062148609161376954, 0.061644798278808595, 0.061297664642333986, 0.0686714859008789, 0.062209022521972655, 0.06472601318359375, 0.06576640319824219, 0.062094337463378904, 0.0669665298461914, 0.06904115295410156, 0.06235443115234375, 0.0663367691040039, 0.06570393371582031, 0.06549094390869141, 0.062415870666503906, 0.06479052734375, 0.06192025756835937, 0.06188544082641602, 0.062050304412841796, 0.062034942626953124, 0.06179840087890625, 0.06219878387451172, 0.06262579345703125, 0.06188544082641602, 0.061548545837402345, 0.06182400131225586, 0.061859840393066405, 0.06182195281982422, 0.06191308975219727, 0.06186700820922852, 0.06239744186401367, 0.06339276885986328, 0.0616806411743164, 0.06202982330322265, 0.061778942108154294, 0.062399486541748046, 0.06199398422241211, 0.0620656623840332, 0.06670642852783203, 0.06790860748291015, 0.06204723358154297, 0.06183116912841797, 0.06162227249145508, 0.06174105453491211, 0.06174105453491211, 0.06164684677124024, 0.06205440139770508, 0.061769729614257814, 0.061835262298583986, 0.061871105194091794, 0.06254899215698242, 0.06189875030517578, 0.06177382278442383, 0.0617625617980957, 0.06176051330566406, 0.06308556747436524, 0.06183628845214844, 0.061655040740966796, 0.06166732788085937, 0.06156697463989258, 0.061644798278808595, 0.06147891235351562, 0.061655040740966796, 0.06181990432739258, 0.061908992767333984, 0.06116659164428711, 0.0620052490234375, 0.06614015960693359, 0.0666429443359375]",tokens/s,15.76260687006712,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1235.7632,1005.060096,0.0,358.612992,318.913024,s,20,0.1788675851821899,0.008943379259109496,0.00026419798811655123,0.00887070369720459,0.00916225633621216,0.009274708795547485,0.009786717424392698,"[0.009914719581604003, 0.008858528137207031, 0.008864447593688965, 0.008830816268920898, 0.008876959800720214, 0.009153504371643067, 0.008886079788208007, 0.008735072135925294, 0.009109472274780274, 0.008775872230529785, 0.008850208282470703, 0.00901251220703125, 0.00899942398071289, 0.008742527961730957, 0.009241024017333984, 0.008729375839233399, 0.00890329647064209, 0.008699104309082032, 0.008877599716186523, 0.008807040214538575]",tokens/s,28624.526879953683,kWh,1.0405706355691948e-07,5.701826505582485e-08,2.244287160088168e-07,3.8550404462156115e-07,tokens/kWh,664065665.643815,MB,1235.7632,1005.060096,0.0,358.612992,328.809472,s,20,10.08869351196289,0.5044346755981446,0.005335005585423021,0.503564468383789,0.5124426330566406,0.5136576141357422,0.5166526654052734,"[0.5010412292480468, 0.5011974487304688, 0.4979539489746094, 0.5063039855957031, 0.5047576599121094, 0.5012839050292969, 0.49773388671875, 0.5053358764648438, 0.5038833312988281, 0.51085546875, 0.5134605712890625, 0.5174014282226562, 0.50324560546875, 0.5123295288085937, 0.50199462890625, 0.5043636169433594, 0.4986883239746094, 0.5073371276855468, 0.49996554565429685, 0.49956039428710936]",tokens/s,124.89228644976944,kWh,6.0475178527528624e-06,3.3125900223381058e-06,1.029085747399049e-05,1.9650965349081454e-05,tokens/kWh,3205949.3709780937,,s,1259,10.248980564594259,0.008140572330892985,0.0011351723397017978,0.007914495944976807,0.00819527702331543,0.008268185138702392,0.017085111923217773,"[0.008740960121154785, 0.008759200096130371, 0.00870195198059082, 0.009260031700134277, 0.008202239990234375, 0.00787660789489746, 0.00785203218460083, 0.007820288181304931, 0.00780083179473877, 0.007929855823516846, 0.007827455997467042, 0.007792640209197998, 0.007823359966278077, 0.007840767860412597, 0.007830527782440186, 0.007824384212493896, 0.007812096118927002, 0.007836671829223632, 0.007806975841522217, 0.007794688224792481, 0.007799808025360107, 0.007849984169006348, 0.007786496162414551, 0.007824384212493896, 0.007945216178894043, 0.007836671829223632, 0.007806975841522217, 0.007813119888305664, 0.008041472434997558, 0.007838719844818116, 0.007780352115631104, 0.007794688224792481, 0.007814144134521485, 0.007823359966278077, 0.007830527782440186, 0.00779366397857666, 0.007840767860412597, 0.00790937614440918, 0.008236031532287597, 0.00806604766845703, 0.008050687789916992, 0.008067071914672852, 0.00800972843170166, 0.007974912166595459, 0.00786636781692505, 0.007839744091033935, 0.00782643222808838, 0.007836671829223632, 0.007819263935089112, 0.007831552028656007, 0.007787519931793213, 0.007797760009765625, 0.007813119888305664, 0.007910399913787843, 0.007830527782440186, 0.00781824016571045, 0.007817215919494629, 0.007847936153411865, 0.007822336196899414, 0.00779366397857666, 0.007809088230133056, 0.007972799777984619, 0.017083391189575196, 0.007821311950683594, 0.007833600044250488, 0.007803904056549072, 0.008130559921264649, 0.008444928169250488, 0.008322048187255859, 0.008384511947631837, 0.008393728256225585, 0.008424448013305665, 0.00838144016265869, 0.008119296073913575, 0.008051712036132813, 0.007872511863708496, 0.007865344047546387, 0.007855103969573975, 0.007837696075439453, 0.007867392063140868, 0.007815167903900147, 0.007829504013061523, 0.007813119888305664, 0.007836671829223632, 0.007914495944976807, 0.007887872219085693, 0.00783564805984497, 0.00791756820678711, 0.007890944004058837, 0.007857151985168457, 0.007817215919494629, 0.007907328128814697, 0.007833600044250488, 0.00782643222808838, 0.007804927825927735, 0.007836671829223632, 0.007812096118927002, 0.00781824016571045, 0.007804927825927735, 0.007844863891601562, 0.007870463848114014, 0.00782643222808838, 0.00781004810333252, 0.008635392189025879, 0.008225791931152344, 0.008044544219970704, 0.007888895988464355, 0.007927807807922363, 0.00791756820678711, 0.007895040035247802, 0.007888895988464355, 0.007903232097625732, 0.007906303882598878, 0.007899136066436767, 0.008059904098510743, 0.007930880069732665, 0.00791756820678711, 0.007890944004058837, 0.0078919677734375, 0.007902207851409913, 0.007917632102966308, 0.007903168201446533, 0.007907328128814697, 0.007885824203491211, 0.007904255867004394, 0.017123327255249024, 0.007914495944976807, 0.007899136066436767, 0.007911424160003662, 0.00787660789489746, 0.007897088050842285, 0.007896063804626464, 0.007951392173767089, 0.007910367965698242, 0.00790937614440918, 0.007902207851409913, 0.007904255867004394, 0.007924736022949219, 0.00791756820678711, 0.007886847972869874, 0.007912447929382324, 0.007911424160003662, 0.007878655910491944, 0.007873536109924317, 0.007907328128814697, 0.007894015789031983, 0.00790118408203125, 0.00790118408203125, 0.007882751941680909, 0.007897088050842285, 0.00790937614440918, 0.00790015983581543, 0.007895040035247802, 0.007877632141113282, 0.007913472175598145, 0.007868415832519531, 0.007877632141113282, 0.007882751941680909, 0.007887872219085693, 0.007899136066436767, 0.007874559879302979, 0.00788479995727539, 0.007873536109924317, 0.007944191932678223, 0.007880703926086426, 0.007899136066436767, 0.007952383995056152, 0.007964672088623047, 0.007881728172302246, 0.007886847972869874, 0.007874559879302979, 0.00789299201965332, 0.007895040035247802, 0.00787660789489746, 0.007916543960571289, 0.007904255867004394, 0.007903232097625732, 0.007869440078735352, 0.007903232097625732, 0.007887872219085693, 0.00796675205230713, 0.007912415981292725, 0.00786636781692505, 0.00789299201965332, 0.007875584125518798, 0.007924767971038818, 0.007874527931213378, 0.0078919677734375, 0.017052671432495118, 0.007898111820220948, 0.007918591976165772, 0.007899136066436767, 0.00785923194885254, 0.007882719993591308, 0.007894015789031983, 0.007855103969573975, 0.00787769603729248, 0.007874495983123779, 0.007897088050842285, 0.007960576057434082, 0.007932928085327149, 0.007918591976165772, 0.007887872219085693, 0.007918591976165772, 0.008210432052612305, 0.007874559879302979, 0.007902207851409913, 0.00789299201965332, 0.007839776039123536, 0.007864287853240967, 0.007882751941680909, 0.007897088050842285, 0.007990272045135497, 0.007927807807922363, 0.007896063804626464, 0.007938047885894776, 0.007918591976165772, 0.008033280372619628, 0.008353792190551757, 0.008071167945861817, 0.007939072132110595, 0.007919616222381591, 0.007890944004058837, 0.007920639991760254, 0.008637439727783204, 0.00832921600341797, 0.008156160354614257, 0.00827187156677246, 0.008194047927856446, 0.008177663803100586, 0.008159232139587403, 0.008148032188415527, 0.00825443172454834, 0.008152031898498535, 0.008161279678344726, 0.008157183647155761, 0.008122367858886719, 0.008179712295532226, 0.008135680198669434, 0.008156160354614257, 0.008168448448181152, 0.008136704444885253, 0.008164352416992187, 0.008143872261047362, 0.008163328170776368, 0.008181759834289551, 0.0082227201461792, 0.008193023681640625, 0.007928832054138184, 0.00785920000076294, 0.007887872219085693, 0.017110015869140623, 0.007940095901489258, 0.007927807807922363, 0.007938047885894776, 0.007880703926086426, 0.007928832054138184, 0.007878655910491944, 0.007922688007354736, 0.007928832054138184, 0.007921664237976075, 0.007896063804626464, 0.007921728134155273, 0.007894976139068604, 0.007971839904785156, 0.007922688007354736, 0.007927807807922363, 0.007964672088623047, 0.00813158416748047, 0.008223744392395019, 0.008168448448181152, 0.008158207893371582, 0.008228863716125488, 0.008165375709533691, 0.008120320320129394, 0.00812342357635498, 0.008126432418823243, 0.008164352416992187, 0.008128512382507324, 0.008146944046020508, 0.00789299201965332, 0.007964672088623047, 0.007932928085327149, 0.00788479995727539, 0.007954432010650634, 0.007886847972869874, 0.007877632141113282, 0.007887872219085693, 0.007885824203491211, 0.00790835189819336, 0.00788479995727539, 0.00787660789489746, 0.007916543960571289, 0.007880703926086426, 0.007913472175598145, 0.007856128215789794, 0.007871488094329833, 0.007870463848114014, 0.007887872219085693, 0.007919616222381591, 0.007886847972869874, 0.007913472175598145, 0.008102911949157715, 0.008297472000122071, 0.008225824356079102, 0.008167391777038574, 0.008201215744018555, 0.008158207893371582, 0.008196096420288086, 0.008138751983642578, 0.008178751945495605, 0.008141759872436524, 0.008168448448181152, 0.008168448448181152, 0.017675264358520508, 0.008162303924560547, 0.008164352416992187, 0.008168448448181152, 0.007974912166595459, 0.007903232097625732, 0.007895040035247802, 0.007900191783905029, 0.007922656059265136, 0.007906303882598878, 0.00795136022567749, 0.007872511863708496, 0.008233983993530274, 0.008203264236450195, 0.008174592018127442, 0.007968768119812012, 0.007970816135406494, 0.00800767993927002, 0.007923711776733398, 0.00791756820678711, 0.007895040035247802, 0.007906303882598878, 0.007885824203491211, 0.007902207851409913, 0.007950335979461669, 0.007906303882598878, 0.00800153636932373, 0.008085503578186035, 0.007935999870300293, 0.00807423973083496, 0.007925759792327881, 0.007998464107513427, 0.007912447929382324, 0.007880703926086426, 0.007910431861877442, 0.007928800106048585, 0.007922688007354736, 0.00797388792037964, 0.007903232097625732, 0.007905280113220215, 0.007947264194488525, 0.007898111820220948, 0.007899136066436767, 0.00788479995727539, 0.007897088050842285, 0.007875584125518798, 0.007867392063140868, 0.00787660789489746, 0.007890944004058837, 0.007881728172302246, 0.007879680156707763, 0.007906303882598878, 0.007890944004058837, 0.007894015789031983, 0.0078919677734375, 0.00790835189819336, 0.007898111820220948, 0.007895040035247802, 0.007961631774902344, 0.007899104118347168, 0.00791756820678711, 0.007910399913787843, 0.008005632400512695, 0.017169408798217774, 0.0079267840385437, 0.00794316816329956, 0.007887872219085693, 0.007921664237976075, 0.007984127998352051, 0.00790015983581543, 0.007987199783325195, 0.007899136066436767, 0.007883776187896728, 0.007895040035247802, 0.007885824203491211, 0.007896063804626464, 0.007905344009399413, 0.007891903877258301, 0.007874559879302979, 0.007882751941680909, 0.007895040035247802, 0.007906303882598878, 0.007896063804626464, 0.007887872219085693, 0.007865344047546387, 0.007896063804626464, 0.007923711776733398, 0.007896063804626464, 0.007872511863708496, 0.007886847972869874, 0.007873536109924317, 0.00788479995727539, 0.00788479995727539, 0.007883776187896728, 0.007895040035247802, 0.007899136066436767, 0.007889920234680176, 0.007888895988464355, 0.007883776187896728, 0.007887872219085693, 0.007912447929382324, 0.00789299201965332, 0.007896063804626464, 0.007897088050842285, 0.00789299201965332, 0.007904255867004394, 0.007873536109924317, 0.0078919677734375, 0.007883776187896728, 0.007934016227722168, 0.007873472213745117, 0.007871488094329833, 0.007897088050842285, 0.007870463848114014, 0.007869440078735352, 0.007874559879302979, 0.007904255867004394, 0.007865344047546387, 0.00790835189819336, 0.007881728172302246, 0.007898111820220948, 0.007878655910491944, 0.007904255867004394, 0.007894015789031983, 0.007880703926086426, 0.007881760120391846, 0.016962528228759766, 0.00789299201965332, 0.007874559879302979, 0.007890944004058837, 0.0078919677734375, 0.007897088050842285, 0.007905280113220215, 0.007899136066436767, 0.007896063804626464, 0.0078919677734375, 0.007912447929382324, 0.007894015789031983, 0.007956480026245117, 0.007898176193237304, 0.007912384033203126, 0.007885824203491211, 0.007932928085327149, 0.007882751941680909, 0.007877632141113282, 0.00787660789489746, 0.007870463848114014, 0.00789299201965332, 0.00787660789489746, 0.007877632141113282, 0.007886879920959473, 0.007880671977996826, 0.007911424160003662, 0.007899136066436767, 0.007886847972869874, 0.007935999870300293, 0.007873536109924317, 0.007868415832519531, 0.0078919677734375, 0.007889920234680176, 0.007879680156707763, 0.007983104228973388, 0.007993343830108643, 0.008654848098754882, 0.008453120231628418, 0.008224767684936523, 0.008190976142883302, 0.008196096420288086, 0.008163328170776368, 0.008144895553588867, 0.008160256385803222, 0.0081725435256958, 0.008152064323425292, 0.008176639556884765, 0.008184831619262695, 0.008145919799804687, 0.00812339210510254, 0.008157183647155761, 0.008146944046020508, 0.008195072174072266, 0.0081397762298584, 0.00821555233001709, 0.008168448448181152, 0.0081397762298584, 0.008179743766784668, 0.00813974380493164, 0.00812339210510254, 0.00800767993927002, 0.008005632400512695, 0.017589248657226563, 0.008153087615966797, 0.008118271827697754, 0.00813158416748047, 0.008112128257751466, 0.008121343612670898, 0.008136704444885253, 0.008134655952453614, 0.0081397762298584, 0.008102944374084473, 0.008115167617797852, 0.008144895553588867, 0.008137727737426758, 0.008156160354614257, 0.008138751983642578, 0.008175616264343261, 0.008162303924560547, 0.00818995189666748, 0.00819711971282959, 0.008226816177368163, 0.00820736026763916, 0.007906303882598878, 0.007905280113220215, 0.007881824016571046, 0.007879583835601807, 0.007870463848114014, 0.007897088050842285, 0.007874559879302979, 0.007897088050842285, 0.007871488094329833, 0.007896063804626464, 0.007914495944976807, 0.00790937614440918, 0.007870463848114014, 0.007983104228973388, 0.008044544219970704, 0.008096768379211425, 0.007906303882598878, 0.008112128257751466, 0.007983104228973388, 0.007894015789031983, 0.007897088050842285, 0.007886847972869874, 0.007949312210083008, 0.008068096160888672, 0.007966720104217529, 0.007918591976165772, 0.007911424160003662, 0.007865344047546387, 0.00786742401123047, 0.007906271934509277, 0.007863296031951903, 0.007878687858581543, 0.007874527931213378, 0.007888895988464355, 0.007899136066436767, 0.00789299201965332, 0.007874559879302979, 0.007891007900238037, 0.007904191970825196, 0.007889920234680176, 0.007860223770141601, 0.007889920234680176, 0.016955392837524414, 0.007903232097625732, 0.007902207851409913, 0.007949312210083008, 0.008045568466186523, 0.008096768379211425, 0.008201215744018555, 0.008014847755432129, 0.007928832054138184, 0.007928832054138184, 0.007914495944976807, 0.007942143917083741, 0.00790937614440918, 0.00790835189819336, 0.007902207851409913, 0.007897088050842285, 0.007897119998931884, 0.007896031856536865, 0.00787660789489746, 0.007882751941680909, 0.007858176231384278, 0.008070143699645996, 0.007881728172302246, 0.007886847972869874, 0.008017919540405273, 0.008667136192321777, 0.008648703575134278, 0.009057279586791991, 0.008278016090393067, 0.008357888221740722, 0.008216575622558593, 0.008090656280517578, 0.008072159767150878, 0.008176639556884765, 0.008013824462890624, 0.008032256126403809, 0.008014847755432129, 0.008168448448181152, 0.008040448188781739, 0.008073216438293456, 0.008193023681640625, 0.008168448448181152, 0.008135680198669434, 0.008099840164184571, 0.008014847755432129, 0.008209407806396484, 0.008148991584777832, 0.00819916820526123, 0.008255488395690918, 0.008286208152770995, 0.008129535675048828, 0.008140800476074218, 0.0081397762298584, 0.008138751983642578, 0.008297472000122071, 0.008203264236450195, 0.008134655952453614, 0.008274944305419921, 0.008238080024719239, 0.008150015830993652, 0.008124416351318359, 0.008136768341064454, 0.008249279975891113, 0.017087488174438475, 0.007910399913787843, 0.007920639991760254, 0.008250368118286134, 0.008164352416992187, 0.008233983993530274, 0.008186880111694337, 0.008132608413696289, 0.008143872261047362, 0.008156160354614257, 0.008376352310180664, 0.008221664428710938, 0.008168448448181152, 0.008158207893371582, 0.008151040077209473, 0.008147968292236327, 0.008152064323425292, 0.008318976402282715, 0.008307711601257324, 0.008219648361206054, 0.00800153636932373, 0.008033280372619628, 0.008261631965637208, 0.00818380832672119, 0.008162303924560547, 0.008141823768615723, 0.007873536109924317, 0.007918591976165772, 0.007895040035247802, 0.007880703926086426, 0.008042495727539062, 0.008142848014831543, 0.008126463890075684, 0.008128512382507324, 0.008168448448181152, 0.008210432052612305, 0.008141823768615723, 0.00820736026763916, 0.008134655952453614, 0.00812339210510254, 0.008116224288940429, 0.00813158416748047, 0.008230912208557128, 0.008254464149475099, 0.008152064323425292, 0.008153087615966797, 0.008138751983642578, 0.008230912208557128, 0.008318976402282715, 0.008128512382507324, 0.008023039817810058, 0.008267775535583496, 0.008164352416992187, 0.008136704444885253, 0.008127488136291505, 0.00820736026763916, 0.008120320320129394, 0.008137727737426758, 0.00810905647277832, 0.008242176055908204, 0.00839782428741455, 0.008219648361206054, 0.0081397762298584, 0.017508352279663086, 0.008938495635986327, 0.008742912292480469, 0.00971059226989746, 0.008355839729309082, 0.008208383560180664, 0.008152064323425292, 0.008168448448181152, 0.008190976142883302, 0.008128512382507324, 0.008148991584777832, 0.00819200038909912, 0.008155136108398438, 0.008122367858886719, 0.008112128257751466, 0.00818892765045166, 0.008202239990234375, 0.008152064323425292, 0.008154111862182617, 0.00818995189666748, 0.008156160354614257, 0.008177663803100586, 0.008173567771911621, 0.008204287528991699, 0.008194047927856446, 0.008203264236450195, 0.008143872261047362, 0.00818995189666748, 0.008177663803100586, 0.00820633602142334, 0.008037376403808593, 0.00800972843170166, 0.008042495727539062, 0.008101887702941894, 0.008113151550292968, 0.008145919799804687, 0.008156160354614257, 0.008130559921264649, 0.0081397762298584, 0.008161279678344726, 0.008170495986938477, 0.008151040077209473, 0.008163328170776368, 0.008194047927856446, 0.008196096420288086, 0.008153087615966797, 0.008142848014831543, 0.008136704444885253, 0.008156160354614257, 0.008150015830993652, 0.008145919799804687, 0.008147968292236327, 0.008154111862182617, 0.008148991584777832, 0.008158207893371582, 0.00813158416748047, 0.008259584426879883, 0.008144895553588867, 0.008218624114990235, 0.00830463981628418, 0.008120320320129394, 0.008150015830993652, 0.008175616264343261, 0.017464319229125978, 0.008178688049316407, 0.008190976142883302, 0.0081725435256958, 0.008135680198669434, 0.008138751983642578, 0.0081397762298584, 0.008038399696350097, 0.007894015789031983, 0.007888895988464355, 0.007879680156707763, 0.00788479995727539, 0.007920639991760254, 0.008181759834289551, 0.008130559921264649, 0.008185855865478516, 0.008140800476074218, 0.008155136108398438, 0.00813158416748047, 0.008122367858886719, 0.007890944004058837, 0.007875584125518798, 0.007867392063140868, 0.00787660789489746, 0.007881728172302246, 0.008043519973754883, 0.008003583908081055, 0.00809062385559082, 0.008116224288940429, 0.008218624114990235, 0.008130559921264649, 0.008170495986938477, 0.00818995189666748, 0.008052736282348634, 0.007889920234680176, 0.007963647842407226, 0.00790015983581543, 0.007899136066436767, 0.007959551811218261, 0.00790015983581543, 0.007911424160003662, 0.007858176231384278, 0.0078919677734375, 0.007912447929382324, 0.007858208179473877, 0.007885791778564454, 0.00790118408203125, 0.0079267840385437, 0.007885824203491211, 0.007836671829223632, 0.00790841579437256, 0.00794924783706665, 0.007862271785736084, 0.007860223770141601, 0.007871488094329833, 0.007906303882598878, 0.007912447929382324, 0.007873536109924317, 0.007865344047546387, 0.007861248016357422, 0.007913472175598145, 0.007860223770141601, 0.007880703926086426, 0.01722265625, 0.008181759834289551, 0.008146944046020508, 0.008179712295532226, 0.008160256385803222, 0.008201215744018555, 0.008342592239379883, 0.008229824066162109, 0.008120320320129394, 0.008130559921264649, 0.008120320320129394, 0.00821555233001709, 0.008196096420288086, 0.008161279678344726, 0.008143872261047362, 0.008142848014831543, 0.008159232139587403, 0.008179712295532226, 0.008135680198669434, 0.008193023681640625, 0.008152064323425292, 0.008148991584777832, 0.008143872261047362, 0.00818380832672119, 0.008122367858886719, 0.008177663803100586, 0.00818995189666748, 0.008096768379211425, 0.008077312469482421, 0.008087552070617676, 0.008060928344726562, 0.007846975803375244, 0.007794623851776123, 0.007839744091033935, 0.007877632141113282, 0.007870463848114014, 0.00796569585800171, 0.00821555233001709, 0.008161279678344726, 0.008137727737426758, 0.0081397762298584, 0.008158207893371582, 0.008185855865478516, 0.008094719886779785, 0.008176639556884765, 0.008166399955749512, 0.008152064323425292, 0.008129535675048828, 0.008145919799804687, 0.008144895553588867, 0.008102911949157715, 0.008136704444885253, 0.008160256385803222, 0.008142848014831543, 0.008163328170776368, 0.008125503540039063, 0.00813152027130127, 0.008148991584777832, 0.00818995189666748, 0.008140800476074218, 0.008144895553588867, 0.008143872261047362, 0.008157183647155761, 0.017770496368408203, 0.008144895553588867, 0.008167424201965333, 0.008121343612670898, 0.008127488136291505, 0.008167424201965333, 0.008127488136291505, 0.00813366413116455, 0.008134655952453614, 0.008162272453308106, 0.008150015830993652, 0.008126463890075684, 0.00828006362915039, 0.00813158416748047, 0.00809267234802246, 0.00790118408203125, 0.00790015983581543, 0.007904255867004394, 0.007929855823516846, 0.007919616222381591, 0.007922688007354736, 0.007886847972869874, 0.007990272045135497, 0.007903232097625732, 0.00789299201965332, 0.008048640251159669, 0.007890944004058837, 0.007895040035247802, 0.007911424160003662, 0.007920639991760254, 0.007906303882598878, 0.007864319801330566, 0.007913472175598145, 0.00788479995727539, 0.007877632141113282, 0.007867392063140868, 0.00788479995727539, 0.007886847972869874, 0.00790118408203125, 0.00790118408203125, 0.00790937614440918, 0.007898111820220948, 0.007895040035247802, 0.007885824203491211, 0.007911424160003662, 0.007966720104217529, 0.007956480026245117, 0.007877632141113282, 0.007889920234680176, 0.007912447929382324, 0.007882751941680909, 0.007911424160003662, 0.007864319801330566, 0.007896063804626464, 0.007914527893066407, 0.007879648208618164, 0.00789299201965332, 0.007902207851409913, 0.007905280113220215, 0.007902207851409913, 0.007896063804626464, 0.007899136066436767, 0.00789299201965332, 0.016945152282714843, 0.007976960182189942, 0.007987199783325195, 0.007897088050842285, 0.007890944004058837, 0.007878655910491944, 0.007885824203491211, 0.008117247581481933, 0.008261631965637208, 0.008130592346191406, 0.008142815589904786, 0.00818073558807373, 0.008161279678344726, 0.008145919799804687, 0.008124447822570801, 0.0082390718460083, 0.008175616264343261, 0.008152064323425292, 0.008142848014831543, 0.008135680198669434, 0.008121343612670898, 0.008034303665161132, 0.008308735847473145, 0.008354816436767578, 0.008210432052612305, 0.00818892765045166, 0.00840499210357666, 0.008446975708007813, 0.008169471740722656, 0.00808448028564453, 0.007904255867004394, 0.007879680156707763, 0.007803904056549072, 0.007822336196899414, 0.007821311950683594, 0.007830527782440186, 0.00781824016571045, 0.00783564805984497, 0.007887872219085693, 0.00790937614440918, 0.007906303882598878, 0.007878655910491944, 0.007903232097625732, 0.00790118408203125, 0.007885824203491211, 0.007912447929382324, 0.007875584125518798, 0.007875584125518798, 0.00785920000076294, 0.007907328128814697, 0.007918591976165772, 0.007895040035247802, 0.007977983951568603, 0.007904255867004394, 0.007905280113220215, 0.007886911869049072, 0.007864255905151368, 0.00790118408203125, 0.007961599826812745, 0.007873536109924317, 0.007895040035247802, 0.007902207851409913, 0.007896063804626464, 0.017140735626220704, 0.007910399913787843, 0.007895040035247802, 0.007890944004058837, 0.007916543960571289, 0.007902207851409913, 0.00790937614440918, 0.007895040035247802, 0.007900191783905029, 0.007898079872131347, 0.007911424160003662, 0.007941120147705078, 0.007872511863708496, 0.007905280113220215, 0.007915520191192627, 0.00790835189819336, 0.007896063804626464, 0.007886847972869874, 0.007879744052886963, 0.007878592014312744, 0.007881728172302246, 0.007902207851409913, 0.00787660789489746, 0.007873536109924317, 0.007888895988464355, 0.00790835189819336, 0.007879680156707763, 0.00790118408203125, 0.007902207851409913, 0.00790015983581543, 0.007922688007354736, 0.007886847972869874, 0.007983104228973388, 0.007858176231384278, 0.0077916159629821775, 0.007862271785736084, 0.00781824016571045, 0.007829504013061523, 0.0078438401222229, 0.007886847972869874, 0.00790835189819336, 0.007917600154876709, 0.007915487766265869, 0.007886847972869874, 0.007902207851409913, 0.008055808067321778, 0.008141823768615723, 0.008117247581481933, 0.008110079765319824, 0.008225791931152344, 0.007896063804626464, 0.007907328128814697, 0.007890944004058837, 0.007872511863708496, 0.0078919677734375, 0.007872511863708496, 0.007890944004058837, 0.007888895988464355, 0.00785920000076294, 0.007879680156707763, 0.007883776187896728, 0.007872511863708496, 0.007987199783325195, 0.01689091110229492, 0.007906271934509277, 0.007910399913787843, 0.007885824203491211, 0.007910399913787843, 0.008279040336608886, 0.008184831619262695, 0.008133631706237793, 0.008155136108398438, 0.008173567771911621, 0.008164352416992187, 0.008150015830993652, 0.008140864372253418, 0.008307647705078126, 0.008159232139587403, 0.008154111862182617, 0.00818380832672119, 0.008154111862182617, 0.008138751983642578, 0.008128512382507324, 0.008130559921264649, 0.008145919799804687, 0.008111104011535645, 0.008121343612670898, 0.008159328460693359, 0.008150943756103516, 0.008162303924560547, 0.008224767684936523, 0.008142848014831543, 0.008118271827697754, 0.008174592018127442, 0.008132608413696289, 0.008121343612670898, 0.008136704444885253, 0.008122367858886719, 0.008136735916137696, 0.008140768051147461, 0.008134655952453614, 0.008135680198669434, 0.008136704444885253, 0.008128512382507324, 0.008117247581481933, 0.007903232097625732, 0.007888895988464355, 0.007879680156707763, 0.007862271785736084, 0.007904255867004394, 0.007907328128814697, 0.007888895988464355, 0.007863296031951903, 0.00788479995727539, 0.007880703926086426, 0.007897088050842285, 0.00787660789489746, 0.007877632141113282, 0.007864319801330566, 0.007898111820220948, 0.007869440078735352, 0.008171520233154296, 0.007862271785736084, 0.007905280113220215, 0.00789299201965332, 0.007885824203491211, 0.017105920791625977, 0.007873536109924317, 0.007889920234680176, 0.007930880069732665, 0.00790015983581543, 0.007897088050842285, 0.0078919677734375, 0.007902207851409913, 0.007905280113220215, 0.007921664237976075, 0.007916543960571289, 0.00790015983581543, 0.0078919677734375, 0.007914495944976807, 0.007897088050842285, 0.007927807807922363, 0.007922688007354736, 0.007899136066436767, 0.007882751941680909, 0.007895040035247802, 0.007905280113220215, 0.007902207851409913, 0.007878655910491944, 0.007896063804626464, 0.007888895988464355, 0.007855103969573975, 0.007883776187896728, 0.007883776187896728, 0.007855103969573975, 0.007872511863708496, 0.007853055953979492, 0.007902207851409913, 0.007882751941680909, 0.007883776187896728, 0.007874559879302979, 0.007872511863708496, 0.007898111820220948, 0.007904255867004394, 0.008046591758728027, 0.007898111820220948, 0.007877632141113282, 0.007856192111968995, 0.007872447967529297, 0.00787660789489746, 0.007890944004058837, 0.007873536109924317, 0.007888927936553955, 0.007879648208618164, 0.007868415832519531, 0.007879680156707763, 0.0078919677734375, 0.007882751941680909, 0.007896063804626464, 0.007927807807922363, 0.008242176055908204, 0.008209440231323242, 0.008128479957580566, 0.008130559921264649, 0.008132608413696289, 0.008170495986938477, 0.008165375709533691, 0.008122367858886719, 0.008122367858886719, 0.017102848052978514, 0.007897088050842285, 0.008235008239746093, 0.008123456001281738, 0.00811308765411377, 0.008124416351318359, 0.008163328170776368, 0.008134655952453614, 0.008124416351318359, 0.007883776187896728, 0.007894015789031983, 0.00789299201965332, 0.007903232097625732, 0.007879680156707763, 0.007888895988464355, 0.007906303882598878, 0.007946239948272706, 0.007930880069732665, 0.007925759792327881, 0.00787052822113037, 0.007870399951934814, 0.007874559879302979, 0.007879680156707763, 0.00790118408203125, 0.007874559879302979, 0.007903232097625732, 0.007858176231384278, 0.007882751941680909, 0.00785920000076294, 0.007887872219085693, 0.007886847972869874, 0.007887872219085693, 0.007857151985168457, 0.007988224029541016, 0.007899136066436767, 0.007855103969573975, 0.007875584125518798, 0.007875584125518798, 0.007878655910491944, 0.007942143917083741, 0.007910399913787843, 0.007874591827392578, 0.007892000198364257, 0.007928768157958984, 0.007923711776733398, 0.007873536109924317, 0.007880703926086426, 0.007872511863708496, 0.007953407764434815, 0.007883776187896728, 0.007881728172302246, 0.007890944004058837, 0.007899136066436767, 0.007879680156707763, 0.007898111820220948, 0.007873568058013916, 0.007885791778564454, 0.007894015789031983, 0.00795750379562378, 0.00790835189819336, 0.007937024116516114, 0.007947264194488525, 0.00787667179107666]",tokens/s,122.84148575218228,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,3172.175872,4874.305536,0.0,4244.635648,4125.520384,s,1,10.5434638671875,10.5434638671875,0.0,10.5434638671875,10.5434638671875,10.5434638671875,10.5434638671875,[10.5434638671875],,kWh,4.399285863609698e-05,2.4069950144039357e-05,7.155589057794565e-05,0.000139618699358082,,MB,3228.495872,5052.563456,0.0,4404.0192,4310.79936,s,10,0.6262004165649414,0.06262004165649414,6.818142333567537e-05,0.06261390304565428,0.06270176773071288,0.06270755615234375,0.06271218688964844,"[0.0626957778930664, 0.06256307220458984, 0.06252703857421875, 0.06257932662963867, 0.06255065536499023, 0.06266697692871094, 0.06271334457397461, 0.06270048141479492, 0.06255526351928711, 0.06264847946166992]",tokens/s,4088.1480310138218,kWh,7.407208955815432e-07,4.057045997673698e-07,3.163412252950304e-06,4.309837748299217e-06,tokens/kWh,59398987.83916513,MB,3232.821248,5056.75776,0.0,4406.116352,4310.80192,s,10,22.735632080078123,2.273563208007812,0.02716169792340399,2.2645362548828123,2.305752783203125,2.320775927734375,2.3327944433593752,"[2.2768828125, 2.24996240234375, 2.25773486328125, 2.302414306640625, 2.289696044921875, 2.335799072265625, 2.271337646484375, 2.248560302734375, 2.2513369140625, 2.25190771484375]",tokens/s,27.70980801330047,kWh,2.6669330425598824e-05,1.4615757504066243e-05,5.274957692184972e-05,9.40346648515148e-05,tokens/kWh,669965.6993458741,,s,630,22.733485092163093,0.03608489697168744,0.0008023194011336012,0.03562649536132813,0.0371559440612793,0.037353419494628906,0.03820140590667725,"[0.03721830368041992, 0.037133312225341795, 0.036830303192138675, 0.03677993774414062, 0.03646464157104492, 0.03532799911499023, 0.035451904296875, 0.03582361602783203, 0.034947071075439456, 0.036539390563964845, 0.03672063827514648, 0.03695820617675781, 0.03703398513793945, 0.03571507263183594, 0.03658342361450195, 0.03696844863891602, 0.03666636657714844, 0.0368721923828125, 0.03549593734741211, 0.03561369705200195, 0.03594854354858398, 0.03551641464233399, 0.03549491119384766, 0.03663564682006836, 0.03695206451416016, 0.03696640014648438, 0.03568947219848633, 0.037028865814208986, 0.036951038360595705, 0.036931583404541016, 0.03697971343994141, 0.035542015075683595, 0.035631103515625, 0.03552460861206055, 0.03540377426147461, 0.03591884613037109, 0.038437889099121096, 0.03701145553588867, 0.03709235382080078, 0.03712102508544922, 0.035917823791503906, 0.035342334747314456, 0.03575603103637695, 0.03546623992919922, 0.0353966064453125, 0.03556966400146484, 0.0356577262878418, 0.03557785415649414, 0.035850238800048825, 0.03544780731201172, 0.03554099273681641, 0.03527679824829102, 0.035604480743408204, 0.03552870559692383, 0.03549388885498047, 0.03614720153808594, 0.036999168395996096, 0.036838401794433595, 0.03539251327514648, 0.035574783325195314, 0.035504127502441404, 0.0353966064453125, 0.03548364639282227, 0.03522048187255859, 0.0353259506225586, 0.035345409393310545, 0.03544678497314453, 0.03807436752319336, 0.03730022430419922, 0.03546931076049804, 0.035386367797851564, 0.035501056671142575, 0.03550515365600586, 0.035576831817626955, 0.03545702362060547, 0.035748863220214845, 0.03544780731201172, 0.03546828842163086, 0.0355860481262207, 0.035520511627197264, 0.035776512145996094, 0.03572633743286133, 0.035514366149902346, 0.03544780731201172, 0.03546214294433594, 0.03539763259887695, 0.03527372741699219, 0.03540991973876953, 0.03525836944580078, 0.03557273483276367, 0.03551232147216797, 0.035335166931152344, 0.03510784149169922, 0.035571712493896485, 0.03562700653076172, 0.0353361930847168, 0.03553996658325195, 0.03547340774536133, 0.035043327331542966, 0.03522252655029297, 0.035419136047363284, 0.03549184036254883, 0.03559731292724609, 0.035798015594482424, 0.03522662353515625, 0.03679129409790039, 0.03647590255737305, 0.0369172477722168, 0.036160511016845705, 0.037220352172851565, 0.03665203094482422, 0.03562188720703125, 0.0355563850402832, 0.035475425720214844, 0.03558915328979492, 0.035701728820800784, 0.03540889739990234, 0.03685478210449219, 0.037064704895019535, 0.035520511627197264, 0.03546316909790039, 0.03561881637573242, 0.03554099273681641, 0.035448833465576174, 0.035560447692871096, 0.03560243225097656, 0.03544473648071289, 0.03542118453979492, 0.03544166564941406, 0.03593318557739258, 0.035334144592285156, 0.035460094451904296, 0.03548876953125, 0.03557580947875977, 0.03556966400146484, 0.03562803268432617, 0.035527679443359376, 0.03618099212646484, 0.03608473587036133, 0.03536281585693359, 0.03546419143676758, 0.03559628677368164, 0.03542118453979492, 0.03550207901000976, 0.035490814208984374, 0.03559628677368164, 0.03528704071044922, 0.03550207901000976, 0.03538534545898438, 0.035625984191894534, 0.0353966064453125, 0.03654246520996094, 0.03685171127319336, 0.03664588928222656, 0.036967422485351564, 0.03530137634277344, 0.035757057189941405, 0.03554816055297851, 0.035214336395263675, 0.0369172477722168, 0.03546316909790039, 0.035547134399414065, 0.03530137634277344, 0.035538944244384765, 0.03548876953125, 0.035547134399414065, 0.03544985580444336, 0.03525734329223633, 0.03553177642822265, 0.03544473648071289, 0.03526553726196289, 0.03537100982666016, 0.03525734329223633, 0.038422527313232424, 0.037288959503173826, 0.036999168395996096, 0.036752384185791014, 0.03548364639282227, 0.03563827133178711, 0.03549184036254883, 0.03544473648071289, 0.03544678497314453, 0.03606118392944336, 0.03692544174194336, 0.03685887908935547, 0.035784702301025394, 0.036310016632080076, 0.036890625, 0.036805633544921876, 0.03748556900024414, 0.036997119903564454, 0.03710464096069336, 0.035507198333740234, 0.035490848541259765, 0.035581920623779295, 0.035576831817626955, 0.035678207397460936, 0.035535873413085936, 0.03553484725952148, 0.035448833465576174, 0.0353546257019043, 0.03521535873413086, 0.03542937469482422, 0.03585331344604492, 0.03632537460327148, 0.03689984130859375, 0.03691929626464844, 0.03537408065795898, 0.036536319732666016, 0.037087230682373046, 0.036942848205566405, 0.037425151824951174, 0.03565363311767578, 0.03704729461669922, 0.035334144592285156, 0.03552870559692383, 0.037302272796630856, 0.03723161697387695, 0.03702374267578125, 0.03629568099975586, 0.035591167449951173, 0.03684659194946289, 0.037026817321777344, 0.03909632110595703, 0.03877478408813476, 0.03705142211914063, 0.03697558212280273, 0.03731455993652344, 0.03732992172241211, 0.037108734130859376, 0.03620044708251953, 0.037108734130859376, 0.036961280822753906, 0.036959232330322264, 0.03653529739379883, 0.03570278549194336, 0.03748556900024414, 0.03751116943359375, 0.0375623664855957, 0.03702579116821289, 0.0370780143737793, 0.03681075286865235, 0.03566284942626953, 0.0356126708984375, 0.03580416107177734, 0.0366295051574707, 0.03693875122070313, 0.03694899368286133, 0.036803585052490234, 0.03691929626464844, 0.036999168395996096, 0.035122177124023435, 0.03689779281616211, 0.03810611343383789, 0.03742822265625, 0.03694899368286133, 0.036083713531494144, 0.03546726226806641, 0.035517440795898435, 0.03537408065795898, 0.03528499221801758, 0.03650867080688477, 0.03694182586669922, 0.037195777893066405, 0.03704012680053711, 0.0354068489074707, 0.035372032165527346, 0.035544063568115236, 0.035520511627197264, 0.03582361602783203, 0.03569049453735352, 0.03514265441894531, 0.03543247985839844, 0.0354662094116211, 0.03525529479980469, 0.035912704467773435, 0.03715584182739258, 0.03547340774536133, 0.037013504028320314, 0.036915199279785156, 0.036465663909912106, 0.036395008087158204, 0.03534438323974609, 0.03584204864501953, 0.03754598236083984, 0.037028865814208986, 0.035490814208984374, 0.036195327758789066, 0.03674009704589844, 0.03713433456420898, 0.036934654235839845, 0.03741593551635742, 0.03543040084838867, 0.03701657485961914, 0.036928512573242187, 0.036890625, 0.03699507141113281, 0.037375999450683595, 0.035388416290283206, 0.03570073699951172, 0.03516416168212891, 0.03540070343017578, 0.035342334747314456, 0.03546316909790039, 0.03537715148925781, 0.03538739013671875, 0.0377446403503418, 0.03813888168334961, 0.03711897659301758, 0.03711180877685547, 0.0367646713256836, 0.037176319122314457, 0.0373125114440918, 0.037085182189941404, 0.036732929229736325, 0.0374015998840332, 0.037005313873291014, 0.03733708953857422, 0.036375553131103515, 0.037108734130859376, 0.037147647857666014, 0.036805633544921876, 0.03735756683349609, 0.0374128646850586, 0.03709132766723633, 0.03662745666503906, 0.03708927917480469, 0.03703091049194336, 0.03693772888183594, 0.03683942413330078, 0.03732582473754883, 0.03705344009399414, 0.037187583923339845, 0.036590591430664066, 0.03695206451416016, 0.03822694396972656, 0.037651454925537106, 0.03722137451171875, 0.03668377685546875, 0.03713228988647461, 0.037168128967285156, 0.03707596969604492, 0.03715686416625977, 0.03764223861694336, 0.03718963241577149, 0.03710259246826172, 0.036967422485351564, 0.03640627288818359, 0.03662847900390625, 0.03747840118408203, 0.037623809814453124, 0.03696230316162109, 0.037236736297607424, 0.03666636657714844, 0.03669311904907226, 0.03618905639648438, 0.036466686248779294, 0.03697151947021484, 0.037070846557617186, 0.03712102508544922, 0.03722649765014648, 0.037174270629882815, 0.03653222274780273, 0.03652608108520508, 0.037166080474853515, 0.03712409591674805, 0.037269504547119144, 0.03729817581176758, 0.037064704895019535, 0.037220352172851565, 0.03724492645263672, 0.03715071868896484, 0.03713945770263672, 0.0370513916015625, 0.03635507202148437, 0.03701760101318359, 0.03704012680053711, 0.03855769729614258, 0.03559936141967773, 0.035372032165527346, 0.0354856948852539, 0.0355860481262207, 0.03551334381103516, 0.03515596771240234, 0.03525632095336914, 0.03736166381835938, 0.0368353271484375, 0.03672576141357422, 0.036819969177246094, 0.036811775207519534, 0.03645337677001953, 0.03689984130859375, 0.03687936019897461, 0.03564441680908203, 0.03644518280029297, 0.036982784271240236, 0.03559423828125, 0.035675136566162106, 0.03561983871459961, 0.03556249618530274, 0.03551129531860352, 0.03545804977416992, 0.03566899108886719, 0.03562496185302735, 0.03681689453125, 0.037256191253662106, 0.037154815673828126, 0.03667865753173828, 0.036822017669677735, 0.036734977722167966, 0.035388416290283206, 0.035432449340820314, 0.03540172958374024, 0.035571712493896485, 0.03563008117675781, 0.03531161499023437, 0.03540377426147461, 0.03539865493774414, 0.03543552017211914, 0.03512831878662109, 0.03550003051757813, 0.03549184036254883, 0.03671039962768555, 0.037012481689453126, 0.036749313354492184, 0.035661823272705076, 0.035299327850341795, 0.035988479614257815, 0.036939777374267575, 0.03684044647216797, 0.03704627227783203, 0.036819969177246094, 0.03553177642822265, 0.035692543029785154, 0.035659774780273434, 0.035576831817626955, 0.035590145111083986, 0.03565260696411133, 0.03561164855957031, 0.035899391174316404, 0.03771084976196289, 0.03563417434692383, 0.035542015075683595, 0.03648614501953125, 0.03683635330200195, 0.03647999954223633, 0.03708313751220703, 0.03689164733886719, 0.036471839904785155, 0.03532080078125, 0.035879936218261715, 0.03552972793579102, 0.03547443389892578, 0.035533824920654294, 0.0355676155090332, 0.0354703369140625, 0.03600896072387695, 0.035448833465576174, 0.03550822448730469, 0.035588096618652344, 0.0354785270690918, 0.03540889739990234, 0.03546419143676758, 0.035544063568115236, 0.03704012680053711, 0.03550310516357422, 0.035533824920654294, 0.03540991973876953, 0.03636019134521484, 0.03683430480957031, 0.03660291290283203, 0.034979808807373045, 0.03546214294433594, 0.036506622314453126, 0.03556249618530274, 0.03553792190551758, 0.03535871887207031, 0.03546316909790039, 0.035896320343017575, 0.035369983673095705, 0.035460094451904296, 0.03509657669067383, 0.03533926391601563, 0.03538739013671875, 0.03562496185302735, 0.035490814208984374, 0.035465217590332034, 0.03547750473022461, 0.03560345458984375, 0.0354785270690918, 0.03507097625732422, 0.03544166564941406, 0.03540172958374024, 0.03555430221557617, 0.03534646224975586, 0.035378143310546874, 0.03563008117675781, 0.035487743377685545, 0.03542835235595703, 0.035369983673095705, 0.03518054580688477, 0.03563520050048828, 0.03541196823120117, 0.03551129531860352, 0.03570892715454101, 0.03581644821166992, 0.03533926391601563, 0.03526144027709961, 0.03542835235595703, 0.03538739013671875, 0.03662031936645508, 0.03663459014892578, 0.03528396987915039, 0.03616153717041016, 0.036982784271240236, 0.03548876953125, 0.037397502899169925, 0.03695820617675781, 0.03689267349243164, 0.03725414276123047, 0.035757057189941405, 0.035730430603027344, 0.03574272155761719, 0.03782656097412109, 0.03730739212036133, 0.035922943115234376, 0.03520409774780273, 0.035356670379638674, 0.03518259048461914, 0.03557580947875977, 0.03546726226806641, 0.035535873413085936, 0.03587583923339844, 0.03555327987670898, 0.03563520050048828, 0.03559526443481445, 0.03530752182006836, 0.035209217071533204, 0.03563724899291992, 0.03555635070800781, 0.0356126708984375, 0.03554611206054688, 0.03556147384643555, 0.03566387176513672, 0.03540582275390625, 0.03554816055297851, 0.03557785415649414, 0.0354856948852539, 0.03546316909790039, 0.03546931076049804, 0.03558911895751953, 0.035579902648925785, 0.03506073760986328, 0.035310592651367184, 0.035250175476074216, 0.03551232147216797, 0.035517440795898435, 0.03518771362304687, 0.03545804977416992, 0.03557785415649414, 0.03589427185058594, 0.03544166564941406, 0.03528396987915039, 0.03500236892700195, 0.0354856948852539, 0.0353546257019043, 0.035620864868164064, 0.035552257537841796, 0.035402751922607424, 0.03550310516357422, 0.03579497528076172, 0.036813793182373045, 0.03679334259033203, 0.03548672103881836, 0.038470657348632815, 0.037348350524902346, 0.036896766662597655, 0.036395008087158204, 0.03631206512451172, 0.03544985580444336, 0.03668073654174805, 0.0352470703125, 0.03557580947875977, 0.0354150390625, 0.0353966064453125, 0.035410945892333984, 0.03576115036010742, 0.035286014556884765, 0.035302398681640625, 0.03532799911499023, 0.03541401672363281, 0.03500236892700195, 0.03530035018920898, 0.03543756866455078, 0.035326976776123044, 0.03539046478271484, 0.035434497833251956, 0.03589120101928711, 0.03546419143676758, 0.03524198532104492, 0.035492862701416016, 0.03540172958374024, 0.034991104125976565, 0.035504127502441404, 0.03543654251098633, 0.03561164855957031, 0.03546112060546875, 0.03544473648071289, 0.03544473648071289, 0.035133438110351564, 0.035092479705810545, 0.03491328048706055, 0.03510988616943359, 0.03533824157714844, 0.03558399963378906, 0.0358809585571289, 0.03503923034667969, 0.03551334381103516, 0.03561676788330078, 0.03564441680908203, 0.0354252815246582, 0.03557068634033203, 0.035659809112548825, 0.035600353240966796, 0.03555942535400391, 0.03561676788330078, 0.037920768737792966, 0.037256191253662106, 0.03695718383789062, 0.036959232330322264]",tokens/s,27.712424973379022,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1396.957184,1382.547456,0.0,752.877568,710.554112,s,1,8.0405,8.0405,0.0,8.0405,8.0405,8.0405,8.0405,[8.0405],,kWh,1.4595303509719947e-05,7.983026508601335e-06,2.2439184618083807e-05,4.501751463640509e-05,,MB,1674.645504,1667.760128,0.0,1019.215872,949.099008,s,10,0.26860819053649904,0.026860819053649905,0.000620844668490922,0.026610367774963378,0.02762796745300293,0.02806702404022217,0.02841826930999756,"[0.026616224288940428, 0.027530399322509766, 0.02654323196411133, 0.026627552032470702, 0.026663295745849608, 0.026522367477416993, 0.026480640411376953, 0.026604511260986327, 0.028506080627441407, 0.02651388740539551]",tokens/s,9530.610346940042,kWh,3.1751197976033565e-07,1.739803472516945e-07,7.733317118550267e-07,1.264824038867057e-06,tokens/kWh,202399695.24085528,MB,1716.6336,1678.245888,0.0,1027.60448,949.101568,s,10,13.55119873046875,1.355119873046875,0.01089726979812518,1.3531354980468748,1.3695959594726563,1.3710599426269532,1.3722311291503906,"[1.3680068359375, 1.3567930908203125, 1.351474853515625, 1.354796142578125, 1.341810546875, 1.34539208984375, 1.34067919921875, 1.350451416015625, 1.37252392578125, 1.3692706298828126]",tokens/s,46.49035207368755,kWh,1.607957432544739e-05,8.811457984009892e-06,2.6656745787746663e-05,5.154777809720394e-05,tokens/kWh,1222167.1297102377,,s,630,13.5492167663574,0.021506693279932414,0.0004128071999748629,0.0212992000579834,0.022094029235839845,0.022190657711029054,0.022612838115692143,"[0.021287935256958008, 0.021582847595214845, 0.021441535949707033, 0.02106265640258789, 0.02129100799560547, 0.021336063385009766, 0.02125619125366211, 0.02123673629760742, 0.02132275199890137, 0.021312511444091797, 0.02124083137512207, 0.02128895950317383, 0.021947391510009767, 0.022007808685302735, 0.022037504196166992, 0.02170572853088379, 0.02209587287902832, 0.02244095993041992, 0.022352895736694335, 0.02212761688232422, 0.02205695915222168, 0.021603328704833984, 0.022112255096435548, 0.02209382438659668, 0.02220953559875488, 0.02210918426513672, 0.021963775634765623, 0.022072320938110353, 0.022124544143676757, 0.022185983657836913, 0.02209689521789551, 0.022076416015625, 0.022037504196166992, 0.021993471145629884, 0.022114303588867186, 0.022007808685302735, 0.02211020851135254, 0.02191974449157715, 0.02206617546081543, 0.021598207473754884, 0.021209087371826172, 0.021285888671875, 0.021223424911499023, 0.021355520248413085, 0.021090303421020508, 0.021361663818359376, 0.022385663986206054, 0.022404096603393556, 0.022009855270385743, 0.021533695220947266, 0.021390335083007812, 0.021355520248413085, 0.02104012870788574, 0.020916223526000977, 0.02122444725036621, 0.02124185562133789, 0.021195775985717775, 0.021118976593017577, 0.021963775634765623, 0.02192076873779297, 0.02187980842590332, 0.021781503677368166, 0.022012928009033202, 0.02230886459350586, 0.022184959411621095, 0.021926912307739257, 0.021993471145629884, 0.022017023086547852, 0.021969919204711915, 0.021626880645751953, 0.022008832931518556, 0.02186649513244629, 0.021932031631469725, 0.0220446720123291, 0.021800960540771484, 0.021582847595214845, 0.02191564750671387, 0.021950464248657226, 0.021932031631469725, 0.02211123275756836, 0.021908479690551756, 0.021934080123901366, 0.02187059211730957, 0.022022144317626953, 0.021948415756225585, 0.021992448806762696, 0.02201190376281738, 0.021785600662231445, 0.021615615844726564, 0.02105241584777832, 0.02125823974609375, 0.021161983489990235, 0.021179391860961915, 0.021366783142089844, 0.021123071670532227, 0.02125312042236328, 0.0212541446685791, 0.021037055969238282, 0.021128192901611328, 0.021283840179443358, 0.02124595260620117, 0.021255168914794922, 0.021282848358154298, 0.021161951065063477, 0.02125209617614746, 0.02122035217285156, 0.020998144149780275, 0.021019647598266602, 0.021191680908203125, 0.021157888412475585, 0.02128691291809082, 0.02172211265563965, 0.021763071060180664, 0.021194751739501954, 0.021392383575439454, 0.021218303680419923, 0.02123776054382324, 0.021132287979125978, 0.021123071670532227, 0.021198848724365234, 0.021378047943115236, 0.021485567092895508, 0.021182464599609374, 0.021151744842529296, 0.021146623611450196, 0.021819391250610352, 0.02125721549987793, 0.022311935424804686, 0.02185625648498535, 0.02145996856689453, 0.02125721549987793, 0.02111692810058594, 0.021168127059936523, 0.021563392639160156, 0.021432319641113282, 0.021350400924682617, 0.02108723258972168, 0.02119987106323242, 0.02251468849182129, 0.022169599533081053, 0.021768192291259765, 0.021226495742797852, 0.021465087890625, 0.021929983139038087, 0.021570560455322265, 0.021223424911499023, 0.021202943801879884, 0.021552127838134767, 0.021533695220947266, 0.02124083137512207, 0.021315584182739256, 0.021348352432250976, 0.021518335342407227, 0.02127462387084961, 0.02123776054382324, 0.02122035217285156, 0.02185318374633789, 0.02221772766113281, 0.021758975982666014, 0.022055936813354493, 0.02125312042236328, 0.021932031631469725, 0.021779455184936524, 0.02149068832397461, 0.021744640350341796, 0.022246400833129884, 0.021795839309692384, 0.021015552520751952, 0.021223424911499023, 0.021218303680419923, 0.021385215759277345, 0.02184499168395996, 0.021219327926635743, 0.021207040786743164, 0.02106675148010254, 0.0214517765045166, 0.021506048202514647, 0.02090188789367676, 0.02086297607421875, 0.020930559158325195, 0.02109337615966797, 0.021194751739501954, 0.021331968307495116, 0.02126643180847168, 0.021285888671875, 0.021190656661987304, 0.02104217529296875, 0.02126233673095703, 0.02128691291809082, 0.021572608947753907, 0.02127462387084961, 0.021215232849121093, 0.02120806312561035, 0.02184601593017578, 0.02146406364440918, 0.021222400665283202, 0.021498880386352538, 0.021287935256958008, 0.021908479690551756, 0.02141798400878906, 0.021301248550415038, 0.02128998374938965, 0.02167398452758789, 0.02169753646850586, 0.021595136642456055, 0.021901311874389647, 0.02166169548034668, 0.021590015411376954, 0.021415935516357423, 0.02125619125366211, 0.02126438331604004, 0.022032384872436524, 0.02208051109313965, 0.021721088409423828, 0.02184499168395996, 0.02151628875732422, 0.021989376068115234, 0.021597183227539063, 0.021702655792236326, 0.02106368064880371, 0.02119987106323242, 0.021839872360229492, 0.021362688064575194, 0.02120806312561035, 0.02123776054382324, 0.0215285758972168, 0.021586944580078125, 0.021608448028564452, 0.02172313690185547, 0.02126233673095703, 0.022366207122802736, 0.02184601593017578, 0.021251071929931642, 0.021203968048095705, 0.021215232849121093, 0.021777408599853516, 0.02124492835998535, 0.021234687805175782, 0.02124595260620117, 0.02151219177246094, 0.021943296432495117, 0.02125619125366211, 0.021549055099487305, 0.02163711929321289, 0.021956607818603514, 0.021315584182739256, 0.021226495742797852, 0.02125823974609375, 0.021251071929931642, 0.021238784790039062, 0.021212160110473634, 0.021207040786743164, 0.021817344665527344, 0.02123776054382324, 0.02127667236328125, 0.02123263931274414, 0.021998592376708984, 0.021361663818359376, 0.021130239486694336, 0.021142528533935546, 0.021171199798583985, 0.021161983489990235, 0.02119987106323242, 0.021180416107177736, 0.021151744842529296, 0.021008384704589843, 0.021210111618041993, 0.021202943801879884, 0.021243904113769533, 0.021181440353393553, 0.021140480041503908, 0.021328895568847657, 0.022009855270385743, 0.022063104629516602, 0.021791744232177734, 0.021304319381713867, 0.021367807388305664, 0.021142528533935546, 0.021201919555664063, 0.021326847076416015, 0.021015552520751952, 0.020948991775512696, 0.020997119903564454, 0.021223424911499023, 0.0212490234375, 0.021275648117065428, 0.021132287979125978, 0.020916223526000977, 0.021134336471557616, 0.021210111618041993, 0.02128486442565918, 0.0212674560546875, 0.0212541446685791, 0.02126131248474121, 0.02129408073425293, 0.021227519989013673, 0.021294111251831054, 0.021252063751220702, 0.02122854423522949, 0.021321727752685548, 0.021456895828247072, 0.021604352951049805, 0.02127462387084961, 0.022246400833129884, 0.021738496780395508, 0.021187583923339845, 0.02122137641906738, 0.02126438331604004, 0.021363744735717772, 0.021248992919921876, 0.021210111618041993, 0.021165056228637694, 0.021166080474853514, 0.021180416107177736, 0.020921344757080077, 0.02126233673095703, 0.021166080474853514, 0.02106879997253418, 0.020898815155029296, 0.02087936019897461, 0.02099404716491699, 0.021359615325927735, 0.021292032241821288, 0.02106265640258789, 0.021353471755981446, 0.021140480041503908, 0.021177343368530274, 0.021110784530639647, 0.021007360458374022, 0.02104319953918457, 0.021110784530639647, 0.02110361671447754, 0.020951040267944337, 0.021109760284423826, 0.020971519470214844, 0.02106368064880371, 0.021227519989013673, 0.021178367614746094, 0.021157888412475585, 0.021169151306152344, 0.021199935913085936, 0.021187519073486327, 0.021642240524291992, 0.022961151123046874, 0.02230169677734375, 0.021651456832885742, 0.021154815673828126, 0.021313535690307618, 0.022183935165405275, 0.02265292739868164, 0.021400575637817384, 0.021140480041503908, 0.0212541446685791, 0.020954111099243163, 0.021159936904907226, 0.02122547149658203, 0.021028863906860353, 0.021363712310791014, 0.022821887969970703, 0.02250547218322754, 0.021562368392944335, 0.02127462387084961, 0.021393407821655275, 0.021230592727661132, 0.021758975982666014, 0.021750783920288085, 0.021337087631225587, 0.02126233673095703, 0.021612543106079102, 0.021207040786743164, 0.021113855361938477, 0.021235712051391603, 0.02124595260620117, 0.02125823974609375, 0.0212674560546875, 0.021194751739501954, 0.021214208602905273, 0.021324800491333007, 0.021352447509765626, 0.021317632675170898, 0.021061632156372072, 0.02124595260620117, 0.021196800231933592, 0.021230592727661132, 0.021203968048095705, 0.02124595260620117, 0.02129100799560547, 0.02125004768371582, 0.0212490234375, 0.02127667236328125, 0.02126131248474121, 0.021275648117065428, 0.02110873603820801, 0.02131455993652344, 0.02204876708984375, 0.02211327934265137, 0.022658048629760744, 0.021547008514404296, 0.021416959762573243, 0.021003263473510742, 0.021230592727661132, 0.021303295135498047, 0.02124595260620117, 0.021315584182739256, 0.021140480041503908, 0.02128998374938965, 0.021222400665283202, 0.021204992294311522, 0.02126131248474121, 0.02126643180847168, 0.02125312042236328, 0.02103398323059082, 0.021029888153076173, 0.02129817581176758, 0.021601280212402343, 0.021161983489990235, 0.02124083137512207, 0.021227519989013673, 0.02122444725036621, 0.021176319122314453, 0.02119987106323242, 0.021189632415771483, 0.021363712310791014, 0.021246976852416992, 0.02124185562133789, 0.02123161506652832, 0.02123980712890625, 0.021174272537231444, 0.021285888671875, 0.02165247917175293, 0.021172224044799806, 0.021138431549072266, 0.021111808776855468, 0.02128691291809082, 0.02103500747680664, 0.020898815155029296, 0.020930591583251952, 0.021041120529174805, 0.0214517765045166, 0.021045248031616212, 0.021159936904907226, 0.021271551132202148, 0.021555200576782226, 0.021605375289916993, 0.021146623611450196, 0.021192703247070312, 0.021144575119018554, 0.021403648376464843, 0.021242879867553712, 0.021150720596313476, 0.02101148796081543, 0.0211680965423584, 0.02110361671447754, 0.021157888412475585, 0.02107187271118164, 0.020824064254760744, 0.020898815155029296, 0.02105958366394043, 0.021164031982421876, 0.021181440353393553, 0.021204992294311522, 0.021141504287719725, 0.021173248291015623, 0.021212160110473634, 0.021128192901611328, 0.021191680908203125, 0.021150720596313476, 0.021181440353393553, 0.021207040786743164, 0.021191680908203125, 0.021154815673828126, 0.02104115104675293, 0.02109337615966797, 0.021051424026489258, 0.021246944427490234, 0.021209087371826172, 0.02122956848144531, 0.021195775985717775, 0.021204992294311522, 0.02132275199890137, 0.021255168914794922, 0.02108415985107422, 0.021121023178100586, 0.02128895950317383, 0.021242879867553712, 0.021118976593017577, 0.02126643180847168, 0.021407743453979493, 0.021313535690307618, 0.021415935516357423, 0.02230271911621094, 0.022701055526733398, 0.022359039306640623, 0.02203647994995117, 0.02209587287902832, 0.021991424560546875, 0.022131711959838866, 0.021967872619628907, 0.022185983657836913, 0.022007808685302735, 0.022383615493774413, 0.022146047592163084, 0.022039552688598633, 0.023383039474487305, 0.02225049591064453, 0.022389759063720704, 0.02219011116027832, 0.0217108154296875, 0.021965824127197265, 0.021922815322875978, 0.02190336036682129, 0.021328895568847657, 0.02126643180847168, 0.02212761688232422, 0.021925888061523437, 0.021983232498168945, 0.0219289608001709, 0.0219289608001709, 0.02181427192687988, 0.02191974449157715, 0.02203647994995117, 0.021925888061523437, 0.02186956787109375, 0.021966848373413086, 0.021955583572387697, 0.021908479690551756, 0.02191155242919922, 0.021994495391845705, 0.02162073516845703, 0.022042623519897463, 0.021884927749633788, 0.02234880065917969, 0.022187007904052734, 0.021617664337158202, 0.02192076873779297, 0.02193715286254883, 0.02167500877380371, 0.021796863555908205, 0.021391359329223633, 0.021123071670532227, 0.02106265640258789, 0.02148659133911133, 0.021954559326171876, 0.021037055969238282, 0.021202943801879884, 0.021195775985717775, 0.021159936904907226, 0.02122444725036621, 0.0212541446685791, 0.02202828788757324, 0.021605375289916993, 0.022297599792480468, 0.022005760192871093, 0.021194751739501954, 0.02121625518798828, 0.021211135864257814, 0.022128639221191407, 0.022013952255249023, 0.021969919204711915, 0.022099967956542968, 0.021942272186279296, 0.021981184005737304, 0.021964799880981444, 0.022215679168701173, 0.021876735687255858, 0.022114303588867186, 0.02123161506652832, 0.02126438331604004, 0.021285888671875, 0.022082559585571288, 0.021591039657592775, 0.0212541446685791, 0.02130636787414551, 0.021279743194580078, 0.02126438331604004, 0.021195775985717775, 0.021725183486938478, 0.02207846450805664, 0.02167091178894043, 0.022010879516601564, 0.021999616622924805, 0.02211123275756836, 0.02222489547729492, 0.022771711349487304, 0.022125568389892578, 0.02187468719482422, 0.02184601593017578, 0.022107135772705077, 0.022018047332763673, 0.021758975982666014, 0.02185932731628418, 0.022191104888916017, 0.021719039916992186, 0.02126540756225586, 0.021251071929931642, 0.021202943801879884, 0.022271999359130858, 0.02207846450805664, 0.021564416885375977, 0.022017023086547852, 0.02207027244567871, 0.021725183486938478, 0.020978687286376953, 0.021153791427612305, 0.021300224304199217, 0.021109760284423826, 0.021219327926635743, 0.021331968307495116, 0.02205695915222168, 0.021497856140136717, 0.021779455184936524, 0.022034431457519533, 0.021823488235473632, 0.021201919555664063, 0.021153791427612305, 0.021222400665283202, 0.021767168045043944, 0.0221265926361084, 0.021798912048339843, 0.02127359962463379, 0.022055936813354493, 0.022175743103027345, 0.022173696517944336, 0.021792768478393554, 0.02188083267211914, 0.021983232498168945, 0.021909503936767577, 0.022012928009033202, 0.022090751647949217, 0.02211737632751465]",tokens/s,46.497152629832016,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1508.888576,2095.579136,0.0,1465.909248,1358.169088,s,1,8.30442578125,8.30442578125,0.0,8.30442578125,8.30442578125,8.30442578125,8.30442578125,[8.30442578125],,kWh,1.795790325000376e-05,9.826584760515469e-06,2.771002216805929e-05,5.5494510178578516e-05,,MB,1688.539136,2122.842112,0.0,1472.200704,1356.544512,s,10,0.30266713523864747,0.030266713523864747,0.0001276513538817822,0.03023400020599365,0.030331279373168943,0.030480391311645507,0.03059968086242676,"[0.03026383972167969, 0.030213632583618165, 0.03062950325012207, 0.030248416900634765, 0.030227039337158205, 0.03023788833618164, 0.030145952224731445, 0.030230112075805664, 0.03029814338684082, 0.030172607421875]",tokens/s,8458.136685311034,kWh,3.5755048948894516e-07,1.9583496249262825e-07,1.3042600229305703e-06,1.8576454749121437e-06,tokens/kWh,137808857.21055433,MB,1724.309504,2122.842112,0.0,1472.200704,1409.728,s,10,12.773408569335938,1.2773408569335938,0.0074791853297365525,1.274037109375,1.287133874511719,1.2874731872558594,1.2877446374511718,"[1.2700616455078124, 1.2878125, 1.282693359375, 1.27657666015625, 1.27045068359375, 1.26907666015625, 1.2714407958984375, 1.286740234375, 1.2870584716796876, 1.27149755859375]",tokens/s,49.32121262545291,kWh,1.5243365400787743e-05,8.353068272559133e-06,2.710634603886556e-05,5.070277971221244e-05,tokens/kWh,1242535.426215017,,s,630,12.769751064300545,0.020269446133810376,0.00039494337923515964,0.020133888244628906,0.020775526237487792,0.021028761291503904,0.021790239009857184,"[0.01987583923339844, 0.020113407135009767, 0.020074495315551756, 0.020107263565063475, 0.02005299186706543, 0.01988915252685547, 0.02007961654663086, 0.020230144500732423, 0.02008678436279297, 0.02015951919555664, 0.02011849594116211, 0.020126720428466797, 0.02011238479614258, 0.020082687377929686, 0.020100095748901366, 0.020199424743652345, 0.020101119995117187, 0.020033536911010744, 0.020121599197387697, 0.020082687377929686, 0.02011955261230469, 0.02011136054992676, 0.020290559768676757, 0.021114879608154297, 0.02144358444213867, 0.020533248901367186, 0.020190208435058594, 0.020113407135009767, 0.020183040618896485, 0.020273151397705077, 0.020142080307006836, 0.020215808868408205, 0.020150272369384766, 0.020118528366088868, 0.020178943634033202, 0.020161535263061522, 0.020148223876953125, 0.020067327499389647, 0.020280319213867186, 0.020188159942626953, 0.020109312057495117, 0.02003046417236328, 0.020076543807983398, 0.020013120651245116, 0.020002752304077148, 0.020051967620849608, 0.02003865623474121, 0.020135936737060548, 0.020063232421875, 0.020090879440307616, 0.020106239318847655, 0.020134912490844727, 0.020161535263061522, 0.02007040023803711, 0.020099071502685546, 0.02016972732543945, 0.020067327499389647, 0.02006220817565918, 0.02006937599182129, 0.02008576011657715, 0.020113407135009767, 0.020116479873657226, 0.02007040023803711, 0.019907583236694337, 0.02003660774230957, 0.02000383949279785, 0.02003455924987793, 0.02004275131225586, 0.020076543807983398, 0.020099071502685546, 0.02011238479614258, 0.02007046318054199, 0.02378848075866699, 0.02163199996948242, 0.02061516761779785, 0.020161535263061522, 0.02009702491760254, 0.020109312057495117, 0.020191232681274415, 0.020105215072631837, 0.02009702491760254, 0.020943872451782225, 0.02080460739135742, 0.02010316848754883, 0.020130815505981444, 0.020167680740356447, 0.02209689521789551, 0.02291097640991211, 0.021072895050048827, 0.020587520599365236, 0.020273151397705077, 0.021389312744140625, 0.02101862335205078, 0.020477951049804686, 0.020517887115478514, 0.020580352783203124, 0.020092927932739257, 0.02040115165710449, 0.020782079696655274, 0.020888576507568358, 0.020797439575195312, 0.020774911880493165, 0.020155391693115234, 0.019843072891235353, 0.02003763198852539, 0.02012774467468262, 0.019958784103393554, 0.01987174415588379, 0.020084735870361328, 0.020093952178955078, 0.020141056060791016, 0.020247552871704103, 0.02006220817565918, 0.02006220817565918, 0.020139007568359374, 0.020130815505981444, 0.02021990394592285, 0.020189184188842774, 0.020163583755493163, 0.020159488677978517, 0.02001408004760742, 0.020147199630737304, 0.020110336303710938, 0.020157440185546875, 0.020936704635620116, 0.020364288330078126, 0.0214517765045166, 0.020377599716186523, 0.02206105613708496, 0.02026700782775879, 0.020136959075927736, 0.020099071502685546, 0.020752384185791017, 0.020730880737304686, 0.020141056060791016, 0.02007961654663086, 0.02012057685852051, 0.02009702491760254, 0.02011955261230469, 0.020191232681274415, 0.02021990394592285, 0.020188159942626953, 0.020140031814575195, 0.020126720428466797, 0.020134912490844727, 0.020183040618896485, 0.020099071502685546, 0.020121664047241212, 0.020072383880615233, 0.020166656494140626, 0.02008064079284668, 0.02012876892089844, 0.020110336303710938, 0.020126720428466797, 0.020130815505981444, 0.020162559509277343, 0.020161535263061522, 0.020254720687866212, 0.020191232681274415, 0.020147199630737304, 0.02006937599182129, 0.02046668815612793, 0.020238336563110353, 0.02009600067138672, 0.02067148780822754, 0.02126131248474121, 0.02108723258972168, 0.02062335968017578, 0.02017791938781738, 0.020075519561767577, 0.02009600067138672, 0.02008678436279297, 0.020690944671630858, 0.02168627166748047, 0.02119987106323242, 0.021147647857666017, 0.020519935607910156, 0.02045952033996582, 0.020136959075927736, 0.020183040618896485, 0.02012985610961914, 0.02061408042907715, 0.020173824310302735, 0.020180992126464844, 0.020117504119873047, 0.020183040618896485, 0.020157440185546875, 0.020155391693115234, 0.02007756805419922, 0.02004991912841797, 0.019966976165771484, 0.020091903686523437, 0.020140031814575195, 0.02020966339111328, 0.020151296615600587, 0.020144128799438478, 0.020207616806030275, 0.02029363250732422, 0.020162559509277343, 0.02012371253967285, 0.02020243263244629, 0.020154367446899413, 0.02008064079284668, 0.02006937599182129, 0.020192256927490236, 0.02058137512207031, 0.020290559768676757, 0.02011136054992676, 0.02012774467468262, 0.02019327926635742, 0.020176895141601564, 0.020144128799438478, 0.020238336563110353, 0.021046272277832033, 0.02088140869140625, 0.020196352005004883, 0.02007347106933594, 0.020099071502685546, 0.02007142448425293, 0.020033536911010744, 0.020150272369384766, 0.020666368484497072, 0.0211015682220459, 0.02062848091125488, 0.02027008056640625, 0.020268064498901367, 0.020179935455322266, 0.02020966339111328, 0.020182016372680665, 0.020102144241333008, 0.02005401611328125, 0.020098047256469728, 0.02011238479614258, 0.020123647689819335, 0.02011136054992676, 0.020133888244628906, 0.02020147132873535, 0.020140031814575195, 0.020214784622192384, 0.021401599884033205, 0.021163007736206055, 0.02041753578186035, 0.020123647689819335, 0.020153343200683595, 0.02044108772277832, 0.020165632247924805, 0.02006937599182129, 0.020220928192138672, 0.0200898551940918, 0.020196352005004883, 0.020150272369384766, 0.020167680740356447, 0.019949567794799804, 0.020083711624145507, 0.02046463966369629, 0.020183040618896485, 0.020212736129760742, 0.02011238479614258, 0.020148223876953125, 0.0202106876373291, 0.020133888244628906, 0.020173824310302735, 0.020165632247924805, 0.020130815505981444, 0.020153343200683595, 0.020136959075927736, 0.02008064079284668, 0.02021379280090332, 0.02010006332397461, 0.02025164794921875, 0.020353023529052734, 0.02021683120727539, 0.020964351654052735, 0.02049945640563965, 0.020170751571655272, 0.020222976684570314, 0.020150272369384766, 0.020237312316894532, 0.02007859230041504, 0.020083711624145507, 0.020098047256469728, 0.020075519561767577, 0.020155391693115234, 0.020115488052368163, 0.020066272735595702, 0.02003455924987793, 0.020067327499389647, 0.020065280914306642, 0.020074495315551756, 0.020129791259765627, 0.02008780860900879, 0.020164608001708984, 0.020129791259765627, 0.02008576011657715, 0.020083711624145507, 0.020142080307006836, 0.020189184188842774, 0.02019327926635742, 0.020121599197387697, 0.020178943634033202, 0.02008780860900879, 0.02009600067138672, 0.020137983322143553, 0.020146175384521483, 0.020188159942626953, 0.020106239318847655, 0.020076543807983398, 0.020222976684570314, 0.02010316848754883, 0.020174848556518556, 0.020166656494140626, 0.020107263565063475, 0.02008780860900879, 0.020145151138305666, 0.020109312057495117, 0.01987583923339844, 0.02004275131225586, 0.02042265510559082, 0.020941823959350587, 0.020822015762329102, 0.020764671325683593, 0.020471807479858398, 0.020813823699951172, 0.020199424743652345, 0.020093952178955078, 0.020195327758789062, 0.02004275131225586, 0.020116479873657226, 0.02007347106933594, 0.020100095748901366, 0.02020966339111328, 0.02008678436279297, 0.020098047256469728, 0.020163583755493163, 0.020090879440307616, 0.02007859230041504, 0.019917823791503905, 0.01985536003112793, 0.020154367446899413, 0.020084735870361328, 0.02007756805419922, 0.02021990394592285, 0.02030080032348633, 0.020117504119873047, 0.02007040023803711, 0.020100095748901366, 0.020123647689819335, 0.0198973445892334, 0.01985536003112793, 0.02000486373901367, 0.02006630325317383, 0.020051967620849608, 0.02005606460571289, 0.02003865623474121, 0.0200949764251709, 0.019803136825561524, 0.019753984451293945, 0.019973119735717772, 0.019776512145996093, 0.020057088851928712, 0.02004582405090332, 0.02006937599182129, 0.020147199630737304, 0.020002815246582033, 0.02004275131225586, 0.020023296356201172, 0.020076543807983398, 0.020048896789550782, 0.02008883285522461, 0.02008678436279297, 0.020160512924194338, 0.02011955261230469, 0.020076543807983398, 0.020943872451782225, 0.020387840270996094, 0.020107263565063475, 0.020082687377929686, 0.0200447998046875, 0.019933183670043944, 0.020068351745605468, 0.020069440841674804, 0.020043712615966797, 0.02002841567993164, 0.020092927932739257, 0.02007756805419922, 0.020155391693115234, 0.020114431381225584, 0.020143104553222657, 0.02012774467468262, 0.02011238479614258, 0.020102144241333008, 0.019848224639892578, 0.01982153511047363, 0.020131839752197265, 0.020102144241333008, 0.020075519561767577, 0.0200898551940918, 0.020133888244628906, 0.020032512664794923, 0.01987379264831543, 0.019941375732421874, 0.02017791938781738, 0.020118528366088868, 0.02010419273376465, 0.02006220817565918, 0.020256767272949217, 0.02032537651062012, 0.02007756805419922, 0.02007040023803711, 0.020142080307006836, 0.02007961654663086, 0.020108287811279296, 0.020133888244628906, 0.020165632247924805, 0.020093952178955078, 0.020686880111694336, 0.021008352279663085, 0.02082099151611328, 0.020513792037963868, 0.020717567443847656, 0.020723712921142577, 0.02067148780822754, 0.020403200149536133, 0.020753408432006838, 0.020144128799438478, 0.020143104553222657, 0.020143104553222657, 0.020068351745605468, 0.020098047256469728, 0.020093952178955078, 0.020148223876953125, 0.02010419273376465, 0.02008166313171387, 0.020205568313598633, 0.020083711624145507, 0.020057088851928712, 0.02007859230041504, 0.020068351745605468, 0.02009600067138672, 0.02004787254333496, 0.020090879440307616, 0.02040934371948242, 0.020618240356445314, 0.020714496612548827, 0.020503551483154296, 0.020319232940673827, 0.020150272369384766, 0.02072985649108887, 0.02067353630065918, 0.0200130558013916, 0.020060159683227538, 0.02008883285522461, 0.01988198471069336, 0.019886079788208007, 0.02003046417236328, 0.020538368225097657, 0.020185087203979494, 0.02004377555847168, 0.020117504119873047, 0.019878911972045898, 0.01982054328918457, 0.019862527847290038, 0.019825664520263672, 0.021611520767211914, 0.02043084716796875, 0.020131839752197265, 0.020388864517211915, 0.020978687286376953, 0.020997119903564454, 0.021357568740844726, 0.02082611274719238, 0.020904960632324218, 0.022030336380004883, 0.021073919296264648, 0.021276735305786134, 0.020851648330688477, 0.020108287811279296, 0.019805183410644533, 0.02027519989013672, 0.020556800842285155, 0.021832704544067383, 0.02092748832702637, 0.020839424133300782, 0.020361215591430663, 0.020134912490844727, 0.020107263565063475, 0.020154367446899413, 0.01989017677307129, 0.020121599197387697, 0.020098047256469728, 0.02012876892089844, 0.020068351745605468, 0.020107263565063475, 0.020123647689819335, 0.02005606460571289, 0.02011955261230469, 0.020166656494140626, 0.020899839401245117, 0.020876287460327148, 0.020587520599365236, 0.02010419273376465, 0.020099071502685546, 0.020143104553222657, 0.02047488021850586, 0.0200263671875, 0.02009702491760254, 0.02005606460571289, 0.020060159683227538, 0.020131839752197265, 0.02012876892089844, 0.020593664169311524, 0.02106470489501953, 0.02084351921081543, 0.020742143630981445, 0.02048409652709961, 0.020131839752197265, 0.020121599197387697, 0.020165632247924805, 0.020110336303710938, 0.020246528625488282, 0.02047488021850586, 0.02067967987060547, 0.02062233543395996, 0.020472864151000976, 0.020847583770751955, 0.020555776596069338, 0.020144128799438478, 0.020163583755493163, 0.020143104553222657, 0.02008064079284668, 0.020142080307006836, 0.021215232849121093, 0.021238784790039062, 0.020781055450439453, 0.02018611145019531, 0.020182016372680665, 0.020083711624145507, 0.020091903686523437, 0.02012774467468262, 0.020116479873657226, 0.020172800064086914, 0.020160512924194338, 0.020105215072631837, 0.020230144500732423, 0.02024550437927246, 0.020880384445190428, 0.020170751571655272, 0.02012473678588867, 0.02016864013671875, 0.020168703079223634, 0.02012057685852051, 0.020571136474609376, 0.020221952438354493, 0.020153343200683595, 0.020101119995117187, 0.020170751571655272, 0.021082111358642578, 0.022872064590454103, 0.021114879608154297, 0.020972543716430665, 0.0208721923828125, 0.020727807998657227, 0.02082815933227539, 0.02065203285217285, 0.020122623443603514, 0.02019327926635742, 0.02012774467468262, 0.01990656089782715, 0.02009600067138672, 0.020137983322143553, 0.020603904724121092, 0.020511743545532226, 0.020155391693115234, 0.02010835266113281, 0.020230079650878908, 0.020098047256469728, 0.020157440185546875, 0.02012774467468262, 0.02008678436279297, 0.020150272369384766, 0.02007756805419922, 0.020065311431884766, 0.020115423202514648, 0.02009600067138672, 0.02004582405090332, 0.020124671936035156, 0.02003660774230957, 0.020076543807983398, 0.020116479873657226, 0.020106239318847655, 0.020136959075927736, 0.020192256927490236, 0.020152320861816408, 0.02007244873046875, 0.020008960723876954, 0.02011136054992676, 0.02008064079284668, 0.020099071502685546, 0.020110336303710938, 0.020135936737060548, 0.020205568313598633, 0.020107263565063475, 0.0200263671875, 0.02005606460571289, 0.020136959075927736, 0.020007936477661133, 0.02011136054992676, 0.020571136474609376, 0.021037055969238282, 0.020496383666992187, 0.020343807220458983, 0.02031001663208008, 0.02029465675354004, 0.020130815505981444, 0.02019430351257324, 0.020143104553222657, 0.020091903686523437, 0.02019126319885254, 0.02009084892272949, 0.020107263565063475, 0.02042163276672363, 0.020571136474609376, 0.02060492706298828, 0.020057088851928712, 0.020023296356201172, 0.020137983322143553, 0.020063232421875, 0.02008678436279297, 0.02007347106933594, 0.02008883285522461]",tokens/s,49.33533917988778,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 67058 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1541.107712,2095.579136,0.0,1465.909248,1358.169088,s,1,8.834560546875,8.834560546875,0.0,8.834560546875,8.834560546875,8.834560546875,8.834560546875,[8.834560546875],,kWh,1.7451490615976837e-05,9.54876888914364e-06,2.785252228199031e-05,5.485278178711079e-05,,MB,1724.57984,2122.842112,0.0,1472.200704,1356.544512,s,10,0.30314528083801273,0.030314528083801272,6.751127408441343e-05,0.0302924165725708,0.030419343948364257,0.030442312240600587,0.030460686874389648,"[0.030296064376831053, 0.030288768768310548, 0.030264127731323243, 0.030252511978149415, 0.030334335327148437, 0.03041423988342285, 0.030276639938354492, 0.030465280532836914, 0.03029692840576172, 0.030256383895874022]",tokens/s,8444.795818437793,kWh,3.581040597054074e-07,1.9622352885542422e-07,1.2872948008788106e-06,1.841622389439642e-06,tokens/kWh,139007866.90473184,MB,1760.411648,2122.842112,0.0,1472.200704,1409.728,s,10,12.847924682617187,1.284792468261719,0.004539804905255994,1.2851262817382811,1.2899037353515626,1.2913843627929686,1.2925688647460938,"[1.2868046875, 1.292864990234375, 1.2831141357421876, 1.28382763671875, 1.2868243408203126, 1.28957470703125, 1.2829921875, 1.2864249267578125, 1.277042236328125, 1.278454833984375]",tokens/s,49.035156693622966,kWh,1.5327562524391956e-05,8.397849097577284e-06,2.6895144411719632e-05,5.062055603368887e-05,tokens/kWh,1244553.6939197662,,s,630,12.844169200897204,0.02038757016015431,0.0002758337298371224,0.020313599586486816,0.020686028099060057,0.020910950565338135,0.021357854785919193,"[0.020153343200683595, 0.020374528884887694, 0.02028339195251465, 0.02026905632019043, 0.02027212715148926, 0.020387840270996094, 0.020335615158081053, 0.020389888763427736, 0.020380672454833985, 0.020988927841186524, 0.02102783966064453, 0.020366336822509764, 0.020299776077270508, 0.0202926082611084, 0.020703231811523438, 0.02045337677001953, 0.020331520080566406, 0.020314111709594726, 0.020339712142944336, 0.02032537651062012, 0.02042163276672363, 0.020353023529052734, 0.020345855712890625, 0.020454399108886717, 0.020358144760131838, 0.020299776077270508, 0.020385791778564453, 0.020369407653808593, 0.02045849609375, 0.02032640075683594, 0.020389888763427736, 0.02045030403137207, 0.020365312576293947, 0.02036735916137695, 0.020374528884887694, 0.020354047775268554, 0.020337663650512695, 0.020288511276245116, 0.020382720947265624, 0.020358144760131838, 0.020361215591430663, 0.020290559768676757, 0.020313087463378905, 0.020296703338623046, 0.020306943893432617, 0.020346879959106445, 0.020271104812622072, 0.020436992645263673, 0.020592639923095703, 0.020726783752441406, 0.02044211196899414, 0.020348928451538087, 0.020363264083862305, 0.02031718444824219, 0.02027622413635254, 0.020137983322143553, 0.020348928451538087, 0.020319232940673827, 0.020847616195678712, 0.021072895050048827, 0.020941823959350587, 0.020668415069580077, 0.02064588737487793, 0.020158464431762696, 0.02022604751586914, 0.020256767272949217, 0.020298751831054687, 0.020311040878295897, 0.020340736389160157, 0.02037555122375488, 0.020368383407592772, 0.020853759765625, 0.021138431549072266, 0.021105663299560547, 0.020945920944213867, 0.021308416366577147, 0.020303871154785155, 0.020328447341918944, 0.020428800582885744, 0.020324352264404297, 0.020336639404296874, 0.02043187141418457, 0.02039910316467285, 0.020360191345214843, 0.020385791778564453, 0.023167999267578124, 0.02124492835998535, 0.020413440704345705, 0.020281343460083007, 0.020503551483154296, 0.020337663650512695, 0.020336639404296874, 0.020331520080566406, 0.020321279525756835, 0.020413440704345705, 0.020287488937377928, 0.020319232940673827, 0.020295679092407228, 0.020477951049804686, 0.020513792037963868, 0.02026393508911133, 0.020303871154785155, 0.020358144760131838, 0.020304895401000975, 0.020320255279541014, 0.020350976943969725, 0.020356096267700196, 0.020350976943969725, 0.020256767272949217, 0.020684799194335936, 0.020353023529052734, 0.020265983581542968, 0.020303871154785155, 0.02028544044494629, 0.020341760635375978, 0.02032537651062012, 0.02031001663208008, 0.020341760635375978, 0.020360191345214843, 0.020320255279541014, 0.021378047943115236, 0.022188032150268554, 0.02085785675048828, 0.020372480392456056, 0.020330495834350586, 0.020381696701049806, 0.02010316848754883, 0.020281343460083007, 0.020320255279541014, 0.020256767272949217, 0.02033459281921387, 0.02042367935180664, 0.02037555122375488, 0.020410367965698242, 0.020348928451538087, 0.020254720687866212, 0.020323328018188477, 0.02024550437927246, 0.02030284881591797, 0.020324352264404297, 0.0204769287109375, 0.020534271240234374, 0.020330495834350586, 0.020290559768676757, 0.020419584274291993, 0.020333568572998048, 0.020291584014892578, 0.02032640075683594, 0.02028339195251465, 0.020395008087158203, 0.020289535522460937, 0.020304895401000975, 0.020273151397705077, 0.020291584014892578, 0.02030899238586426, 0.02025574493408203, 0.020702207565307617, 0.020370431900024414, 0.020290559768676757, 0.020355072021484375, 0.02024140739440918, 0.020288511276245116, 0.020360191345214843, 0.020319232940673827, 0.020331520080566406, 0.020393983840942383, 0.020343807220458983, 0.020349952697753908, 0.02128281593322754, 0.021151744842529296, 0.02052403259277344, 0.020305919647216796, 0.020257791519165038, 0.020322303771972656, 0.02022809600830078, 0.02027008056640625, 0.020248575210571287, 0.0202926082611084, 0.02025164794921875, 0.020418560028076172, 0.020460544586181642, 0.020518911361694335, 0.020486143112182616, 0.020374528884887694, 0.020343807220458983, 0.02022809600830078, 0.02025267219543457, 0.02026803207397461, 0.020214784622192384, 0.020009983062744142, 0.020350976943969725, 0.020274175643920898, 0.02025267219543457, 0.02029465675354004, 0.020311040878295897, 0.02009702491760254, 0.02039091110229492, 0.020307968139648438, 0.02032640075683594, 0.020282367706298828, 0.020319232940673827, 0.020381696701049806, 0.020304895401000975, 0.02022604751586914, 0.020387840270996094, 0.020740095138549804, 0.020346879959106445, 0.02039910316467285, 0.020291584014892578, 0.020336639404296874, 0.020339712142944336, 0.020445184707641603, 0.020420608520507814, 0.020320255279541014, 0.020237312316894532, 0.020304895401000975, 0.020447231292724608, 0.020321279525756835, 0.02024345588684082, 0.020677631378173827, 0.020548608779907225, 0.020281343460083007, 0.02022707176208496, 0.020333568572998048, 0.020336639404296874, 0.020332544326782227, 0.020342784881591795, 0.020347904205322266, 0.02040729522705078, 0.02032537651062012, 0.020743167877197266, 0.020751359939575196, 0.021072895050048827, 0.020752384185791017, 0.020329471588134765, 0.020339712142944336, 0.020405248641967775, 0.020356096267700196, 0.020312063217163084, 0.020281343460083007, 0.020299776077270508, 0.020364288330078126, 0.020291584014892578, 0.020368383407592772, 0.02064691162109375, 0.02042163276672363, 0.02024345588684082, 0.020336639404296874, 0.020265983581542968, 0.020355072021484375, 0.020335615158081053, 0.020321279525756835, 0.020410367965698242, 0.02024345588684082, 0.02025984001159668, 0.02024448013305664, 0.020282367706298828, 0.02022809600830078, 0.020343807220458983, 0.020353023529052734, 0.02031001663208008, 0.020385791778564453, 0.020556800842285155, 0.02127257537841797, 0.020996095657348633, 0.020915199279785156, 0.020907007217407226, 0.02088652801513672, 0.02085273551940918, 0.020914176940917968, 0.02102783966064453, 0.020419584274291993, 0.02031718444824219, 0.020319232940673827, 0.020307968139648438, 0.020316160202026368, 0.020328447341918944, 0.020360191345214843, 0.020357120513916017, 0.020297727584838866, 0.020360191345214843, 0.02026393508911133, 0.020361215591430663, 0.020347904205322266, 0.020572160720825194, 0.020462591171264647, 0.020411392211914063, 0.020180992126464844, 0.02026803207397461, 0.02024448013305664, 0.02029363250732422, 0.02028544044494629, 0.020351999282836913, 0.020256767272949217, 0.020249599456787108, 0.02028544044494629, 0.020373504638671876, 0.020497407913208008, 0.020368383407592772, 0.020332544326782227, 0.02026393508911133, 0.02065407943725586, 0.02025164794921875, 0.02028544044494629, 0.02041651153564453, 0.020163583755493163, 0.020298751831054687, 0.020314111709594726, 0.02020147132873535, 0.02025062370300293, 0.020282367706298828, 0.020395008087158203, 0.020699136734008788, 0.020512767791748047, 0.02025369644165039, 0.020168703079223634, 0.020230144500732423, 0.02022604751586914, 0.020199424743652345, 0.02023526382446289, 0.020315135955810547, 0.020330495834350586, 0.020370431900024414, 0.020669439315795898, 0.02025062370300293, 0.02031001663208008, 0.02020147132873535, 0.020342784881591795, 0.02025574493408203, 0.020262912750244142, 0.02043801689147949, 0.020525056838989256, 0.02027622413635254, 0.020282367706298828, 0.020273151397705077, 0.021151744842529296, 0.020824064254760744, 0.02089779281616211, 0.020544511795043945, 0.020273151397705077, 0.02027827262878418, 0.020264959335327147, 0.020384767532348632, 0.020899839401245117, 0.02087424087524414, 0.02043801689147949, 0.020339712142944336, 0.02024345588684082, 0.020711423873901368, 0.02023219108581543, 0.020323328018188477, 0.020281343460083007, 0.020271104812622072, 0.02027724838256836, 0.020364288330078126, 0.020273151397705077, 0.020280319213867186, 0.020279296875, 0.02025369644165039, 0.021301248550415038, 0.02149273681640625, 0.021611520767211914, 0.020818944931030273, 0.02027519989013672, 0.020337663650512695, 0.02025267219543457, 0.02027212715148926, 0.02044825553894043, 0.020555776596069338, 0.020336639404296874, 0.020288511276245116, 0.020246528625488282, 0.02026700782775879, 0.020273151397705077, 0.02024345588684082, 0.02027212715148926, 0.02171801567077637, 0.021014528274536134, 0.020107263565063475, 0.02027622413635254, 0.020262912750244142, 0.02084249687194824, 0.020388864517211915, 0.020328447341918944, 0.02023526382446289, 0.020336639404296874, 0.02031718444824219, 0.020316160202026368, 0.0202608642578125, 0.02024550437927246, 0.020296703338623046, 0.02035916709899902, 0.02040729522705078, 0.021130239486694336, 0.020509695053100584, 0.02028441619873047, 0.02069708824157715, 0.020346879959106445, 0.021009408950805664, 0.02047590446472168, 0.02029363250732422, 0.020384767532348632, 0.020331520080566406, 0.020254720687866212, 0.020213760375976563, 0.020222976684570314, 0.02027827262878418, 0.02028544044494629, 0.02025881576538086, 0.020395008087158203, 0.020281343460083007, 0.02025062370300293, 0.020297727584838866, 0.020504575729370117, 0.02042163276672363, 0.02029363250732422, 0.020257791519165038, 0.020366336822509764, 0.02025267219543457, 0.020257791519165038, 0.020311040878295897, 0.020304895401000975, 0.02030899238586426, 0.020379648208618165, 0.02030182456970215, 0.020369407653808593, 0.02041651153564453, 0.020353023529052734, 0.02027212715148926, 0.02025984001159668, 0.020231168746948244, 0.020592639923095703, 0.020576255798339844, 0.020330495834350586, 0.020287488937377928, 0.020304895401000975, 0.020315135955810547, 0.02026803207397461, 0.02030899238586426, 0.02025369644165039, 0.020347904205322266, 0.020059135437011717, 0.020184064865112306, 0.02062233543395996, 0.02031820869445801, 0.02030284881591797, 0.020189184188842774, 0.02031718444824219, 0.020337663650512695, 0.020327423095703127, 0.020230144500732423, 0.020289535522460937, 0.020274175643920898, 0.020289535522460937, 0.02065715217590332, 0.02043391990661621, 0.020351999282836913, 0.022541311264038084, 0.021271551132202148, 0.020328447341918944, 0.02023526382446289, 0.020262912750244142, 0.02022502326965332, 0.020262912750244142, 0.020388864517211915, 0.02025267219543457, 0.02021785545349121, 0.02026803207397461, 0.02025164794921875, 0.02024140739440918, 0.02025369644165039, 0.02063667106628418, 0.02081177520751953, 0.020706304550170897, 0.02027622413635254, 0.02023423957824707, 0.020204544067382812, 0.020402175903320312, 0.020745216369628908, 0.02018611145019531, 0.02022604751586914, 0.020274175643920898, 0.02023526382446289, 0.020262912750244142, 0.02025164794921875, 0.020264959335327147, 0.02069708824157715, 0.020900863647460938, 0.02122444725036621, 0.02101862335205078, 0.02063667106628418, 0.02020249557495117, 0.0206561279296875, 0.020246528625488282, 0.02023628807067871, 0.02021887969970703, 0.02030182456970215, 0.02025267219543457, 0.02022809600830078, 0.020298751831054687, 0.020257791519165038, 0.020247552871704103, 0.02030182456970215, 0.020212736129760742, 0.020048896789550782, 0.020195327758789062, 0.020196352005004883, 0.020190208435058594, 0.019927040100097656, 0.020123647689819335, 0.020214784622192384, 0.020294719696044922, 0.020207551956176757, 0.020199424743652345, 0.020230144500732423, 0.020172800064086914, 0.02022707176208496, 0.020179967880249023, 0.020192256927490236, 0.02037555122375488, 0.02024140739440918, 0.02031718444824219, 0.020242431640625, 0.02021683120727539, 0.020184064865112306, 0.02024140739440918, 0.02022604751586914, 0.02039193534851074, 0.02021683120727539, 0.020197376251220703, 0.02022400093078613, 0.020230144500732423, 0.02026803207397461, 0.02020147132873535, 0.020248575210571287, 0.020288511276245116, 0.02021683120727539, 0.020329471588134765, 0.020212736129760742, 0.020612096786499022, 0.02022502326965332, 0.02021990394592285, 0.02025164794921875, 0.020392959594726562, 0.020196352005004883, 0.020237312316894532, 0.02021990394592285, 0.020192256927490236, 0.020273151397705077, 0.02023219108581543, 0.020247552871704103, 0.020791296005249024, 0.020711423873901368, 0.0202608642578125, 0.020306943893432617, 0.020339712142944336, 0.020299776077270508, 0.020289535522460937, 0.020311040878295897, 0.020376575469970702, 0.020282367706298828, 0.020254720687866212, 0.02029363250732422, 0.020299776077270508, 0.020288511276245116, 0.020323328018188477, 0.020288511276245116, 0.019986431121826173, 0.020150272369384766, 0.020195327758789062, 0.020221952438354493, 0.020214784622192384, 0.020197376251220703, 0.020230144500732423, 0.02027519989013672, 0.02026803207397461, 0.020327423095703127, 0.020291584014892578, 0.02088755226135254, 0.020313087463378905, 0.02024550437927246, 0.02030182456970215, 0.020355072021484375, 0.020229120254516602, 0.02032640075683594, 0.020354047775268554, 0.02026803207397461, 0.02025984001159668, 0.020760576248168947, 0.020262912750244142, 0.020336639404296874, 0.020264959335327147, 0.020200447082519533, 0.02025369644165039, 0.020246528625488282, 0.02022604751586914, 0.020166656494140626, 0.02022707176208496, 0.020336639404296874, 0.020242431640625, 0.02023628807067871, 0.02026803207397461, 0.020336639404296874, 0.02030182456970215, 0.020287488937377928, 0.02026700782775879, 0.02043187141418457, 0.02027622413635254, 0.020321279525756835, 0.020206592559814454, 0.02028646469116211, 0.020271104812622072, 0.020254720687866212, 0.020264959335327147, 0.02031820869445801, 0.020299776077270508, 0.02026188850402832, 0.020320255279541014, 0.020321279525756835, 0.020321279525756835, 0.02027724838256836, 0.020273151397705077, 0.020471807479858398, 0.020143104553222657, 0.020230144500732423, 0.020248575210571287, 0.020273151397705077, 0.020331520080566406, 0.020296703338623046, 0.02027519989013672]",tokens/s,49.049493987979545,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1282.564096,2645.03296,0.0,1998.585856,1692.285952,s,10,0.2531576633453369,0.02531576633453369,0.002450285014570774,0.024608816146850586,0.025783482170104975,0.02917710142135619,0.031891996822357174,"[0.03257072067260742, 0.02457766342163086, 0.02456787109375, 0.024639968872070313, 0.023735296249389647, 0.02453727912902832, 0.02502934455871582, 0.024820192337036133, 0.024842975616455078, 0.02383635139465332]",tokens/s,10112.275355093076,kWh,2.792075205920154e-07,1.529921474871729e-07,8.369050561796469e-07,1.2691047242588353e-06,tokens/kWh,201717001.84120387,MB,1282.859008,2645.03296,0.0,1998.585856,1740.091904,s,10,14.224613037109373,1.4224613037109373,0.007999866488579442,1.4226959228515625,1.4288772583007814,1.4350930480957031,1.4400656799316405,"[1.441308837890625, 1.4159813232421874, 1.4227452392578126, 1.4274959716796876, 1.4234324951171875, 1.4254453125, 1.42061181640625, 1.4226466064453125, 1.410975341796875, 1.4139700927734375]",tokens/s,44.28942976209244,kWh,1.649391267739289e-05,9.038513698214438e-06,3.331084448422058e-05,5.884327085982791e-05,tokens/kWh,1070640.6880418654,,s,629,14.421040109634406,0.022926931811819394,0.002966066291432769,0.022564863204956053,0.022848736572265625,0.023248467254638677,0.047059558868408236,"[0.02345062446594238, 0.023438335418701172, 0.023334911346435547, 0.02593791961669922, 0.023631872177124022, 0.023432191848754884, 0.023536640167236327, 0.022730752944946288, 0.02284752082824707, 0.022558687210083007, 0.022576128005981445, 0.022565887451171874, 0.022806528091430665, 0.02365644836425781, 0.023848960876464844, 0.023938047409057618, 0.023386144638061525, 0.023298015594482423, 0.023143423080444335, 0.023015424728393553, 0.02310758399963379, 0.022743040084838868, 0.022743040084838868, 0.02262937545776367, 0.022384639739990234, 0.022402048110961914, 0.022543359756469726, 0.02267136001586914, 0.022543359756469726, 0.022590463638305663, 0.022413312911987306, 0.02251468849182129, 0.022372352600097657, 0.022536191940307617, 0.022580223083496095, 0.022455327987670897, 0.02253718376159668, 0.022559743881225586, 0.022518783569335937, 0.022353919982910156, 0.023468063354492186, 0.02273072052001953, 0.0225167350769043, 0.022666240692138673, 0.022587392807006838, 0.022557695388793944, 0.022419456481933595, 0.022979583740234375, 0.0226375675201416, 0.022574079513549804, 0.02263039970397949, 0.022649856567382814, 0.023013376235961915, 0.022584320068359375, 0.022502399444580077, 0.022550527572631835, 0.022541311264038084, 0.022582271575927734, 0.02264371109008789, 0.022633472442626954, 0.022517759323120116, 0.022525951385498046, 0.0473620491027832, 0.022525951385498046, 0.02267238426208496, 0.02266009521484375, 0.022524927139282228, 0.022616064071655274, 0.022587392807006838, 0.022130687713623046, 0.02282598304748535, 0.022890495300292968, 0.02264575958251953, 0.022579200744628908, 0.022518783569335937, 0.022551551818847656, 0.02265088081359863, 0.02282700729370117, 0.022845439910888672, 0.02262937545776367, 0.022577152252197266, 0.022571008682250978, 0.02287615966796875, 0.02270515251159668, 0.022545408248901368, 0.022587392807006838, 0.02253004837036133, 0.02262118339538574, 0.022649856567382814, 0.022642688751220705, 0.02253926467895508, 0.022557695388793944, 0.02247065544128418, 0.021695487976074217, 0.0216494083404541, 0.021728256225585937, 0.02169343948364258, 0.021741567611694337, 0.02169753646850586, 0.021777408599853516, 0.0221265926361084, 0.02168832015991211, 0.021707775115966797, 0.021751808166503905, 0.022738943099975584, 0.022576128005981445, 0.02267750358581543, 0.02270412826538086, 0.022567935943603516, 0.022503423690795898, 0.022580223083496095, 0.022568960189819336, 0.022582271575927734, 0.02268876838684082, 0.02267955207824707, 0.022681600570678712, 0.022557695388793944, 0.02259660720825195, 0.022569984436035157, 0.02267033576965332, 0.022801408767700194, 0.02265292739868164, 0.02266111946105957, 0.022509567260742186, 0.022838272094726563, 0.047782913208007816, 0.022707199096679686, 0.022584320068359375, 0.022564863204956053, 0.022674432754516603, 0.022626304626464845, 0.022611967086791994, 0.0224849910736084, 0.02268262481689453, 0.022615039825439453, 0.02254643249511719, 0.022516767501831056, 0.022648799896240236, 0.022920192718505858, 0.02280243110656738, 0.022716415405273437, 0.02269696044921875, 0.022509567260742186, 0.02259660720825195, 0.02186240005493164, 0.02168524742126465, 0.02166988754272461, 0.02171494483947754, 0.021893119812011717, 0.021777408599853516, 0.022468608856201173, 0.022656000137329102, 0.022567935943603516, 0.022509567260742186, 0.02251468849182129, 0.022665216445922853, 0.022495231628417968, 0.022585344314575196, 0.022890495300292968, 0.022707199096679686, 0.022599679946899414, 0.0224901123046875, 0.022616064071655274, 0.022360063552856444, 0.021804031372070314, 0.023768064498901367, 0.023956480026245116, 0.02288128089904785, 0.022603776931762694, 0.0226713924407959, 0.023395296096801757, 0.022510591506958007, 0.02287718391418457, 0.02255462455749512, 0.022599679946899414, 0.022701055526733398, 0.022611967086791994, 0.022558719635009765, 0.022564863204956053, 0.022536191940307617, 0.022574079513549804, 0.022587392807006838, 0.022669343948364257, 0.022663135528564454, 0.022517759323120116, 0.022557695388793944, 0.022557695388793944, 0.02261299133300781, 0.047557632446289064, 0.022609920501708985, 0.02284339141845703, 0.022714399337768556, 0.022558687210083007, 0.02247065544128418, 0.022534143447875975, 0.02250752067565918, 0.0227061767578125, 0.022619136810302733, 0.02262937545776367, 0.022804479598999023, 0.022742015838623047, 0.022770687103271483, 0.022768640518188478, 0.022951936721801756, 0.02267852783203125, 0.022591487884521484, 0.022607872009277344, 0.022577152252197266, 0.022529024124145508, 0.02264678382873535, 0.022737920761108397, 0.022552576065063477, 0.02267136001586914, 0.022562816619873048, 0.022501375198364256, 0.022537216186523438, 0.022494207382202147, 0.02264678382873535, 0.02247987174987793, 0.022520832061767578, 0.022560768127441407, 0.02410700798034668, 0.02294988822937012, 0.022955007553100586, 0.022804479598999023, 0.022603776931762694, 0.022517759323120116, 0.02248806381225586, 0.022510591506958007, 0.022556671142578123, 0.022529024124145508, 0.022588415145874022, 0.022787071228027343, 0.022559743881225586, 0.0224901123046875, 0.022579200744628908, 0.02262118339538574, 0.022518783569335937, 0.022634496688842775, 0.022610944747924806, 0.02249113655090332, 0.02265907287597656, 0.022619136810302733, 0.022478847503662108, 0.0225167350769043, 0.023174144744873046, 0.02269388771057129, 0.022603776931762694, 0.022579200744628908, 0.022594560623168947, 0.02265292739868164, 0.046281726837158206, 0.022588415145874022, 0.022619136810302733, 0.022642688751220705, 0.022658048629760744, 0.022820863723754883, 0.022597631454467772, 0.022796287536621093, 0.022675455093383787, 0.022595584869384764, 0.02252288055419922, 0.022564863204956053, 0.0224849910736084, 0.02255462455749512, 0.022748159408569335, 0.02264678382873535, 0.02248294448852539, 0.022684671401977538, 0.02248908805847168, 0.022733823776245117, 0.022589439392089843, 0.022492160797119142, 0.02248089599609375, 0.022501375198364256, 0.022565887451171874, 0.022586368560791017, 0.02262428855895996, 0.022853599548339844, 0.022603776931762694, 0.022559743881225586, 0.02252288055419922, 0.022573055267333983, 0.022579200744628908, 0.02252390480041504, 0.022502399444580077, 0.02264473533630371, 0.022536191940307617, 0.022897663116455077, 0.022585344314575196, 0.02255564880371094, 0.022405120849609376, 0.022467584609985353, 0.022635520935058592, 0.022593536376953126, 0.0225218563079834, 0.022452224731445314, 0.022436864852905275, 0.022429695129394533, 0.02269696044921875, 0.022748159408569335, 0.02262015914916992, 0.022977535247802734, 0.022610944747924806, 0.022665216445922853, 0.022550527572631835, 0.022545440673828125, 0.022626272201538088, 0.02249830436706543, 0.02253926467895508, 0.02264371109008789, 0.022418432235717774, 0.02254745674133301, 0.022605823516845702, 0.04739276885986328, 0.02246143913269043, 0.022681600570678712, 0.02274406433105469, 0.022584320068359375, 0.022503423690795898, 0.02266828727722168, 0.02253824043273926, 0.02254643249511719, 0.022562816619873048, 0.022585344314575196, 0.022560768127441407, 0.02247987174987793, 0.022607872009277344, 0.022597631454467772, 0.022610944747924806, 0.022567935943603516, 0.022563840866088865, 0.02281881523132324, 0.022563840866088865, 0.0224849910736084, 0.02272051239013672, 0.0226375675201416, 0.02251263999938965, 0.022478847503662108, 0.022413312911987306, 0.02251468849182129, 0.0224901123046875, 0.02249113655090332, 0.02434867286682129, 0.023407615661621094, 0.022523935317993165, 0.022494176864624023, 0.022559743881225586, 0.022478847503662108, 0.022961151123046874, 0.022486015319824217, 0.022556671142578123, 0.02263859176635742, 0.022518783569335937, 0.022518783569335937, 0.02246451187133789, 0.02289356803894043, 0.022587392807006838, 0.022529024124145508, 0.02253926467895508, 0.022483968734741212, 0.022623231887817383, 0.022536191940307617, 0.022718463897705078, 0.02246451187133789, 0.022972415924072266, 0.02262835121154785, 0.0225167350769043, 0.022565887451171874, 0.022534143447875975, 0.022614015579223632, 0.02248192024230957, 0.02244812774658203, 0.022526975631713866, 0.022551551818847656, 0.022509567260742186, 0.022915071487426757, 0.04775833511352539, 0.02229145622253418, 0.022557695388793944, 0.02253107261657715, 0.022478847503662108, 0.0224399356842041, 0.02260479927062988, 0.022494207382202147, 0.02253004837036133, 0.022792192459106447, 0.02268057632446289, 0.02263039970397949, 0.022606847763061523, 0.022649856567382814, 0.022525951385498046, 0.022591487884521484, 0.022524927139282228, 0.022495231628417968, 0.022508544921875, 0.02243174362182617, 0.02255564880371094, 0.022437887191772463, 0.02250547218322754, 0.022789119720458984, 0.022580223083496095, 0.022500352859497072, 0.022552576065063477, 0.022320127487182616, 0.022432767868041992, 0.022403072357177735, 0.02246963119506836, 0.022459392547607423, 0.0224901123046875, 0.02248089599609375, 0.022550527572631835, 0.022467584609985353, 0.022548479080200197, 0.022475776672363282, 0.022477823257446287, 0.02252288055419922, 0.0224849910736084, 0.022486015319824217, 0.022814720153808594, 0.02265500831604004, 0.022472671508789063, 0.022571008682250978, 0.02265395164489746, 0.02249830436706543, 0.022722560882568358, 0.02262118339538574, 0.022388736724853517, 0.02170572853088379, 0.021612543106079102, 0.021695487976074217, 0.022331392288208008, 0.022533119201660155, 0.022584320068359375, 0.022502399444580077, 0.022580223083496095, 0.022626304626464845, 0.022617088317871094, 0.023888896942138672, 0.023900159835815428, 0.048435199737548826, 0.022572032928466795, 0.022586368560791017, 0.022606847763061523, 0.02252390480041504, 0.02247065544128418, 0.022392831802368163, 0.022492160797119142, 0.022527999877929687, 0.02246143913269043, 0.02269900894165039, 0.022972415924072266, 0.022939647674560547, 0.02273689651489258, 0.022567935943603516, 0.02269388771057129, 0.02290380859375, 0.022779903411865234, 0.022605823516845702, 0.022666240692138673, 0.02265497589111328, 0.022666240692138673, 0.02287308883666992, 0.022775808334350587, 0.022595584869384764, 0.02246963119506836, 0.0227061767578125, 0.022598655700683593, 0.02247987174987793, 0.0224532470703125, 0.02243174362182617, 0.022486015319824217, 0.022541343688964845, 0.022189023971557618, 0.021953535079956055, 0.02270515251159668, 0.022633472442626954, 0.02249625587463379, 0.022388736724853517, 0.022459392547607423, 0.022429695129394533, 0.022640640258789063, 0.02250444793701172, 0.022399999618530272, 0.022501375198364256, 0.022605823516845702, 0.0224399356842041, 0.022534143447875975, 0.02251468849182129, 0.02254745674133301, 0.022427648544311524, 0.0224716796875, 0.022389759063720704, 0.02249318313598633, 0.022563840866088865, 0.02271334457397461, 0.02255462455749512, 0.022589439392089843, 0.022642688751220705, 0.0231014404296875, 0.02254745674133301, 0.022579200744628908, 0.02269696044921875, 0.04771123123168945, 0.022592512130737305, 0.02264371109008789, 0.022725631713867187, 0.02244915199279785, 0.02268671989440918, 0.02250547218322754, 0.022475776672363282, 0.022564863204956053, 0.02251468849182129, 0.022571008682250978, 0.02249728012084961, 0.022548479080200197, 0.022580223083496095, 0.022772735595703125, 0.022789119720458984, 0.022508544921875, 0.022747135162353514, 0.022509567260742186, 0.022601728439331056, 0.022254592895507814, 0.022381568908691408, 0.022518783569335937, 0.02252288055419922, 0.022477823257446287, 0.02253209686279297, 0.02265292739868164, 0.02251571273803711, 0.022641664505004884, 0.0224532470703125, 0.022544384002685547, 0.022437887191772463, 0.02270207977294922, 0.022600704193115235, 0.022566911697387695, 0.02243071937561035, 0.022535167694091796, 0.02248089599609375, 0.02253824043273926, 0.022527999877929687, 0.02228531265258789, 0.02247987174987793, 0.022460416793823244, 0.02248806381225586, 0.022437887191772463, 0.02251571273803711, 0.022404096603393556, 0.022582271575927734, 0.02262118339538574, 0.02246143913269043, 0.022376447677612304, 0.02247270393371582, 0.021943296432495117, 0.02165862464904785, 0.021585920333862304, 0.02163609504699707, 0.021603328704833984, 0.021702655792236326, 0.021951488494873047, 0.02187980842590332, 0.021659648895263672, 0.02163609504699707, 0.021739519119262696, 0.045930496215820314, 0.021621759414672852, 0.02165247917175293, 0.021627904891967774, 0.0216944637298584, 0.021584896087646483, 0.02169343948364258, 0.021748735427856446, 0.021755903244018555, 0.022071296691894532, 0.021841920852661133, 0.021811199188232423, 0.022022144317626953, 0.022518783569335937, 0.022567935943603516, 0.022632448196411133, 0.02263654327392578, 0.022412288665771486, 0.022459392547607423, 0.022587392807006838, 0.022642688751220705, 0.022559743881225586, 0.022419456481933595, 0.022595584869384764, 0.022737920761108397, 0.02244812774658203, 0.022764543533325195, 0.02286591911315918, 0.022866943359375, 0.022540288925170897, 0.022541311264038084, 0.022564863204956053, 0.02249932861328125, 0.022607872009277344, 0.02301644706726074, 0.02265292739868164, 0.02348646354675293, 0.02287718391418457, 0.022573055267333983, 0.022571008682250978, 0.023392255783081056, 0.02261299133300781, 0.022563840866088865, 0.02254745674133301, 0.022725631713867187, 0.022534143447875975, 0.022619136810302733, 0.022601728439331056, 0.022498336791992188, 0.02250441551208496, 0.02248192024230957, 0.0224901123046875, 0.022467584609985353, 0.022432767868041992, 0.022517759323120116, 0.022550527572631835, 0.022475776672363282, 0.0225218563079834, 0.02246963119506836, 0.02247987174987793, 0.022467584609985353, 0.022591487884521484, 0.022592512130737305]",tokens/s,43.61682619409526,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694991a-41428bd0086501fc7eb2e428;368fb4cb-1f2b-4e77-a5bf-224e96afc204) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",deci,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,879.88224,793.247744,0.0,163.577856,154.631168,s,1,7.23656201171875,7.23656201171875,0.0,7.23656201171875,7.23656201171875,7.23656201171875,7.23656201171875,[7.23656201171875],,kWh,5.404841131257854e-06,2.928358316981396e-06,6.526394110029088e-06,1.4859593558268336e-05,,MB,1564.123136,851.968,0.0,201.326592,187.147776,s,27,0.19863625335693358,0.007356898272479022,0.00025619945052906065,0.007291359901428223,0.007526316928863525,0.007755315256118774,0.008179292078018187,"[0.007150015830993652, 0.0074823999404907224, 0.007492767810821533, 0.007284287929534912, 0.00784275197982788, 0.00743833589553833, 0.007291359901428223, 0.007408959865570068, 0.007448703765869141, 0.0075512962341308595, 0.0071058239936828615, 0.00708025598526001, 0.00725113582611084, 0.007144959926605224, 0.007111487865447998, 0.007162303924560547, 0.007398399829864502, 0.007203199863433838, 0.007148863792419434, 0.00829753589630127, 0.007182623863220215, 0.00727782392501831, 0.007453023910522461, 0.007493919849395752, 0.007509664058685302, 0.007120031833648682, 0.0073043198585510255]",tokens/s,34797.27332341335,kWh,8.651443215660119e-08,4.7405569258726604e-08,1.5575755236338534e-07,2.896775537787132e-07,tokens/kWh,883741238.009626,MB,1611.984896,851.968,0.0,201.326592,187.150336,s,27,10.028083068847653,0.37141048403139465,0.006531494079224864,0.37081500244140625,0.3799626342773437,0.3807436309814453,0.3812587579345703,"[0.3808787536621094, 0.3742617492675781, 0.37764825439453126, 0.38139227294921874, 0.37877117919921877, 0.3804283447265625, 0.37938565063476565, 0.3796521606445312, 0.3774781799316406, 0.36283648681640623, 0.36218255615234374, 0.3697691650390625, 0.36315203857421874, 0.36662271118164064, 0.36338876342773435, 0.36536968994140623, 0.37471875, 0.37426617431640624, 0.3682018127441406, 0.36572244262695314, 0.3629099731445313, 0.37091860961914064, 0.37081500244140625, 0.378114013671875, 0.36520025634765624, 0.3661914672851562, 0.3678066101074219]",tokens/s,169.6236447506278,kWh,4.329698776305399e-06,2.3724482085634043e-06,6.658671767488966e-06,1.3360818752357768e-05,tokens/kWh,4715279.891726879,,s,1701,10.017297276020052,0.005889063654332775,0.00015414575374562487,0.005879807949066162,0.006079487800598145,0.006116352081298828,0.006291456222534179,"[0.0058122239112854005, 0.005808127880096436, 0.006130688190460205, 0.006116352081298828, 0.006078464031219482, 0.006052864074707031, 0.006050816059112549, 0.006052864074707031, 0.00592793607711792, 0.005921792030334472, 0.0058726401329040525, 0.006228991985321045, 0.006127615928649902, 0.006138879776000977, 0.006147071838378906, 0.006154304027557373, 0.006102975845336914, 0.006243328094482422, 0.00622489595413208, 0.006247424125671387, 0.006118400096893311, 0.006107135772705078, 0.006012928009033203, 0.005970975875854492, 0.006010848045349121, 0.006109183788299561, 0.0060631041526794435, 0.00608358383178711, 0.005932032108306885, 0.005932032108306885, 0.005912576198577881, 0.005941247940063477, 0.005994495868682862, 0.006048768043518066, 0.006011903762817383, 0.006087679862976075, 0.006021120071411133, 0.006064127922058105, 0.0062975997924804685, 0.006127615928649902, 0.006100992202758789, 0.006050816059112549, 0.006051839828491211, 0.006054912090301514, 0.006053887844085694, 0.006046783924102783, 0.006047679901123047, 0.006031360149383545, 0.005953536033630371, 0.005901311874389649, 0.005932032108306885, 0.0059064321517944334, 0.005922815799713135, 0.005922815799713135, 0.005947391986846923, 0.005921792030334472, 0.006060031890869141, 0.005941247940063477, 0.006119423866271972, 0.006014976024627685, 0.006091775894165039, 0.006032383918762207, 0.00608460807800293, 0.006048768043518066, 0.006072319984436035, 0.006024191856384278, 0.00608460807800293, 0.006912000179290771, 0.006263872146606445, 0.006043583869934082, 0.006105088233947754, 0.006077439785003662, 0.00602726411819458, 0.005966847896575928, 0.005904384136199951, 0.005948416233062744, 0.0059248957633972164, 0.005924831867218017, 0.0059269118309021, 0.005891071796417236, 0.005790719985961914, 0.005720064163208007, 0.005746687889099121, 0.005691391944885254, 0.005839871883392334, 0.005701632022857666, 0.005702655792236328, 0.005798912048339844, 0.005732351779937744, 0.005763072013854981, 0.005730303764343261, 0.00576204776763916, 0.005726208209991455, 0.005772287845611572, 0.005768191814422607, 0.005868576049804688, 0.005983200073242188, 0.00591974401473999, 0.005859327793121338, 0.005883903980255127, 0.0059391999244689945, 0.005909503936767578, 0.005925888061523437, 0.005895167827606201, 0.005993472099304199, 0.006021120071411133, 0.006102015972137451, 0.006031360149383545, 0.006093823909759521, 0.00602726411819458, 0.006127615928649902, 0.005922815799713135, 0.005942272186279297, 0.005869631767272949, 0.005921728134155273, 0.005890048027038574, 0.005945343971252442, 0.00582144021987915, 0.005795839786529541, 0.005787648200988769, 0.005980160236358643, 0.006061056137084961, 0.006108160018920898, 0.0059688959121704105, 0.005955584049224853, 0.005917695999145508, 0.006043647766113281, 0.0060702719688415525, 0.006050816059112549, 0.006047743797302246, 0.006080512046813965, 0.006021120071411133, 0.006079487800598145, 0.00602726411819458, 0.006053887844085694, 0.006047743797302246, 0.006097919940948486, 0.00602729606628418, 0.006104032039642334, 0.005941247940063477, 0.006055935859680176, 0.0060405759811401364, 0.006109183788299561, 0.00607539176940918, 0.006071296215057373, 0.006039552211761475, 0.006082560062408447, 0.006034431934356689, 0.005963776111602783, 0.005915647983551026, 0.0059781441688537594, 0.005922783851623535, 0.00591871976852417, 0.005937151908874512, 0.0059136319160461425, 0.005929952144622803, 0.005913599967956543, 0.005891071796417236, 0.005947391986846923, 0.005940224170684814, 0.006220831871032715, 0.006076384067535401, 0.005963776111602783, 0.005944320201873779, 0.006067200183868408, 0.005944320201873779, 0.005946400165557861, 0.005857247829437256, 0.005783552169799804, 0.005943295955657959, 0.006095871925354004, 0.006022143840789795, 0.005972991943359375, 0.0059351038932800295, 0.005954559803009033, 0.005956672191619873, 0.0059534721374511716, 0.005959680080413818, 0.005955584049224853, 0.005953536033630371, 0.006003712177276611, 0.005883903980255127, 0.005873663902282715, 0.005909503936767578, 0.00591974401473999, 0.005951519966125488, 0.005920735836029053, 0.005942272186279297, 0.005958655834197998, 0.005951488018035888, 0.005920767784118652, 0.006057983875274659, 0.006032383918762207, 0.005950463771820068, 0.006114304065704346, 0.006397952079772949, 0.006032383918762207, 0.0060364799499511715, 0.0060631041526794435, 0.006069248199462891, 0.006411263942718506, 0.006127615928649902, 0.006178815841674804, 0.0060364799499511715, 0.006178815841674804, 0.006032383918762207, 0.006138912200927734, 0.006027232170104981, 0.006009856224060059, 0.005952511787414551, 0.006024191856384278, 0.005909503936767578, 0.006038527965545654, 0.005988351821899414, 0.006127615928649902, 0.005913599967956543, 0.0060631041526794435, 0.005883903980255127, 0.005996543884277344, 0.005894144058227539, 0.006013951778411865, 0.005799935817718506, 0.005972991943359375, 0.0059269118309021, 0.005993472099304199, 0.005916672229766846, 0.005970943927764893, 0.005934080123901367, 0.005994495868682862, 0.006205440044403076, 0.005983232021331787, 0.005945343971252442, 0.006203392028808594, 0.006121471881866455, 0.006150144100189209, 0.006292479991912842, 0.006131711959838867, 0.006049791812896729, 0.005985280036926269, 0.005928959846496582, 0.005967872142791748, 0.006155263900756836, 0.005961728096008301, 0.006001664161682129, 0.006094848155975342, 0.006089727878570556, 0.00606822395324707, 0.006368256092071533, 0.006093855857849121, 0.0061255359649658205, 0.005997568130493164, 0.0059699201583862304, 0.00605072021484375, 0.00616755199432373, 0.006014976024627685, 0.006141952037811279, 0.006412288188934326, 0.0063508481979370115, 0.006421567916870118, 0.006120384216308594, 0.0061265921592712404, 0.006031360149383545, 0.006014976024627685, 0.00608358383178711, 0.006048768043518066, 0.006077439785003662, 0.006064127922058105, 0.006139904022216797, 0.005988351821899414, 0.0060067839622497555, 0.005950463771820068, 0.005986303806304932, 0.005932032108306885, 0.00602726411819458, 0.005921792030334472, 0.005955584049224853, 0.005932032108306885, 0.006031360149383545, 0.005896192073822021, 0.005955584049224853, 0.005895232200622559, 0.005978047847747803, 0.005933055877685547, 0.005982207775115967, 0.006011903762817383, 0.005928959846496582, 0.005915647983551026, 0.0059996161460876465, 0.0058951997756958, 0.00603542423248291, 0.005933055877685547, 0.005928959846496582, 0.005914624214172363, 0.005971968173980713, 0.006088704109191895, 0.005974016189575195, 0.005922815799713135, 0.005942272186279297, 0.00591871976852417, 0.005938176155090332, 0.005961728096008301, 0.0060702719688415525, 0.005953536033630371, 0.005892096042633056, 0.005874688148498535, 0.0059361281394958495, 0.006051839828491211, 0.005948416233062744, 0.005940224170684814, 0.005913599967956543, 0.005951488018035888, 0.005912576198577881, 0.005964799880981446, 0.006103040218353272, 0.005940224170684814, 0.006073344230651856, 0.006072319984436035, 0.00601087999343872, 0.005908480167388916, 0.005981184005737304, 0.005910528182983398, 0.005976064205169678, 0.005961728096008301, 0.006033408164978027, 0.0059054079055786135, 0.006105088233947754, 0.005933055877685547, 0.00608460807800293, 0.005904384136199951, 0.0061562881469726565, 0.00603545618057251, 0.00603545618057251, 0.0059054079055786135, 0.0060364799499511715, 0.005943295955657959, 0.0061562881469726565, 0.006021120071411133, 0.006130688190460205, 0.005996543884277344, 0.006136832237243653, 0.006005760192871094, 0.006218751907348633, 0.005888000011444092, 0.006032383918762207, 0.005819392204284668, 0.0059770879745483394, 0.0059023361206054685, 0.006227968215942382, 0.006033408164978027, 0.0061265921592712404, 0.006050816059112549, 0.006127615928649902, 0.005986303806304932, 0.006105088233947754, 0.006054912090301514, 0.006093823909759521, 0.006017024040222168, 0.006119423866271972, 0.006017024040222168, 0.00613478422164917, 0.006060031890869141, 0.006113279819488526, 0.006069248199462891, 0.006109183788299561, 0.006231040000915527, 0.006112256050109863, 0.006043647766113281, 0.006087679862976075, 0.005929984092712402, 0.005974016189575195, 0.005932032108306885, 0.005967872142791748, 0.005915647983551026, 0.006049791812896729, 0.006024191856384278, 0.006137856006622314, 0.005957632064819336, 0.005962751865386963, 0.006014976024627685, 0.006554624080657959, 0.006087679862976075, 0.00601804780960083, 0.006085631847381592, 0.005992447853088379, 0.006039552211761475, 0.005928959846496582, 0.0060928001403808595, 0.005984255790710449, 0.006117440223693847, 0.00600057601928711, 0.006140927791595459, 0.0060067839622497555, 0.0060364799499511715, 0.005911551952362061, 0.0060590081214904785, 0.006011903762817383, 0.006088704109191895, 0.0060067839622497555, 0.006111231803894043, 0.006030335903167725, 0.006135807991027832, 0.005972991943359375, 0.006112256050109863, 0.006064159870147705, 0.006007775783538818, 0.005954559803009033, 0.005988351821899414, 0.005954559803009033, 0.005997568130493164, 0.00591871976852417, 0.006033408164978027, 0.005891071796417236, 0.00602726411819458, 0.005896192073822021, 0.006013951778411865, 0.005898240089416504, 0.005985280036926269, 0.00587062406539917, 0.005990367889404297, 0.005859327793121338, 0.00601907205581665, 0.005883903980255127, 0.006012928009033203, 0.005888000011444092, 0.005992447853088379, 0.005909503936767578, 0.005920928001403808, 0.0058765759468078615, 0.006132768154144287, 0.006045663833618164, 0.006103040218353272, 0.006017024040222168, 0.006102015972137451, 0.006030367851257324, 0.006113247871398926, 0.006021120071411133, 0.0061082239151000976, 0.006022079944610596, 0.005994495868682862, 0.005948416233062744, 0.005946368217468262, 0.0060364799499511715, 0.006017024040222168, 0.006045695781707764, 0.00602726411819458, 0.0060815677642822264, 0.0060292801856994625, 0.006055935859680176, 0.006009856224060059, 0.006077439785003662, 0.0060282878875732426, 0.006038527965545654, 0.006031360149383545, 0.0060590081214904785, 0.0060067839622497555, 0.0059770879745483394, 0.006069248199462891, 0.006093823909759521, 0.005997568130493164, 0.005922880172729492, 0.005966784000396729, 0.005942272186279297, 0.005949440002441406, 0.0059361281394958495, 0.006062079906463623, 0.00602623987197876, 0.005986303806304932, 0.005846047878265381, 0.005978079795837402, 0.005914624214172363, 0.006014976024627685, 0.005988351821899414, 0.006121471881866455, 0.006023168087005615, 0.006127615928649902, 0.006042623996734619, 0.006128640174865723, 0.006085631847381592, 0.006087679862976075, 0.006106112003326416, 0.006079487800598145, 0.005982207775115967, 0.005948416233062744, 0.005964799880981446, 0.005941247940063477, 0.0059658241271972655, 0.006045695781707764, 0.00608358383178711, 0.005942272186279297, 0.005946368217468262, 0.005938176155090332, 0.005923840045928955, 0.005972991943359375, 0.006111231803894043, 0.006073344230651856, 0.006069248199462891, 0.006055935859680176, 0.006082560062408447, 0.00608358383178711, 0.006069248199462891, 0.006080512046813965, 0.0060590081214904785, 0.005953536033630371, 0.00592793607711792, 0.006017024040222168, 0.0060590081214904785, 0.005876736164093017, 0.0059361281394958495, 0.005916672229766846, 0.005946368217468262, 0.006100992202758789, 0.006086656093597412, 0.00602623987197876, 0.006089727878570556, 0.00602623987197876, 0.006081535816192627, 0.006015999794006348, 0.006104063987731933, 0.005958655834197998, 0.005970943927764893, 0.005896192073822021, 0.005993472099304199, 0.005884960174560547, 0.006007775783538818, 0.005874688148498535, 0.00608358383178711, 0.006032383918762207, 0.006549503803253174, 0.006042623996734619, 0.0060661759376525876, 0.005893119812011719, 0.00596998405456543, 0.005892032146453857, 0.005961728096008301, 0.0059699201583862304, 0.0060999679565429685, 0.0060293121337890625, 0.006064127922058105, 0.006013951778411865, 0.006073344230651856, 0.006195199966430664, 0.006096896171569824, 0.00601804780960083, 0.006086656093597412, 0.005986303806304932, 0.00602623987197876, 0.005928959846496582, 0.005792768001556397, 0.005668863773345947, 0.00568012809753418, 0.005754879951477051, 0.0058009600639343266, 0.005761023998260498, 0.005689343929290771, 0.005734399795532226, 0.005705728054046631, 0.0057825279235839844, 0.005682176113128662, 0.005789696216583252, 0.005757952213287353, 0.005786623954772949, 0.005785600185394287, 0.006085631847381592, 0.006790143966674805, 0.006866943836212158, 0.006107135772705078, 0.006097919940948486, 0.006055935859680176, 0.006064127922058105, 0.006030335903167725, 0.005921792030334472, 0.005879807949066162, 0.005764095783233642, 0.005758975982666016, 0.005814271926879883, 0.005765120029449463, 0.005794816017150879, 0.005758975982666016, 0.005775360107421875, 0.005767168045043946, 0.00570470380783081, 0.005715968132019043, 0.00570470380783081, 0.005789696216583252, 0.00576204776763916, 0.00572211217880249, 0.0057190399169921875, 0.005698560237884521, 0.005725279808044433, 0.005743519783020019, 0.0057077760696411135, 0.005738495826721191, 0.005725183963775635, 0.005843967914581299, 0.005868544101715088, 0.005852159976959229, 0.005706751823425293, 0.005728256225585937, 0.005760000228881836, 0.00573747205734253, 0.005699615955352783, 0.005738463878631592, 0.005691391944885254, 0.005693439960479736, 0.005720064163208007, 0.005693439960479736, 0.005730303764343261, 0.005684224128723145, 0.005675007820129394, 0.005709887981414795, 0.005691328048706055, 0.005723135948181152, 0.005696512222290039, 0.0058009600639343266, 0.005684224128723145, 0.005705728054046631, 0.00570470380783081, 0.0056852478981018065, 0.005705728054046631, 0.005679103851318359, 0.00572108793258667, 0.005692416191101074, 0.005708799839019775, 0.0057283201217651365, 0.0057108159065246585, 0.005716959953308106, 0.005682176113128662, 0.005728256225585937, 0.005754879951477051, 0.005760000228881836, 0.005784575939178467, 0.0057825279235839844, 0.005702655792236328, 0.005608448028564453, 0.005734399795532226, 0.005702655792236328, 0.0057077760696411135, 0.005713920116424561, 0.005686272144317627, 0.0057149438858032225, 0.005705728054046631, 0.005726208209991455, 0.005751808166503906, 0.005788671970367432, 0.005777408123016357, 0.0057825279235839844, 0.005787648200988769, 0.00576204776763916, 0.005796864032745362, 0.005780479907989502, 0.005789696216583252, 0.005795839786529541, 0.00576204776763916, 0.005775360107421875, 0.005773312091827393, 0.005791776180267334, 0.005674975872039795, 0.005726208209991455, 0.005830656051635743, 0.005775360107421875, 0.005787648200988769, 0.005786623954772949, 0.00571289587020874, 0.0056852478981018065, 0.005716991901397705, 0.005690400123596191, 0.005690336227416992, 0.005689343929290771, 0.005690368175506591, 0.005710847854614258, 0.005692416191101074, 0.005709824085235596, 0.005703680038452149, 0.005676032066345215, 0.005701632022857666, 0.005777408123016357, 0.005673984050750733, 0.005659647941589355, 0.005738495826721191, 0.005762112140655517, 0.005709760189056397, 0.005732351779937744, 0.005700607776641845, 0.005758975982666016, 0.005769216060638428, 0.00582144021987915, 0.005780479907989502, 0.005814367771148681, 0.005765024185180664, 0.005805056095123291, 0.005801983833312988, 0.005799935817718506, 0.005788671970367432, 0.005744639873504639, 0.005801983833312988, 0.006251520156860352, 0.006153215885162353, 0.006023200035095215, 0.00613372802734375, 0.006239232063293457, 0.006292479991912842, 0.006076416015625, 0.006081535816192627, 0.00601804780960083, 0.006056960105895996, 0.006021120071411133, 0.0059688959121704105, 0.006073344230651856, 0.006074368000030517, 0.006009856224060059, 0.006041600227355957, 0.005924863815307617, 0.005974016189575195, 0.005888000011444092, 0.00591974401473999, 0.005891071796417236, 0.005924863815307617, 0.005687295913696289, 0.005698560237884521, 0.005713920116424561, 0.005752831935882568, 0.005700607776641845, 0.005731328010559082, 0.005745664119720459, 0.005725183963775635, 0.0057487359046936035, 0.005720064163208007, 0.005815296173095703, 0.005832704067230224, 0.0058122239112854005, 0.005794816017150879, 0.005804031848907471, 0.005826560020446778, 0.005804031848907471, 0.005816319942474365, 0.005797887802124023, 0.005852159976959229, 0.005787648200988769, 0.005817344188690185, 0.005773312091827393, 0.005767168045043946, 0.005818367958068848, 0.005780479907989502, 0.005834752082824707, 0.005765120029449463, 0.005814271926879883, 0.005733376026153565, 0.005751808166503906, 0.0057118721008300784, 0.005713920116424561, 0.005732351779937744, 0.0057118721008300784, 0.005745664119720459, 0.005697535991668701, 0.005751808166503906, 0.005706751823425293, 0.005752831935882568, 0.005836800098419189, 0.005774335861206055, 0.0057877120971679685, 0.005653439998626709, 0.005733376026153565, 0.00568832015991211, 0.005700607776641845, 0.00571289587020874, 0.005687295913696289, 0.0057825279235839844, 0.005761023998260498, 0.005807104110717773, 0.005699584007263184, 0.005697535991668701, 0.005736447811126709, 0.00572108793258667, 0.005715968132019043, 0.005668863773345947, 0.005735424041748047, 0.005689343929290771, 0.005695487976074219, 0.005735424041748047, 0.005693439960479736, 0.005746687889099121, 0.005706751823425293, 0.005787648200988769, 0.005723135948181152, 0.00581324815750122, 0.005796864032745362, 0.005784575939178467, 0.00581324815750122, 0.005767168045043946, 0.005799935817718506, 0.005784575939178467, 0.00586240005493164, 0.005693439960479736, 0.005723135948181152, 0.0058429441452026365, 0.005795839786529541, 0.005854207992553711, 0.005791744232177734, 0.00582041597366333, 0.005774335861206055, 0.0058419198989868165, 0.005767168045043946, 0.005807104110717773, 0.005724160194396972, 0.0057118721008300784, 0.005790719985961914, 0.0057825279235839844, 0.005793791770935058, 0.005768191814422607, 0.005796864032745362, 0.005764095783233642, 0.005818367958068848, 0.0058757119178771975, 0.0057077760696411135, 0.005766143798828125, 0.0057077760696411135, 0.005743616104125977, 0.005713920116424561, 0.005736447811126709, 0.0058009600639343266, 0.005768191814422607, 0.005742591857910156, 0.005761023998260498, 0.005790719985961914, 0.005696512222290039, 0.005865471839904785, 0.005769216060638428, 0.005755904197692871, 0.005713920116424561, 0.005696512222290039, 0.005744639873504639, 0.00577023983001709, 0.0058388481140136715, 0.005764095783233642, 0.005819392204284668, 0.005761023998260498, 0.00572211217880249, 0.005758975982666016, 0.005733376026153565, 0.005752831935882568, 0.005708799839019775, 0.005751808166503906, 0.0057794561386108395, 0.005783552169799804, 0.005774335861206055, 0.005728256225585937, 0.00566374397277832, 0.005730303764343261, 0.005751808166503906, 0.005693439960479736, 0.005733376026153565, 0.005699584007263184, 0.006346752166748047, 0.006034431934356689, 0.006053887844085694, 0.005992479801177978, 0.006052832126617432, 0.0059955201148986816, 0.006049791812896729, 0.00597811222076416, 0.006078464031219482, 0.005975039958953857, 0.00592793607711792, 0.005830656051635743, 0.0059269118309021, 0.005840896129608154, 0.005917695999145508, 0.005891071796417236, 0.005900288105010986, 0.005856256008148194, 0.005725183963775635, 0.005732351779937744, 0.00572211217880249, 0.005730303764343261, 0.005725183963775635, 0.00572108793258667, 0.005706751823425293, 0.005745664119720459, 0.005746687889099121, 0.0057292799949646, 0.005774335861206055, 0.005706751823425293, 0.005733376026153565, 0.00581324815750122, 0.0057794561386108395, 0.005789696216583252, 0.005768191814422607, 0.005783552169799804, 0.005752831935882568, 0.005704736232757569, 0.00577839994430542, 0.005755904197692871, 0.005763072013854981, 0.005694464206695556, 0.005803008079528809, 0.005696512222290039, 0.0057487359046936035, 0.005699584007263184, 0.00572214412689209, 0.005730271816253662, 0.005666816234588623, 0.005733376026153565, 0.005691391944885254, 0.005757952213287353, 0.005694464206695556, 0.0057190399169921875, 0.005743616104125977, 0.005740543842315674, 0.006284287929534912, 0.006064127922058105, 0.005836800098419189, 0.005803040027618408, 0.005752799987792968, 0.005742591857910156, 0.005750783920288086, 0.005755904197692871, 0.005706751823425293, 0.005755904197692871, 0.005706751823425293, 0.005728256225585937, 0.005753856182098388, 0.005750783920288086, 0.005730303764343261, 0.005752831935882568, 0.005810175895690918, 0.0057825279235839844, 0.0057794561386108395, 0.0057784318923950195, 0.0057652158737182616, 0.005723040103912353, 0.005835775852203369, 0.005676032066345215, 0.005798912048339844, 0.005766143798828125, 0.0057825279235839844, 0.005788671970367432, 0.005787648200988769, 0.005753856182098388, 0.005690368175506591, 0.005753856182098388, 0.005686272144317627, 0.005708799839019775, 0.005733376026153565, 0.005725183963775635, 0.005735424041748047, 0.0057415680885314945, 0.005807104110717773, 0.005781536102294922, 0.005781472206115723, 0.005791744232177734, 0.0057825279235839844, 0.005801983833312988, 0.005768191814422607, 0.0058091521263122555, 0.005772287845611572, 0.005766143798828125, 0.005790719985961914, 0.005768191814422607, 0.005797887802124023, 0.005758975982666016, 0.005829631805419922, 0.005739520072937012, 0.005791744232177734, 0.005773312091827393, 0.005758975982666016, 0.005705728054046631, 0.005695487976074219, 0.005751840114593506, 0.005690336227416992, 0.005751808166503906, 0.005696512222290039, 0.005742591857910156, 0.005747712135314941, 0.005643263816833496, 0.005745664119720459, 0.005706751823425293, 0.005732351779937744, 0.005705728054046631, 0.005708799839019775, 0.005699584007263184, 0.00572211217880249, 0.0057190399169921875, 0.005715968132019043, 0.00571289587020874, 0.005705728054046631, 0.005700607776641845, 0.005746687889099121, 0.005746687889099121, 0.005816319942474365, 0.005764095783233642, 0.005833727836608887, 0.005758975982666016, 0.005816319942474365, 0.005690368175506591, 0.00581324815750122, 0.0060928001403808595, 0.006037504196166992, 0.005990399837493897, 0.006043647766113281, 0.0059054079055786135, 0.005967872142791748, 0.005884928226470947, 0.005929984092712402, 0.005884928226470947, 0.005895167827606201, 0.005932032108306885, 0.005882880210876465, 0.005929984092712402, 0.005984255790710449, 0.0061296639442443845, 0.0059688959121704105, 0.005938176155090332, 0.005882880210876465, 0.005921792030334472, 0.005763072013854981, 0.0056780800819396975, 0.005706751823425293, 0.005694464206695556, 0.005728256225585937, 0.005726208209991455, 0.00582041597366333, 0.005773344039916992, 0.005825503826141358, 0.0057784318923950195, 0.005822527885437012, 0.005772223949432373, 0.005818367958068848, 0.005726208209991455, 0.005765120029449463, 0.005815296173095703, 0.0058091521263122555, 0.005715968132019043, 0.005718016147613526, 0.006057983875274659, 0.006086656093597412, 0.006051839828491211, 0.00601910400390625, 0.006109151840209961, 0.006034431934356689, 0.006038527965545654, 0.006044672012329101, 0.00602726411819458, 0.006009856224060059, 0.006054912090301514, 0.006042623996734619, 0.006038527965545654, 0.00602623987197876, 0.006007808208465576, 0.005985280036926269, 0.006085631847381592, 0.006001696109771728, 0.006063072204589843, 0.005993472099304199, 0.006052864074707031, 0.006001664161682129, 0.006053887844085694, 0.00601907205581665, 0.005974016189575195, 0.006025216102600098, 0.0060405759811401364, 0.00601087999343872, 0.006079487800598145, 0.0060067839622497555, 0.0059688959121704105, 0.005992479801177978, 0.006115295886993408, 0.0059064321517944334, 0.006064127922058105, 0.006011903762817383, 0.006033408164978027, 0.005876736164093017, 0.005787648200988769, 0.005801983833312988, 0.005794816017150879, 0.0057190399169921875, 0.005699584007263184, 0.00572211217880249, 0.00568832015991211, 0.005743616104125977, 0.005696512222290039, 0.005810175895690918, 0.005761023998260498, 0.005805056095123291, 0.005769216060638428, 0.0058122239112854005, 0.005783552169799804, 0.005796864032745362, 0.005747712135314941, 0.005708799839019775, 0.005733376026153565, 0.005708799839019775, 0.005908512115478516, 0.005823455810546875, 0.0060026879310607914, 0.00611737585067749, 0.006238207817077636, 0.006079487800598145, 0.006089727878570556, 0.005945343971252442, 0.006004799842834472, 0.005918655872344971, 0.0059688959121704105, 0.006030335903167725, 0.00608358383178711, 0.006003712177276611, 0.006077439785003662, 0.0059985918998718265, 0.006097919940948486, 0.005986303806304932, 0.006103040218353272, 0.005981184005737304, 0.006048768043518066, 0.006003712177276611, 0.006053887844085694, 0.005993472099304199, 0.006041600227355957, 0.006022143840789795, 0.006069248199462891, 0.006004735946655273, 0.006057983875274659, 0.006047743797302246, 0.006065152168273926, 0.006000639915466309, 0.00606822395324707, 0.006020095825195313, 0.006054912090301514, 0.00602729606628418, 0.0060507841110229495, 0.005984255790710449, 0.0060405759811401364, 0.005993472099304199, 0.006004735946655273, 0.0058757119178771975, 0.005791744232177734, 0.00577942419052124, 0.005791744232177734, 0.005788671970367432, 0.005765120029449463, 0.005961728096008301, 0.0056555519104003905, 0.00582041597366333, 0.005766143798828125, 0.005831679821014404, 0.005706783771514893, 0.005768159866333008, 0.0056852478981018065, 0.00573747205734253, 0.00572108793258667, 0.0056852478981018065, 0.005743616104125977, 0.005689343929290771, 0.005840896129608154, 0.005766143798828125, 0.005758975982666016, 0.005705728054046631, 0.005897215843200684, 0.005752831935882568, 0.005735424041748047, 0.00573747205734253, 0.0057415680885314945, 0.005734399795532226, 0.005720064163208007, 0.005703680038452149, 0.005917695999145508, 0.006057983875274659, 0.006060031890869141, 0.005888000011444092, 0.005970975875854492, 0.005881824016571045, 0.005946368217468262, 0.005959680080413818, 0.006042623996734619, 0.005971968173980713, 0.005922815799713135, 0.005899263858795166, 0.0059023361206054685, 0.005909503936767578, 0.005937151908874512, 0.00582860803604126, 0.0057415680885314945, 0.005899263858795166, 0.005801983833312988, 0.005703680038452149, 0.00566476821899414, 0.0057497601509094234, 0.005698560237884521, 0.0057487359046936035, 0.005689343929290771, 0.005747712135314941, 0.005686272144317627, 0.005716991901397705, 0.005790719985961914, 0.005733471870422363, 0.005976992130279541, 0.006291456222534179, 0.006707200050354004, 0.006336512088775635, 0.0062904319763183595, 0.0060999679565429685, 0.006023168087005615, 0.0060293121337890625, 0.00601907205581665, 0.0059996161460876465, 0.005889023780822754, 0.005909503936767578, 0.006008831977844238, 0.005889023780822754, 0.0057712640762329105, 0.005765120029449463, 0.005745664119720459, 0.005736447811126709, 0.005794816017150879, 0.005795839786529541, 0.0057415680885314945, 0.005720064163208007, 0.005753856182098388, 0.005706751823425293, 0.005783552169799804, 0.00573747205734253, 0.0057242240905761715, 0.005731264114379883, 0.005684224128723145, 0.005750783920288086, 0.005631999969482422, 0.005695487976074219, 0.005696512222290039, 0.005781504154205322, 0.005705728054046631, 0.005744703769683838, 0.005714879989624023, 0.005725183963775635, 0.005738495826721191, 0.005806079864501953, 0.005825535774230957, 0.005775360107421875, 0.005814271926879883, 0.005780479907989502, 0.005794816017150879, 0.005816319942474365, 0.005792768001556397, 0.005823488235473633, 0.005811200141906738, 0.005826560020446778, 0.005790719985961914, 0.005799935817718506, 0.005736447811126709, 0.005715968132019043, 0.005773344039916992, 0.005760992050170898, 0.005814271926879883, 0.005753856182098388, 0.005830719947814942, 0.005772223949432373, 0.005848063945770264, 0.005742591857910156, 0.005700607776641845, 0.00574675178527832, 0.005688255786895752, 0.0057712640762329105, 0.0056852478981018065, 0.005746687889099121, 0.005843967914581299, 0.005761023998260498, 0.005832704067230224, 0.005763072013854981, 0.005807104110717773, 0.005652480125427246, 0.005747712135314941, 0.005718016147613526, 0.0057190399169921875, 0.00572108793258667, 0.0057190399169921875, 0.005734399795532226, 0.005723135948181152, 0.0057190399169921875, 0.0057190399169921875, 0.005723135948181152, 0.0057487359046936035, 0.005720064163208007, 0.005756927967071533, 0.005731328010559082, 0.005844992160797119, 0.005805056095123291, 0.005806079864501953, 0.0058122239112854005, 0.005790719985961914, 0.00582860803604126, 0.005774335861206055, 0.005757952213287353, 0.005713920116424561, 0.005755904197692871, 0.00570470380783081, 0.005760000228881836, 0.005764095783233642, 0.005731328010559082, 0.005743616104125977, 0.005694464206695556, 0.005773312091827393, 0.005745664119720459, 0.005850111961364746, 0.005757952213287353, 0.005750783920288086, 0.005740543842315674, 0.005763072013854981, 0.005753856182098388, 0.005767168045043946, 0.005736447811126709, 0.005716991901397705, 0.005705728054046631, 0.005637119770050049, 0.005701632022857666, 0.005764095783233642, 0.005715968132019043, 0.005736447811126709, 0.005739520072937012, 0.005726208209991455, 0.005752831935882568, 0.005761023998260498, 0.00582041597366333, 0.005777472019195556, 0.005838784217834472, 0.005769216060638428, 0.005826560020446778, 0.005815296173095703, 0.005797887802124023, 0.005811200141906738, 0.0057794561386108395, 0.005835775852203369, 0.005775360107421875, 0.005947391986846923, 0.0059955201148986816, 0.006164480209350586, 0.00601087999343872, 0.006072319984436035, 0.005984255790710449, 0.0060631041526794435, 0.00601087999343872, 0.006044672012329101, 0.006009856224060059, 0.005908480167388916, 0.005718016147613526, 0.005761023998260498, 0.005715968132019043, 0.005781504154205322, 0.0057077760696411135, 0.005720064163208007, 0.005746687889099121, 0.005785600185394287, 0.005836800098419189, 0.005780479907989502, 0.00582041597366333, 0.005677055835723877, 0.005840896129608154, 0.005784575939178467, 0.005849088191986084, 0.005786623954772949, 0.00582041597366333, 0.00582144021987915, 0.005792768001556397, 0.005763072013854981, 0.005734399795532226, 0.005758975982666016, 0.005747744083404541, 0.005752799987792968, 0.005752863883972168, 0.005696512222290039, 0.005750751972198486, 0.005710847854614258, 0.006014976024627685, 0.006071296215057373, 0.0060364799499511715, 0.006025216102600098, 0.006044672012329101, 0.006011903762817383, 0.0060282878875732426, 0.005903359889984131, 0.005945375919342041, 0.005910496234893799, 0.005957632064819336, 0.005916672229766846, 0.005972991943359375, 0.006031360149383545, 0.006057983875274659, 0.006008831977844238, 0.006102015972137451, 0.006030335903167725, 0.006043647766113281, 0.006022143840789795, 0.006049791812896729, 0.005910528182983398, 0.00586240005493164, 0.005718016147613526, 0.005675007820129394, 0.005696576118469238, 0.0057363839149475095, 0.005750783920288086, 0.005750783920288086, 0.00582041597366333, 0.005776383876800537, 0.005751808166503906, 0.005824512004852295, 0.005824512004852295, 0.00582041597366333, 0.005787648200988769, 0.005775360107421875, 0.005713920116424561, 0.005772287845611572, 0.005734399795532226, 0.005758975982666016, 0.00572211217880249, 0.005710847854614258, 0.005867519855499268, 0.005799935817718506, 0.005793791770935058, 0.005696512222290039, 0.005873663902282715, 0.00582041597366333, 0.0058091521263122555, 0.005856256008148194, 0.005818367958068848, 0.005798912048339844, 0.005815296173095703, 0.005857279777526855, 0.005859327793121338, 0.006108160018920898, 0.0060405759811401364, 0.0059770879745483394, 0.005889023780822754, 0.006001664161682129, 0.006000639915466309, 0.005925888061523437, 0.005891071796417236, 0.00613478422164917, 0.006074368000030517, 0.006050816059112549, 0.005920767784118652, 0.0059361281394958495, 0.005921792030334472, 0.0059391999244689945, 0.006000671863555908, 0.0060507841110229495, 0.006088704109191895, 0.005928959846496582, 0.00597811222076416, 0.006052864074707031, 0.00602726411819458, 0.005952511787414551, 0.005907455921173096, 0.005943295955657959, 0.006011903762817383, 0.006077439785003662, 0.0060293121337890625, 0.0060702719688415525, 0.0060282878875732426, 0.006116352081298828, 0.006023168087005615, 0.00607539176940918, 0.005901311874389649, 0.00597811222076416, 0.005888000011444092, 0.006136832237243653, 0.006013951778411865, 0.006107135772705078, 0.006000639915466309, 0.006140992164611816, 0.006027200222015381, 0.006113279819488526, 0.006024191856384278, 0.006000639915466309, 0.005900288105010986, 0.005972991943359375, 0.005945343971252442, 0.005938176155090332, 0.00592793607711792, 0.005957695960998535, 0.005875648021697998, 0.005899263858795166, 0.005974016189575195, 0.0059361281394958495, 0.005934080123901367, 0.005956607818603516, 0.006034431934356689, 0.006053887844085694, 0.005958655834197998, 0.005947391986846923, 0.005921792030334472, 0.005929984092712402, 0.00592793607711792, 0.005923840045928955, 0.005909503936767578, 0.005944320201873779, 0.005920767784118652, 0.005947391986846923, 0.00592793607711792, 0.0059361600875854495, 0.0059350719451904295, 0.006118400096893311, 0.006048799991607666, 0.0060548801422119145, 0.006033440113067627, 0.006051807880401611, 0.00603545618057251, 0.006061056137084961, 0.006054912090301514, 0.006093823909759521, 0.006046720027923584, 0.006087679862976075, 0.006053887844085694, 0.005938176155090332, 0.005929984092712402, 0.005971968173980713, 0.005933055877685547, 0.006050816059112549, 0.005909503936767578, 0.0059556479454040525, 0.0059268479347229005, 0.005881855964660645, 0.00586956787109375, 0.005943295955657959, 0.005857279777526855, 0.005751808166503906, 0.005788671970367432, 0.0058091840744018556, 0.005729248046875, 0.005725183963775635, 0.00573747205734253, 0.005750783920288086, 0.005790719985961914, 0.005763072013854981, 0.0057487359046936035, 0.005739520072937012, 0.005724192142486572, 0.005904352188110352, 0.0058849921226501465, 0.005853184223175049, 0.005810111999511719, 0.005863423824310303, 0.0057292799949646, 0.005742591857910156, 0.005727231979370117, 0.005696512222290039, 0.005786623954772949, 0.005789696216583252, 0.005844992160797119, 0.005784575939178467, 0.005833727836608887, 0.005810175895690918, 0.00582144021987915, 0.005793791770935058, 0.005814271926879883, 0.005848063945770264, 0.005764095783233642, 0.005825535774230957, 0.0057784318923950195, 0.005804031848907471, 0.00571289587020874, 0.005766143798828125, 0.005715968132019043, 0.0058091521263122555, 0.005654528141021729, 0.005668863773345947, 0.005740543842315674, 0.005723135948181152, 0.005734399795532226, 0.005731328010559082, 0.0057118721008300784, 0.005757952213287353, 0.0058009600639343266, 0.005834752082824707, 0.005798912048339844, 0.005744639873504639, 0.0057487359046936035, 0.005733376026153565, 0.0057292799949646, 0.005713920116424561, 0.005798912048339844, 0.005794816017150879, 0.005805056095123291, 0.0057825279235839844, 0.005786623954772949, 0.005743616104125977, 0.005732351779937744, 0.005798912048339844, 0.005766143798828125, 0.00582860803604126, 0.005774335861206055, 0.005824512004852295, 0.005769216060638428, 0.005848063945770264, 0.005776383876800537, 0.005780479907989502, 0.005732384204864502, 0.005695456027984619, 0.005765120029449463, 0.005731328010559082, 0.005760000228881836, 0.005689343929290771, 0.005725183963775635, 0.00572211217880249, 0.00572211217880249, 0.005796864032745362, 0.005761023998260498, 0.005720064163208007, 0.005780479907989502, 0.005808127880096436, 0.005838912010192871, 0.005748672008514404, 0.005744639873504639, 0.005701632022857666, 0.005738495826721191, 0.00571289587020874, 0.005777440071105957, 0.005724127769470215, 0.005697535991668701, 0.00576204776763916, 0.005706751823425293, 0.005794816017150879, 0.005733376026153565, 0.005874688148498535, 0.005892096042633056, 0.0059351038932800295, 0.005859327793121338, 0.005933055877685547, 0.005874688148498535, 0.005950463771820068, 0.005884928226470947, 0.005911551952362061, 0.00591871976852417, 0.0059054079055786135, 0.005909503936767578, 0.005879807949066162, 0.005912576198577881, 0.005897215843200684, 0.005911551952362061, 0.005903359889984131, 0.00591974401473999, 0.005881855964660645, 0.005945343971252442, 0.006011903762817383, 0.006067200183868408, 0.005988351821899414, 0.0060364799499511715, 0.005982207775115967, 0.005982207775115967, 0.0059351038932800295, 0.006064127922058105, 0.005952511787414551, 0.005941247940063477, 0.005876736164093017, 0.0059269118309021, 0.005891071796417236, 0.005921792030334472, 0.0059023361206054685, 0.005913599967956543, 0.005960703849792481, 0.005873727798461914, 0.005869503974914551, 0.005694464206695556, 0.005765120029449463, 0.005725183963775635, 0.005760000228881836, 0.0057077760696411135, 0.00587775993347168, 0.005690368175506591, 0.005708799839019775, 0.005730368137359619, 0.005705664157867432, 0.005742591857910156, 0.005803008079528809, 0.005806079864501953, 0.0057784318923950195, 0.005775360107421875, 0.005776383876800537, 0.005795839786529541, 0.005786623954772949, 0.005784575939178467, 0.005815296173095703, 0.005775392055511475, 0.005895135879516601, 0.005789696216583252, 0.005825535774230957, 0.005823488235473633, 0.005798912048339844, 0.006064127922058105, 0.005788671970367432, 0.005860352039337159, 0.005725183963775635, 0.0058091521263122555, 0.005763072013854981, 0.005797887802124023, 0.00571289587020874, 0.005755904197692871, 0.005693439960479736, 0.005689343929290771, 0.005740543842315674, 0.005765120029449463, 0.005822463989257813, 0.005751808166503906, 0.0058429441452026365, 0.005777408123016357, 0.005816319942474365]",tokens/s,169.8062813880892,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492b8-634e3ca77eb48b2270e617dd;be4e9d7c-7209-43ab-a2d1-ffd9eb9a74d5) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,933.04832,849.870848,0.0,220.20096,205.438976,s,1,7.48857470703125,7.48857470703125,0.0,7.48857470703125,7.48857470703125,7.48857470703125,7.48857470703125,[7.48857470703125],,kWh,7.696306453477873e-06,4.202129939382958e-06,9.950285738025144e-06,2.1848722130885977e-05,,MB,1584.427008,910.688256,0.0,260.046848,226.388992,s,18,0.19934585762023926,0.01107476986779107,0.00026037823493982526,0.010958672046661377,0.011350115013122558,0.011422949123382568,0.011606190166473388,"[0.011287903785705567, 0.011334464073181153, 0.010891936302185058, 0.010853152275085449, 0.011217439651489258, 0.010874048233032226, 0.010759296417236329, 0.011652000427246094, 0.01127609634399414, 0.011271648406982422, 0.011017248153686524, 0.010841919898986816, 0.010806752204895019, 0.010798175811767579, 0.010844927787780761, 0.011336223602294922, 0.01090009593963623, 0.01138252830505371]",tokens/s,23115.604482629376,kWh,1.2816335834086094e-07,7.022733369196374e-08,2.7702872246637074e-07,4.754194144991954e-07,tokens/kWh,538471909.6288258,MB,1632.374784,925.36832,0.0,274.726912,226.391552,s,18,10.241471801757813,0.5689706556532118,0.009216291823338737,0.565548583984375,0.5849463134765625,0.5857332000732421,0.5867170501708984,"[0.5869630126953125, 0.5684609375, 0.5657175903320313, 0.566205810546875, 0.5617012939453125, 0.5605464477539063, 0.5625922241210938, 0.5738027954101562, 0.5767313842773437, 0.5776922607421875, 0.5595741577148438, 0.5585230712890625, 0.5616942138671875, 0.561663818359375, 0.5653795776367188, 0.5640049438476562, 0.5847020874023438, 0.5855161743164062]",tokens/s,110.72627274190845,kWh,6.565546421443213e-06,3.597545539214073e-06,1.050297990153365e-05,2.066607186219094e-05,tokens/kWh,3048474.834506889,,s,1134,10.233718847274774,0.009024443427931905,0.0002538863489796963,0.008895487785339355,0.009283583641052246,0.009348147106170653,0.009950709552764897,"[0.009187328338623046, 0.00920473575592041, 0.009124863624572753, 0.009316351890563965, 0.008913920402526856, 0.008860671997070312, 0.009255935668945312, 0.009233407974243164, 0.009233407974243164, 0.00920576000213623, 0.009294848442077636, 0.00922214412689209, 0.00923033618927002, 0.009284607887268067, 0.00923750400543213, 0.009432064056396485, 0.00931942367553711, 0.009193471908569336, 0.009200639724731445, 0.009259008407592773, 0.009192447662353515, 0.009186304092407227, 0.009048064231872559, 0.0090316801071167, 0.009128959655761718, 0.009067520141601563, 0.009070591926574707, 0.009030655860900879, 0.009519136428833008, 0.009277407646179198, 0.009138175964355469, 0.009249792098999024, 0.009056256294250489, 0.008894463539123536, 0.008833056449890136, 0.00927843189239502, 0.009205823898315429, 0.009378751754760743, 0.009163776397705077, 0.009207807540893554, 0.00922111988067627, 0.009200639724731445, 0.009221152305603027, 0.009241567611694337, 0.009226240158081055, 0.00922214412689209, 0.009234432220458985, 0.009879551887512206, 0.010059776306152344, 0.01010483169555664, 0.011688960075378419, 0.010662912368774414, 0.009385984420776367, 0.00932249641418457, 0.009291775703430176, 0.009219072341918945, 0.009140224456787109, 0.009172991752624511, 0.009803775787353516, 0.00933683204650879, 0.009331711769104004, 0.009186304092407227, 0.009210880279541016, 0.009164799690246582, 0.009256959915161133, 0.00897433567047119, 0.009456640243530273, 0.00919961643218994, 0.008882176399230958, 0.008870911598205567, 0.008877056121826172, 0.008937472343444825, 0.00889145565032959, 0.008872896194458009, 0.00890880012512207, 0.008884223937988281, 0.00889958381652832, 0.008852479934692382, 0.009094143867492676, 0.009247743606567382, 0.009176063537597656, 0.009174015998840332, 0.009177087783813476, 0.00913100814819336, 0.008869888305664063, 0.009013248443603515, 0.00909721565246582, 0.009233407974243164, 0.009226240158081055, 0.009176063537597656, 0.00923136043548584, 0.009187328338623046, 0.00910028839111328, 0.008866815567016602, 0.008950783729553222, 0.008896512031555176, 0.008837120056152344, 0.008854528427124024, 0.008895487785339355, 0.008867839813232421, 0.008820768356323241, 0.0088503999710083, 0.008859647750854491, 0.008984576225280762, 0.008880127906799316, 0.008947711944580078, 0.00891596794128418, 0.008927231788635253, 0.008902655601501466, 0.00891596794128418, 0.008860671997070312, 0.008841216087341308, 0.008969216346740723, 0.008926207542419434, 0.008921088218688965, 0.008884223937988281, 0.008918016433715821, 0.009067520141601563, 0.009244671821594238, 0.009174015998840332, 0.009256959915161133, 0.009241600036621094, 0.009197567939758301, 0.009225215911865235, 0.009182208061218262, 0.008895487785339355, 0.00870809555053711, 0.008844287872314453, 0.008918016433715821, 0.008885248184204102, 0.008844287872314453, 0.008862719535827637, 0.00888319969177246, 0.008839167594909669, 0.008860671997070312, 0.010686464309692383, 0.010177536010742188, 0.009220095634460449, 0.009323519706726074, 0.009256959915161133, 0.00918835163116455, 0.009275391578674316, 0.009208831787109375, 0.00922214412689209, 0.00921497631072998, 0.009200639724731445, 0.009112575531005859, 0.008820735931396484, 0.008889344215393067, 0.008838144302368164, 0.008831999778747558, 0.008853504180908203, 0.00893337631225586, 0.008860671997070312, 0.008845312118530273, 0.008821760177612305, 0.008868864059448242, 0.008866815567016602, 0.008837120056152344, 0.008863743782043456, 0.008843263626098634, 0.008846336364746094, 0.008863743782043456, 0.008819711685180665, 0.008805376052856445, 0.00880025577545166, 0.008870911598205567, 0.008830975532531739, 0.00885043239593506, 0.008855551719665527, 0.008831999778747558, 0.00881049633026123, 0.008844287872314453, 0.0088340482711792, 0.008791040420532227, 0.008879103660583497, 0.008879103660583497, 0.008910847663879394, 0.008792063713073731, 0.009072640419006347, 0.009317376136779786, 0.00903270435333252, 0.008844287872314453, 0.008869888305664063, 0.008945664405822755, 0.008845312118530273, 0.008831999778747558, 0.00880025577545166, 0.008902655601501466, 0.00871833610534668, 0.008885248184204102, 0.0088340482711792, 0.008771583557128907, 0.008804351806640624, 0.008862719535827637, 0.008865792274475098, 0.008821760177612305, 0.008852479934692382, 0.008942624092102051, 0.008902624130249023, 0.008881152153015137, 0.00880128002166748, 0.00889958381652832, 0.008824831962585449, 0.008890368461608887, 0.008876031875610351, 0.008858624458312989, 0.008847392082214356, 0.00884937572479248, 0.008882176399230958, 0.008812543869018554, 0.008813568115234375, 0.008783871650695801, 0.008862719535827637, 0.008845312118530273, 0.00888319969177246, 0.008873984336853028, 0.008846336364746094, 0.008880127906799316, 0.00880025577545166, 0.008819711685180665, 0.008736767768859864, 0.008803327560424805, 0.008956928253173829, 0.009124863624572753, 0.00942796802520752, 0.009257984161376954, 0.00922316837310791, 0.009254912376403808, 0.009256959915161133, 0.009190400123596192, 0.009154560089111329, 0.008919039726257324, 0.008860671997070312, 0.00882380771636963, 0.00890777587890625, 0.009067520141601563, 0.009244671821594238, 0.009169919967651367, 0.009267200469970703, 0.009143296241760255, 0.009195520401000976, 0.009193471908569336, 0.009133055686950683, 0.009257984161376954, 0.009183263778686524, 0.009151455879211425, 0.009179136276245118, 0.009095168113708496, 0.00910643196105957, 0.009199647903442383, 0.00919651222229004, 0.009054207801818847, 0.009150464057922364, 0.009177087783813476, 0.009193471908569336, 0.009132032394409179, 0.009164799690246582, 0.009158656120300293, 0.009151488304138184, 0.008956928253173829, 0.008901663780212402, 0.008914912223815918, 0.008860671997070312, 0.008880160331726073, 0.008855520248413086, 0.008889344215393067, 0.008999936103820801, 0.008927231788635253, 0.00884227180480957, 0.008847328186035156, 0.008879103660583497, 0.008867839813232421, 0.00892416000366211, 0.008857600212097168, 0.008728608131408692, 0.008871904373168945, 0.008854528427124024, 0.00884224033355713, 0.008837120056152344, 0.008847359657287598, 0.008902655601501466, 0.008851455688476563, 0.008872960090637207, 0.008837151527404784, 0.008847328186035156, 0.008853504180908203, 0.008835071563720704, 0.008812543869018554, 0.008860671997070312, 0.008816639900207519, 0.008717311859130859, 0.009172991752624511, 0.009356287956237793, 0.009041919708251953, 0.008876031875610351, 0.008844287872314453, 0.008881152153015137, 0.008864768028259277, 0.008855551719665527, 0.008861696243286133, 0.008843263626098634, 0.008831999778747558, 0.008762368202209473, 0.00882688045501709, 0.008864864349365235, 0.008829855918884277, 0.008846336364746094, 0.008854528427124024, 0.008822784423828126, 0.008897536277770996, 0.008809503555297851, 0.008800224304199218, 0.00890777587890625, 0.008857600212097168, 0.008697855949401855, 0.008836095809936523, 0.008839167594909669, 0.008885248184204102, 0.00882380771636963, 0.008847359657287598, 0.008814592361450196, 0.008828960418701171, 0.008839136123657226, 0.008828927993774414, 0.008831999778747558, 0.008889344215393067, 0.0088340482711792, 0.00888319969177246, 0.00882585620880127, 0.00873574447631836, 0.008803327560424805, 0.00881868839263916, 0.0089303035736084, 0.008844287872314453, 0.008811519622802735, 0.008870911598205567, 0.008878080368041993, 0.008828927993774414, 0.008840191841125488, 0.008839167594909669, 0.008859647750854491, 0.008870976448059082, 0.008818623542785644, 0.008820735931396484, 0.00881868839263916, 0.008816639900207519, 0.008840191841125488, 0.008859647750854491, 0.008841216087341308, 0.00888319969177246, 0.008821760177612305, 0.008879103660583497, 0.008811519622802735, 0.008811519622802735, 0.009637887954711915, 0.009281536102294922, 0.00918835163116455, 0.009160703659057617, 0.009060352325439454, 0.009151488304138184, 0.00913920021057129, 0.009208831787109375, 0.008994815826416015, 0.008860671997070312, 0.008845312118530273, 0.008899680137634277, 0.00887388801574707, 0.008838144302368164, 0.008838144302368164, 0.008827903747558593, 0.008838144302368164, 0.008838144302368164, 0.008805376052856445, 0.008852479934692382, 0.008827903747558593, 0.008887295722961425, 0.008821760177612305, 0.008671232223510742, 0.008843392372131347, 0.008835968017578124, 0.008882176399230958, 0.008893440246582032, 0.008830975532531739, 0.008843263626098634, 0.008871935844421386, 0.00877670383453369, 0.00890675163269043, 0.008828927993774414, 0.008866815567016602, 0.008822815895080566, 0.008885215759277344, 0.009284607887268067, 0.00894156837463379, 0.010004480361938477, 0.009326592445373535, 0.009277440071105958, 0.009251839637756347, 0.009209856033325196, 0.00899891185760498, 0.008862719535827637, 0.008894463539123536, 0.00882380771636963, 0.008913920402526856, 0.008821760177612305, 0.00911359977722168, 0.00942080020904541, 0.0090316801071167, 0.00881868839263916, 0.008931327819824218, 0.008867839813232421, 0.00889958381652832, 0.008869888305664063, 0.008796256065368652, 0.008902560234069825, 0.008879103660583497, 0.008865792274475098, 0.008838144302368164, 0.008813568115234375, 0.008931327819824218, 0.008848383903503418, 0.008803327560424805, 0.008828927993774414, 0.008864768028259277, 0.008856575965881347, 0.0088340482711792, 0.008836095809936523, 0.008830975532531739, 0.008889344215393067, 0.008848383903503418, 0.00884224033355713, 0.008860671997070312, 0.00881766414642334, 0.008871935844421386, 0.008837120056152344, 0.008841216087341308, 0.00889139175415039, 0.008895487785339355, 0.008854528427124024, 0.008847359657287598, 0.008821760177612305, 0.010704895973205567, 0.009893888473510743, 0.009454591751098633, 0.009233407974243164, 0.00923033618927002, 0.009220095634460449, 0.009142271995544434, 0.009129983901977539, 0.009177087783813476, 0.00922316837310791, 0.009178144454956055, 0.009202655792236327, 0.00918835163116455, 0.009101311683654785, 0.008886272430419923, 0.00881766414642334, 0.008863743782043456, 0.008802304267883301, 0.008815679550170898, 0.009058239936828614, 0.00914739227294922, 0.009374719619750976, 0.009289728164672852, 0.00919961643218994, 0.009202688217163087, 0.009152511596679687, 0.009117695808410644, 0.009119744300842286, 0.009136128425598144, 0.009184255599975585, 0.009162752151489258, 0.009177087783813476, 0.009175040245056153, 0.009154560089111329, 0.009169919967651367, 0.009918463706970216, 0.00949350357055664, 0.00921497631072998, 0.009155584335327148, 0.00920473575592041, 0.009228287696838379, 0.009144319534301757, 0.00884224033355713, 0.008839167594909669, 0.008872960090637207, 0.008862719535827637, 0.008827903747558593, 0.00882688045501709, 0.008888319969177246, 0.008854528427124024, 0.008867839813232421, 0.00885257625579834, 0.008837023735046386, 0.008992768287658692, 0.0089169921875, 0.00869375991821289, 0.008742912292480469, 0.008754176139831543, 0.008913920402526856, 0.008822784423828126, 0.008824831962585449, 0.008848383903503418, 0.008989695549011231, 0.009058303833007812, 0.009218048095703125, 0.009195520401000976, 0.009250816345214843, 0.009217023849487305, 0.009292799949645996, 0.009325568199157714, 0.009500672340393066, 0.009219072341918945, 0.009254912376403808, 0.009239551544189453, 0.009275391578674316, 0.009277440071105958, 0.009605119705200196, 0.009534527778625488, 0.009429951667785645, 0.009245696067810059, 0.009245696067810059, 0.009187328338623046, 0.00930611228942871, 0.009163776397705077, 0.009250816345214843, 0.009219136238098145, 0.009311167716979981, 0.009210880279541016, 0.009251839637756347, 0.009154560089111329, 0.0092293119430542, 0.009162752151489258, 0.009158656120300293, 0.009193471908569336, 0.009174015998840332, 0.009189375877380371, 0.00916585636138916, 0.00917807960510254, 0.009218048095703125, 0.009172991752624511, 0.009249792098999024, 0.00918015956878662, 0.009117823600769043, 0.009223039627075196, 0.009151488304138184, 0.009145343780517578, 0.009175040245056153, 0.00903270435333252, 0.008767487525939942, 0.008806400299072266, 0.008824831962585449, 0.008835071563720704, 0.008877056121826172, 0.008881152153015137, 0.008830975532531739, 0.008821760177612305, 0.008836095809936523, 0.008866815567016602, 0.008845312118530273, 0.008806400299072266, 0.008805376052856445, 0.009184255599975585, 0.009202688217163087, 0.009182208061218262, 0.009165823936462402, 0.00919046401977539, 0.009064448356628419, 0.009195520401000976, 0.009160703659057617, 0.009714688301086426, 0.009338879585266113, 0.009208831787109375, 0.009227264404296874, 0.00918015956878662, 0.009182208061218262, 0.009126912117004395, 0.009162752151489258, 0.009267200469970703, 0.009178112030029297, 0.009307135581970214, 0.009156607627868652, 0.009233407974243164, 0.00919961643218994, 0.009340928077697755, 0.009185279846191406, 0.009051136016845703, 0.008830975532531739, 0.008856575965881347, 0.008844287872314453, 0.0088340482711792, 0.00899071979522705, 0.00882585620880127, 0.00881868839263916, 0.008819711685180665, 0.00918227195739746, 0.009141183853149415, 0.009081855773925781, 0.009167936325073242, 0.009164735794067383, 0.009252863883972168, 0.009169919967651367, 0.009190400123596192, 0.009125984191894532, 0.00908176040649414, 0.009136128425598144, 0.00919654369354248, 0.00922316837310791, 0.009187328338623046, 0.009206784248352052, 0.009179136276245118, 0.009278464317321777, 0.009281536102294922, 0.009163776397705077, 0.009136128425598144, 0.009148415565490722, 0.009201663970947266, 0.009408512115478516, 0.009212960243225097, 0.00942182445526123, 0.009229280471801759, 0.009183232307434081, 0.009350144386291503, 0.00919654369354248, 0.009235456466674804, 0.009186304092407227, 0.009177120208740234, 0.009032671928405761, 0.008926207542419434, 0.00942080020904541, 0.008680447578430176, 0.0088156156539917, 0.008851455688476563, 0.008857600212097168, 0.00889241600036621, 0.008844287872314453, 0.008840191841125488, 0.008812543869018554, 0.008822848320007324, 0.008900544166564942, 0.008870911598205567, 0.00882585620880127, 0.008880127906799316, 0.008853504180908203, 0.008840191841125488, 0.008829952239990235, 0.00884227180480957, 0.008813535690307617, 0.008840191841125488, 0.008830975532531739, 0.008838144302368164, 0.00882688045501709, 0.0088340482711792, 0.0089169921875, 0.00881766414642334, 0.008806400299072266, 0.00880947208404541, 0.008837120056152344, 0.008786944389343262, 0.00880025577545166, 0.008815648078918457, 0.00885756778717041, 0.009325568199157714, 0.009211903572082519, 0.009233407974243164, 0.009193504333496094, 0.009135071754455566, 0.009102335929870605, 0.008828927993774414, 0.00881868839263916, 0.008864768028259277, 0.00881868839263916, 0.008802304267883301, 0.008837120056152344, 0.008912896156311035, 0.008855551719665527, 0.008820735931396484, 0.008863743782043456, 0.008864768028259277, 0.008895487785339355, 0.008936448097229004, 0.008839167594909669, 0.0088340482711792, 0.008856575965881347, 0.008884223937988281, 0.008846336364746094, 0.00882089614868164, 0.008757087707519531, 0.008943615913391113, 0.008826944351196289, 0.008814528465270996, 0.008849408149719238, 0.008860671997070312, 0.008680447578430176, 0.008871968269348145, 0.008841183662414551, 0.008851455688476563, 0.008819711685180665, 0.008841216087341308, 0.008852479934692382, 0.008829952239990235, 0.008837151527404784, 0.008845279693603516, 0.008855551719665527, 0.008864768028259277, 0.008845312118530273, 0.008845312118530273, 0.008841279983520509, 0.008878016471862793, 0.008877056121826172, 0.008857600212097168, 0.008868864059448242, 0.00891596794128418, 0.008858624458312989, 0.008758272171020508, 0.008851519584655762, 0.008846336364746094, 0.00891487979888916, 0.008856575965881347, 0.008858624458312989, 0.008855551719665527, 0.008813568115234375, 0.008866815567016602, 0.008829952239990235, 0.008824831962585449, 0.008820735931396484, 0.008831999778747558, 0.008845312118530273, 0.008803327560424805, 0.008863743782043456, 0.008877056121826172, 0.008886272430419923, 0.008838144302368164, 0.008897536277770996, 0.008841216087341308, 0.00882380771636963, 0.008843263626098634, 0.008806400299072266, 0.008836095809936523, 0.0088156156539917, 0.00888319969177246, 0.008879103660583497, 0.008778752326965332, 0.009187328338623046, 0.009340928077697755, 0.00892518424987793, 0.008847392082214356, 0.008852448463439942, 0.00882380771636963, 0.00893337631225586, 0.00882585620880127, 0.008791104316711425, 0.008837056159973145, 0.008816639900207519, 0.008861696243286133, 0.008837120056152344, 0.00869478416442871, 0.008862719535827637, 0.008829952239990235, 0.00882688045501709, 0.008841216087341308, 0.008802304267883301, 0.00881868839263916, 0.00882688045501709, 0.008819711685180665, 0.008863743782043456, 0.008839167594909669, 0.008851455688476563, 0.008841216087341308, 0.008861727714538574, 0.008920031547546387, 0.009189472198486329, 0.00928553581237793, 0.00918835163116455, 0.009164799690246582, 0.009070591926574707, 0.008879103660583497, 0.008896512031555176, 0.008853504180908203, 0.00888319969177246, 0.008839167594909669, 0.009146368026733399, 0.008882176399230958, 0.008857600212097168, 0.00882585620880127, 0.008853504180908203, 0.008852479934692382, 0.008880127906799316, 0.008910847663879394, 0.0088340482711792, 0.00951296043395996, 0.009302016258239745, 0.009190400123596192, 0.009195520401000976, 0.009148415565490722, 0.008861696243286133, 0.00882585620880127, 0.008778752326965332, 0.00882585620880127, 0.00881766414642334, 0.008824831962585449, 0.008812543869018554, 0.008918016433715821, 0.008848383903503418, 0.008820735931396484, 0.008830975532531739, 0.008854528427124024, 0.008824831962585449, 0.008833024024963379, 0.008802304267883301, 0.008824831962585449, 0.008835071563720704, 0.008864768028259277, 0.008831999778747558, 0.008895487785339355, 0.008852479934692382, 0.008836095809936523, 0.008893440246582032, 0.00882688045501709, 0.008713215827941894, 0.008851455688476563, 0.008836095809936523, 0.008846336364746094, 0.008852479934692382, 0.008766464233398438, 0.008821760177612305, 0.0088156156539917, 0.008870911598205567, 0.008863743782043456, 0.008858624458312989, 0.008858624458312989, 0.008865792274475098, 0.008866815567016602, 0.008896512031555176, 0.0088340482711792, 0.008839167594909669, 0.008830975532531739, 0.008855551719665527, 0.008838144302368164, 0.008862719535827637, 0.008861696243286133, 0.008862719535827637, 0.008848383903503418, 0.008879103660583497, 0.008843263626098634, 0.00880947208404541, 0.008862719535827637, 0.008841216087341308, 0.008855551719665527, 0.00880947208404541, 0.008847359657287598, 0.009332736015319825, 0.00920576000213623, 0.009120767593383788, 0.009291775703430176, 0.009235456466674804, 0.009288703918457031, 0.009837568283081055, 0.009406463623046875, 0.009060352325439454, 0.008895487785339355, 0.008812576293945313, 0.008811488151550292, 0.008838144302368164, 0.008858624458312989, 0.008857600212097168, 0.008838144302368164, 0.008862719535827637, 0.008848383903503418, 0.008851455688476563, 0.008872960090637207, 0.00882688045501709, 0.008847359657287598, 0.008802304267883301, 0.008875007629394532, 0.008822784423828126, 0.008840191841125488, 0.008849408149719238, 0.00881049633026123, 0.008861696243286133, 0.00890060806274414, 0.008717311859130859, 0.008733695983886718, 0.008855551719665527, 0.008849408149719238, 0.008918016433715821, 0.00888319969177246, 0.008861696243286133, 0.008854528427124024, 0.008860671997070312, 0.008853504180908203, 0.008869888305664063, 0.008868864059448242, 0.008905728340148926, 0.008932352066040039, 0.008880127906799316, 0.008902655601501466, 0.008904704093933105, 0.008927231788635253, 0.008940544128417969, 0.008955904006958008, 0.008893440246582032, 0.008995840072631836, 0.008896512031555176, 0.008870911598205567, 0.00882380771636963, 0.008869888305664063, 0.008939519882202148, 0.008742912292480469, 0.008893440246582032, 0.008844287872314453, 0.008872960090637207, 0.00888319969177246, 0.008838144302368164, 0.008876031875610351, 0.008860671997070312, 0.008879103660583497, 0.008813568115234375, 0.008857600212097168, 0.008868864059448242, 0.008829952239990235, 0.008855551719665527, 0.008928256034851074, 0.008833024024963379, 0.008827903747558593, 0.008854528427124024, 0.008837120056152344, 0.008835071563720704, 0.008824831962585449, 0.008849408149719238, 0.008870911598205567, 0.008871935844421386, 0.008852479934692382, 0.008838144302368164, 0.008860671997070312, 0.009672703742980958, 0.009966591835021972, 0.009313280105590821, 0.010050559997558594, 0.009407487869262696, 0.009465855598449707, 0.009247743606567382, 0.009235456466674804, 0.009324543952941895, 0.009217023849487305, 0.009060352325439454, 0.009305088043212891, 0.00922316837310791, 0.008948736190795899, 0.008875007629394532, 0.008886272430419923, 0.008861696243286133, 0.008827903747558593, 0.008802304267883301, 0.008872960090637207, 0.008880127906799316, 0.008840191841125488, 0.008836095809936523, 0.008875007629394532, 0.008840191841125488, 0.00882688045501709, 0.008912896156311035, 0.008781824111938476, 0.008758272171020508, 0.008843263626098634, 0.008866815567016602, 0.008869888305664063, 0.008912896156311035, 0.0089169921875, 0.008884223937988281, 0.009168895721435547, 0.009326592445373535, 0.008988672256469727, 0.008878080368041993, 0.00888326358795166, 0.00882579231262207, 0.008861696243286133, 0.008973312377929688, 0.008873984336853028, 0.008855551719665527, 0.008858688354492187, 0.008871871948242188, 0.008857600212097168, 0.008836095809936523, 0.008838144302368164, 0.008824831962585449, 0.008879103660583497, 0.008872960090637207, 0.008844287872314453, 0.008855551719665527, 0.008928256034851074, 0.00918227195739746, 0.009216959953308106, 0.009155584335327148, 0.009207807540893554, 0.009175040245056153, 0.009162752151489258, 0.009182208061218262, 0.009164799690246582, 0.00918015956878662, 0.009153535842895508, 0.009069567680358886, 0.008851455688476563, 0.008845312118530273, 0.008855584144592286, 0.008824799537658691, 0.00891596794128418, 0.008835071563720704, 0.008713215827941894, 0.009696255683898926, 0.00940339183807373, 0.009347071647644043, 0.00918835163116455, 0.00930303955078125, 0.009249792098999024, 0.009267200469970703, 0.009250816345214843, 0.009123871803283692, 0.009240544319152832, 0.009265151977539063, 0.009181183815002441, 0.00919961643218994, 0.009299967765808105, 0.009305088043212891, 0.009200639724731445, 0.009127936363220214, 0.009104448318481445, 0.009108415603637695, 0.009027584075927735, 0.009194496154785157, 0.009257984161376954, 0.009256959915161133, 0.009283583641052246, 0.009262080192565919, 0.009225215911865235, 0.009283616065979003, 0.00925487995147705, 0.00923852825164795, 0.009245696067810059, 0.00933683204650879, 0.009293824195861817, 0.009189375877380371, 0.009290752410888671, 0.00923852825164795, 0.009176128387451172, 0.009017279624938965, 0.009294848442077636, 0.009227264404296874, 0.009240575790405273, 0.009194496154785157, 0.00919654369354248, 0.009233407974243164, 0.009286656379699706, 0.009203712463378906, 0.009175040245056153, 0.00927948760986328, 0.009361408233642577, 0.009211903572082519, 0.009189375877380371, 0.009252863883972168, 0.00922214412689209, 0.009249792098999024, 0.00951193618774414, 0.010125311851501465, 0.01012224006652832, 0.009388031959533692, 0.0095283203125, 0.009245696067810059, 0.00927948760986328, 0.00932147216796875, 0.009283583641052246, 0.009078783988952637, 0.009343999862670899, 0.009251839637756347, 0.009260031700134277, 0.009217023849487305, 0.009307135581970214, 0.009308159828186035, 0.009243647575378417, 0.009262080192565919, 0.009596927642822266, 0.009779199600219727, 0.009370623588562011, 0.009273344039916993, 0.009254912376403808, 0.009284607887268067, 0.009254912376403808, 0.009264127731323242, 0.009246720314025878, 0.009355263710021973, 0.00932863998413086, 0.009283583641052246, 0.009369600296020507, 0.009235456466674804, 0.009285632133483887, 0.009241600036621094, 0.009244671821594238, 0.009259008407592773, 0.009318400382995605, 0.00921395206451416, 0.00927948760986328, 0.00922214412689209, 0.00930611228942871, 0.00929587173461914, 0.009283583641052246, 0.009247743606567382, 0.009275391578674316, 0.009283583641052246, 0.009246720314025878, 0.009157631874084473, 0.009256959915161133, 0.009208831787109375, 0.009342975616455078, 0.009251839637756347, 0.009563136100769042, 0.009648127555847168, 0.009371647834777832, 0.00919654369354248, 0.009185279846191406, 0.009273344039916993, 0.009264127731323242, 0.009289759635925293, 0.00927023983001709, 0.009227264404296874, 0.009273344039916993, 0.009218048095703125, 0.009189375877380371, 0.009225215911865235, 0.009255935668945312, 0.009242624282836913, 0.009170944213867188, 0.009294848442077636, 0.00929792022705078, 0.00923136043548584]",tokens/s,110.81015776605804,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gptj,MB,3752.902656,5604.114432,0.0,4974.444544,4685.071872,s,1,10.3563564453125,10.3563564453125,0.0,10.3563564453125,10.3563564453125,10.3563564453125,10.3563564453125,[10.3563564453125],,kWh,4.254803022986065e-05,2.330400113797566e-05,7.606478307398135e-05,0.00014191681444181766,,MB,1966.477312,5643.96032,0.0,4993.318912,4233.626624,s,10,1.0546202163696288,0.10546202163696287,0.00020218637452673065,0.10539780426025391,0.10563232574462891,0.10579822616577149,0.10593094650268554,"[0.10557955169677734, 0.10525033569335937, 0.10596412658691406, 0.10533334350585938, 0.10539385223388673, 0.10536479949951172, 0.10524918365478515, 0.10540175628662109, 0.105595458984375, 0.10548780822753906]",tokens/s,2427.41411577754,kWh,1.246280637426979e-06,6.828725376766564e-07,4.795822550105057e-06,6.724975725208691e-06,tokens/kWh,38067051.90628116,MB,1975.791616,5643.96032,0.0,4993.318912,4336.17664,s,10,20.291557861328126,2.0291557861328124,0.015983654128438496,2.0227132568359374,2.0551712890625002,2.0571978271484377,2.0588190576171876,"[2.0340372314453123, 2.0224385986328124, 2.0107415771484374, 2.018682373046875, 2.01105029296875, 2.0229879150390624, 2.03636376953125, 2.054720947265625, 2.059224365234375, 2.021310791015625]",tokens/s,31.047394404382374,kWh,2.408776872833775e-05,1.3200791532836141e-05,5.212561187589434e-05,8.941417213706824e-05,tokens/kWh,704586.2920189386,,s,630,20.289135587692257,0.03220497712332105,0.0005756409963187565,0.03189452743530274,0.0329541618347168,0.03319383029937744,0.03378714591979981,"[0.031660032272338864, 0.031664127349853514, 0.03180544090270996, 0.03171225547790527, 0.03174297523498535, 0.03299737548828125, 0.03180851173400879, 0.031783935546875, 0.03157094383239746, 0.03172454452514648, 0.031855615615844726, 0.031719423294067385, 0.031698944091796875, 0.031665151596069335, 0.031704063415527346, 0.03183001518249512, 0.03163340759277344, 0.032589824676513675, 0.03279667282104492, 0.03243724822998047, 0.031850496292114255, 0.03294105529785156, 0.03267686462402344, 0.03175833511352539, 0.032115711212158206, 0.03191705513000488, 0.033157119750976564, 0.03177369689941406, 0.03237785720825195, 0.031884288787841795, 0.031971328735351565, 0.032727039337158204, 0.03260518264770508, 0.03265024185180664, 0.032717823028564456, 0.032123905181884765, 0.03183001518249512, 0.03174502372741699, 0.03168358421325684, 0.031662080764770506, 0.03179622459411621, 0.03174297523498535, 0.03182080078125, 0.031731712341308595, 0.03175632095336914, 0.03173270416259766, 0.03339878463745117, 0.03384832000732422, 0.033175552368164066, 0.03278131103515625, 0.03266355133056641, 0.032717823028564456, 0.032683006286621095, 0.03295129776000977, 0.03263385772705078, 0.03210342407226562, 0.03462041473388672, 0.03319705581665039, 0.03299126434326172, 0.03391689682006836, 0.03280287933349609, 0.03200505447387696, 0.0321710090637207, 0.03194777679443359, 0.03175014305114746, 0.031909887313842776, 0.031693824768066405, 0.031647743225097655, 0.03179520034790039, 0.03180339241027832, 0.03181158447265625, 0.03174297523498535, 0.03191193580627441, 0.03182899284362793, 0.03252326583862305, 0.03264921569824219, 0.032314369201660156, 0.03188531112670898, 0.03182592010498047, 0.03178700828552246, 0.03177881622314453, 0.03181875228881836, 0.031833087921142575, 0.03196006393432617, 0.032979969024658204, 0.03218227386474609, 0.031850496292114255, 0.03187404823303223, 0.0321710090637207, 0.03250790405273438, 0.03190681648254395, 0.03174092864990234, 0.03170099258422852, 0.03170816040039062, 0.032048126220703126, 0.03207987213134766, 0.0318023681640625, 0.031710208892822264, 0.03181465530395508, 0.0323164176940918, 0.03279974365234375, 0.032688129425048826, 0.03282022476196289, 0.03278131103515625, 0.03259904098510742, 0.03172863960266113, 0.03163443183898926, 0.031898624420166014, 0.031870975494384765, 0.03322060775756836, 0.03326464080810547, 0.032282623291015625, 0.0317573127746582, 0.03175935935974121, 0.03172147178649903, 0.03218739318847656, 0.031693824768066405, 0.03177984046936035, 0.03186175918579102, 0.03196108818054199, 0.03287756729125976, 0.03363532638549805, 0.03275775909423828, 0.031866880416870115, 0.03214131164550781, 0.03199692726135254, 0.03184435272216797, 0.031659008026123044, 0.03161500740051269, 0.03171427154541016, 0.0316231689453125, 0.031681535720825195, 0.03177984046936035, 0.031970304489135744, 0.03165695953369141, 0.03171123123168945, 0.031676416397094724, 0.03163955116271973, 0.03159347152709961, 0.031664127349853514, 0.031768575668334964, 0.0317573127746582, 0.031559680938720705, 0.031693824768066405, 0.03187814331054688, 0.031681535720825195, 0.03172863960266113, 0.03168563270568848, 0.03256012725830078, 0.032366592407226565, 0.03229388809204101, 0.031899648666381834, 0.03242496109008789, 0.031942655563354495, 0.03230828857421875, 0.03267987060546875, 0.03203583908081055, 0.03174195289611816, 0.03163033676147461, 0.03179110336303711, 0.03163852882385254, 0.03165593528747559, 0.031676416397094724, 0.031714303970336914, 0.03171743965148926, 0.03179104042053223, 0.03172659111022949, 0.032729087829589845, 0.031869951248168944, 0.03206655883789063, 0.03151974487304687, 0.031696895599365234, 0.03176755142211914, 0.03179417610168457, 0.031524864196777344, 0.031736831665039066, 0.031696895599365234, 0.031748096466064454, 0.03170508766174317, 0.03177471923828125, 0.0324659194946289, 0.03281510543823242, 0.03327385711669922, 0.032679935455322266, 0.03177779197692871, 0.03249151992797852, 0.03279667282104492, 0.031692800521850584, 0.03172249603271484, 0.031718399047851564, 0.0316549129486084, 0.031678464889526366, 0.0316549129486084, 0.03273932647705078, 0.03354214477539062, 0.03284275054931641, 0.032271358489990236, 0.031954944610595705, 0.032694271087646484, 0.032699390411376955, 0.032704513549804685, 0.03256012725830078, 0.03257241439819336, 0.03207372665405273, 0.03180441665649414, 0.03175424003601074, 0.03200511932373047, 0.03180646324157715, 0.03172352027893066, 0.03178291130065918, 0.0318023681640625, 0.03226828765869141, 0.03222528076171875, 0.03248537445068359, 0.031749120712280275, 0.03167129516601563, 0.031664127349853514, 0.031648767471313476, 0.03147776031494141, 0.03173072052001953, 0.031741920471191405, 0.03167334365844727, 0.031715328216552735, 0.03162931251525879, 0.03163340759277344, 0.032686080932617184, 0.032679935455322266, 0.03273830413818359, 0.032737281799316405, 0.03262054443359375, 0.03183923149108887, 0.031663103103637694, 0.03166720008850098, 0.03154431915283203, 0.03177574348449707, 0.031678464889526366, 0.031698944091796875, 0.031731712341308595, 0.03162009620666504, 0.0316682243347168, 0.03189555168151856, 0.03268915176391601, 0.0326195182800293, 0.0328908805847168, 0.03277107238769531, 0.031955968856811526, 0.03165184020996094, 0.03169484710693359, 0.03167436790466309, 0.031541248321533204, 0.03167436790466309, 0.03169587135314941, 0.033995777130126956, 0.03253964614868164, 0.031627264022827145, 0.03174297523498535, 0.031751167297363284, 0.03176959991455078, 0.031783935546875, 0.031838207244873046, 0.03170611190795898, 0.031676416397094724, 0.03163955116271973, 0.03167027282714844, 0.0317010555267334, 0.03172755241394043, 0.03151769638061523, 0.03139481544494629, 0.03158323287963867, 0.03167334365844727, 0.03186892890930176, 0.03191296005249023, 0.03240345764160156, 0.03194367980957031, 0.032379905700683595, 0.03277107238769531, 0.03279974365234375, 0.032674816131591795, 0.03181670379638672, 0.03173785591125488, 0.031677440643310545, 0.03172761535644531, 0.031849472045898435, 0.03187711906433106, 0.03176451110839844, 0.03146339225769043, 0.0318156795501709, 0.03176038360595703, 0.03176755142211914, 0.031736831665039066, 0.03179212760925293, 0.03186380767822266, 0.03175628852844238, 0.031579135894775394, 0.03181670379638672, 0.03172454452514648, 0.03184435272216797, 0.03213824081420898, 0.03278438568115234, 0.03254476928710937, 0.03182694435119629, 0.031678464889526366, 0.031838207244873046, 0.031902719497680664, 0.032306270599365236, 0.03229276657104492, 0.03189452743530274, 0.03193139266967773, 0.03168870353698731, 0.03172352027893066, 0.031719423294067385, 0.03176755142211914, 0.03176755142211914, 0.031733760833740236, 0.03182592010498047, 0.03181465530395508, 0.03278847885131836, 0.03267379379272461, 0.03284889602661133, 0.032827392578125, 0.032639999389648434, 0.03221196746826172, 0.03197644805908203, 0.03181363105773926, 0.03175424003601074, 0.03182592010498047, 0.03213824081420898, 0.03179622459411621, 0.031821823120117186, 0.03174399948120117, 0.03189452743530274, 0.03261644744873047, 0.032148479461669925, 0.032069633483886716, 0.0322979850769043, 0.03174399948120117, 0.03174092864990234, 0.031697919845581055, 0.031923200607299806, 0.0318474235534668, 0.03180441665649414, 0.03207884979248047, 0.033928192138671875, 0.03269734573364258, 0.03226214218139648, 0.03163852882385254, 0.03184025573730469, 0.03165081596374512, 0.031693824768066405, 0.03168460845947266, 0.03165593528747559, 0.03183923149108887, 0.0316753921508789, 0.03355136108398438, 0.03311001586914063, 0.032266239166259765, 0.031735807418823245, 0.031853567123413085, 0.03177471923828125, 0.031735807418823245, 0.031783935546875, 0.03170201683044434, 0.03180953598022461, 0.03303936004638672, 0.03279359817504883, 0.03180031967163086, 0.03261644744873047, 0.031855615615844726, 0.03171123123168945, 0.03171327972412109, 0.03190681648254395, 0.031765504837036135, 0.031893503189086916, 0.031736831665039066, 0.03198873519897461, 0.032694271087646484, 0.032835582733154296, 0.03192831993103027, 0.031835136413574217, 0.03167129516601563, 0.03174502372741699, 0.03155353546142578, 0.03177984046936035, 0.03187711906433106, 0.032008190155029294, 0.03184127998352051, 0.03197747230529785, 0.032671745300292966, 0.03172352027893066, 0.031591424942016604, 0.03210444641113281, 0.03329433441162109, 0.03190169525146484, 0.03182694435119629, 0.03167334365844727, 0.03231129455566406, 0.031991903305053714, 0.031646623611450195, 0.03179520034790039, 0.031834112167358396, 0.0316682243347168, 0.03184639930725098, 0.03180339241027832, 0.03234918212890625, 0.032833534240722655, 0.0330332145690918, 0.03302809524536133, 0.03275059127807617, 0.032709632873535156, 0.03181056022644043, 0.031838207244873046, 0.032702465057373044, 0.03159756851196289, 0.03177471923828125, 0.031715328216552735, 0.03185971260070801, 0.03282534408569336, 0.032909313201904294, 0.0315729923248291, 0.031747072219848634, 0.032210945129394535, 0.03181670379638672, 0.03459686279296875, 0.033301502227783206, 0.031751167297363284, 0.03262054443359375, 0.03269529724121094, 0.03329740905761719, 0.03276800155639648, 0.03182796859741211, 0.03254579162597656, 0.03266764831542969, 0.033271808624267575, 0.03296255874633789, 0.033091583251953126, 0.03297484970092773, 0.03289907073974609, 0.03307724761962891, 0.033020927429199216, 0.03285913467407227, 0.03282227325439453, 0.032586753845214846, 0.03194675254821777, 0.03187609672546387, 0.03168767929077149, 0.03168972778320313, 0.0323768310546875, 0.03178188705444336, 0.031734783172607424, 0.03262464141845703, 0.031886335372924804, 0.03176652717590332, 0.03302912139892578, 0.03248332977294922, 0.032514049530029294, 0.03307929611206055, 0.03271987152099609, 0.03172454452514648, 0.03170508766174317, 0.03180748748779297, 0.03185971260070801, 0.031791135787963866, 0.03181974411010742, 0.03166720008850098, 0.03169587135314941, 0.031939584732055666, 0.033620990753173825, 0.032866302490234374, 0.03288371276855469, 0.032525310516357424, 0.03285811233520508, 0.032909313201904294, 0.032866302490234374, 0.032884735107421875, 0.03308748626708984, 0.03303628921508789, 0.03327897644042969, 0.03295334243774414, 0.032851966857910156, 0.03275980758666992, 0.03334041595458984, 0.03292671966552734, 0.03274444961547852, 0.032817150115966795, 0.03282227325439453, 0.0330250244140625, 0.03284172821044922, 0.033468414306640625, 0.03330355072021484, 0.032889854431152346, 0.03285708618164063, 0.033637374877929685, 0.033309696197509765, 0.03269529724121094, 0.03279564666748047, 0.032906238555908206, 0.03292671966552734, 0.03282329559326172, 0.03298303985595703, 0.0329697265625, 0.032753662109375, 0.03290828704833984, 0.03274342346191406, 0.03280384063720703, 0.03301279830932617, 0.03289081573486328, 0.032712703704833986, 0.03330559921264648, 0.0329615364074707, 0.032688129425048826, 0.03273318481445313, 0.03297075271606445, 0.032930816650390625, 0.03282329559326172, 0.03281817626953125, 0.03338547134399414, 0.03300044631958008, 0.03283660888671875, 0.03340595245361328, 0.03311820983886719, 0.03285606384277344, 0.03159347152709961, 0.03196416091918945, 0.03240038299560547, 0.03162931251525879, 0.03160371208190918, 0.032775169372558595, 0.03271475219726563, 0.03269734573364258, 0.03166720008850098, 0.03266355133056641, 0.031893503189086916, 0.03190169525146484, 0.03272499084472656, 0.03196211242675781, 0.031904767990112305, 0.03170201683044434, 0.031735807418823245, 0.033277950286865234, 0.03297587203979492, 0.0327823371887207, 0.032882686614990234, 0.03276902389526367, 0.03288371276855469, 0.03287875366210938, 0.03269615936279297, 0.03280281448364258, 0.03338751983642578, 0.03308544158935547, 0.032879615783691404, 0.03298918533325195, 0.033105918884277344, 0.0327116813659668, 0.032860160827636715, 0.03278131103515625, 0.03285094451904297, 0.032766014099121096, 0.032815040588378905, 0.032882686614990234, 0.03445043182373047, 0.03198361587524414, 0.03339878463745117, 0.031666175842285156, 0.03206655883789063, 0.03275059127807617, 0.03275263977050781, 0.032824321746826174, 0.03186892890930176, 0.03179929542541504, 0.03265331268310547, 0.032704513549804685, 0.03222323226928711, 0.03156582450866699, 0.0317388801574707, 0.03180441665649414, 0.033189888000488284, 0.032950271606445314, 0.03177471923828125, 0.03172966384887695, 0.03179520034790039, 0.03158732795715332, 0.03184127998352051, 0.031817728042602536, 0.03170201683044434, 0.031664127349853514, 0.03184332847595215, 0.031748096466064454, 0.031780864715576174, 0.031677440643310545, 0.03174297523498535, 0.03245260620117187, 0.03278847885131836, 0.032797695159912106, 0.03169587135314941, 0.03172147178649903, 0.03168460845947266, 0.03171327972412109, 0.03190995216369629, 0.03172960090637207, 0.03176038360595703, 0.03180031967163086, 0.03177779197692871, 0.03168870353698731, 0.031893503189086916, 0.031664127349853514, 0.031817728042602536, 0.031853567123413085, 0.03181977653503418, 0.03240652847290039, 0.032586753845214846, 0.03270348739624023, 0.032686080932617184, 0.03274342346191406, 0.03219558334350586, 0.03259392166137695, 0.03273113632202149, 0.03268198394775391, 0.0317573127746582, 0.032571392059326174, 0.03251200103759765, 0.03265331268310547, 0.03275369644165039, 0.03287548828125, 0.03262771224975586, 0.0316753921508789, 0.03171123123168945, 0.0317071361541748, 0.031693887710571286, 0.03172038459777832, 0.031719423294067385]",tokens/s,31.05110108200809,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948106-7eae8abf35688f2e537398ea;ddb9b4a1-0682-4438-b0ca-b68ec49fe82a) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7362.404352,9907.470336,0.0,9277.800448,8679.633408,s,1,12.225109375,12.225109375,0.0,12.225109375,12.225109375,12.225109375,12.225109375,[12.225109375],,kWh,6.379633466875198e-05,3.4926428134278264e-05,0.0001312384383240217,0.00022996120112705196,,MB,1789.063168,9928.441856,0.0,9277.800448,8206.575616,s,10,2.0253566284179687,0.20253566284179686,8.06197243750747e-05,0.20254300689697266,0.20261893920898438,0.20264412536621096,0.20266427429199219,"[0.20250569152832032, 0.2025386199951172, 0.2026693115234375, 0.20261334228515626, 0.20261174011230468, 0.20254739379882813, 0.20243177795410155, 0.20240380859375, 0.2024653778076172, 0.20256956481933594]",tokens/s,1263.974928701642,kWh,2.3949648228054534e-06,1.3117624153462565e-06,9.704746652680063e-06,1.3411473890831774e-05,tokens/kWh,19088133.19727702,MB,1803.132928,9928.441856,0.0,9277.800448,8480.92416,s,10,21.735060791015623,2.173506079101563,0.03683146366697132,2.1620875244140625,2.2277313232421876,2.229778625488281,2.231416467285156,"[2.218291015625, 2.17312939453125, 2.231825927734375, 2.2272763671875, 2.139671630859375, 2.121445068359375, 2.148839111328125, 2.1504072265625, 2.15992138671875, 2.164253662109375]",tokens/s,28.985426176512735,kWh,2.4807794643167682e-05,1.3596134267623309e-05,7.139259600292048e-05,0.00010979652491371146,tokens/kWh,573788.6517766513,,s,630,21.732490276336677,0.034496016311645505,0.0009796531124699173,0.034696191787719724,0.03546708793640137,0.03573860912322998,0.03668577320098878,"[0.03558911895751953, 0.0357386245727539, 0.034994174957275394, 0.03496550369262695, 0.03482316970825195, 0.03483852767944336, 0.03508633422851563, 0.03520307159423828, 0.034816001892089846, 0.03496451187133789, 0.03539759826660156, 0.03514470291137695, 0.03504537582397461, 0.035119102478027346, 0.03500137710571289, 0.03573859024047851, 0.03550003051757813, 0.034857982635498046, 0.03511808013916016, 0.035133438110351564, 0.03502489471435547, 0.035154945373535154, 0.03498704147338867, 0.03493167877197265, 0.035089408874511716, 0.03528192138671875, 0.03506687927246094, 0.03514470291137695, 0.034971649169921876, 0.03501260757446289, 0.03540582275390625, 0.035135486602783206, 0.03502592086791992, 0.035007488250732424, 0.03502796936035156, 0.03502592086791992, 0.03501670455932617, 0.035046398162841795, 0.03527475357055664, 0.03510476684570313, 0.0350013427734375, 0.034985984802246094, 0.035102718353271486, 0.035138561248779294, 0.03552460861206055, 0.035625984191894534, 0.03517440032958984, 0.03543142318725586, 0.03501875305175781, 0.03520614242553711, 0.03506892776489258, 0.03501772689819336, 0.0351907844543457, 0.03525222396850586, 0.03527679824829102, 0.03513651275634765, 0.035148799896240236, 0.035156993865966796, 0.03521331024169922, 0.035988479614257815, 0.03779481506347656, 0.03566796875, 0.03502284622192383, 0.0333199348449707, 0.03313868713378906, 0.03307110214233398, 0.03308441543579101, 0.03312643051147461, 0.0337213134765625, 0.03351859283447266, 0.03330559921264648, 0.03315507125854492, 0.0331253776550293, 0.033326080322265625, 0.03306598281860352, 0.03305267333984375, 0.03322367858886719, 0.033073150634765625, 0.03313151931762695, 0.03323494338989258, 0.03333222579956055, 0.03313356781005859, 0.03319504165649414, 0.034053089141845704, 0.03359743881225586, 0.0332042236328125, 0.03305984115600586, 0.03489894485473633, 0.03543961715698242, 0.034872318267822264, 0.03506995010375977, 0.03527372741699219, 0.03500646209716797, 0.035105792999267575, 0.03520409774780273, 0.0350904312133789, 0.03507712173461914, 0.03516108703613281, 0.03561062240600586, 0.03618611145019531, 0.03546419143676758, 0.03527884674072266, 0.03521535873413086, 0.035138561248779294, 0.03520204925537109, 0.03530342483520508, 0.03524198532104492, 0.035198974609375, 0.03526758575439453, 0.03531468963623047, 0.035343360900878903, 0.035110912322998046, 0.03536076736450195, 0.03515596771240234, 0.03529523086547852, 0.03526041412353516, 0.03520000076293945, 0.03523379135131836, 0.035079166412353514, 0.035335166931152344, 0.035519489288330076, 0.03539148712158203, 0.03546419143676758, 0.03496243286132812, 0.03523583984375, 0.03506790542602539, 0.03328307342529297, 0.03510067367553711, 0.03548364639282227, 0.035514366149902346, 0.03578060913085938, 0.036580352783203124, 0.03570380783081055, 0.035282943725585936, 0.03536076736450195, 0.035366912841796876, 0.03536076736450195, 0.035280895233154294, 0.03508531188964844, 0.03534131240844727, 0.03510988616943359, 0.03520204925537109, 0.03530854415893555, 0.03520204925537109, 0.035364864349365234, 0.03539865493774414, 0.03526144027709961, 0.03522457504272461, 0.03510784149169922, 0.03511500930786133, 0.03552870559692383, 0.03590655899047852, 0.03614720153808594, 0.03565670394897461, 0.035312641143798826, 0.035166206359863283, 0.03521638488769531, 0.03516108703613281, 0.03535769653320313, 0.03528192138671875, 0.03525734329223633, 0.03536281585693359, 0.03672883224487305, 0.035659774780273434, 0.0353177604675293, 0.0351723518371582, 0.03524607849121094, 0.03515596771240234, 0.03515801620483398, 0.03525632095336914, 0.03533004760742187, 0.03518668746948242, 0.03527679824829102, 0.035253246307373046, 0.035143680572509765, 0.035178497314453126, 0.03498700714111328, 0.035125247955322264, 0.03563008117675781, 0.03566694259643555, 0.035776512145996094, 0.03628441619873047, 0.03581542587280274, 0.03761356735229492, 0.035768318176269534, 0.0352624626159668, 0.03570380783081055, 0.035544063568115236, 0.035160064697265625, 0.03476684951782227, 0.03504435348510742, 0.03507199859619141, 0.03510681533813476, 0.035197952270507815, 0.03523481750488281, 0.03523481750488281, 0.03529216003417969, 0.03513753509521484, 0.035286014556884765, 0.035506175994873046, 0.03601408004760742, 0.03785625457763672, 0.03580416107177734, 0.03606118392944336, 0.03625983810424805, 0.03553177642822265, 0.03518771362304687, 0.03525836944580078, 0.03512934494018555, 0.03514470291137695, 0.03558092880249023, 0.03539558410644531, 0.035460094451904296, 0.03530137634277344, 0.03545292663574219, 0.035604480743408204, 0.03516108703613281, 0.03526860809326172, 0.035332096099853515, 0.03520614242553711, 0.03492659378051758, 0.03562496185302735, 0.03527577590942383, 0.03549798583984375, 0.03557785415649414, 0.0352542724609375, 0.03525120162963867, 0.035138561248779294, 0.03517542266845703, 0.035361793518066405, 0.03522457504272461, 0.035140609741210936, 0.03538431930541992, 0.03599052810668945, 0.03570175933837891, 0.035270721435546874, 0.03510572814941406, 0.03509145736694336, 0.035345409393310545, 0.03502796936035156, 0.035133438110351564, 0.035342334747314456, 0.035149822235107424, 0.03560857772827149, 0.03539865493774414, 0.03522150421142578, 0.035178497314453126, 0.03489791870117188, 0.034966529846191405, 0.03472281646728516, 0.03494604873657227, 0.03518566513061523, 0.03520409774780273, 0.03521023941040039, 0.035645439147949216, 0.035404800415039066, 0.03497267150878906, 0.03513241577148438, 0.03449139022827148, 0.034574337005615234, 0.03444326400756836, 0.03467264175415039, 0.03588198471069336, 0.035163169860839845, 0.03448726272583008, 0.03448019027709961, 0.03467155075073242, 0.03447091293334961, 0.03377356719970703, 0.03315609741210938, 0.033051647186279294, 0.033181697845458984, 0.033154048919677735, 0.033091583251953126, 0.033073150634765625, 0.03304550552368164, 0.03296255874633789, 0.03308441543579101, 0.03314176177978516, 0.03292160034179688, 0.03311820983886719, 0.03317452621459961, 0.03285913467407227, 0.03302707290649414, 0.03309056091308594, 0.03306905746459961, 0.033160190582275394, 0.03354214477539062, 0.03342233657836914, 0.033142784118652346, 0.033068031311035154, 0.0331038703918457, 0.034522113800048826, 0.03456819152832031, 0.03437875366210937, 0.03456921768188476, 0.034601982116699216, 0.03445145416259766, 0.0345241584777832, 0.03457740783691406, 0.03448320007324219, 0.034651134490966795, 0.03449958419799805, 0.03462451171875, 0.03473920059204102, 0.033983486175537106, 0.03319705581665039, 0.03317964935302734, 0.03317145538330078, 0.033001472473144534, 0.03341107177734375, 0.03349708938598633, 0.03304140853881836, 0.03444838333129883, 0.03497881698608398, 0.033442817687988284, 0.03304345703125, 0.03299532699584961, 0.03309260940551758, 0.0348671989440918, 0.035340320587158205, 0.03430806350708008, 0.03317145538330078, 0.03312947082519531, 0.0331253776550293, 0.033119232177734374, 0.03315609741210938, 0.03305574417114258, 0.033219585418701174, 0.033148929595947264, 0.033097728729248044, 0.033116161346435545, 0.03316838455200195, 0.032933887481689454, 0.03462963104248047, 0.033104896545410156, 0.03315814590454102, 0.03326668930053711, 0.033105918884277344, 0.033006591796875, 0.03313971328735352, 0.03307929611206055, 0.033175552368164066, 0.0340049934387207, 0.03334860610961914, 0.033089534759521484, 0.03314080047607422, 0.03309971237182617, 0.03301375961303711, 0.033107967376708985, 0.0347770881652832, 0.0344719352722168, 0.03471155166625976, 0.034753536224365236, 0.0346060791015625, 0.03464089584350586, 0.0345456657409668, 0.03455385589599609, 0.034680831909179685, 0.034272254943847655, 0.03406131362915039, 0.03518054580688477, 0.03475558471679688, 0.03483443069458008, 0.03508838272094727, 0.03458355331420898, 0.03456409454345703, 0.03317452621459961, 0.033121280670166016, 0.032756736755371094, 0.03304550552368164, 0.03325439834594727, 0.033050624847412106, 0.03367833709716797, 0.03363430404663086, 0.03313459014892578, 0.033140735626220705, 0.03312025451660156, 0.03323392105102539, 0.032985088348388675, 0.03308963012695312, 0.033154975891113284, 0.03314995193481445, 0.03322265625, 0.03304652786254883, 0.03277619171142578, 0.03321343994140625, 0.03303833770751953, 0.033203201293945314, 0.03315507125854492, 0.03305472183227539, 0.03320832061767578, 0.03314790344238281, 0.03310182571411133, 0.03305574417114258, 0.03310182571411133, 0.03319910430908203, 0.03585433578491211, 0.03638476943969727, 0.03523481750488281, 0.03480166244506836, 0.034756607055664065, 0.03449958419799805, 0.03460300827026367, 0.03460505676269531, 0.034799617767333986, 0.03487948989868164, 0.034664447784423826, 0.03469311904907227, 0.03453849411010742, 0.034541568756103515, 0.03480473709106445, 0.03458355331420898, 0.03467673492431641, 0.034506752014160154, 0.03465420913696289, 0.034993152618408206, 0.03508838272094727, 0.03459379196166992, 0.03461734390258789, 0.034699264526367186, 0.034702369689941406, 0.03463676834106445, 0.03451084899902344, 0.03457535934448242, 0.03476275253295898, 0.034923519134521484, 0.03497881698608398, 0.035643390655517575, 0.03508224105834961, 0.03480688095092774, 0.034523040771484374, 0.034490367889404294, 0.03324518585205078, 0.03344076919555664, 0.03332403182983398, 0.033188865661621096, 0.03329945755004883, 0.032950271606445314, 0.03317145538330078, 0.03312025451660156, 0.03366912078857422, 0.03314176177978516, 0.03309568023681641, 0.033172481536865236, 0.03306086349487305, 0.033097728729248044, 0.03293286514282227, 0.03306905746459961, 0.03307417678833008, 0.03313356781005859, 0.03312639999389649, 0.03336703872680664, 0.03372236633300781, 0.03319500732421875, 0.03326259231567383, 0.03311001586914063, 0.03301068878173828, 0.033224800109863284, 0.03311708831787109, 0.03313459014892578, 0.033189888000488284, 0.033225727081298825, 0.03305472183227539, 0.03312844848632813, 0.032998401641845705, 0.03312947082519531, 0.03306905746459961, 0.03399270248413086, 0.035140609741210936, 0.03609190368652344, 0.03620454406738281, 0.03497881698608398, 0.03529216003417969, 0.03497273635864258, 0.03471865463256836, 0.03483955383300781, 0.034601982116699216, 0.03468902587890625, 0.03332201766967773, 0.03473097610473633, 0.03464191818237305, 0.034887680053710936, 0.03523276901245117, 0.03497267150878906, 0.034618366241455076, 0.034854911804199216, 0.035023872375488284, 0.03533824157714844, 0.034551807403564457, 0.03498188781738281, 0.03486412811279297, 0.03474227142333984, 0.03536281585693359, 0.034938880920410156, 0.03467366409301758, 0.034756607055664065, 0.03461939239501953, 0.03546524810791016, 0.03461628723144531, 0.03445862579345703, 0.03376435089111328, 0.03495935821533203, 0.034743297576904295, 0.03409612655639648, 0.03474227142333984, 0.035740673065185545, 0.03546214294433594, 0.03491635131835937, 0.03457331085205078, 0.034528255462646484, 0.03460812759399414, 0.03458457565307617, 0.034830337524414064, 0.03467478561401367, 0.0338175048828125, 0.03304345703125, 0.03309056091308594, 0.03303833770751953, 0.03308031845092774, 0.033089534759521484, 0.03307827377319336, 0.0350750732421875, 0.03462758255004883, 0.034427902221679685, 0.03466342544555664, 0.034582527160644534, 0.03451289749145508, 0.034367488861083983, 0.034490367889404294, 0.03448835372924805, 0.03443299102783203, 0.03598137664794922, 0.03552556610107422, 0.034776065826416014, 0.03450777435302734, 0.03440435028076172, 0.03538739013671875, 0.0349224967956543, 0.03467673492431641, 0.034612224578857424, 0.03450982284545898, 0.034825214385986326, 0.03464191818237305, 0.034162689208984375, 0.03451289749145508, 0.03448524856567383, 0.0341923828125, 0.034369537353515625, 0.03443609619140625, 0.03444121551513672, 0.03467468643188477, 0.034552833557128904, 0.034514942169189454, 0.03472588729858399, 0.03442483139038086, 0.034543617248535156, 0.03441356658935547, 0.03342335891723633, 0.03311513519287109, 0.03317862319946289, 0.03311513519287109, 0.03312639999389649, 0.03291340637207031, 0.033017856597900394, 0.03298303985595703, 0.032917503356933595, 0.03324313735961914, 0.03291033554077148, 0.03305574417114258, 0.03300249481201172, 0.033037311553955076, 0.033111072540283205, 0.03301372909545899, 0.03405209732055664, 0.034783233642578126, 0.03378073501586914, 0.03327385711669922, 0.03344076919555664, 0.03318783950805664, 0.03308544158935547, 0.03313971328735352, 0.033527809143066405, 0.034405376434326174, 0.034495487213134765, 0.03460710525512695, 0.03749785614013672, 0.03638988876342773, 0.03489382553100586, 0.03473100662231445, 0.03755110549926758, 0.03749273681640625, 0.035833854675292966, 0.034050048828125, 0.033140735626220705, 0.03419136047363281, 0.03362508773803711, 0.03451084899902344, 0.03462963104248047, 0.034328575134277346, 0.0344535026550293, 0.03418624114990235, 0.03381043243408203, 0.03524095916748047, 0.035527679443359376, 0.03447808074951172, 0.03458150482177735, 0.034609153747558595, 0.03429683303833008, 0.033898494720458985, 0.034490367889404294, 0.03439923095703125, 0.03290425491333008, 0.03442169570922852, 0.03448831939697266, 0.033974273681640625, 0.03390873718261719, 0.034285568237304685, 0.034648063659667966, 0.03445555114746094, 0.03467059326171875, 0.035410945892333984, 0.03525529479980469, 0.03467571258544922, 0.035192832946777344, 0.03348992156982422, 0.03424460983276367, 0.034764801025390625, 0.03462963104248047, 0.03460095977783203]",tokens/s,28.988854567024603,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949009-0f5883156c63c2794cf9eeb6;2468f1ff-3ca9-4800-855c-7fe4321cfa1a) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-rw-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-rw-1b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4193.792,6159.859712,0.0,5530.189824,5138.859008,s,1,10.4326904296875,10.4326904296875,0.0,10.4326904296875,10.4326904296875,10.4326904296875,10.4326904296875,[10.4326904296875],,kWh,4.2205925559015775e-05,2.3116362947213676e-05,7.956173031603564e-05,0.0001448840188222651,,MB,1656.5248,6185.025536,0.0,5534.384128,4845.009408,s,10,1.1223208923339842,0.11223208923339842,6.923289997726557e-05,0.11220393753051758,0.11231198196411132,0.11235208549499512,0.11238416831970215,"[0.11219334411621094, 0.11215245056152344, 0.11217810821533203, 0.11218844604492187, 0.11221453094482423, 0.1123921890258789, 0.11230307006835938, 0.11219036865234375, 0.11222614288330078, 0.11228224182128907]",tokens/s,2280.9875655760193,kWh,1.3274420884050778e-06,7.273732638101519e-07,5.45539138053974e-06,7.510206732754969e-06,tokens/kWh,34086944.4889557,MB,1700.663296,6185.025536,0.0,5534.384128,5014.504448,s,10,17.45094616699219,1.745094616699219,0.0041027067635340956,1.7461857299804686,1.7495196777343751,1.749908154296875,1.7502189355468751,"[1.744258056640625, 1.7452841796875, 1.7470872802734374, 1.737177978515625, 1.750296630859375, 1.7475389404296875, 1.7441563720703126, 1.749433349609375, 1.7384871826171875, 1.7472261962890625]",tokens/s,36.101194397792675,kWh,2.0788426311318114e-05,1.1392425214127637e-05,5.015009754826072e-05,8.233094907370646e-05,tokens/kWh,765204.3454958778,,s,630,17.448615951538084,0.027696215796092195,0.0004029301570840029,0.027622400283813478,0.02802800693511963,0.02843407325744629,0.029378877239227295,"[0.027717632293701173, 0.027611135482788086, 0.027641855239868163, 0.027599872589111327, 0.027672576904296874, 0.027707391738891602, 0.027633663177490234, 0.027636735916137696, 0.027671552658081053, 0.027651071548461914, 0.02771046447753906, 0.027810815811157227, 0.02773811149597168, 0.02774835205078125, 0.02833510398864746, 0.028653568267822265, 0.027684864044189454, 0.027669504165649415, 0.027623424530029295, 0.027456512451171877, 0.027695104598999022, 0.027622400283813478, 0.027641855239868163, 0.027623424530029295, 0.027660287857055665, 0.027460607528686523, 0.027632640838623046, 0.027617279052734374, 0.02756505584716797, 0.027645952224731447, 0.027616256713867186, 0.02772275161743164, 0.027580415725708008, 0.027658239364624023, 0.027624448776245116, 0.02794803237915039, 0.027671552658081053, 0.027672576904296874, 0.02769817543029785, 0.02774323272705078, 0.027624448776245116, 0.027622400283813478, 0.027576320648193358, 0.027624448776245116, 0.027682815551757813, 0.027695104598999022, 0.027704320907592773, 0.02774630355834961, 0.027662336349487306, 0.027648000717163085, 0.027923456192016603, 0.027487232208251954, 0.027633663177490234, 0.027423744201660157, 0.02753433609008789, 0.027752447128295898, 0.02775142478942871, 0.027724800109863282, 0.02773094367980957, 0.027628543853759766, 0.027654144287109376, 0.027659263610839844, 0.027427839279174804, 0.027659263610839844, 0.02759884834289551, 0.027621376037597657, 0.0275230712890625, 0.027603967666625977, 0.027707391738891602, 0.02774323272705078, 0.027602943420410156, 0.02753433609008789, 0.0276889591217041, 0.027580415725708008, 0.02768998336791992, 0.027585535049438475, 0.027588607788085938, 0.030290943145751953, 0.02861568069458008, 0.027640832901000976, 0.02775654411315918, 0.027649023056030272, 0.02774323272705078, 0.027623424530029295, 0.027777023315429687, 0.027502592086791993, 0.027813888549804686, 0.027411455154418944, 0.027621376037597657, 0.02756608009338379, 0.027619327545166016, 0.02760601615905762, 0.02772275161743164, 0.0275599365234375, 0.027834367752075196, 0.02759884834289551, 0.027694080352783205, 0.027572223663330078, 0.02775142478942871, 0.027618303298950195, 0.027561983108520507, 0.027279359817504883, 0.027476991653442383, 0.0277258243560791, 0.02778726387023926, 0.02854911994934082, 0.02790809631347656, 0.027682815551757813, 0.027683839797973633, 0.02748313522338867, 0.02771353530883789, 0.027612159729003907, 0.02755788803100586, 0.027622400283813478, 0.027699199676513672, 0.027599872589111327, 0.027703296661376952, 0.02731929588317871, 0.027802623748779298, 0.027608064651489257, 0.027660287857055665, 0.027454463958740235, 0.027615232467651366, 0.027387903213500975, 0.02768998336791992, 0.02756403160095215, 0.027465728759765624, 0.02753126335144043, 0.027604991912841798, 0.02752921676635742, 0.027639808654785155, 0.027388927459716796, 0.02755379295349121, 0.027658239364624023, 0.02757734489440918, 0.027494400024414063, 0.02759884834289551, 0.028073984146118162, 0.027757568359375, 0.027673599243164062, 0.027672576904296874, 0.027618303298950195, 0.02772889518737793, 0.027568128585815428, 0.02738380813598633, 0.027594751358032226, 0.027637760162353517, 0.027571199417114257, 0.027708415985107423, 0.027673599243164062, 0.02756915283203125, 0.02756505584716797, 0.027482112884521483, 0.027423744201660157, 0.02750668716430664, 0.027546623229980468, 0.027517951965332032, 0.027661312103271486, 0.027554815292358398, 0.027551744461059572, 0.02752409553527832, 0.027476991653442383, 0.028069887161254883, 0.028318719863891603, 0.02751283264160156, 0.02787942314147949, 0.027794431686401368, 0.027684864044189454, 0.028539903640747072, 0.027765760421752928, 0.0277258243560791, 0.027913215637207032, 0.027871231079101562, 0.02914406394958496, 0.02815385627746582, 0.02758246421813965, 0.02735103988647461, 0.027681791305541992, 0.027643903732299805, 0.02759065628051758, 0.027641855239868163, 0.027653120040893556, 0.02733568000793457, 0.027604991912841798, 0.027797504425048827, 0.028794879913330077, 0.029412351608276367, 0.02773094367980957, 0.027587583541870117, 0.02774527931213379, 0.027583488464355467, 0.027619327545166016, 0.02757734489440918, 0.02750054359436035, 0.027442176818847655, 0.027626495361328125, 0.027658239364624023, 0.027829248428344725, 0.027983871459960938, 0.027639808654785155, 0.027695104598999022, 0.027546623229980468, 0.027602943420410156, 0.02716979217529297, 0.027673599243164062, 0.02796544075012207, 0.02772377586364746, 0.027624448776245116, 0.027648000717163085, 0.02735820770263672, 0.027287551879882813, 0.027505664825439452, 0.027667455673217774, 0.027407360076904298, 0.027588607788085938, 0.02751590347290039, 0.027578367233276366, 0.02753331184387207, 0.027595775604248047, 0.027513856887817382, 0.027505664825439452, 0.027591680526733397, 0.027481088638305663, 0.027551744461059572, 0.02756608009338379, 0.027608064651489257, 0.027511808395385744, 0.027509759902954102, 0.027382783889770508, 0.027487232208251954, 0.02752921676635742, 0.027437055587768554, 0.027591680526733397, 0.027452415466308593, 0.028572671890258788, 0.02778828811645508, 0.02769715118408203, 0.027337728500366212, 0.027595775604248047, 0.027305984497070314, 0.027613183975219727, 0.027858943939208985, 0.02857574462890625, 0.02750464057922363, 0.02753023910522461, 0.0273438720703125, 0.027271167755126953, 0.02717081642150879, 0.02732748794555664, 0.027287551879882813, 0.02733158493041992, 0.027227136611938478, 0.028017663955688478, 0.03155353546142578, 0.028028928756713867, 0.027535360336303712, 0.027631616592407225, 0.027554815292358398, 0.027535360336303712, 0.027678720474243163, 0.027609088897705077, 0.02751590347290039, 0.02753126335144043, 0.02753433609008789, 0.027618303298950195, 0.027686912536621092, 0.02768998336791992, 0.027778047561645508, 0.02759065628051758, 0.027616256713867186, 0.027223039627075195, 0.02758143997192383, 0.027496448516845705, 0.027660287857055665, 0.02834636878967285, 0.02933452796936035, 0.027807743072509765, 0.02756915283203125, 0.027621376037597657, 0.027426816940307616, 0.027881471633911133, 0.027643903732299805, 0.02769817543029785, 0.027801599502563477, 0.02754560089111328, 0.027465728759765624, 0.0281395206451416, 0.02814566421508789, 0.0277708797454834, 0.027604991912841798, 0.027313152313232423, 0.027681791305541992, 0.02757427215576172, 0.027648000717163085, 0.027648000717163085, 0.02758143997192383, 0.027251712799072264, 0.028483583450317384, 0.027784191131591796, 0.027686912536621092, 0.02770636749267578, 0.027229183197021483, 0.027833343505859375, 0.02819071960449219, 0.027661312103271486, 0.027622400283813478, 0.02791219139099121, 0.027812864303588865, 0.027821056365966795, 0.027910144805908203, 0.027784191131591796, 0.027663360595703124, 0.027715583801269532, 0.02757427215576172, 0.027511808395385744, 0.02791935920715332, 0.02753945541381836, 0.027649023056030272, 0.027678720474243163, 0.027594751358032226, 0.027267072677612306, 0.027325439453125, 0.02733875274658203, 0.027652095794677735, 0.027665407180786132, 0.027386880874633788, 0.02753331184387207, 0.027473920822143554, 0.027404287338256835, 0.027348991394042968, 0.027601919174194335, 0.02759782409667969, 0.027636735916137696, 0.027444223403930663, 0.02798080062866211, 0.02757734489440918, 0.029062143325805666, 0.027775999069213866, 0.02758246421813965, 0.027451391220092772, 0.02757427215576172, 0.027407360076904298, 0.028264448165893553, 0.027822080612182616, 0.027561983108520507, 0.027596799850463868, 0.027608064651489257, 0.02732441520690918, 0.027664384841918944, 0.02750464057922363, 0.027448320388793947, 0.02733260726928711, 0.02754560089111328, 0.027473920822143554, 0.02772275161743164, 0.029154304504394532, 0.028531711578369142, 0.02755891227722168, 0.02754969596862793, 0.027482112884521483, 0.027673599243164062, 0.027439104080200196, 0.027656192779541015, 0.02760704040527344, 0.027617279052734374, 0.027616256713867186, 0.027660287857055665, 0.02879795265197754, 0.02999398422241211, 0.028481536865234375, 0.028477439880371092, 0.027864063262939453, 0.027615232467651366, 0.027451391220092772, 0.027663360595703124, 0.027696128845214843, 0.027676671981811524, 0.02768076705932617, 0.02774323272705078, 0.027664384841918944, 0.02774220848083496, 0.028609535217285157, 0.027862016677856444, 0.02755891227722168, 0.027445247650146484, 0.02757529640197754, 0.027626495361328125, 0.027356159210205077, 0.027406335830688477, 0.02736332893371582, 0.027785215377807617, 0.027585535049438475, 0.02776166343688965, 0.027777023315429687, 0.027646976470947264, 0.027616256713867186, 0.02774323272705078, 0.027622400283813478, 0.027826175689697266, 0.028646400451660156, 0.02773811149597168, 0.027778047561645508, 0.027699199676513672, 0.027655168533325194, 0.027833343505859375, 0.027654144287109376, 0.02772275161743164, 0.027509759902954102, 0.027807743072509765, 0.02740838432312012, 0.028025856018066408, 0.027634687423706054, 0.027683839797973633, 0.027630592346191408, 0.027691007614135742, 0.027681791305541992, 0.028013568878173828, 0.027814912796020507, 0.02758246421813965, 0.027679744720458983, 0.02752204895019531, 0.027601919174194335, 0.0273305606842041, 0.027619327545166016, 0.02775859260559082, 0.027707391738891602, 0.027636735916137696, 0.027279359817504883, 0.027622400283813478, 0.027625471115112304, 0.027707391738891602, 0.027447296142578126, 0.02732441520690918, 0.027641855239868163, 0.027432960510253908, 0.028034048080444338, 0.027622400283813478, 0.027691007614135742, 0.02774220848083496, 0.027701248168945314, 0.027672576904296874, 0.027693056106567384, 0.028284927368164063, 0.027757568359375, 0.02772991943359375, 0.027639808654785155, 0.02771455955505371, 0.027518976211547853, 0.02775859260559082, 0.027448320388793947, 0.02731827163696289, 0.02768998336791992, 0.027380735397338866, 0.027303936004638672, 0.027237375259399413, 0.02754867172241211, 0.02774732780456543, 0.02771353530883789, 0.027621376037597657, 0.027659263610839844, 0.0276889591217041, 0.02776780891418457, 0.027587583541870117, 0.02775142478942871, 0.027831296920776367, 0.02773196792602539, 0.027654144287109376, 0.028099584579467773, 0.027618303298950195, 0.027651071548461914, 0.027657215118408202, 0.02772172737121582, 0.027685888290405275, 0.02777190399169922, 0.02773094367980957, 0.027996160507202147, 0.028039167404174805, 0.028027904510498046, 0.028572671890258788, 0.028248064041137694, 0.02835353660583496, 0.028087295532226563, 0.028242944717407226, 0.028444671630859376, 0.02797670364379883, 0.028165119171142578, 0.027850751876831056, 0.028391424179077147, 0.028421119689941408, 0.02812723159790039, 0.028161024093627928, 0.027630592346191408, 0.027592704772949218, 0.027217920303344727, 0.027415552139282227, 0.027196416854858397, 0.027686912536621092, 0.027497472763061522, 0.027240447998046875, 0.02718003273010254, 0.027490304946899413, 0.02755276870727539, 0.02753331184387207, 0.02812211227416992, 0.027494400024414063, 0.027517951965332032, 0.027609088897705077, 0.02753945541381836, 0.02776371192932129, 0.027267072677612306, 0.027623424530029295, 0.027757568359375, 0.027291648864746092, 0.02737766456604004, 0.027386880874633788, 0.027594751358032226, 0.027455488204956056, 0.02730188751220703, 0.027385856628417967, 0.02753740882873535, 0.027433984756469725, 0.02771455955505371, 0.029032447814941405, 0.02860032081604004, 0.02797875213623047, 0.02775142478942871, 0.027494400024414063, 0.02753740882873535, 0.02749849510192871, 0.02755583953857422, 0.027687936782836913, 0.027587583541870117, 0.027390975952148438, 0.02731827163696289, 0.0274913272857666, 0.027797504425048827, 0.027475967407226562, 0.02755276870727539, 0.02751283264160156, 0.02857472038269043, 0.028305408477783203, 0.028029951095581054, 0.027857919692993165, 0.02729267120361328, 0.027433984756469725, 0.027402240753173827, 0.028075008392333983, 0.027865087509155274, 0.027546623229980468, 0.027303936004638672, 0.02734284782409668, 0.027251712799072264, 0.027595775604248047, 0.0275281925201416, 0.027329536437988283, 0.027275264739990233, 0.027405311584472656, 0.027225088119506836, 0.027431936264038087, 0.027258880615234377, 0.02714419174194336, 0.027798528671264647, 0.02770227241516113, 0.02754867172241211, 0.027447296142578126, 0.02749235153198242, 0.027481088638305663, 0.027640832901000976, 0.0275599365234375, 0.027476991653442383, 0.02733363151550293, 0.027546623229980468, 0.027479040145874024, 0.027234304428100587, 0.0275599365234375, 0.027584512710571288, 0.027240447998046875, 0.027468799591064453, 0.027841535568237305, 0.027807743072509765, 0.027490304946899413, 0.027784191131591796, 0.027765760421752928, 0.027449344635009764, 0.02754150390625, 0.027584512710571288, 0.02895359992980957, 0.029682687759399414, 0.02820812797546387, 0.027685888290405275, 0.027668479919433595, 0.027485183715820313, 0.027452415466308593, 0.027222015380859374, 0.027820032119750978, 0.030839807510375978, 0.02856038475036621, 0.027495424270629884, 0.027637760162353517, 0.027482112884521483, 0.027518976211547853, 0.027535360336303712, 0.02750873565673828, 0.027390975952148438, 0.027646976470947264, 0.02736844825744629, 0.027520000457763674, 0.027752447128295898, 0.02751283264160156, 0.027579391479492187, 0.02754867172241211, 0.027592704772949218, 0.027617279052734374, 0.02775142478942871, 0.028217344284057616, 0.029396991729736328, 0.028395519256591797, 0.027591680526733397, 0.02774323272705078, 0.02750668716430664, 0.027563007354736328, 0.027283456802368163, 0.02752921676635742, 0.027444223403930663, 0.02754969596862793, 0.027489280700683592, 0.027484159469604492, 0.027571199417114257, 0.02750771141052246, 0.02730291175842285]",tokens/s,36.106015614634806,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17675.526144,22507.159552,0.0,21877.489664,21024.863232,s,1,19.50287890625,19.50287890625,0.0,19.50287890625,19.50287890625,19.50287890625,19.50287890625,[19.50287890625],,kWh,0.00015070446977708262,8.258333925927872e-05,0.0003158260859940018,0.0005491138950303631,,MB,4686.061568,22591.045632,0.0,21940.404224,19728.708096,s,10,5.307831848144531,0.5307831848144531,9.359138123565499e-05,0.5307865295410157,0.53087939453125,0.5309120788574219,0.5309382263183594,"[0.5308721313476562, 0.530847412109375, 0.5308423461914062, 0.5307940063476563, 0.5306777954101562, 0.530664306640625, 0.5307696533203125, 0.530779052734375, 0.5309447631835937, 0.530640380859375]",tokens/s,482.3061606397543,kWh,6.275474799232374e-06,3.4386563256518067e-06,2.434737327905226e-05,3.406150440393644e-05,tokens/kWh,7515816.0063656615,MB,4690.325504,22616.211456,0.0,21965.570048,19728.710656,s,10,32.798836425781246,3.279883642578125,0.008840667319575755,3.276041259765625,3.29236142578125,3.2925156738281247,3.292639072265625,"[3.288758056640625, 3.2923271484375, 3.270821044921875, 3.2727900390625, 3.271623291015625, 3.292669921875, 3.27581005859375, 3.287604736328125, 3.2762724609375, 3.27015966796875]",tokens/s,19.207998473531024,kWh,3.903192687694779e-05,2.1392178127967344e-05,0.000139753563557147,0.00020017766856206208,tokens/kWh,314720.42037729995,,s,630,32.79617642211916,0.05205742289225261,0.0008290190368617073,0.051853809356689456,0.05267241096496582,0.05311344738006592,0.05457766307830811,"[0.052528129577636716, 0.05177036666870117, 0.05139353561401367, 0.051525630950927735, 0.05161574554443359, 0.052997119903564455, 0.052836353302001954, 0.052934654235839845, 0.05406515121459961, 0.052411392211914064, 0.05203046417236328, 0.05214822387695312, 0.05187583923339844, 0.05173759841918945, 0.051833854675292966, 0.051593215942382815, 0.05119180679321289, 0.051566593170166014, 0.0518021125793457, 0.05219532775878906, 0.05193011093139648, 0.05192192077636719, 0.05197516632080078, 0.052152320861816405, 0.05274726486206055, 0.05371903991699219, 0.05292851257324219, 0.051926017761230465, 0.05216767883300781, 0.05233356857299805, 0.05175500869750976, 0.052001792907714846, 0.05179904174804688, 0.05183590316772461, 0.05220454406738281, 0.05197414398193359, 0.05187686538696289, 0.051945472717285154, 0.05212364959716797, 0.052168704986572265, 0.053288959503173826, 0.05211443328857422, 0.05199257659912109, 0.0528721923828125, 0.05247385787963867, 0.05363404846191406, 0.051918846130371094, 0.05259571075439453, 0.05311897659301758, 0.052393985748291017, 0.05208268737792969, 0.05185945510864258, 0.05201100921630859, 0.05206016159057617, 0.05247488021850586, 0.05224959945678711, 0.05192499160766602, 0.052133888244628904, 0.05203968048095703, 0.05201100921630859, 0.051676158905029294, 0.05177139282226562, 0.05219942474365234, 0.05258342361450195, 0.05184307098388672, 0.05176729583740235, 0.05250457763671875, 0.05169868850708008, 0.051863552093505856, 0.05185843276977539, 0.052057086944580076, 0.05228134536743164, 0.05205196762084961, 0.052116481781005856, 0.05164646530151367, 0.051860481262207034, 0.0520447998046875, 0.05233152008056641, 0.05269094467163086, 0.051689472198486325, 0.051850238800048826, 0.051781631469726565, 0.051639297485351565, 0.05177958297729492, 0.05163212966918945, 0.05252710342407227, 0.052144126892089845, 0.05181542587280273, 0.051810302734375, 0.05148979187011719, 0.051716094970703126, 0.052972545623779295, 0.052572158813476565, 0.051884033203125, 0.05889023971557617, 0.05851955032348633, 0.051896320343017575, 0.05238784027099609, 0.05240627288818359, 0.05204991912841797, 0.05169868850708008, 0.05178265762329102, 0.05181235122680664, 0.051722240447998044, 0.052789249420166016, 0.051947521209716796, 0.05178572845458984, 0.05177139282226562, 0.051678207397460936, 0.052217857360839844, 0.05393305587768555, 0.052808704376220705, 0.05251891326904297, 0.05173657608032227, 0.05174476623535156, 0.05185638427734375, 0.051694591522216796, 0.05244416046142578, 0.0535470085144043, 0.05175807952880859, 0.05189836883544922, 0.05148876953125, 0.05165158462524414, 0.05156966400146484, 0.0518021125793457, 0.05175398254394531, 0.05243801498413086, 0.053515262603759765, 0.05172531127929687, 0.051343360900878904, 0.05158399963378906, 0.05438566589355469, 0.05416550445556641, 0.05299507141113281, 0.05176422500610352, 0.05168230438232422, 0.05168435287475586, 0.05168742370605469, 0.05198438262939453, 0.051738624572753904, 0.051743743896484375, 0.051901439666748046, 0.05161779022216797, 0.051743743896484375, 0.0535551986694336, 0.0521267204284668, 0.05186867141723633, 0.052342784881591796, 0.051538944244384766, 0.051768318176269534, 0.051576831817626956, 0.05149593734741211, 0.05156249618530274, 0.05166796875, 0.05186252975463867, 0.0516577262878418, 0.051555328369140625, 0.051768318176269534, 0.05163827133178711, 0.051689472198486325, 0.051604480743408204, 0.05141196823120117, 0.051931137084960936, 0.051593215942382815, 0.051576831817626956, 0.05175603103637695, 0.05184307098388672, 0.0524318733215332, 0.05173555374145508, 0.05165670394897461, 0.051361793518066405, 0.05204275131225586, 0.05180928039550781, 0.05173452758789063, 0.051745792388916016, 0.05156556701660156, 0.052212734222412106, 0.051942401885986325, 0.0516577262878418, 0.05164748764038086, 0.051694591522216796, 0.05187071990966797, 0.05159936141967773, 0.05163520050048828, 0.051622913360595706, 0.051571712493896485, 0.05228339385986328, 0.051999744415283204, 0.051643424987792966, 0.05262643051147461, 0.0517918701171875, 0.051535873413085936, 0.051947521209716796, 0.05182566452026367, 0.05125222396850586, 0.051932159423828124, 0.05304115295410156, 0.05215846252441406, 0.051708927154541014, 0.05184921646118164, 0.05187891387939453, 0.052241409301757816, 0.051980289459228515, 0.051784702301025394, 0.05192294311523438, 0.0517283821105957, 0.05168844985961914, 0.05159628677368164, 0.052001792907714846, 0.05147443389892578, 0.051798015594482424, 0.051465217590332034, 0.05175603103637695, 0.051552257537841796, 0.05159219360351563, 0.051681278228759765, 0.05236940765380859, 0.05169868850708008, 0.052142078399658204, 0.051692543029785154, 0.05178060913085938, 0.05164646530151367, 0.05225164794921875, 0.05163212966918945, 0.051555328369140625, 0.051659774780273435, 0.05146726226806641, 0.051692543029785154, 0.05168435287475586, 0.05184921646118164, 0.05176627349853516, 0.053321727752685545, 0.05338521575927734, 0.05170380783081055, 0.05161983871459961, 0.05209395217895508, 0.05273497772216797, 0.05253222274780273, 0.053220352172851565, 0.05189120101928711, 0.0517212142944336, 0.05170995330810547, 0.05163008117675781, 0.051846145629882816, 0.051942401885986325, 0.05215948867797852, 0.05207040023803711, 0.05323263931274414, 0.05195673751831055, 0.05174272155761719, 0.051672065734863284, 0.05164031982421875, 0.052345855712890625, 0.051888126373291016, 0.05242060852050781, 0.05168230438232422, 0.0516864013671875, 0.05167411041259766, 0.05185740661621094, 0.052160511016845705, 0.051776512145996094, 0.05171507263183594, 0.05162803268432617, 0.051697662353515625, 0.05173759841918945, 0.05215641784667969, 0.05258956909179688, 0.05202841567993164, 0.051708927154541014, 0.05162188720703125, 0.05158707046508789, 0.051961856842041014, 0.05184921646118164, 0.05172940826416016, 0.05187788772583008, 0.051827713012695314, 0.05161983871459961, 0.05166080093383789, 0.05163417434692383, 0.051697662353515625, 0.051714046478271485, 0.05159423828125, 0.05183488082885742, 0.051963905334472656, 0.05166387176513672, 0.052378623962402344, 0.05193830490112305, 0.05186867141723633, 0.05284659194946289, 0.05245132827758789, 0.051931137084960936, 0.05251891326904297, 0.05189529418945313, 0.05217792129516602, 0.05175091171264649, 0.0518656005859375, 0.05170073699951172, 0.05167103958129883, 0.05177446365356445, 0.051932159423828124, 0.05185228729248047, 0.05151027297973633, 0.051738624572753904, 0.05149184036254883, 0.05504716873168945, 0.052749313354492185, 0.05197414398193359, 0.05182156753540039, 0.051716094970703126, 0.05162803268432617, 0.05169868850708008, 0.05166387176513672, 0.0517498893737793, 0.05169561767578125, 0.05173759841918945, 0.052924415588378904, 0.05178060913085938, 0.05151641464233398, 0.05147955322265625, 0.05161676788330078, 0.0519024658203125, 0.05196083068847656, 0.05205913543701172, 0.05171507263183594, 0.05164031982421875, 0.05177241516113281, 0.051778560638427736, 0.05207551956176758, 0.052022270202636715, 0.0516577262878418, 0.05187788772583008, 0.05170175933837891, 0.05172326278686523, 0.05271347045898438, 0.05238886260986328, 0.05232537460327148, 0.05190860748291016, 0.05166899108886719, 0.05330124664306641, 0.05287116622924805, 0.054089729309082034, 0.052496383666992184, 0.05707468795776367, 0.05245542526245117, 0.05331148910522461, 0.05255680084228516, 0.05188915252685547, 0.05165158462524414, 0.05252608108520508, 0.05172531127929687, 0.05157376098632813, 0.05202739334106445, 0.05191987228393555, 0.051661823272705076, 0.05242675018310547, 0.052201473236083984, 0.051950592041015625, 0.05403238296508789, 0.05459251022338867, 0.053106689453125, 0.05318656158447266, 0.05188198471069336, 0.05202329635620117, 0.05215436935424805, 0.05168025588989258, 0.05218201446533203, 0.05217279815673828, 0.05170380783081055, 0.05134438323974609, 0.05265510559082031, 0.05199564743041992, 0.05175603103637695, 0.052106239318847655, 0.051931137084960936, 0.05242675018310547, 0.05200896072387695, 0.05206630325317383, 0.051487743377685545, 0.05183283233642578, 0.051402751922607424, 0.05267148971557617, 0.05149593734741211, 0.05161779022216797, 0.05247795104980469, 0.05231513595581055, 0.0525926399230957, 0.05216665649414062, 0.051784702301025394, 0.05219123077392578, 0.0518021125793457, 0.05165363311767578, 0.051732479095458986, 0.053379070281982424, 0.05454131317138672, 0.051937278747558595, 0.052203521728515626, 0.051454975128173826, 0.05152665710449219, 0.05157068634033203, 0.051627006530761715, 0.05118668746948242, 0.051762176513671876, 0.0514068489074707, 0.05200281524658203, 0.05228339385986328, 0.05217279815673828, 0.05194649505615234, 0.05217279815673828, 0.051593215942382815, 0.05161676788330078, 0.05248409652709961, 0.05135974502563476, 0.05161164855957031, 0.05193318557739258, 0.051707904815673826, 0.05134950256347656, 0.052598785400390625, 0.05168537521362305, 0.051732479095458986, 0.052185089111328124, 0.05142732620239258, 0.051762176513671876, 0.051986431121826174, 0.052178943634033206, 0.052517887115478515, 0.052381694793701174, 0.05219839859008789, 0.05166080093383789, 0.05296332931518555, 0.051699710845947267, 0.0520079345703125, 0.052529151916503904, 0.05184307098388672, 0.05187071990966797, 0.05268070220947266, 0.05156556701660156, 0.052178943634033206, 0.05195161437988281, 0.05193625640869141, 0.05097983932495117, 0.05245951843261719, 0.052598785400390625, 0.05181849670410156, 0.05178777694702148, 0.051523582458496094, 0.0530145263671875, 0.05199462509155273, 0.05176627349853516, 0.052222976684570314, 0.05189120101928711, 0.0522342414855957, 0.052566017150878906, 0.05222809600830078, 0.052400127410888675, 0.052142078399658204, 0.05195468902587891, 0.05199052810668945, 0.05180723190307617, 0.051722240447998044, 0.05164543914794922, 0.051806209564208984, 0.05167103958129883, 0.05165363311767578, 0.051571712493896485, 0.05195264053344727, 0.05176729583740235, 0.052808704376220705, 0.0639887351989746, 0.05185331344604492, 0.0518205451965332, 0.05183590316772461, 0.0535551986694336, 0.052413440704345705, 0.05219123077392578, 0.05200691223144531, 0.051822593688964844, 0.05165363311767578, 0.05305036926269531, 0.053664768218994144, 0.05207859039306641, 0.052152320861816405, 0.05180928039550781, 0.05125529479980469, 0.05182156753540039, 0.05173657608032227, 0.05169868850708008, 0.05167513656616211, 0.05266534423828125, 0.051846145629882816, 0.05167103958129883, 0.05195980834960937, 0.05186969757080078, 0.05218406295776367, 0.05230694580078125, 0.051568641662597656, 0.051576831817626956, 0.05180518341064453, 0.051454975128173826, 0.05157273483276367, 0.05191065597534179, 0.05159936141967773, 0.051533824920654295, 0.05164236831665039, 0.05154816055297851, 0.05283631896972656, 0.05171712112426758, 0.05148979187011719, 0.05159017562866211, 0.051757022857666014, 0.05164134216308594, 0.05177241516113281, 0.051969024658203126, 0.05172633743286133, 0.051740673065185545, 0.05187276840209961, 0.05188608169555664, 0.05168332672119141, 0.05181644821166992, 0.05177036666870117, 0.053564414978027344, 0.05275033569335937, 0.05217484664916992, 0.052547584533691405, 0.05241958236694336, 0.051748863220214845, 0.0516864013671875, 0.05180928039550781, 0.0519444465637207, 0.05187481689453125, 0.051937278747558595, 0.05156966400146484, 0.05183692932128906, 0.05181849670410156, 0.05176422500610352, 0.05228031921386719, 0.052364288330078126, 0.05206016159057617, 0.051806209564208984, 0.05281280136108398, 0.05220044708251953, 0.051714046478271485, 0.05178060913085938, 0.05278412628173828, 0.05204787063598633, 0.05184000015258789, 0.051950592041015625, 0.051699710845947267, 0.05189532852172852, 0.051854305267333985, 0.05185331344604492, 0.05185228729248047, 0.05188505554199219, 0.051899391174316405, 0.05166387176513672, 0.05186764907836914, 0.05243392181396484, 0.05187583923339844, 0.051762176513671876, 0.051931137084960936, 0.05184716796875, 0.05165670394897461, 0.05230694580078125, 0.05250764846801758, 0.05166694259643555, 0.05310464096069336, 0.05237964630126953, 0.05170073699951172, 0.05224345779418945, 0.05164543914794922, 0.0517130241394043, 0.051966976165771485, 0.0521451530456543, 0.05201203155517578, 0.051555328369140625, 0.05179904174804688, 0.05172531127929687, 0.051501056671142575, 0.05214617538452149, 0.052103168487548826, 0.05166387176513672, 0.05189120101928711, 0.05167103958129883, 0.051798015594482424, 0.05201408004760742, 0.05156147384643555, 0.051955711364746096, 0.05177958297729492, 0.05163520050048828, 0.05157376098632813, 0.0516577262878418, 0.0549857292175293, 0.05288345718383789, 0.05196492767333984, 0.05168844985961914, 0.051692543029785154, 0.05151027297973633, 0.0515860481262207, 0.05139254379272461, 0.051767265319824216, 0.05165875244140625, 0.051484672546386716, 0.05153484725952148, 0.051517440795898435, 0.051848190307617184, 0.05189017486572266, 0.05189427185058594, 0.05187071990966797, 0.05188198471069336, 0.05215129470825195, 0.051794944763183595, 0.052022270202636715, 0.05262233734130859, 0.05204582214355469, 0.051899391174316405, 0.05192192077636719, 0.051798015594482424, 0.05166796875, 0.05177446365356445, 0.05189529418945313, 0.05206937789916992, 0.05169868850708008, 0.05190655899047852, 0.053008384704589843, 0.051637248992919924, 0.05179084777832031, 0.05226803207397461, 0.05165260696411133, 0.052101119995117184, 0.0516864013671875, 0.0516864013671875]",tokens/s,19.20955637911196,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,1559.138304,1804.075008,0.0,1174.40512,1147.036672,s,1,8.5658740234375,8.5658740234375,0.0,8.5658740234375,8.5658740234375,8.5658740234375,8.5658740234375,[8.5658740234375],,kWh,2.047215934513752e-05,1.1196648284488355e-05,3.0920580291998734e-05,6.258938792162461e-05,,MB,1735.790592,1881.669632,0.0,1231.028224,1064.7808,s,10,0.3141377944946289,0.03141377944946289,0.00022810320188474526,0.03132348823547364,0.03169502696990967,0.031816505718231206,0.03191368871688843,"[0.031221216201782226, 0.03156928062438965, 0.0313143367767334, 0.03127452850341797, 0.03122585678100586, 0.03193798446655274, 0.031668031692504886, 0.03139977645874024, 0.03133263969421387, 0.031194143295288086]",tokens/s,8149.29004043724,kWh,3.7067275145854335e-07,2.0311045398822288e-07,1.3019010763509944e-06,1.8756842817977606e-06,tokens/kWh,136483523.63151187,MB,1783.799808,1883.766784,0.0,1233.125376,1119.937024,s,10,10.70198718261719,1.0701987182617185,0.0113103699518238,1.0708211669921874,1.0837013793945312,1.0862286193847657,1.088250411376953,"[1.0831397705078125, 1.0780311279296875, 1.0579542236328126, 1.07225439453125, 1.0557728271484375, 1.088755859375, 1.05430517578125, 1.0778472900390625, 1.069387939453125, 1.06453857421875]",tokens/s,58.86757190508358,kWh,1.2498532248471746e-05,6.848677492350661e-06,2.3518424335449686e-05,4.286563407627208e-05,tokens/kWh,1469708.8088771123,,s,630,10.698004493713375,0.016980959513830763,0.0004231991779915042,0.01710591983795166,0.017390387535095215,0.01758489646911621,0.018223370342254644,"[0.01641062355041504, 0.016519168853759765, 0.016440319061279296, 0.016519168853759765, 0.016527360916137695, 0.016548864364624022, 0.016540672302246092, 0.016579584121704103, 0.016552959442138672, 0.016532480239868166, 0.01661440086364746, 0.01655193519592285, 0.016648191452026367, 0.017360895156860352, 0.017490943908691405, 0.017326080322265625, 0.017330175399780275, 0.017261568069458007, 0.017289215087890625, 0.01743769645690918, 0.017329151153564454, 0.017276927947998046, 0.017292287826538084, 0.017330175399780275, 0.01737113571166992, 0.017286144256591796, 0.017294336318969726, 0.017278976440429687, 0.017366016387939453, 0.017383424758911133, 0.017321983337402345, 0.017354751586914064, 0.01724723243713379, 0.017301504135131835, 0.01777663993835449, 0.01760358428955078, 0.017442815780639647, 0.017320959091186524, 0.017362943649291994, 0.017333248138427734, 0.017328128814697266, 0.017348608016967772, 0.017280000686645508, 0.017246208190917968, 0.017293312072753905, 0.017276927947998046, 0.01719603157043457, 0.01740390396118164, 0.017292287826538084, 0.017328128814697266, 0.017325056076049804, 0.017238016128540038, 0.017288192749023438, 0.01737215995788574, 0.01738035202026367, 0.01741926383972168, 0.017810432434082032, 0.017827840805053712, 0.017524736404418945, 0.017280000686645508, 0.017258495330810548, 0.017118207931518553, 0.01717452812194824, 0.017080320358276366, 0.0172410888671875, 0.017299455642700197, 0.017254400253295898, 0.016940031051635742, 0.016946176528930663, 0.017542144775390626, 0.017335296630859375, 0.017091583251953125, 0.017338367462158204, 0.01718169593811035, 0.01704550361633301, 0.017076223373413087, 0.016942079544067384, 0.016837631225585938, 0.016470016479492186, 0.016291839599609375, 0.016270336151123048, 0.01643519973754883, 0.016531455993652345, 0.016446464538574217, 0.016706560134887697, 0.01719398307800293, 0.017311744689941407, 0.017250303268432618, 0.017185792922973633, 0.017059839248657227, 0.017242111206054688, 0.01698406410217285, 0.01703731155395508, 0.016920576095581053, 0.017298431396484376, 0.017136640548706054, 0.01719500732421875, 0.01721139144897461, 0.017153024673461914, 0.01723187255859375, 0.017262592315673828, 0.017257471084594727, 0.0172410888671875, 0.017095680236816405, 0.017185792922973633, 0.01717043113708496, 0.01716223907470703, 0.017177600860595704, 0.01719705581665039, 0.017153024673461914, 0.017491968154907226, 0.017262592315673828, 0.01718988800048828, 0.017129472732543945, 0.016986112594604492, 0.017757183074951173, 0.017525760650634766, 0.017192960739135742, 0.017129472732543945, 0.017273855209350587, 0.01719910430908203, 0.01721446418762207, 0.017144832611083984, 0.01716531181335449, 0.01716223907470703, 0.017217536926269532, 0.016780288696289062, 0.016703487396240235, 0.016522239685058594, 0.016496639251708984, 0.016493568420410155, 0.016250879287719726, 0.016264192581176756, 0.016471040725708007, 0.016281600952148437, 0.016397312164306642, 0.016474111557006836, 0.016533504486083983, 0.016490495681762696, 0.016563199996948243, 0.016472063064575194, 0.01660211181640625, 0.016470016479492186, 0.016514047622680664, 0.016563199996948243, 0.016499711990356446, 0.01636761665344238, 0.016488447189331054, 0.016457727432250976, 0.016536575317382812, 0.016542720794677734, 0.016500736236572267, 0.016538623809814454, 0.016525312423706053, 0.016465919494628906, 0.016473087310791015, 0.016529407501220703, 0.016519168853759765, 0.016495616912841796, 0.017366016387939453, 0.01765478324890137, 0.01844121551513672, 0.01784832000732422, 0.017274879455566407, 0.017377279281616212, 0.017374208450317383, 0.01725132751464844, 0.017260543823242186, 0.017309696197509765, 0.017154048919677735, 0.017373184204101562, 0.017177600860595704, 0.017192960739135742, 0.017184768676757813, 0.01701273536682129, 0.016977920532226562, 0.016442367553710938, 0.01656934356689453, 0.016540672302246092, 0.01764352035522461, 0.01661952018737793, 0.016644096374511717, 0.016549888610839843, 0.016532480239868166, 0.016550912857055664, 0.016496639251708984, 0.016948223114013672, 0.01722368049621582, 0.017301504135131835, 0.016270336151123048, 0.016668672561645507, 0.016470016479492186, 0.016528383255004882, 0.016521215438842773, 0.016442367553710938, 0.01657344055175781, 0.016532480239868166, 0.01655705642700195, 0.016660480499267577, 0.01680793571472168, 0.017157119750976564, 0.017302528381347656, 0.01721548843383789, 0.01716223907470703, 0.017120256423950195, 0.0172042236328125, 0.01719603157043457, 0.017236991882324217, 0.0172359676361084, 0.01717350387573242, 0.01721036720275879, 0.01723391914367676, 0.017325056076049804, 0.017282047271728516, 0.017266687393188478, 0.017253376007080077, 0.01719398307800293, 0.017169408798217774, 0.01740185546875, 0.017144832611083984, 0.017236991882324217, 0.01721036720275879, 0.017246208190917968, 0.0176680965423584, 0.01747148895263672, 0.017349632263183593, 0.017261568069458007, 0.017333248138427734, 0.017308671951293944, 0.017252351760864256, 0.016780288696289062, 0.01655705642700195, 0.016582656860351562, 0.016833536148071288, 0.017543167114257813, 0.016720895767211915, 0.016627712249755858, 0.016520191192626953, 0.016582656860351562, 0.01658060836791992, 0.01660006332397461, 0.016532480239868166, 0.016534528732299804, 0.017352703094482422, 0.01723494338989258, 0.017253376007080077, 0.017171455383300782, 0.017169408798217774, 0.017133567810058595, 0.0172410888671875, 0.017236991882324217, 0.01720012855529785, 0.01642086410522461, 0.01658367919921875, 0.01703628730773926, 0.01757695960998535, 0.01738956832885742, 0.016719871520996094, 0.0165928955078125, 0.01660416030883789, 0.016566272735595702, 0.016461824417114256, 0.016511999130249023, 0.016507904052734376, 0.016506879806518555, 0.016502784729003905, 0.016456703186035156, 0.01661747169494629, 0.016464895248413085, 0.016483327865600587, 0.01646899223327637, 0.016487424850463867, 0.016462848663330077, 0.016549888610839843, 0.016441343307495117, 0.016533504486083983, 0.016466943740844727, 0.016395263671875, 0.016454656600952147, 0.016564224243164064, 0.016466943740844727, 0.016472063064575194, 0.016481279373168945, 0.01662566375732422, 0.016475135803222657, 0.01663488006591797, 0.016545791625976563, 0.016657407760620118, 0.01658982467651367, 0.016518144607543944, 0.016490495681762696, 0.01653555107116699, 0.01640652847290039, 0.016701440811157226, 0.01722368049621582, 0.01678335952758789, 0.016627712249755858, 0.01659596824645996, 0.016929792404174804, 0.0166430721282959, 0.016656383514404297, 0.017063936233520507, 0.01718681526184082, 0.017295360565185547, 0.017164287567138673, 0.017136640548706054, 0.017310720443725586, 0.017089536666870117, 0.017140735626220704, 0.017187839508056642, 0.01724313545227051, 0.017236991882324217, 0.017258495330810548, 0.017160192489624023, 0.017864704132080078, 0.01744179153442383, 0.01739776039123535, 0.017280000686645508, 0.01725542449951172, 0.017155071258544922, 0.017124351501464845, 0.01723187255859375, 0.01722777557373047, 0.017286144256591796, 0.016889856338500975, 0.016942079544067384, 0.017134592056274413, 0.017154048919677735, 0.01720729637145996, 0.017188863754272463, 0.01662054443359375, 0.01655193519592285, 0.016582656860351562, 0.016504831314086914, 0.01640140724182129, 0.017069055557250978, 0.01721651268005371, 0.017901567459106444, 0.01742438316345215, 0.01717043113708496, 0.017185792922973633, 0.017304576873779298, 0.01738751983642578, 0.01723084831237793, 0.017284095764160155, 0.017156095504760743, 0.0172728328704834, 0.017292287826538084, 0.018562047958374024, 0.018757631301879883, 0.017571840286254883, 0.017574911117553712, 0.017922048568725587, 0.017582080841064454, 0.01737932777404785, 0.017507328033447265, 0.017378303527832033, 0.0172728328704834, 0.017266687393188478, 0.017187839508056642, 0.0172728328704834, 0.01720627212524414, 0.017299455642700197, 0.017156095504760743, 0.017185792922973633, 0.017338367462158204, 0.01721139144897461, 0.017604608535766602, 0.01724825668334961, 0.01721855926513672, 0.01744076728820801, 0.017246208190917968, 0.017285120010375975, 0.017229824066162108, 0.017315839767456053, 0.01723391914367676, 0.01722060775756836, 0.01720319938659668, 0.017120256423950195, 0.01698918342590332, 0.016526336669921874, 0.01658982467651367, 0.016590848922729492, 0.016649215698242188, 0.016546815872192384, 0.016541696548461913, 0.016508928298950197, 0.016508928298950197, 0.016509952545166014, 0.016485376358032225, 0.016517120361328123, 0.016486400604248046, 0.016497663497924805, 0.016564224243164064, 0.016483327865600587, 0.016503807067871093, 0.016457727432250976, 0.016501760482788085, 0.016515071868896485, 0.01656524848937988, 0.01666662406921387, 0.01676288032531738, 0.016522239685058594, 0.0170383358001709, 0.01657344055175781, 0.016653312683105468, 0.01639423942565918, 0.01662259292602539, 0.01656934356689453, 0.016720895767211915, 0.017452032089233398, 0.017896448135375977, 0.01765990447998047, 0.017268735885620116, 0.017081344604492187, 0.016898048400878905, 0.016519168853759765, 0.01679052734375, 0.016513023376464844, 0.01685606384277344, 0.017161216735839844, 0.01699635124206543, 0.01705062484741211, 0.016677888870239257, 0.016546815872192384, 0.016671743392944336, 0.016571392059326173, 0.01661030387878418, 0.017110015869140623, 0.016874496459960937, 0.016492544174194337, 0.016507904052734376, 0.016525312423706053, 0.01660620880126953, 0.016505855560302735, 0.01661747169494629, 0.0164136962890625, 0.016517120361328123, 0.017098751068115235, 0.017156095504760743, 0.017104896545410156, 0.01637171173095703, 0.016562175750732423, 0.01661440086364746, 0.01662873649597168, 0.01682841682434082, 0.016906240463256835, 0.01883750343322754, 0.018347007751464844, 0.017500160217285156, 0.016941055297851563, 0.017476608276367187, 0.0172042236328125, 0.017172479629516603, 0.017129472732543945, 0.017236991882324217, 0.01705779266357422, 0.016686080932617187, 0.016552959442138672, 0.016499711990356446, 0.01721855926513672, 0.01825484848022461, 0.017351680755615235, 0.017300479888916014, 0.017252351760864256, 0.017150976181030272, 0.017143808364868163, 0.017302528381347656, 0.01724723243713379, 0.017168384552001953, 0.01716633605957031, 0.0175994873046875, 0.016748544692993163, 0.01740287971496582, 0.01719705581665039, 0.017155071258544922, 0.017113088607788086, 0.01662873649597168, 0.01642393684387207, 0.01660211181640625, 0.016639999389648438, 0.016524288177490236, 0.017043455123901367, 0.01700761604309082, 0.017084415435791016, 0.01737932777404785, 0.017258495330810548, 0.017148927688598634, 0.017228799819946287, 0.017125375747680666, 0.01661030387878418, 0.017064960479736328, 0.01707827186584473, 0.017035263061523438, 0.01718988800048828, 0.01721139144897461, 0.01720319938659668, 0.01720217514038086, 0.017169408798217774, 0.017167360305786132, 0.017127424240112304, 0.017147903442382813, 0.017106943130493164, 0.016752639770507814, 0.016471040725708007, 0.01658982467651367, 0.016920576095581053, 0.01740083122253418, 0.017377279281616212, 0.017303552627563477, 0.017253376007080077, 0.01722675132751465, 0.01717350387573242, 0.01665433692932129, 0.016517120361328123, 0.016561151504516602, 0.01656729507446289, 0.017519615173339845, 0.017315839767456053, 0.017846271514892577, 0.016973823547363282, 0.01640447998046875, 0.01663283157348633, 0.016524288177490236, 0.01658982467651367, 0.016732160568237304, 0.017339391708374022, 0.017171455383300782, 0.016955392837524414, 0.017110015869140623, 0.017812480926513673, 0.017520639419555666, 0.017764352798461915, 0.017253376007080077, 0.017133567810058595, 0.017175552368164062, 0.01658572769165039, 0.01659903907775879, 0.016885759353637696, 0.017108991622924806, 0.017031167984008787, 0.01678643226623535, 0.017073152542114257, 0.016717824935913086, 0.016871423721313478, 0.017091583251953125, 0.017093631744384767, 0.017093631744384767, 0.017160192489624023, 0.01720627212524414, 0.016726015090942382, 0.01664614486694336, 0.016544767379760742, 0.016747520446777343, 0.017547264099121093, 0.01927884864807129, 0.017120256423950195, 0.01663795280456543, 0.016504831314086914, 0.016574464797973632, 0.016495616912841796, 0.016553983688354493, 0.016555007934570314, 0.016508928298950197, 0.01656524848937988, 0.016352256774902343, 0.016527360916137695, 0.01637171173095703, 0.016882688522338866, 0.017275903701782228, 0.016676864624023437, 0.01657241630554199, 0.01677824020385742, 0.017167360305786132, 0.01758720016479492, 0.017159168243408202, 0.01659903907775879, 0.016490495681762696, 0.01639628791809082, 0.016479232788085937, 0.016314367294311523, 0.016493568420410155, 0.01660518455505371, 0.01646080017089844, 0.016552959442138672, 0.016525312423706053, 0.016874496459960937, 0.016493568420410155, 0.016846847534179688, 0.017242111206054688, 0.017138687133789063, 0.01721958351135254, 0.01721139144897461, 0.017589248657226563, 0.017179647445678712, 0.017153024673461914, 0.01721139144897461, 0.01723289680480957, 0.017715200424194336, 0.017720319747924804, 0.01814630317687988, 0.0174202880859375, 0.017295360565185547, 0.017287168502807617, 0.017276927947998046, 0.0178606071472168, 0.01740595245361328, 0.01717350387573242, 0.017092607498168946, 0.017143808364868163, 0.017136640548706054, 0.017079296112060546, 0.0166748161315918, 0.016550912857055664, 0.016672767639160157, 0.01657753562927246, 0.016501760482788085, 0.016359424591064452, 0.016526336669921874, 0.016533504486083983, 0.016486400604248046, 0.01640755271911621, 0.016366592407226564, 0.016480255126953124, 0.01664204788208008, 0.016528383255004882, 0.016526336669921874, 0.016627712249755858, 0.016546815872192384, 0.01661235237121582]",tokens/s,58.88948732169778,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-7b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-7b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495ac-5e42e51857eede2b456eb4b4;ad0329de-34e3-47a5-bb01-63405db8328a) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for internlm/internlm-20b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/internlm/internlm-20b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for internlm/internlm2-20b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/internlm/internlm2-20b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4037, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 200, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 83, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -RuntimeError: q_weight and gptq_scales have incompatible shapes - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1072.943104,1200.095232,0.0,570.425344,536.326656,s,1,7.712841796875,7.712841796875,0.0,7.712841796875,7.712841796875,7.712841796875,7.712841796875,[7.712841796875],,kWh,1.093415106804514e-05,5.976639636586949e-06,1.705445808797279e-05,3.3965248792604876e-05,,MB,1620.799488,1275.592704,0.0,624.951296,594.377728,s,10,0.23528006362915038,0.02352800636291504,0.0005114677006899605,0.023287487983703614,0.024523212051391602,0.024552358436584473,0.02457567554473877,"[0.023266176223754882, 0.024581504821777345, 0.023240991592407226, 0.023325088500976563, 0.023235231399536132, 0.024516735076904297, 0.02329689598083496, 0.02328121566772461, 0.02324246406555176, 0.023293760299682616]",tokens/s,10880.649896606135,kWh,2.7841422077775256e-07,1.5255407976370005e-07,6.729188389881562e-07,1.1038871395296088e-06,tokens/kWh,231907765.5973847,MB,1668.599808,1290.272768,0.0,639.63136,607.71072,s,10,12.956216918945314,1.2956216918945314,0.0038208044220831077,1.2949007568359376,1.2989009887695313,1.3020254455566407,1.3045250109863282,"[1.2944727783203125, 1.2982066650390625, 1.295466064453125, 1.2971517333984375, 1.2949197998046875, 1.30514990234375, 1.2910177001953125, 1.2948817138671875, 1.291293212890625, 1.2936573486328125]",tokens/s,48.62530505172219,kWh,1.5318114178460165e-05,8.394066153681069e-06,2.4539767997812623e-05,4.8251948329953854e-05,tokens/kWh,1305646.7599856656,,s,630,12.951265218734752,0.020557563839261495,0.00028806448157604167,0.020468736648559572,0.02087290897369385,0.021083049583435057,0.021955028533935557,"[0.02032339286804199, 0.02042367935180664, 0.020484031677246092, 0.020611072540283205, 0.02072985649108887, 0.020397056579589845, 0.020510719299316405, 0.020462591171264647, 0.020501504898071288, 0.020463615417480468, 0.02045747184753418, 0.02041753578186035, 0.020406272888183592, 0.02060492706298828, 0.02046976089477539, 0.02058137512207031, 0.020468736648559572, 0.020526079177856444, 0.020396032333374024, 0.02041753578186035, 0.02048102378845215, 0.02072985649108887, 0.02083020782470703, 0.020296703338623046, 0.02048102378845215, 0.02042470359802246, 0.020412416458129884, 0.020427776336669923, 0.020242431640625, 0.0204083194732666, 0.020452352523803712, 0.020393983840942383, 0.020371456146240235, 0.020436992645263673, 0.020620288848876952, 0.020370431900024414, 0.020408351898193358, 0.020605920791625976, 0.02060697555541992, 0.02044927978515625, 0.02042265510559082, 0.02045337677001953, 0.021019647598266602, 0.02084147262573242, 0.020448287963867186, 0.020465631484985352, 0.02027724838256836, 0.020063232421875, 0.020411392211914063, 0.02042367935180664, 0.02043801689147949, 0.020419584274291993, 0.020838464736938477, 0.021087167739868164, 0.02106675148010254, 0.020976640701293944, 0.02047283172607422, 0.020455423355102538, 0.020496383666992187, 0.020382720947265624, 0.02043391990661621, 0.020447231292724608, 0.02224742317199707, 0.020915136337280274, 0.020508672714233397, 0.02046463966369629, 0.02049945640563965, 0.020534271240234374, 0.020418560028076172, 0.020488191604614257, 0.02043391990661621, 0.02060697555541992, 0.020562944412231447, 0.02044108772277832, 0.020477951049804686, 0.020497407913208008, 0.0204769287109375, 0.020513792037963868, 0.020494335174560546, 0.020468736648559572, 0.02045747184753418, 0.020540416717529295, 0.020497407913208008, 0.020354047775268554, 0.020445184707641603, 0.020494335174560546, 0.020569087982177735, 0.020625408172607423, 0.02045132827758789, 0.020447231292724608, 0.020521984100341797, 0.02104319953918457, 0.020855808258056642, 0.020497407913208008, 0.020447231292724608, 0.020444160461425782, 0.02041548728942871, 0.020373504638671876, 0.02042572784423828, 0.021407743453979493, 0.022116352081298828, 0.021416959762573243, 0.02105036735534668, 0.021113855361938477, 0.020567039489746093, 0.020554752349853517, 0.020520959854125977, 0.020289535522460937, 0.020410367965698242, 0.020447231292724608, 0.02044108772277832, 0.021247039794921874, 0.02096735954284668, 0.020540416717529295, 0.02046668815612793, 0.02044211196899414, 0.020489215850830078, 0.02050764846801758, 0.020387840270996094, 0.02031001663208008, 0.02042982482910156, 0.020732959747314452, 0.020833248138427733, 0.020434944152832032, 0.02045132827758789, 0.020468736648559572, 0.02027519989013672, 0.020402175903320312, 0.020447231292724608, 0.02068889617919922, 0.02039193534851074, 0.02041651153564453, 0.020517887115478514, 0.020487167358398437, 0.020559871673583984, 0.020529151916503906, 0.02040934371948242, 0.02046976089477539, 0.020452352523803712, 0.020503551483154296, 0.020987903594970703, 0.020853759765625, 0.020414464950561522, 0.020398080825805662, 0.02045747184753418, 0.02040729522705078, 0.020386816024780274, 0.021176319122314453, 0.021146623611450196, 0.02046566390991211, 0.02042367935180664, 0.02042982482910156, 0.020470783233642577, 0.020531200408935548, 0.020666368484497072, 0.02306662368774414, 0.021373952865600586, 0.02103603172302246, 0.02087116813659668, 0.020436992645263673, 0.020380672454833985, 0.02046463966369629, 0.020462591171264647, 0.02043801689147949, 0.02044211196899414, 0.020320287704467775, 0.02033660888671875, 0.02021171188354492, 0.020395008087158203, 0.020385791778564453, 0.020744192123413087, 0.02044825553894043, 0.020585472106933594, 0.020382720947265624, 0.020460544586181642, 0.020412416458129884, 0.020410367965698242, 0.020570112228393556, 0.020447231292724608, 0.020316160202026368, 0.02040425682067871, 0.02050864028930664, 0.02042163276672363, 0.020529151916503906, 0.020414464950561522, 0.020338687896728515, 0.020371456146240235, 0.02044927978515625, 0.02041548728942871, 0.02088960075378418, 0.02080463981628418, 0.020568031311035156, 0.02046463966369629, 0.020471807479858398, 0.02043801689147949, 0.020463615417480468, 0.020550655364990233, 0.02047590446472168, 0.02041651153564453, 0.020355072021484375, 0.020427776336669923, 0.02042982482910156, 0.020298751831054687, 0.02042572784423828, 0.02045132827758789, 0.020418560028076172, 0.020923391342163086, 0.020596736907958983, 0.020463615417480468, 0.02102272033691406, 0.0210513916015625, 0.022033407211303712, 0.02124083137512207, 0.02055379295349121, 0.020402143478393555, 0.02052092742919922, 0.02106777572631836, 0.02066329574584961, 0.02053222465515137, 0.020477951049804686, 0.02046771240234375, 0.020446271896362306, 0.02052396774291992, 0.020452352523803712, 0.02044825553894043, 0.020478975296020507, 0.020420608520507814, 0.02039193534851074, 0.02049126434326172, 0.02045849609375, 0.02047488021850586, 0.02048307228088379, 0.020404224395751954, 0.020341760635375978, 0.020538368225097657, 0.020580352783203124, 0.020358175277709962, 0.020369375228881836, 0.020497407913208008, 0.020677631378173827, 0.021191680908203125, 0.020600831985473633, 0.020403200149536133, 0.021395456314086913, 0.021045248031616212, 0.02046156883239746, 0.020371456146240235, 0.020413440704345705, 0.020389888763427736, 0.020535295486450195, 0.020381696701049806, 0.020287488937377928, 0.02030284881591797, 0.020706304550170897, 0.020952064514160155, 0.02045132827758789, 0.020444160461425782, 0.02044313621520996, 0.020649984359741212, 0.020976640701293944, 0.0204083194732666, 0.020444160461425782, 0.020368383407592772, 0.020538368225097657, 0.020503551483154296, 0.020410367965698242, 0.020428800582885744, 0.020503551483154296, 0.02045644760131836, 0.02048307228088379, 0.020410367965698242, 0.020382720947265624, 0.02066739273071289, 0.020963327407836914, 0.02045030403137207, 0.020511743545532226, 0.020599807739257812, 0.02049843215942383, 0.020616191864013672, 0.020517887115478514, 0.020564992904663085, 0.020428800582885744, 0.02044313621520996, 0.020462591171264647, 0.020493312835693358, 0.020485183715820313, 0.020462591171264647, 0.020490175247192384, 0.02051481628417969, 0.02145382308959961, 0.020771839141845702, 0.02051584053039551, 0.020435968399047853, 0.020616191864013672, 0.02045849609375, 0.02066534423828125, 0.020555776596069338, 0.020531200408935548, 0.020477951049804686, 0.020574207305908202, 0.020538368225097657, 0.020508672714233397, 0.020553728103637696, 0.020537343978881836, 0.020593664169311524, 0.02053222465515137, 0.020497407913208008, 0.02091007995605469, 0.020542463302612304, 0.020470783233642577, 0.020447231292724608, 0.02037555122375488, 0.020452352523803712, 0.020503551483154296, 0.02049126434326172, 0.021389312744140625, 0.021078016281127928, 0.020503551483154296, 0.020545536041259766, 0.02084351921081543, 0.02054560089111328, 0.020518848419189453, 0.020362239837646484, 0.020616191864013672, 0.020536319732666015, 0.02064076805114746, 0.02063871955871582, 0.02049843215942383, 0.020719615936279297, 0.020666368484497072, 0.020568063735961914, 0.020462591171264647, 0.020521984100341797, 0.02047283172607422, 0.020402175903320312, 0.020446207046508787, 0.020535295486450195, 0.020750335693359375, 0.022054912567138672, 0.022054912567138672, 0.020898815155029296, 0.020576255798339844, 0.020512767791748047, 0.020547584533691408, 0.02109644889831543, 0.02129305648803711, 0.020561920166015626, 0.02046976089477539, 0.020434944152832032, 0.02048307228088379, 0.020531200408935548, 0.020462591171264647, 0.020505599975585938, 0.02063871955871582, 0.021612543106079102, 0.02084864044189453, 0.0204902400970459, 0.020548608779907225, 0.020502527236938475, 0.02042163276672363, 0.02048409652709961, 0.020455423355102538, 0.02070425605773926, 0.020743167877197266, 0.02058137512207031, 0.02230067253112793, 0.02062950325012207, 0.020470783233642577, 0.020904960632324218, 0.02104934310913086, 0.020320255279541014, 0.020553728103637696, 0.020574207305908202, 0.020700159072875975, 0.020744192123413087, 0.020468736648559572, 0.020535295486450195, 0.02063155174255371, 0.02031001663208008, 0.02050764846801758, 0.020462591171264647, 0.020332544326782227, 0.02025574493408203, 0.020463615417480468, 0.02043801689147949, 0.020702207565307617, 0.020998144149780275, 0.021019647598266602, 0.020516864776611327, 0.020521984100341797, 0.020377599716186523, 0.02081996726989746, 0.020602880477905275, 0.02045337677001953, 0.020307968139648438, 0.02041753578186035, 0.02037555122375488, 0.020347904205322266, 0.020373504638671876, 0.020380672454833985, 0.020355072021484375, 0.020489215850830078, 0.020987903594970703, 0.020489215850830078, 0.02042572784423828, 0.02043084716796875, 0.02031820869445801, 0.02042268753051758, 0.020398048400878905, 0.02043084716796875, 0.020519935607910156, 0.02044825553894043, 0.020393983840942383, 0.020495359420776366, 0.020427776336669923, 0.020488191604614257, 0.020572160720825194, 0.020380672454833985, 0.020242431640625, 0.020479999542236327, 0.020471807479858398, 0.02051584053039551, 0.02049228858947754, 0.0205230712890625, 0.020467647552490233, 0.02042367935180664, 0.02051481628417969, 0.020445184707641603, 0.02042470359802246, 0.020499488830566407, 0.020402143478393555, 0.020374528884887694, 0.020406272888183592, 0.020401216506958007, 0.020407232284545898, 0.02048409652709961, 0.02046976089477539, 0.021113855361938477, 0.020747264862060546, 0.020373504638671876, 0.02042163276672363, 0.02023423957824707, 0.020362239837646484, 0.020419584274291993, 0.02044313621520996, 0.020404224395751954, 0.020380672454833985, 0.020468736648559572, 0.020361215591430663, 0.02065715217590332, 0.021359615325927735, 0.02176313591003418, 0.020678592681884767, 0.02062233543395996, 0.020414464950561522, 0.02046156883239746, 0.02042265510559082, 0.02086297607421875, 0.020547584533691408, 0.020560895919799805, 0.020521984100341797, 0.02039910316467285, 0.020341760635375978, 0.02036227226257324, 0.020397024154663088, 0.020404224395751954, 0.020686847686767578, 0.021150720596313476, 0.020583423614501953, 0.020551679611206054, 0.020420608520507814, 0.020364320755004883, 0.020392927169799804, 0.02043289566040039, 0.020350976943969725, 0.020594688415527345, 0.020436992645263673, 0.02040012741088867, 0.020271104812622072, 0.02043903923034668, 0.020380672454833985, 0.020888576507568358, 0.020487167358398437, 0.02044313621520996, 0.02045644760131836, 0.020531200408935548, 0.021104639053344726, 0.02086911964416504, 0.020569087982177735, 0.02045952033996582, 0.020404224395751954, 0.020489215850830078, 0.020520959854125977, 0.020539392471313478, 0.020528127670288086, 0.020501504898071288, 0.02045952033996582, 0.020436992645263673, 0.020695039749145508, 0.020602912902832032, 0.020796384811401367, 0.020495359420776366, 0.020447231292724608, 0.020384767532348632, 0.02031718444824219, 0.020402175903320312, 0.02040729522705078, 0.020403200149536133, 0.020396032333374024, 0.02039091110229492, 0.020435968399047853, 0.02045952033996582, 0.02042367935180664, 0.020405248641967775, 0.021255168914794922, 0.020867071151733398, 0.02039910316467285, 0.020400192260742186, 0.020328384399414062, 0.02041548728942871, 0.020384767532348632, 0.02043084716796875, 0.02043187141418457, 0.020434944152832032, 0.02032537651062012, 0.020393983840942383, 0.020222976684570314, 0.020428800582885744, 0.020428800582885744, 0.020577280044555665, 0.020582399368286132, 0.020559871673583984, 0.020517887115478514, 0.02067251205444336, 0.020470783233642577, 0.02103193664550781, 0.020864000320434572, 0.020396032333374024, 0.020406272888183592, 0.020626432418823244, 0.020398080825805662, 0.020420671463012696, 0.02039084815979004, 0.020374528884887694, 0.020356096267700196, 0.02045132827758789, 0.02105548858642578, 0.020789247512817383, 0.020486143112182616, 0.02051584053039551, 0.020413440704345705, 0.02032537651062012, 0.020462623596191408, 0.020499423980712892, 0.020412416458129884, 0.020393983840942383, 0.02040934371948242, 0.020370431900024414, 0.02039091110229492, 0.02040115165710449, 0.020351999282836913, 0.020504575729370117, 0.02043903923034668, 0.020970495223999023, 0.02041651153564453, 0.020529151916503906, 0.02041753578186035, 0.020402175903320312, 0.020345855712890625, 0.020369407653808593, 0.02043187141418457, 0.020412416458129884, 0.020384767532348632, 0.020420608520507814, 0.020307968139648438, 0.02042982482910156, 0.020436992645263673, 0.020468736648559572, 0.020398080825805662, 0.020477951049804686, 0.02044108772277832, 0.020447231292724608, 0.020445184707641603, 0.02047590446472168, 0.020703231811523438, 0.021133407592773438, 0.020606880187988282, 0.020462591171264647, 0.020428800582885744, 0.02049126434326172, 0.02043084716796875, 0.020463615417480468, 0.02041651153564453, 0.02041651153564453, 0.021167104721069335, 0.02060492706298828, 0.02041753578186035, 0.02045747184753418, 0.020405248641967775, 0.020546560287475587, 0.020396032333374024, 0.020455423355102538, 0.020599807739257812, 0.02041548728942871, 0.020501504898071288, 0.020545536041259766, 0.020535327911376952, 0.020548608779907225, 0.02055881690979004, 0.02043187141418457, 0.02168217658996582, 0.020544511795043945, 0.02041548728942871, 0.020601856231689454, 0.020454399108886717, 0.02044313621520996, 0.021061632156372072, 0.020641792297363282, 0.020541439056396483, 0.02047590446472168, 0.020478975296020507, 0.020512767791748047, 0.020460544586181642, 0.020387840270996094, 0.02047488021850586, 0.02060495948791504, 0.020585439682006837, 0.020471807479858398, 0.020602880477905275, 0.020504575729370117]",tokens/s,48.64389612596839,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b2f-6ae9874027ec04660f185aff;3e73d25b-ebc4-46c2-86d8-67ed0854a12f) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4947.406848,8236.040192,0.0,7606.370304,6988.678144,s,1,11.559060546875,11.559060546875,0.0,11.559060546875,11.559060546875,11.559060546875,11.559060546875,[11.559060546875],,kWh,5.6076752015991996e-05,3.071898305305203e-05,9.69464664459796e-05,0.0001837422015150236,,MB,2840.236032,8257.011712,0.0,7608.467456,6915.138048,s,10,1.1635939865112306,0.11635939865112306,4.122393119209347e-05,0.11635148620605469,0.11641306533813477,0.11642159004211426,0.11642840980529785,"[0.11630390167236328, 0.11641117095947266, 0.11643011474609374, 0.11634194946289063, 0.11630982208251953, 0.11636102294921875, 0.11639008331298828, 0.11632608032226563, 0.11633331298828126, 0.11638652801513671]",tokens/s,2200.0801221700813,kWh,1.376855450443796e-06,7.543175227384653e-07,6.009316822953504e-06,8.140489796135765e-06,tokens/kWh,31447739.191506814,MB,2844.561408,8261.206016,0.0,7610.564608,6915.140608,s,10,19.886754638671878,1.9886754638671877,0.017414377479408594,1.9853505249023438,2.0114911132812496,2.014130139160156,2.0162413598632813,"[1.97901708984375, 1.97367333984375, 2.0021956787109376, 2.0009007568359376, 1.9916839599609375, 2.0167691650390625, 1.9738931884765625, 1.9754010009765626, 1.9623157958984374, 2.0109046630859373]",tokens/s,31.679377125461134,kWh,2.325620497038952e-05,1.2745129250092118e-05,5.6503344298252185e-05,9.250467851873381e-05,tokens/kWh,681046.6347087666,,s,630,19.884772346496582,0.03156313070872473,0.0008181328252341243,0.031764480590820314,0.032442573928833006,0.03271372985839844,0.03317393325805664,"[0.030651391983032225, 0.030752767562866212, 0.03317452621459961, 0.032723968505859374, 0.03229388809204101, 0.03159654426574707, 0.032304126739501955, 0.03284172821044922, 0.032086017608642575, 0.030669824600219726, 0.030709760665893555, 0.030290943145751953, 0.030699520111083983, 0.030723072052001952, 0.03180953598022461, 0.032454654693603514, 0.03162623977661133, 0.03201433563232422, 0.030696447372436524, 0.030691328048706053, 0.030674943923950194, 0.030904319763183592, 0.030369792938232422, 0.031038463592529295, 0.03260723114013672, 0.032266239166259765, 0.0321710090637207, 0.032395263671875, 0.03222732925415039, 0.03211468887329102, 0.0307589111328125, 0.03082035255432129, 0.03080601692199707, 0.030651391983032225, 0.032287742614746096, 0.032215038299560544, 0.032159744262695314, 0.03180748748779297, 0.032249855041503905, 0.03266559982299805, 0.03141427230834961, 0.03079884719848633, 0.030664703369140626, 0.030702592849731446, 0.030665727615356447, 0.030450687408447266, 0.030340095520019532, 0.03057254409790039, 0.03080499267578125, 0.030857215881347655, 0.03074355125427246, 0.030745599746704103, 0.030680063247680665, 0.03082035255432129, 0.0307640323638916, 0.03136409568786621, 0.032494590759277346, 0.032107521057128906, 0.030731264114379882, 0.0310118408203125, 0.03201740646362305, 0.031899648666381834, 0.03216793441772461, 0.031053823471069338, 0.030845951080322266, 0.030448671340942382, 0.03081007957458496, 0.0307589111328125, 0.030731264114379882, 0.03116851234436035, 0.031251455307006834, 0.03233280181884766, 0.03282329559326172, 0.0336629753112793, 0.03259187316894531, 0.0326297607421875, 0.03200307083129883, 0.03213619232177734, 0.03231129455566406, 0.03215769577026367, 0.03204403305053711, 0.0322529296875, 0.03172966384887695, 0.032261119842529294, 0.03237478256225586, 0.03209011077880859, 0.032347137451171876, 0.03238195037841797, 0.03219660949707031, 0.030726144790649414, 0.030671871185302735, 0.030685184478759765, 0.030737407684326173, 0.030674943923950194, 0.03075993537902832, 0.030684160232543944, 0.030281728744506835, 0.030851072311401367, 0.031287296295166016, 0.03209625625610352, 0.030683135986328124, 0.030744575500488282, 0.03128934478759766, 0.03097088050842285, 0.030713855743408205, 0.030701568603515625, 0.030661632537841797, 0.030661632537841797, 0.030281728744506835, 0.030672895431518556, 0.03075993537902832, 0.03056025505065918, 0.030649343490600587, 0.030324735641479493, 0.03059916877746582, 0.030675968170166015, 0.032368640899658206, 0.031958015441894534, 0.030855167388916017, 0.030591999053955078, 0.03213619232177734, 0.031801343917846676, 0.03137433624267578, 0.03040870475769043, 0.03074662399291992, 0.031426559448242186, 0.03232460784912109, 0.03235532760620117, 0.032331775665283204, 0.031373311996459964, 0.03226521682739258, 0.03271475219726563, 0.03265228652954102, 0.031037439346313478, 0.03083776092529297, 0.030842880249023437, 0.031301631927490234, 0.0321607666015625, 0.03101388740539551, 0.03224166488647461, 0.03218841552734375, 0.03247206497192383, 0.03149619293212891, 0.030667776107788085, 0.030934015274047853, 0.031095808029174804, 0.030897151947021483, 0.03058995246887207, 0.032484352111816404, 0.032008190155029294, 0.03227238464355469, 0.032230400085449216, 0.03153510475158691, 0.031855615615844726, 0.03204403305053711, 0.03136307144165039, 0.032110591888427735, 0.03217203140258789, 0.0316753921508789, 0.03223961639404297, 0.03183616065979004, 0.03115110397338867, 0.03152179145812988, 0.03077631950378418, 0.03229183959960937, 0.03250995254516602, 0.0323768310546875, 0.0321341438293457, 0.03163443183898926, 0.030517248153686522, 0.03041279983520508, 0.0320706558227539, 0.03074764823913574, 0.03238092803955078, 0.03177164840698242, 0.03171327972412109, 0.03223654556274414, 0.03209523010253906, 0.03219353485107422, 0.030845951080322266, 0.031887359619140625, 0.03280588912963867, 0.03233484649658203, 0.031848447799682614, 0.03083776092529297, 0.03237478256225586, 0.03222528076171875, 0.03237580871582031, 0.0322979850769043, 0.03206553649902344, 0.030507007598876954, 0.03041587257385254, 0.031663103103637694, 0.031281152725219724, 0.030801919937133788, 0.03057663917541504, 0.031719423294067385, 0.032045055389404296, 0.032105472564697264, 0.03185971260070801, 0.030774272918701173, 0.03220275115966797, 0.03216588973999023, 0.03263078308105469, 0.03242291259765625, 0.03235635375976562, 0.03224166488647461, 0.032368640899658206, 0.0322426872253418, 0.031734783172607424, 0.03185971260070801, 0.031493120193481446, 0.03219148635864258, 0.03204915237426758, 0.0322242546081543, 0.03223961639404297, 0.032198654174804685, 0.03214438247680664, 0.031870975494384765, 0.03151769638061523, 0.03154022407531738, 0.030809087753295897, 0.03079475212097168, 0.032143360137939454, 0.032732158660888674, 0.03255091094970703, 0.03241984176635742, 0.03212492752075195, 0.03241984176635742, 0.03211161422729492, 0.03214131164550781, 0.03224166488647461, 0.03223961639404297, 0.03222220611572266, 0.03246694564819336, 0.032271358489990236, 0.03240857696533203, 0.032254974365234376, 0.03216793441772461, 0.03228979110717774, 0.03243724822998047, 0.030451711654663087, 0.030737407684326173, 0.030535680770874023, 0.032502784729003906, 0.03197235107421875, 0.030655487060546875, 0.03061555290222168, 0.031251455307006834, 0.030537727355957032, 0.03018239974975586, 0.030507007598876954, 0.03037491226196289, 0.030456832885742188, 0.03114188766479492, 0.0310118408203125, 0.030651391983032225, 0.03058585548400879, 0.030638080596923828, 0.03058176040649414, 0.030525440216064452, 0.030726144790649414, 0.031251455307006834, 0.031959039688110355, 0.030228479385375977, 0.03075481605529785, 0.030660608291625976, 0.030963712692260743, 0.031139839172363282, 0.032756736755371094, 0.03244134521484375, 0.03233280181884766, 0.03251609420776367, 0.0322242546081543, 0.032146430969238284, 0.03222732925415039, 0.034269184112548826, 0.032758785247802735, 0.03180544090270996, 0.032092159271240234, 0.03146240043640137, 0.031627264022827145, 0.03201126480102539, 0.03196006393432617, 0.031591424942016604, 0.030466047286987305, 0.031068159103393556, 0.033312767028808594, 0.030883840560913086, 0.031239168167114258, 0.03225600051879883, 0.032314369201660156, 0.03226419067382812, 0.031719423294067385, 0.03169587135314941, 0.03036262321472168, 0.03162214469909668, 0.03218636703491211, 0.030534656524658203, 0.031764480590820314, 0.032484352111816404, 0.032178176879882815, 0.03141939163208008, 0.03255295944213867, 0.0344268798828125, 0.03266252899169922, 0.032024574279785153, 0.03183718490600586, 0.03219353485107422, 0.03204095840454101, 0.03213312149047851, 0.03122380828857422, 0.03014143943786621, 0.03040358352661133, 0.030204927444458008, 0.031409151077270506, 0.03136614418029785, 0.030846975326538087, 0.03058585548400879, 0.030671871185302735, 0.030642175674438478, 0.030737407684326173, 0.030950399398803712, 0.031426559448242186, 0.03127603149414063, 0.03218431854248047, 0.032571392059326174, 0.03226521682739258, 0.03165081596374512, 0.03264614486694336, 0.03243417739868164, 0.032402431488037106, 0.032366592407226565, 0.03234918212890625, 0.03227238464355469, 0.032307201385498044, 0.03235635375976562, 0.032835582733154296, 0.03260927963256836, 0.03213926315307617, 0.03197235107421875, 0.032366592407226565, 0.03180646324157715, 0.03181977653503418, 0.03171123123168945, 0.032517120361328124, 0.03272499084472656, 0.03220889663696289, 0.033225727081298825, 0.03264921569824219, 0.03250483322143555, 0.032091136932373046, 0.03230515289306641, 0.032395263671875, 0.032530433654785154, 0.032263168334960936, 0.03222220611572266, 0.03234099197387695, 0.032623615264892575, 0.03223961639404297, 0.03188121604919433, 0.03251507186889648, 0.03280588912963867, 0.03232563018798828, 0.03179110336303711, 0.030851072311401367, 0.030760959625244142, 0.030741504669189453, 0.03079782485961914, 0.03102720069885254, 0.03224166488647461, 0.03216486358642578, 0.03206758499145508, 0.03240959930419922, 0.03345817565917969, 0.03241062545776367, 0.03238092803955078, 0.032118785858154295, 0.03178700828552246, 0.030725120544433594, 0.030948352813720704, 0.032302078247070314, 0.030624767303466797, 0.030752767562866212, 0.030711807250976563, 0.030766080856323243, 0.030669824600219726, 0.03220172882080078, 0.03181056022644043, 0.03209830474853516, 0.032817150115966795, 0.032039936065673826, 0.03193139266967773, 0.03228979110717774, 0.03219558334350586, 0.032075775146484374, 0.031927295684814457, 0.03215155029296875, 0.032007167816162106, 0.03212492752075195, 0.030729215621948244, 0.03080499267578125, 0.030275583267211914, 0.03060940742492676, 0.030714879989624022, 0.03228364944458008, 0.030715904235839843, 0.030591999053955078, 0.030470144271850585, 0.03124940872192383, 0.03124838447570801, 0.03181977653503418, 0.032173057556152344, 0.031835136413574217, 0.03042099189758301, 0.031161344528198243, 0.03212595367431641, 0.03224063873291016, 0.030673919677734376, 0.030312448501586913, 0.0305930233001709, 0.030691328048706053, 0.03271372985839844, 0.030707712173461913, 0.030652416229248046, 0.031106048583984375, 0.030713855743408205, 0.030410751342773438, 0.030448640823364258, 0.030769151687622072, 0.0327720947265625, 0.032435199737548825, 0.030845951080322266, 0.03080703926086426, 0.030316543579101563, 0.030661632537841797, 0.030278656005859376, 0.03203583908081055, 0.032389118194580076, 0.0315156478881836, 0.03142348861694336, 0.03297484970092773, 0.03223756790161133, 0.03228672027587891, 0.03217715072631836, 0.03216998291015625, 0.031648767471313476, 0.03144499206542969, 0.03210342407226562, 0.03218329620361328, 0.03280588912963867, 0.03225804901123047, 0.032254974365234376, 0.03213926315307617, 0.030655487060546875, 0.030819328308105468, 0.030835712432861328, 0.0307640323638916, 0.031109119415283205, 0.03176652717590332, 0.030486528396606444, 0.03097292709350586, 0.03151667213439941, 0.032176128387451174, 0.03184639930725098, 0.032277503967285154, 0.03212287902832031, 0.032586753845214846, 0.03214745712280274, 0.03227443313598633, 0.03085312080383301, 0.03271372985839844, 0.03295948791503906, 0.030963712692260743, 0.031082496643066407, 0.03078656005859375, 0.03085312080383301, 0.031130624771118165, 0.031491071701049804, 0.032271358489990236, 0.03210342407226562, 0.03234918212890625, 0.03245363235473633, 0.03077631950378418, 0.030461952209472655, 0.03060326385498047, 0.030422016143798827, 0.03038412857055664, 0.030734336853027344, 0.030600191116333008, 0.03058892822265625, 0.03058380889892578, 0.030300159454345704, 0.03014143943786621, 0.03055308723449707, 0.03034726333618164, 0.03077939224243164, 0.030623743057250977, 0.03075481605529785, 0.030119935989379884, 0.03018547248840332, 0.03019059181213379, 0.030434303283691407, 0.0305664005279541, 0.031038463592529295, 0.030467071533203126, 0.03061759948730469, 0.030512128829956055, 0.030320640563964843, 0.0301527042388916, 0.03021824073791504, 0.03077324867248535, 0.030660608291625976, 0.030552064895629883, 0.030342144012451173, 0.030650367736816408, 0.03035955238342285, 0.030248960494995116, 0.030434303283691407, 0.03036467170715332, 0.030390272140502928, 0.03297075271606445, 0.033172481536865236, 0.031941631317138675, 0.03212595367431641, 0.030749696731567383, 0.030841856002807616, 0.03078860855102539, 0.03177779197692871, 0.0320552978515625, 0.030716928482055664, 0.030670848846435547, 0.030651391983032225, 0.0323061752319336, 0.03152076721191406, 0.03240140914916992, 0.03194367980957031, 0.03210342407226562, 0.0316231689453125, 0.03197747230529785, 0.031697919845581055, 0.03198464012145996, 0.03196416091918945, 0.03263488006591797, 0.03223859024047852, 0.03159040069580078, 0.030479360580444335, 0.030756864547729492, 0.03062272071838379, 0.030631935119628906, 0.03032678413391113, 0.03056230354309082, 0.030631935119628906, 0.03100467109680176, 0.03180544090270996, 0.030629888534545898, 0.031263744354248044, 0.032263168334960936, 0.03098111915588379, 0.030845951080322266, 0.030431232452392577, 0.030510080337524413, 0.03018137550354004, 0.03035647964477539, 0.030698495864868162, 0.03221196746826172, 0.03235635375976562, 0.031057920455932617, 0.030698495864868162, 0.030311424255371092, 0.03213824081420898, 0.033007614135742186, 0.03237376022338867, 0.03226009750366211, 0.03221913528442383, 0.03219558334350586, 0.03296768188476563, 0.03309568023681641, 0.03082137680053711, 0.03218739318847656, 0.031971328735351565, 0.03214131164550781, 0.031764480590820314, 0.03164672088623047, 0.03142963218688965, 0.03232665634155273, 0.032379905700683595, 0.031936511993408204, 0.031493120193481446, 0.0320634880065918, 0.032271358489990236, 0.032247806549072264, 0.03218534469604492, 0.032276481628417966, 0.03218431854248047, 0.03220991897583008, 0.03279257583618164, 0.030947328567504883, 0.03213824081420898, 0.03226828765869141, 0.03161702346801758, 0.032282623291015625, 0.03316326522827148, 0.033037311553955076, 0.03232153701782227, 0.03239731216430664, 0.03240038299560547, 0.03221811294555664, 0.03213721466064453, 0.03212492752075195, 0.031768575668334964, 0.03215155029296875, 0.0323164176940918, 0.032233470916748046, 0.03199897575378418, 0.031632383346557616, 0.03146137619018555, 0.03232460784912109, 0.03212595367431641, 0.03206860733032227, 0.032210945129394535, 0.03214233779907227, 0.030889984130859374, 0.030654464721679688, 0.030718975067138672, 0.030686208724975586, 0.030721023559570314, 0.030552064895629883, 0.03129241561889649, 0.03104256057739258]",tokens/s,31.68253520946128,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8220.688384,12519.473152,0.0,11882.463232,11315.947008,s,1,13.790009765625,13.790009765625,0.0,13.790009765625,13.790009765625,13.790009765625,13.790009765625,[13.790009765625],,kWh,8.184917125485894e-05,4.484102183674408e-05,0.00015802234863999953,0.00028471254173160256,,MB,3677.835264,12540.444672,0.0,11884.560384,11070.3104,s,10,2.083279083251953,0.2083279083251953,5.80619608251725e-05,0.20833046722412107,0.2084000503540039,0.20841973648071288,0.20843548538208007,"[0.20834141540527343, 0.20831951904296875, 0.20835427856445313, 0.20825946044921875, 0.20843942260742188, 0.20827503967285158, 0.20839567565917969, 0.20824166870117186, 0.20830613708496093, 0.20834646606445312]",tokens/s,1228.831998833251,kWh,2.463517731336869e-06,1.3498186341801444e-06,1.0769944958541564e-05,1.4583281324058578e-05,tokens/kWh,17554348.319240563,MB,3682.082816,12544.638976,0.0,11886.657536,11070.31296,s,10,24.113241943359373,2.411324194335937,0.009185701752479779,2.412723510742188,2.419812255859375,2.4260876464843752,2.4311079589843754,"[2.412994384765625, 2.41245263671875, 2.40384814453125, 2.4019169921875, 2.402259765625, 2.432363037109375, 2.41460986328125, 2.418417724609375, 2.401056884765625, 2.413322509765625]",tokens/s,26.126723295019143,kWh,2.8701261653177788e-05,1.5729177169235494e-05,8.42294921520587e-05,0.00012865993097447198,tokens/kWh,489662.9395246615,,s,630,24.11115930938723,0.038271681443471754,0.0005463254634013884,0.03807846450805664,0.03895889930725098,0.03942271842956543,0.040517733154296874,"[0.03847372817993164, 0.038904830932617186, 0.038100990295410156, 0.03823001480102539, 0.03876147079467773, 0.038811649322509766, 0.038332416534423826, 0.03864371109008789, 0.038351871490478515, 0.03810406494140625, 0.03809996795654297, 0.03807027053833008, 0.03802214431762695, 0.038073345184326174, 0.03781119918823242, 0.03839590454101562, 0.038130687713623046, 0.038010879516601564, 0.03806617736816406, 0.03803443145751953, 0.038035457611083984, 0.03804569625854492, 0.03803238296508789, 0.03821363067626953, 0.038042625427246096, 0.037976062774658204, 0.03801702499389648, 0.03801804733276367, 0.03867340850830078, 0.03826278305053711, 0.03825151824951172, 0.038307838439941407, 0.03848294448852539, 0.038819839477539066, 0.03816960144042969, 0.03803647994995117, 0.038079486846923825, 0.038043647766113284, 0.03803238296508789, 0.037958656311035156, 0.037986305236816405, 0.03870515060424805, 0.03853107070922852, 0.0379791374206543, 0.03802624130249024, 0.03803033447265625, 0.03807027053833008, 0.038059009552001956, 0.03805593490600586, 0.03811328125, 0.03808665466308594, 0.03806003189086914, 0.03804876708984375, 0.03807436752319336, 0.03804876708984375, 0.04115558242797852, 0.04052070236206055, 0.03822694396972656, 0.03819417572021484, 0.03870924758911133, 0.038951934814453124, 0.03816960144042969, 0.038056961059570314, 0.03837542343139649, 0.03826483154296875, 0.038391807556152346, 0.03806412887573242, 0.038063102722167966, 0.038193153381347655, 0.03842867279052734, 0.038353919982910156, 0.03805286407470703, 0.03811328125, 0.03805388641357422, 0.03858432006835937, 0.03922431945800781, 0.03915059280395508, 0.03809382247924804, 0.037996543884277346, 0.03801702499389648, 0.03870515060424805, 0.03868672180175781, 0.03812659072875976, 0.038242305755615234, 0.03800678253173828, 0.038150142669677735, 0.03811532974243164, 0.038114303588867186, 0.03838566589355469, 0.03803238296508789, 0.038338558197021484, 0.03837235260009766, 0.03799347305297852, 0.038056961059570314, 0.03815935897827148, 0.03815423965454102, 0.03856076812744141, 0.03804467010498047, 0.037961727142333986, 0.03802214431762695, 0.03850137710571289, 0.03806105422973633, 0.03798732757568359, 0.038004737854003906, 0.03816755294799805, 0.03808768081665039, 0.037943294525146484, 0.03799244689941406, 0.03896627044677734, 0.03858124923706055, 0.038024192810058595, 0.03807846450805664, 0.03827814483642578, 0.038019073486328124, 0.03942502212524414, 0.03807027053833008, 0.03906662368774414, 0.03836928176879883, 0.03903692626953125, 0.0384716796875, 0.03829862213134766, 0.038199295043945314, 0.038645759582519534, 0.03805388641357422, 0.03810201644897461, 0.038174720764160154, 0.03853209686279297, 0.038609920501708986, 0.03815116882324219, 0.038345729827880856, 0.03839897537231445, 0.037945343017578126, 0.03811328125, 0.037972991943359374, 0.03808256149291992, 0.03787059020996094, 0.03784601593017578, 0.0380313606262207, 0.03807743835449219, 0.03803955078125, 0.03800678253173828, 0.038112255096435545, 0.038269950866699216, 0.0381030387878418, 0.038199295043945314, 0.03838566589355469, 0.03841843032836914, 0.03806208038330078, 0.0379576301574707, 0.03794124984741211, 0.037986305236816405, 0.039011329650878904, 0.03814809417724609, 0.03796377563476563, 0.03806412887573242, 0.037972991943359374, 0.038744064331054685, 0.03840512084960938, 0.03796582412719727, 0.038019073486328124, 0.038004737854003906, 0.03793407821655274, 0.03799552154541016, 0.03801599884033203, 0.03801702499389648, 0.038027263641357424, 0.03798015975952149, 0.03790233612060547, 0.03800985717773438, 0.0379791374206543, 0.040174591064453126, 0.03823513412475586, 0.03802828979492188, 0.03807743835449219, 0.038204414367675785, 0.03805081558227539, 0.03798527908325195, 0.038217727661132815, 0.03808563232421875, 0.03795558547973633, 0.03800064086914062, 0.037926910400390625, 0.038013950347900394, 0.03813478469848633, 0.03868672180175781, 0.03833958435058594, 0.03804467010498047, 0.03790335845947266, 0.0379607048034668, 0.03814912033081055, 0.03793817520141601, 0.03796582412719727, 0.03793612670898437, 0.03827097702026367, 0.03808153533935547, 0.038176769256591796, 0.03811328125, 0.03809075164794922, 0.03805081558227539, 0.037928958892822266, 0.038043647766113284, 0.03813683319091797, 0.037958656311035156, 0.03790848159790039, 0.03800883102416992, 0.038384639739990234, 0.03874508666992187, 0.0395601921081543, 0.0383109130859375, 0.038084606170654296, 0.03807436752319336, 0.03803852844238281, 0.038019073486328124, 0.03802624130249024, 0.038012928009033206, 0.037994495391845705, 0.03803033447265625, 0.03796377563476563, 0.03799859237670898, 0.037937152862548826, 0.037953536987304685, 0.038694911956787106, 0.0380497932434082, 0.03793920135498047, 0.03795558547973633, 0.0380497932434082, 0.03807027053833008, 0.037991424560546876, 0.03802521514892578, 0.038024192810058595, 0.03805491256713867, 0.03804467010498047, 0.03889152145385742, 0.038353919982910156, 0.03841843032836914, 0.038059009552001956, 0.03801497650146484, 0.03805184173583984, 0.03807129669189453, 0.03802214431762695, 0.03804569625854492, 0.038042625427246096, 0.03817779159545898, 0.03802009582519531, 0.03798732757568359, 0.03802624130249024, 0.03816243362426758, 0.038112255096435545, 0.03809280014038086, 0.038040576934814455, 0.03820544052124023, 0.03812761688232422, 0.03820544052124023, 0.038338558197021484, 0.039572479248046875, 0.038509567260742186, 0.03812966537475586, 0.03800883102416992, 0.0380313606262207, 0.03802521514892578, 0.03802624130249024, 0.03803340911865234, 0.03802009582519531, 0.038004737854003906, 0.03804774475097656, 0.03803750228881836, 0.038063102722167966, 0.0380211181640625, 0.037988353729248046, 0.038002689361572264, 0.03813888168334961, 0.03803647994995117, 0.03807846450805664, 0.03799961471557617, 0.0380313606262207, 0.03803238296508789, 0.03781836700439453, 0.0377968635559082, 0.03802214431762695, 0.0393994255065918, 0.03931852722167969, 0.03926323318481445, 0.03934207916259766, 0.03792486572265625, 0.037585918426513674, 0.037561344146728515, 0.03803033447265625, 0.03797401428222656, 0.03803955078125, 0.03919769668579102, 0.03928575897216797, 0.038004737854003906, 0.03796480178833008, 0.03783065414428711, 0.03763916778564453, 0.037550079345703126, 0.037580799102783204, 0.03767603302001953, 0.03789311981201172, 0.038046718597412106, 0.037994495391845705, 0.03796377563476563, 0.037996543884277346, 0.0379422721862793, 0.03796480178833008, 0.03804774475097656, 0.037956607818603515, 0.03800985717773438, 0.038095870971679685, 0.03783270263671875, 0.03784499359130859, 0.03801190567016602, 0.03814092636108399, 0.03803955078125, 0.03807846450805664, 0.03817881774902344, 0.03814297485351562, 0.038004737854003906, 0.038043647766113284, 0.03809894561767578, 0.03811532974243164, 0.038091777801513675, 0.038043647766113284, 0.03762483215332031, 0.03778355026245117, 0.03805491256713867, 0.03879731369018555, 0.03820544052124023, 0.03810406494140625, 0.03818086242675781, 0.03788595199584961, 0.03798527908325195, 0.03765043258666992, 0.037748737335205076, 0.0381102066040039, 0.0381921272277832, 0.038091777801513675, 0.03813785552978516, 0.03814297485351562, 0.0381399040222168, 0.04130918502807617, 0.039785472869873044, 0.039392257690429686, 0.03806924819946289, 0.03810201644897461, 0.03867852783203125, 0.03943731307983398, 0.039428096771240234, 0.03952640151977539, 0.03939328002929687, 0.039406593322753904, 0.039419902801513675, 0.03966361618041992, 0.039299072265625, 0.03971891021728516, 0.03815628814697265, 0.03870003128051758, 0.038870014190673825, 0.04105830383300781, 0.040586238861083986, 0.03836723327636719, 0.039564289093017575, 0.03938508987426758, 0.04023807907104492, 0.03945369720458984, 0.03808768081665039, 0.03824947357177735, 0.03826176071166992, 0.03803750228881836, 0.03810611343383789, 0.03799859237670898, 0.03817779159545898, 0.03815321731567383, 0.03803340911865234, 0.03797094345092773, 0.038100990295410156, 0.03798732757568359, 0.03841126251220703, 0.038225921630859375, 0.03804467010498047, 0.03806208038330078, 0.038024192810058595, 0.03804569625854492, 0.038007808685302735, 0.040272895812988284, 0.04031180953979492, 0.039782398223876955, 0.03810611343383789, 0.03809996795654297, 0.03800678253173828, 0.0379607048034668, 0.037953536987304685, 0.038027263641357424, 0.03793407821655274, 0.038024192810058595, 0.0379422721862793, 0.037994495391845705, 0.03794841766357422, 0.03895808029174805, 0.03931033706665039, 0.03798732757568359, 0.03806719970703125, 0.037935104370117184, 0.03800678253173828, 0.037610496520996094, 0.038005760192871094, 0.03800883102416992, 0.038012928009033206, 0.037994495391845705, 0.03799552154541016, 0.0402083854675293, 0.03907583999633789, 0.042369022369384765, 0.0393809928894043, 0.03816960144042969, 0.03811840057373047, 0.038152191162109376, 0.03809689712524414, 0.03807846450805664, 0.03805286407470703, 0.038128639221191404, 0.03805491256713867, 0.03803033447265625, 0.03801497650146484, 0.038046718597412106, 0.03819417572021484, 0.03810713577270508, 0.03811532974243164, 0.038043647766113284, 0.03808051300048828, 0.03806719970703125, 0.0381952018737793, 0.03803750228881836, 0.03801702499389648, 0.03809894561767578, 0.038168575286865236, 0.03808358383178711, 0.03800678253173828, 0.03814092636108399, 0.03823820877075195, 0.03815116882324219, 0.03825766372680664, 0.03802521514892578, 0.03800371170043945, 0.03811123275756836, 0.039782398223876955, 0.0396492805480957, 0.038242305755615234, 0.0381102066040039, 0.0379791374206543, 0.03959091186523438, 0.0393256950378418, 0.03941785430908203, 0.0377077751159668, 0.03814809417724609, 0.03812044906616211, 0.03811532974243164, 0.03813273620605469, 0.0381214714050293, 0.0381102066040039, 0.038392833709716793, 0.03813580703735352, 0.03819417572021484, 0.038144001007080076, 0.038133758544921875, 0.03810713577270508, 0.038056961059570314, 0.03806617736816406, 0.03802828979492188, 0.038063102722167966, 0.038040576934814455, 0.03810508728027344, 0.03817062377929688, 0.03811328125, 0.03816447830200195, 0.03790848159790039, 0.03808665466308594, 0.0381030387878418, 0.038024192810058595, 0.0381921272277832, 0.03830374526977539, 0.039556095123291016, 0.03838771057128906, 0.03939328002929687, 0.039413761138916016, 0.03858124923706055, 0.03949465560913086, 0.038112255096435545, 0.0380682258605957, 0.0387583999633789, 0.0395335693359375, 0.03792998504638672, 0.03806617736816406, 0.03851264190673828, 0.039430145263671876, 0.03805286407470703, 0.037667839050292966, 0.03830988693237305, 0.03811328125, 0.03846963119506836, 0.03854950332641602, 0.03810611343383789, 0.03813580703735352, 0.038091777801513675, 0.03855462265014648, 0.03809382247924804, 0.03806208038330078, 0.03816243362426758, 0.03807231903076172, 0.03878911972045898, 0.038348800659179685, 0.03808563232421875, 0.038214656829833986, 0.038117374420166016, 0.03816755294799805, 0.038117374420166016, 0.03813683319091797, 0.03834470367431641, 0.03914854431152344, 0.038809600830078124, 0.03806924819946289, 0.03811532974243164, 0.038059009552001956, 0.03805491256713867, 0.038073345184326174, 0.03804569625854492, 0.03798425674438476, 0.03799859237670898, 0.037988353729248046, 0.0380948486328125, 0.03801497650146484, 0.03793920135498047, 0.038160385131835936, 0.03801804733276367, 0.037986305236816405, 0.03793305587768555, 0.03794739151000977, 0.037951488494873044, 0.03788390350341797, 0.037956607818603515, 0.03796889495849609, 0.037493759155273435, 0.03795251083374023, 0.03799552154541016, 0.03897753524780274, 0.03793305587768555, 0.03794944000244141, 0.03802828979492188, 0.03803955078125, 0.03787366485595703, 0.03796889495849609, 0.03798425674438476, 0.037935104370117184, 0.03791155242919922, 0.0379422721862793, 0.037966846466064456, 0.03794124984741211, 0.03786137771606445, 0.03797401428222656, 0.037937152862548826, 0.03911679840087891, 0.03886489486694336, 0.037935104370117184, 0.037884929656982425, 0.0379607048034668, 0.03793612670898437, 0.0380211181640625, 0.038188030242919925, 0.03806924819946289, 0.037926910400390625, 0.038042625427246096, 0.03801702499389648, 0.03800371170043945, 0.03795455932617187, 0.037996543884277346, 0.038010879516601564, 0.03820748901367187, 0.03789209747314453, 0.038076416015625, 0.03825254440307617, 0.039204864501953124, 0.038079486846923825, 0.03796377563476563, 0.038004737854003906, 0.0380211181640625, 0.03791872024536133, 0.03800371170043945, 0.038024192810058595, 0.03812454223632813, 0.03803033447265625, 0.03807436752319336, 0.03829862213134766, 0.038117374420166016, 0.038095870971679685, 0.03814809417724609, 0.03812454223632813, 0.038150142669677735, 0.0392437744140625, 0.04051046371459961, 0.03884543991088867, 0.0383631362915039, 0.03876454544067383, 0.03823001480102539, 0.038147071838378906, 0.0383744010925293, 0.03895808029174805, 0.038338558197021484, 0.03813273620605469, 0.03806412887573242, 0.03932057571411133, 0.0380497932434082, 0.03828224182128906, 0.038234111785888675, 0.038265857696533206, 0.03811328125, 0.03818598556518555, 0.03817779159545898, 0.03859558486938477, 0.038160385131835936, 0.038056961059570314, 0.03878911972045898, 0.038215679168701173, 0.03803647994995117, 0.03804159927368164, 0.037994495391845705, 0.03829043197631836, 0.038079486846923825, 0.03805593490600586, 0.03822694396972656, 0.04092006301879883]",tokens/s,26.128980026054652,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694915d-39ab4dc675e32efe6561cfed;e47ddd1a-35c8-40f3-a7eb-3ff9fadfc81d) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c3e-42f750de74e57e6a47d2fa67;42b55cee-8651-4cc8-aa87-b8b5bc58be2c) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fb6-14aa653006bdca2765c97850;3e155ea7-f331-4aeb-9ad7-8e41408fd669) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949264-2f8b59776239e9fd092109d4;3fc6b401-c54c-4b1e-a2d6-62cfccdac636) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11060.518912,15131.475968,0.0,14501.80608,13634.065408,s,1,14.3992392578125,14.3992392578125,0.0,14.3992392578125,14.3992392578125,14.3992392578125,14.3992392578125,[14.3992392578125],,kWh,9.009613501458172e-05,4.9364575985617705e-05,0.00019774571375197336,0.0003372064247521728,,MB,2052.411392,15152.447488,0.0,14501.80608,12898.96192,s,10,3.578675659179687,0.35786756591796876,0.00040006518051751013,0.3578063659667969,0.35796163940429687,0.3584950256347656,0.3589217346191406,"[0.35775778198242186, 0.35780398559570314, 0.3575802001953125, 0.3590284118652344, 0.35784310913085937, 0.3578087463378906, 0.357819580078125, 0.3575936279296875, 0.35784133911132815, 0.357598876953125]",tokens/s,715.34842601154,kWh,4.227105513095263e-06,2.315409727145117e-06,1.6691213749785954e-05,2.3233728990026333e-05,tokens/kWh,11018463.721854312,MB,2060.263424,15152.447488,0.0,14501.80608,13241.259008,s,10,26.705897705078126,2.6705897705078128,0.05823564481812388,2.6979384765625003,2.728227978515625,2.728562841796875,2.728830732421875,"[2.720063720703125, 2.581814208984375, 2.706813232421875, 2.728153564453125, 2.711645751953125, 2.728897705078125, 2.689063720703125, 2.66100927734375, 2.599175048828125, 2.579261474609375]",tokens/s,23.59029480893299,kWh,3.050183436905725e-05,1.6717511046205892e-05,0.0001008144080324103,0.00014803375344767344,tokens/kWh,425578.61658401485,,s,630,26.69141192626952,0.04236732051788814,0.0012489830060166086,0.043066368103027344,0.04345446395874023,0.043653682518005374,0.04424454284667969,"[0.0427325439453125, 0.0431912956237793, 0.04316159820556641, 0.043081729888916016, 0.04328550338745117, 0.04335411071777344, 0.04344319915771484, 0.043270145416259766, 0.043232257843017576, 0.042912769317626956, 0.04325273513793945, 0.04321279907226563, 0.04326092910766602, 0.043172863006591795, 0.04319539260864258, 0.043014144897460936, 0.04329062271118164, 0.04359884643554687, 0.04321279907226563, 0.04328550338745117, 0.043319297790527345, 0.043390975952148435, 0.043096065521240234, 0.04349747085571289, 0.044034046173095705, 0.04338995361328125, 0.04315852737426758, 0.04268646240234375, 0.04306739044189453, 0.043308032989501956, 0.043154430389404294, 0.04288000106811524, 0.042820606231689456, 0.04313702392578125, 0.04309196853637695, 0.04295065689086914, 0.043154430389404294, 0.042861568450927735, 0.042755073547363284, 0.04287897491455078, 0.04315238571166992, 0.04314112091064453, 0.04308070373535156, 0.042828800201416016, 0.042820606231689456, 0.042877952575683595, 0.04310323333740235, 0.043117568969726565, 0.04318822479248047, 0.043194366455078126, 0.04290457534790039, 0.04325068664550781, 0.043463680267333986, 0.0431912956237793, 0.04311552047729492, 0.04304383850097656, 0.043172863006591795, 0.04329574584960937, 0.043412479400634765, 0.04311449432373047, 0.04317900848388672, 0.043240447998046876, 0.04324761581420898, 0.04093439865112305, 0.04056063842773437, 0.040622081756591794, 0.0409354248046875, 0.04107468795776367, 0.04069887924194336, 0.04072447967529297, 0.04076134490966797, 0.04050124740600586, 0.040576000213623044, 0.04085657501220703, 0.0407562255859375, 0.040758270263671875, 0.04059340667724609, 0.040428543090820314, 0.04055039978027344, 0.04071526336669922, 0.04065484619140625, 0.04054937744140625, 0.04068454360961914, 0.04038655853271484, 0.040597503662109374, 0.040354816436767575, 0.040545280456542966, 0.044450817108154295, 0.04347903823852539, 0.04294246292114258, 0.04320870590209961, 0.04052377700805664, 0.04051046371459961, 0.040403968811035154, 0.04055244827270508, 0.040525825500488284, 0.0405032958984375, 0.040643585205078124, 0.04076134490966797, 0.04051763153076172, 0.040515583038330076, 0.04047359848022461, 0.04038143920898438, 0.04061798477172852, 0.04060569763183594, 0.04051763153076172, 0.04043571090698242, 0.04046745681762695, 0.04036812973022461, 0.04067737579345703, 0.0406824951171875, 0.04055244827270508, 0.04064972686767578, 0.0405852165222168, 0.040525825500488284, 0.04087091064453125, 0.040852481842041016, 0.040910846710205076, 0.04077875137329102, 0.04056371307373047, 0.04046540832519531, 0.040637439727783206, 0.04172390365600586, 0.043891712188720705, 0.04322099304199219, 0.043394046783447264, 0.04103680038452148, 0.0408350715637207, 0.04062822341918945, 0.040700927734375, 0.040438785552978515, 0.04070502471923828, 0.043286529541015625, 0.043256832122802735, 0.04315135955810547, 0.04314214324951172, 0.0429752311706543, 0.04320153427124023, 0.04333670425415039, 0.04324454498291016, 0.043246593475341794, 0.04314828872680664, 0.04306227111816406, 0.04307660675048828, 0.04306534576416016, 0.04339507293701172, 0.043123710632324216, 0.042998783111572264, 0.04308582305908203, 0.04335513687133789, 0.04316262435913086, 0.04314214324951172, 0.04308582305908203, 0.04307353591918945, 0.0433889274597168, 0.04340531158447265, 0.0429035530090332, 0.04312473678588867, 0.04301619338989258, 0.04300799942016602, 0.04316364669799805, 0.04335513687133789, 0.04301824188232422, 0.043117568969726565, 0.043015167236328124, 0.04318003082275391, 0.04323123168945313, 0.04326911926269531, 0.04365414428710938, 0.04443648147583008, 0.043686912536621096, 0.04311552047729492, 0.04309299087524414, 0.04312985610961914, 0.042929153442382816, 0.043254783630371094, 0.043128833770751954, 0.043399166107177735, 0.04372275161743164, 0.04377907180786133, 0.04354048156738281, 0.04308889770507812, 0.04291993713378906, 0.04347903823852539, 0.04305100631713867, 0.04317695999145508, 0.04136959838867187, 0.04342169570922851, 0.04299264144897461, 0.04107161712646484, 0.042570751190185545, 0.04348620986938476, 0.04343296051025391, 0.043594753265380856, 0.043328510284423825, 0.043224063873291016, 0.043338752746582034, 0.043150337219238284, 0.043218944549560545, 0.04356403350830078, 0.04345548629760742, 0.04342169570922851, 0.04317388916015625, 0.04304076766967774, 0.04315135955810547, 0.04304076766967774, 0.043445247650146485, 0.043319297790527345, 0.04314828872680664, 0.043069438934326174, 0.043273216247558595, 0.043055103302001956, 0.04316364669799805, 0.04327731323242188, 0.043216896057128903, 0.04320665740966797, 0.04352819061279297, 0.043153408050537106, 0.043237377166748046, 0.04382310485839844, 0.04356300735473633, 0.04338380813598633, 0.043377662658691404, 0.043184127807617184, 0.04341350555419922, 0.04338483047485352, 0.04306227111816406, 0.04321996688842773, 0.043243518829345705, 0.04382003021240234, 0.04357632064819336, 0.04340326309204102, 0.04348518371582031, 0.04321279907226563, 0.0435230712890625, 0.04334182357788086, 0.043012096405029294, 0.04326604843139648, 0.04335411071777344, 0.04379545593261719, 0.043491329193115234, 0.04332339096069336, 0.04329062271118164, 0.04316569519042969, 0.04323328018188476, 0.0435230712890625, 0.043312126159667966, 0.043235328674316405, 0.04341145706176758, 0.043254783630371094, 0.04325068664550781, 0.04327936172485351, 0.042412033081054686, 0.04340633773803711, 0.04379852676391602, 0.04332953643798828, 0.043409408569335936, 0.04330188751220703, 0.043328510284423825, 0.04360806274414063, 0.04341964721679688, 0.04286361694335938, 0.042967041015625, 0.04293632125854492, 0.0433889274597168, 0.04305612945556641, 0.04360704040527344, 0.04466175842285156, 0.043769855499267575, 0.04345446395874023, 0.043232257843017576, 0.04296499252319336, 0.04316569519042969, 0.04286975860595703, 0.043071487426757815, 0.043325439453125, 0.04312678527832031, 0.04376473617553711, 0.04328857421875, 0.0434411506652832, 0.04315852737426758, 0.04322611236572266, 0.042967041015625, 0.043622398376464845, 0.043232257843017576, 0.0433520622253418, 0.043156478881835936, 0.043322368621826174, 0.043235328674316405, 0.04332646560668945, 0.04311142349243164, 0.043308032989501956, 0.04344627380371094, 0.043261951446533206, 0.04298649597167969, 0.043202560424804685, 0.04393267059326172, 0.04378316879272461, 0.04328755187988281, 0.043325439453125, 0.04060569763183594, 0.04071526336669922, 0.04058009719848633, 0.040801280975341796, 0.04070809555053711, 0.04070912170410156, 0.04059648132324219, 0.04353023910522461, 0.04326911926269531, 0.04096102523803711, 0.04425523376464844, 0.043635711669921876, 0.04353228759765625, 0.04362956619262695, 0.043259902954101564, 0.04365107345581055, 0.04365824127197265, 0.04340224075317383, 0.043292671203613284, 0.043292671203613284, 0.04411699295043945, 0.04421836853027344, 0.043910144805908206, 0.04343091201782227, 0.04326502227783203, 0.04336537551879883, 0.0433070068359375, 0.04324863815307617, 0.04336640167236328, 0.04365311813354492, 0.04329676818847656, 0.043291648864746096, 0.04319846343994141, 0.04323328018188476, 0.04336537551879883, 0.043210750579833986, 0.043358207702636715, 0.04316364669799805, 0.043015167236328124, 0.04316364669799805, 0.04313497543334961, 0.04312063980102539, 0.043225086212158204, 0.04300288009643555, 0.04113100814819336, 0.04303462219238281, 0.04302643203735351, 0.04305100631713867, 0.04326707077026367, 0.043014144897460936, 0.043000831604003906, 0.04303155136108398, 0.043170814514160154, 0.04312575912475586, 0.04326707077026367, 0.043194366455078126, 0.04317695999145508, 0.043112449645996094, 0.04339814376831055, 0.043138046264648434, 0.04323020935058594, 0.043150337219238284, 0.04326092910766602, 0.04307353591918945, 0.0432803840637207, 0.04313497543334961, 0.04332339096069336, 0.04341145706176758, 0.04341452789306641, 0.04310323333740235, 0.043338752746582034, 0.0432097282409668, 0.043238399505615234, 0.044096511840820314, 0.04338483047485352, 0.04416921615600586, 0.043911167144775394, 0.043423744201660154, 0.04213862228393555, 0.04345446395874023, 0.04322304153442383, 0.04347596740722656, 0.043284481048583984, 0.043200511932373044, 0.041275390625, 0.04316364669799805, 0.040771583557128906, 0.0408279037475586, 0.040602622985839845, 0.04080332946777344, 0.04099379348754883, 0.04326502227783203, 0.04188467025756836, 0.04324863815307617, 0.04306022262573242, 0.04336025619506836, 0.04302950286865234, 0.04319232177734375, 0.04321791839599609, 0.04330188751220703, 0.04231987380981445, 0.043305984497070314, 0.042987518310546875, 0.04318105697631836, 0.04067225646972656, 0.042014720916748044, 0.04322099304199219, 0.04366643142700195, 0.04299468612670899, 0.040871936798095705, 0.04174950408935547, 0.043358207702636715, 0.04307251358032226, 0.043450366973876955, 0.043199489593505856, 0.04332748794555664, 0.04322611236572266, 0.04349747085571289, 0.04315955352783203, 0.04335411071777344, 0.043138046264648434, 0.042894336700439455, 0.04330086517333984, 0.04361830520629883, 0.043224063873291016, 0.04329779052734375, 0.043128833770751954, 0.043199489593505856, 0.0434524154663086, 0.04398796844482422, 0.04313497543334961, 0.043146240234375, 0.04278681564331055, 0.043189247131347655, 0.04307558441162109, 0.04283903884887695, 0.04064767837524414, 0.04081868743896484, 0.04052070236206055, 0.04069171142578125, 0.04195532989501953, 0.04092927932739258, 0.040807422637939454, 0.040716289520263675, 0.04089548873901367, 0.04067635345458984, 0.04071321487426758, 0.04325580978393555, 0.0406927375793457, 0.04239052963256836, 0.043166721343994144, 0.042782718658447266, 0.043081729888916016, 0.040513534545898434, 0.040632320404052735, 0.04062003326416016, 0.040619007110595705, 0.042426368713378904, 0.04315135955810547, 0.04311654281616211, 0.043055103302001956, 0.04261478424072265, 0.04335103988647461, 0.042858497619628906, 0.043003902435302735, 0.04287078475952148, 0.043071487426757815, 0.04099071884155273, 0.04081356811523437, 0.040389633178710936, 0.041128959655761715, 0.04286361694335938, 0.04298649597167969, 0.04288204956054688, 0.04301004791259765, 0.043407360076904294, 0.04439244842529297, 0.04309401702880859, 0.04306124877929687, 0.04299468612670899, 0.04149862289428711, 0.04314316940307617, 0.04318207931518555, 0.04102656173706055, 0.04304281616210937, 0.040758270263671875, 0.04102656173706055, 0.040659969329833984, 0.04064665603637695, 0.040602622985839845, 0.040665088653564455, 0.04089344024658203, 0.042071041107177735, 0.043053054809570314, 0.04322918319702149, 0.04296089553833008, 0.042515457153320314, 0.042916862487792966, 0.04305408096313477, 0.04273561477661133, 0.04309401702880859, 0.04390707015991211, 0.043251712799072264, 0.045284351348876956, 0.04091187286376953, 0.040635391235351564, 0.0406743049621582, 0.040815616607666014, 0.04050534439086914, 0.04123136138916016, 0.04064665603637695, 0.04069171142578125, 0.04068454360961914, 0.04071321487426758, 0.04045209503173828, 0.04069171142578125, 0.040720382690429685, 0.040671230316162106, 0.04071321487426758, 0.04077056121826172, 0.04061798477172852, 0.042705921173095705, 0.04300185775756836, 0.04307558441162109, 0.04286975860595703, 0.0433623046875, 0.042881023406982424, 0.04302336120605469, 0.0404326400756836, 0.04056883239746094, 0.04119244766235351, 0.04588339233398438, 0.043202560424804685, 0.04356915283203125, 0.04295372772216797, 0.04319641494750977, 0.04294451141357422, 0.0405667839050293, 0.04037529754638672, 0.04071219253540039, 0.040471553802490234, 0.040583168029785156, 0.040559616088867184, 0.04083814239501953, 0.0413040657043457, 0.04304793548583984, 0.040460289001464846, 0.040635391235351564, 0.040586238861083986, 0.040525825500488284, 0.040360958099365234, 0.04051148986816406, 0.04043673706054687, 0.040576000213623044, 0.0406036491394043, 0.040787967681884765, 0.040569854736328126, 0.04063129425048828, 0.040738815307617186, 0.040970241546630856, 0.040417278289794925, 0.04063334274291992, 0.040441856384277344, 0.04060774230957031, 0.04051763153076172, 0.04067635345458984, 0.04042342376708984, 0.04112179183959961, 0.04079513549804688, 0.04071526336669922, 0.04078079986572265, 0.04091904067993164, 0.04057395172119141, 0.040621055603027346, 0.04067942428588867, 0.0405401611328125, 0.04054323196411133, 0.04055551910400391, 0.04057702255249023, 0.04189798355102539, 0.04271615982055664, 0.040771583557128906, 0.04064460754394531, 0.040553470611572266, 0.04048076629638672, 0.04163481521606445, 0.042234878540039066, 0.04049407958984375, 0.04069683074951172, 0.04030054473876953, 0.04072857666015625, 0.040513534545898434, 0.040668159484863284, 0.040578048706054685, 0.040612865447998046, 0.042126335144042966, 0.04159590530395508, 0.040528896331787106, 0.04059033584594727, 0.04043366241455078, 0.040546302795410154, 0.0402503662109375, 0.04043366241455078, 0.04047052764892578, 0.04055449676513672, 0.04047052764892578, 0.040390655517578124, 0.0402872314453125, 0.04054937744140625, 0.04038451385498047, 0.04052070236206055, 0.04053913497924805, 0.04061695861816406, 0.040354816436767575, 0.041027584075927735, 0.04084428787231445, 0.04195123291015625, 0.040932350158691407, 0.040594432830810545, 0.04054425430297852, 0.04092825698852539, 0.04040703964233398, 0.04078079986572265, 0.040581119537353515, 0.04050124740600586, 0.04401049423217773, 0.04265574264526367, 0.042105857849121096, 0.04214067077636719, 0.0420239372253418]",tokens/s,23.603097570869142,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948ce0-3444eee579eb976d15e1ea5a;0e4b7586-4890-4e0f-8cf6-08242a5926a3) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6677.696512,9622.257664,0.0,8992.587776,8404.320768,s,1,11.9874208984375,11.9874208984375,0.0,11.9874208984375,11.9874208984375,11.9874208984375,11.9874208984375,[11.9874208984375],,kWh,5.9293012450694335e-05,3.2481278265179544e-05,0.00011995870707803835,0.00021173299779391223,,MB,1703.927808,9643.229184,0.0,8992.587776,7880.40704,s,10,1.8198451843261718,0.18198451843261718,7.743756920928738e-05,0.1819715881347656,0.18208723907470703,0.18211191024780274,0.1821316471862793,"[0.18188534545898438, 0.18193232727050782, 0.18203587341308594, 0.18200483703613282, 0.18208175659179687, 0.18195120239257812, 0.18190080261230468, 0.18199197387695312, 0.18192448425292967, 0.18213658142089845]",tokens/s,1406.713066610599,kWh,2.150057256249906e-06,1.17812121926931e-06,8.463739094219127e-06,1.1791917569738346e-05,tokens/kWh,21709785.409030844,MB,1748.004864,9643.229184,0.0,8992.587776,8123.606528,s,10,19.37654052734375,1.9376540527343749,0.007659862633679834,1.9387833251953124,1.9460040893554689,1.946804168701172,1.9474442321777343,"[1.9458262939453126, 1.947604248046875, 1.9309443359375, 1.927997314453125, 1.944681396484375, 1.9441253662109375, 1.9429635009765625, 1.9346031494140625, 1.928370849609375, 1.929424072265625]",tokens/s,32.513543844989144,kWh,2.2890486200485013e-05,1.2544575107951391e-05,6.490995849357831e-05,0.00010034501980201471,tokens/kWh,627833.8488975524,,s,630,19.374210046768212,0.030752714359949508,0.00042951519747222715,0.03060172748565674,0.031229951477050783,0.0314671612739563,0.032529530220031744,"[0.030894079208374024, 0.031172607421875, 0.03113471984863281, 0.030645248413085937, 0.030621696472167968, 0.03055411148071289, 0.030636032104492186, 0.03058483123779297, 0.030675968170166015, 0.03061862373352051, 0.030950399398803712, 0.03144601631164551, 0.030637056350708007, 0.03059916877746582, 0.03099238395690918, 0.030645248413085937, 0.03061452865600586, 0.031015935897827147, 0.030704639434814454, 0.03062579154968262, 0.030629888534545898, 0.03120025634765625, 0.033035263061523434, 0.0324771842956543, 0.03119820785522461, 0.031075328826904298, 0.030692352294921874, 0.031102975845336913, 0.030635007858276365, 0.030913536071777343, 0.03063091278076172, 0.03099545669555664, 0.030889984130859374, 0.030534656524658203, 0.030870527267456056, 0.031460351943969726, 0.030880767822265624, 0.030533632278442382, 0.030644224166870116, 0.030559232711791992, 0.030629888534545898, 0.030466047286987305, 0.03057459259033203, 0.030943231582641603, 0.03082956886291504, 0.030526464462280273, 0.03178291130065918, 0.031090688705444337, 0.03060121536254883, 0.030810111999511718, 0.030656511306762696, 0.031117311477661135, 0.030867456436157226, 0.03078451156616211, 0.031140863418579103, 0.030948352813720704, 0.03124838447570801, 0.030725120544433594, 0.031040512084960937, 0.030468095779418947, 0.030649343490600587, 0.030422016143798827, 0.03054591941833496, 0.0313436164855957, 0.030503936767578125, 0.03101081657409668, 0.03063910484313965, 0.03055718421936035, 0.03073945617675781, 0.03098419189453125, 0.030889984130859374, 0.03052851104736328, 0.030964736938476563, 0.030624767303466797, 0.030742528915405274, 0.030604288101196288, 0.03079680061340332, 0.03082956886291504, 0.03255091094970703, 0.03282636642456055, 0.031082496643066407, 0.030535680770874023, 0.030523391723632814, 0.0313118724822998, 0.03146854400634766, 0.03118387222290039, 0.03060531234741211, 0.030633983612060548, 0.030547967910766603, 0.030515199661254884, 0.030483455657958985, 0.03052851104736328, 0.030563327789306642, 0.030629888534545898, 0.030453760147094725, 0.030678016662597656, 0.03156787109375, 0.03172863960266113, 0.030526464462280273, 0.0310435848236084, 0.030474239349365235, 0.03052441596984863, 0.032069633483886716, 0.03273932647705078, 0.031425535202026365, 0.030737407684326173, 0.031076351165771485, 0.03058176040649414, 0.030502912521362304, 0.031057920455932617, 0.03115827178955078, 0.030793727874755858, 0.030801919937133788, 0.030555135726928712, 0.031119359970092773, 0.03136409568786621, 0.030818304061889647, 0.030661632537841797, 0.030521343231201172, 0.031014911651611327, 0.030500864028930662, 0.030993408203125, 0.03063091278076172, 0.03052441596984863, 0.030514175415039063, 0.030453760147094725, 0.03084492874145508, 0.03074764823913574, 0.030695423126220703, 0.03059916877746582, 0.03058995246887207, 0.03058278465270996, 0.030690303802490236, 0.03058073616027832, 0.03061862373352051, 0.03043328094482422, 0.03056844711303711, 0.03059916877746582, 0.030525440216064452, 0.030530559539794923, 0.030591999053955078, 0.030535680770874023, 0.030664703369140626, 0.030542848587036132, 0.030538751602172853, 0.030516223907470705, 0.03055718421936035, 0.030534656524658203, 0.030555135726928712, 0.030512128829956055, 0.03054080009460449, 0.030526464462280273, 0.030491647720336915, 0.030477312088012694, 0.030542848587036132, 0.03037491226196289, 0.030498815536499024, 0.030843904495239258, 0.030846975326538087, 0.030398464202880858, 0.030414848327636718, 0.03056230354309082, 0.030515199661254884, 0.03058073616027832, 0.03139788818359375, 0.03076710319519043, 0.030692352294921874, 0.030715904235839843, 0.030706687927246092, 0.030458879470825196, 0.03100876808166504, 0.030710784912109376, 0.030774272918701173, 0.03172352027893066, 0.03189145660400391, 0.03080703926086426, 0.03056844711303711, 0.03042918395996094, 0.030406656265258788, 0.030810111999511718, 0.030494720458984374, 0.030638080596923828, 0.030723072052001952, 0.030234624862670898, 0.03040768051147461, 0.03060736083984375, 0.030661632537841797, 0.030665727615356447, 0.030641151428222657, 0.0307906551361084, 0.03057356834411621, 0.030559232711791992, 0.03052441596984863, 0.03058278465270996, 0.030451711654663087, 0.030523391723632814, 0.030551040649414062, 0.030493696212768553, 0.03042508888244629, 0.030494720458984374, 0.030868480682373047, 0.03057766342163086, 0.03075379180908203, 0.03058483123779297, 0.030733312606811523, 0.030274560928344726, 0.030484479904174806, 0.03054182434082031, 0.030466047286987305, 0.030690303802490236, 0.03054080009460449, 0.030534656524658203, 0.030452735900878908, 0.03056844711303711, 0.03054489517211914, 0.030551040649414062, 0.030689279556274415, 0.03054899215698242, 0.030488576889038086, 0.031178752899169923, 0.0309616641998291, 0.030902271270751954, 0.030453760147094725, 0.031441919326782225, 0.030918655395507814, 0.030539775848388673, 0.03055308723449707, 0.030551040649414062, 0.03057049560546875, 0.03054080009460449, 0.030825471878051756, 0.030547967910766603, 0.030525440216064452, 0.03080089569091797, 0.030555135726928712, 0.030571519851684572, 0.03059097671508789, 0.03058483123779297, 0.03057868766784668, 0.03100364875793457, 0.030492671966552733, 0.03055308723449707, 0.030517248153686522, 0.030637056350708007, 0.03057049560546875, 0.030552064895629883, 0.030519296646118164, 0.030526464462280273, 0.03054080009460449, 0.030457855224609375, 0.03019059181213379, 0.0301527042388916, 0.03074355125427246, 0.03059507179260254, 0.030542848587036132, 0.030101503372192383, 0.030082048416137694, 0.030203903198242187, 0.030365695953369142, 0.030091264724731445, 0.030497791290283204, 0.03037183952331543, 0.03031449508666992, 0.030466047286987305, 0.030587903976440428, 0.030533632278442382, 0.0304005126953125, 0.03057663917541504, 0.03401830291748047, 0.03182489585876465, 0.031108095169067384, 0.031373311996459964, 0.030999551773071288, 0.030879743576049806, 0.030893056869506837, 0.030931968688964844, 0.030447616577148437, 0.0307589111328125, 0.030728191375732423, 0.030308351516723633, 0.031041536331176758, 0.031185920715332032, 0.030676992416381835, 0.030819328308105468, 0.0313702392578125, 0.031351808547973634, 0.0313702392578125, 0.03115110397338867, 0.031322111129760744, 0.031438848495483396, 0.03143475151062012, 0.03118796730041504, 0.031110143661499022, 0.031251455307006834, 0.031351808547973634, 0.031188991546630858, 0.03140403175354004, 0.03125555229187012, 0.03124940872192383, 0.030678016662597656, 0.030649343490600587, 0.03042815971374512, 0.030922752380371094, 0.03057356834411621, 0.030863359451293947, 0.030563327789306642, 0.030859264373779297, 0.030440448760986328, 0.03057049560546875, 0.030440448760986328, 0.030478336334228515, 0.030533632278442382, 0.030520320892333985, 0.030533632278442382, 0.0314654712677002, 0.03054899215698242, 0.031927295684814457, 0.03221811294555664, 0.03180953598022461, 0.03150028800964356, 0.030983167648315428, 0.03057766342163086, 0.030697471618652345, 0.030648319244384766, 0.03061759948730469, 0.030698495864868162, 0.030638080596923828, 0.030651391983032225, 0.030571519851684572, 0.03082854461669922, 0.030600191116333008, 0.030740480422973632, 0.03077017593383789, 0.030696447372436524, 0.030620672225952147, 0.03059404754638672, 0.030663679122924805, 0.030539775848388673, 0.031053823471069338, 0.03100876808166504, 0.030868480682373047, 0.031107072830200196, 0.03122380828857422, 0.0305930233001709, 0.03059814453125, 0.030871551513671876, 0.03078246307373047, 0.030939136505126953, 0.030531583786010744, 0.031046655654907225, 0.030690303802490236, 0.030858240127563476, 0.030631935119628906, 0.03099443244934082, 0.030697471618652345, 0.03075993537902832, 0.030898176193237304, 0.03057766342163086, 0.030698495864868162, 0.030678016662597656, 0.03055411148071289, 0.03058483123779297, 0.03096780776977539, 0.03061862373352051, 0.0314521598815918, 0.030964736938476563, 0.030946304321289062, 0.03118489646911621, 0.030684160232543944, 0.030860288619995117, 0.030616575241088868, 0.03095244789123535, 0.030669824600219726, 0.031095808029174804, 0.03078860855102539, 0.03138764762878418, 0.030884864807128907, 0.030422016143798827, 0.030505983352661133, 0.030636032104492186, 0.030369792938232422, 0.030546943664550782, 0.03056844711303711, 0.030514175415039063, 0.031147008895874025, 0.030623743057250977, 0.030559232711791992, 0.03057663917541504, 0.030491647720336915, 0.030516223907470705, 0.03022643280029297, 0.030596096038818358, 0.03053260803222656, 0.030494720458984374, 0.030652416229248046, 0.030522367477416993, 0.030480384826660156, 0.030508031845092775, 0.032254974365234376, 0.03205836868286133, 0.03164159965515137, 0.03074355125427246, 0.030774272918701173, 0.03177369689941406, 0.03081318473815918, 0.03053670310974121, 0.032440319061279296, 0.03158016014099121, 0.031302656173706055, 0.03098111915588379, 0.030859264373779297, 0.030869504928588868, 0.030641151428222657, 0.030950399398803712, 0.030898176193237304, 0.030516223907470705, 0.03059507179260254, 0.03050809669494629, 0.03069536018371582, 0.03058585548400879, 0.030629888534545898, 0.0314204158782959, 0.03057254409790039, 0.030690303802490236, 0.03140403175354004, 0.030535680770874023, 0.030523391723632814, 0.030493696212768553, 0.030873600006103515, 0.03099443244934082, 0.031014911651611327, 0.03055820846557617, 0.030604288101196288, 0.030507007598876954, 0.030843904495239258, 0.03077017593383789, 0.03119206428527832, 0.031094783782958983, 0.031086591720581053, 0.030688255310058594, 0.031148031234741212, 0.030729215621948244, 0.030657535552978517, 0.030563327789306642, 0.030678016662597656, 0.030692352294921874, 0.030594112396240235, 0.030638015747070313, 0.03057356834411621, 0.030559232711791992, 0.03062579154968262, 0.030518272399902343, 0.03078656005859375, 0.030527488708496094, 0.03080294418334961, 0.030697471618652345, 0.03206041717529297, 0.03160063934326172, 0.030628864288330077, 0.030860288619995117, 0.030519296646118164, 0.030757888793945313, 0.031039487838745116, 0.03078144073486328, 0.031473663330078124, 0.030993408203125, 0.03055820846557617, 0.030608383178710938, 0.03140505599975586, 0.03196211242675781, 0.031096832275390625, 0.03101286315917969, 0.030703615188598633, 0.030537727355957032, 0.030468095779418947, 0.030500864028930662, 0.030563327789306642, 0.03080294418334961, 0.030648319244384766, 0.030515199661254884, 0.030475263595581056, 0.03052851104736328, 0.03038924789428711, 0.030509056091308592, 0.030455808639526367, 0.030432256698608398, 0.03055718421936035, 0.030529535293579102, 0.03041689682006836, 0.030672895431518556, 0.030509056091308592, 0.030513151168823242, 0.030418943405151368, 0.030459903717041017, 0.03054182434082031, 0.030551040649414062, 0.030503936767578125, 0.030859264373779297, 0.030934015274047853, 0.030502912521362304, 0.03054489517211914, 0.030426111221313477, 0.030325759887695314, 0.03056947135925293, 0.03055308723449707, 0.030473215103149414, 0.030518272399902343, 0.030437376022338865, 0.03051011276245117, 0.030427104949951173, 0.030443519592285157, 0.030341119766235353, 0.030467071533203126, 0.030502912521362304, 0.030118911743164063, 0.030259199142456054, 0.030101503372192383, 0.030046207427978516, 0.030543872833251953, 0.030101503372192383, 0.030112768173217775, 0.030394367218017578, 0.03339263916015625, 0.0318525447845459, 0.030980096817016602, 0.03122790336608887, 0.031055871963500976, 0.03055615997314453, 0.03059814453125, 0.030522367477416993, 0.030476287841796876, 0.030846975326538087, 0.03057561683654785, 0.030493696212768553, 0.03052851104736328, 0.030485504150390624, 0.0305664005279541, 0.030510080337524413, 0.0307589111328125, 0.030484479904174806, 0.03056947135925293, 0.030453760147094725, 0.030525440216064452, 0.030464000701904297, 0.030587903976440428, 0.030508031845092775, 0.030660608291625976, 0.030492671966552733, 0.03060223960876465, 0.030527488708496094, 0.03058278465270996, 0.03076300811767578, 0.030673919677734376, 0.030519296646118164, 0.030636032104492186, 0.03051011276245117, 0.030547935485839842, 0.030478336334228515, 0.03080294418334961, 0.03137228775024414, 0.030925823211669923, 0.030439424514770507, 0.030497791290283204, 0.030438400268554686, 0.03052851104736328, 0.03034316825866699, 0.03042918395996094, 0.030930944442749023, 0.030547967910766603, 0.030512128829956055, 0.030063615798950196, 0.030119935989379884, 0.030349311828613282, 0.030451711654663087, 0.030373888015747072, 0.030477312088012694, 0.030459903717041017, 0.030390272140502928, 0.030345216751098632, 0.030810111999511718, 0.030486528396606444, 0.030465024948120117, 0.03060121536254883, 0.030485504150390624, 0.030527488708496094, 0.030446592330932616, 0.03094528007507324, 0.031041536331176758, 0.030838783264160157, 0.030616575241088868, 0.030480384826660156, 0.030596096038818358, 0.030495744705200195, 0.031069183349609376, 0.03055308723449707, 0.03056537628173828, 0.030476287841796876, 0.030465024948120117, 0.03038924789428711, 0.030656511306762696, 0.030297088623046874, 0.030513151168823242, 0.030519296646118164, 0.030527488708496094, 0.030449663162231445, 0.03053670310974121, 0.030501888275146483, 0.03100569534301758, 0.03038617515563965, 0.03055411148071289, 0.030534656524658203, 0.030500864028930662, 0.030442495346069336, 0.03078860855102539, 0.03040358352661133, 0.030518272399902343, 0.030494720458984374, 0.030483455657958985, 0.031308799743652346, 0.032611328125, 0.03155251121520996, 0.031426559448242186, 0.030628864288330077, 0.030468095779418947, 0.030499839782714845, 0.030516223907470705, 0.030637056350708007, 0.031121408462524414, 0.030448640823364258, 0.030484479904174806]",tokens/s,32.51745482676287,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949363-46ab8ed24e41d8cc04a7d32c;cae9269f-014e-4011-83ce-e993e098b454) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 231, in load_model_with_no_weights - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 213, in create_no_weights_model - meta_model = self.automodel_class.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for tiiuae/falcon-40b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/tiiuae/falcon-40b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,922.304512,931.659776,0.0,301.989888,282.769408,s,1,7.34854052734375,7.34854052734375,0.0,7.34854052734375,7.34854052734375,7.34854052734375,7.34854052734375,[7.34854052734375],,kWh,7.014400859741777e-06,3.8205920233399015e-06,9.85028565808621e-06,2.068527854116789e-05,,MB,1574.285312,992.477184,0.0,341.835776,318.94528,s,15,0.19347334194183352,0.012898222796122235,0.00026234189190355907,0.012727487564086913,0.01323751049041748,0.013262508869171142,0.013307810306549072,"[0.01323641586303711, 0.013319135665893555, 0.01277881622314453, 0.013238240242004394, 0.012705951690673829, 0.012727487564086913, 0.012676799774169922, 0.013155839920043945, 0.01318841552734375, 0.012677599906921386, 0.0127073917388916, 0.012684576034545899, 0.012690367698669434, 0.013130559921264648, 0.012555744171142578]",tokens/s,19847.695612527696,kWh,1.5397446944622752e-07,8.436454273511302e-08,3.1335854061716704e-07,5.516975527985076e-07,tokens/kWh,464022359.1738443,MB,1622.085632,1017.643008,0.0,367.0016,318.94784,s,15,10.443511596679688,0.6962341064453125,0.0070102870689394544,0.6940888671875,0.7053194458007812,0.7082876464843749,0.711540390625,"[0.7123535766601562, 0.7065451049804687, 0.7023159790039063, 0.700054931640625, 0.6940888671875, 0.6863651123046876, 0.6922277221679688, 0.694628173828125, 0.689594482421875, 0.6898701782226563, 0.6905087890625, 0.6914014282226563, 0.6938773193359375, 0.70348095703125, 0.696198974609375]",tokens/s,90.48680525240614,kWh,8.32045731552324e-06,4.559182904126586e-06,1.3016592599524102e-05,2.589623281917393e-05,tokens/kWh,2432786.2836232274,,s,945,10.437054460525516,0.011044502074630171,0.00027806965554413984,0.010893312454223633,0.011377663612365722,0.011483968353271484,0.011948359794616698,"[0.011361280441284179, 0.011312128067016602, 0.011396096229553223, 0.011193344116210938, 0.010835968017578124, 0.010874879837036134, 0.010880000114440918, 0.011387904167175293, 0.011406335830688476, 0.011379712104797364, 0.011364352226257325, 0.011314175605773925, 0.011324416160583496, 0.011189248085021973, 0.010874912261962891, 0.010886112213134765, 0.010891263961791992, 0.01093939208984375, 0.011009023666381837, 0.011315199851989746, 0.011531264305114745, 0.011522047996520996, 0.011428863525390624, 0.011454463958740235, 0.011298815727233886, 0.011414527893066406, 0.011366399765014648, 0.011387904167175293, 0.011346943855285644, 0.01133465576171875, 0.011296768188476563, 0.011490303993225098, 0.01164185619354248, 0.011781120300292968, 0.01154150390625, 0.011403264045715332, 0.011177984237670899, 0.011314175605773925, 0.011364352226257325, 0.011344896316528321, 0.011440128326416015, 0.011443231582641602, 0.011358176231384277, 0.01136844825744629, 0.011331583976745606, 0.011350015640258788, 0.011362303733825683, 0.01132646369934082, 0.011156479835510253, 0.010862591743469239, 0.010722304344177246, 0.011191295623779298, 0.011469823837280273, 0.01140121555328369, 0.011399168014526367, 0.011205632209777832, 0.011096063613891602, 0.011157504081726074, 0.012112895965576171, 0.011599871635437011, 0.011380736351013183, 0.011290623664855956, 0.01131929588317871, 0.011485183715820312, 0.011108351707458495, 0.010920000076293946, 0.010841024398803712, 0.01083289623260498, 0.010776576042175292, 0.010862591743469239, 0.011492351531982421, 0.011355135917663574, 0.011387904167175293, 0.011225088119506836, 0.011275263786315918, 0.011276288032531738, 0.011276288032531738, 0.011258879661560058, 0.011288576126098633, 0.011304960250854493, 0.01133676815032959, 0.0113438081741333, 0.011266048431396485, 0.01125273609161377, 0.011284543991088867, 0.011288512229919433, 0.011935744285583496, 0.011408384323120118, 0.011286527633666991, 0.011250687599182128, 0.01135103988647461, 0.011266048431396485, 0.011309056282043458, 0.011340800285339356, 0.011379712104797364, 0.011348992347717286, 0.01127731227874756, 0.011294719696044921, 0.011321344375610352, 0.011210751533508301, 0.011302911758422851, 0.011308032035827637, 0.01127731227874756, 0.011296768188476563, 0.011278335571289062, 0.010912768363952637, 0.010843135833740235, 0.010933247566223145, 0.010855456352233887, 0.010877920150756835, 0.010905599594116211, 0.010884096145629883, 0.010855423927307128, 0.010820608139038086, 0.010858495712280274, 0.010859519958496093, 0.010888192176818847, 0.010819583892822266, 0.010884096145629883, 0.011570176124572755, 0.011410431861877441, 0.011390975952148438, 0.011358207702636718, 0.011768832206726074, 0.011750399589538574, 0.011472895622253418, 0.010862591743469239, 0.010941439628601075, 0.01091481590270996, 0.010892288208007812, 0.010919936180114746, 0.010897407531738281, 0.010897407531738281, 0.010883071899414062, 0.010895359992980956, 0.0109169282913208, 0.010900416374206544, 0.010862591743469239, 0.010876928329467773, 0.010842111587524414, 0.010836000442504884, 0.010865632057189942, 0.010870783805847169, 0.010799103736877442, 0.011166720390319825, 0.011500543594360351, 0.01153536033630371, 0.011323391914367676, 0.010907648086547851, 0.011456512451171874, 0.011246591567993165, 0.011415552139282227, 0.011422719955444336, 0.011315199851989746, 0.011333632469177245, 0.011307007789611816, 0.010844160079956054, 0.010889216423034668, 0.01083903980255127, 0.010855423927307128, 0.011374591827392578, 0.011324416160583496, 0.01131827163696289, 0.01131827163696289, 0.01082470417022705, 0.010907648086547851, 0.010779680252075196, 0.010885087966918946, 0.011271167755126953, 0.011309056282043458, 0.011321344375610352, 0.011370495796203613, 0.011332608222961426, 0.011311136245727538, 0.011237343788146973, 0.01132960033416748, 0.011339712142944335, 0.011361280441284179, 0.011332608222961426, 0.011283455848693847, 0.011344896316528321, 0.011344927787780762, 0.011424736022949218, 0.011422719955444336, 0.011356160163879395, 0.011321344375610352, 0.011337727546691894, 0.011331583976745606, 0.011231231689453124, 0.011346943855285644, 0.011314175605773925, 0.011304960250854493, 0.011149312019348144, 0.010859519958496093, 0.01082265567779541, 0.01091481590270996, 0.010893312454223633, 0.01083187198638916, 0.010894335746765137, 0.010859519958496093, 0.010873855590820313, 0.010842111587524414, 0.010895359992980956, 0.010901503562927246, 0.010859519958496093, 0.010797056198120117, 0.01131929588317871, 0.011478015899658203, 0.011387904167175293, 0.01126195240020752, 0.011268095970153809, 0.01143500804901123, 0.011281408309936524, 0.010860544204711914, 0.011361280441284179, 0.01114521598815918, 0.01094758415222168, 0.010886143684387208, 0.010884096145629883, 0.01083084774017334, 0.0108472318649292, 0.010920960426330567, 0.010806271553039551, 0.010900480270385742, 0.010897407531738281, 0.01084006404876709, 0.010820608139038086, 0.010848256111145019, 0.010862591743469239, 0.010788864135742187, 0.010851327896118163, 0.010891263961791992, 0.010717311859130859, 0.010837887763977051, 0.012139519691467286, 0.011663359642028808, 0.011333632469177245, 0.011315199851989746, 0.011287551879882812, 0.011243519783020019, 0.011410431861877441, 0.011329567909240723, 0.011395039558410645, 0.011388928413391113, 0.011316224098205567, 0.011258879661560058, 0.011300864219665528, 0.011379712104797364, 0.011385855674743652, 0.01132748794555664, 0.011336704254150391, 0.011247615814208984, 0.010835968017578124, 0.010855584144592286, 0.010799967765808105, 0.010877951622009278, 0.010829824447631836, 0.010856448173522949, 0.010863615989685058, 0.010852352142333984, 0.010861568450927735, 0.010913791656494141, 0.010865663528442383, 0.010936320304870606, 0.010869759559631348, 0.010876928329467773, 0.01084006404876709, 0.010902527809143067, 0.010897407531738281, 0.010864640235900879, 0.01084006404876709, 0.01093939208984375, 0.010868736267089844, 0.010861568450927735, 0.010845184326171875, 0.010900480270385742, 0.010840224266052247, 0.010785632133483887, 0.010919936180114746, 0.010885120391845703, 0.010856448173522949, 0.01084006404876709, 0.010819583892822266, 0.010864640235900879, 0.010875904083251953, 0.010936320304870606, 0.010872832298278809, 0.010863648414611817, 0.010833888053894044, 0.010851327896118163, 0.010853376388549805, 0.010856448173522949, 0.010863615989685058, 0.010880000114440918, 0.010735615730285645, 0.010705920219421386, 0.011629568099975587, 0.011416576385498046, 0.011407360076904297, 0.011294719696044921, 0.011703295707702637, 0.011346943855285644, 0.011328512191772461, 0.011347968101501465, 0.011377663612365722, 0.011377663612365722, 0.011619327545166015, 0.011597824096679688, 0.011632672309875488, 0.012224479675292969, 0.010992639541625977, 0.010910719871520995, 0.010908672332763672, 0.010855423927307128, 0.010896384239196777, 0.010850303649902344, 0.01081651210784912, 0.01084928035736084, 0.01081446361541748, 0.010836992263793945, 0.01080832004547119, 0.010692607879638672, 0.010855423927307128, 0.010934271812438966, 0.011042816162109375, 0.01081446361541748, 0.01084006404876709, 0.010836992263793945, 0.010797056198120117, 0.01084928035736084, 0.010845184326171875, 0.010863615989685058, 0.010837023735046386, 0.010835935592651367, 0.010817536354064941, 0.01064038372039795, 0.010701824188232421, 0.01070899200439453, 0.010710016250610351, 0.010805279731750488, 0.0108820161819458, 0.01080832004547119, 0.010862591743469239, 0.010848256111145019, 0.010769408226013183, 0.010785792350769043, 0.010828800201416015, 0.01083289623260498, 0.010804224014282226, 0.01082367992401123, 0.010996735572814942, 0.010946559906005859, 0.010993663787841796, 0.011638784408569336, 0.01152716827392578, 0.011285504341125489, 0.011286527633666991, 0.011325440406799316, 0.01120358371734619, 0.01123635196685791, 0.011287551879882812, 0.011034624099731445, 0.011024383544921875, 0.010810367584228516, 0.010694656372070312, 0.010681344032287597, 0.010675200462341309, 0.010575872421264648, 0.010718208312988281, 0.01075609588623047, 0.010843135833740235, 0.010799103736877442, 0.010769408226013183, 0.010856448173522949, 0.01084928035736084, 0.010825728416442871, 0.01083084774017334, 0.01082265567779541, 0.01093017578125, 0.0112609281539917, 0.011225088119506836, 0.011250687599182128, 0.011249664306640626, 0.011248640060424805, 0.011249664306640626, 0.011290687561035156, 0.0112608642578125, 0.011280384063720703, 0.011191328048706055, 0.010999775886535645, 0.010811391830444337, 0.010784768104553222, 0.010870783805847169, 0.01083187198638916, 0.010819583892822266, 0.011086848258972168, 0.011396096229553223, 0.011243519783020019, 0.011239423751831054, 0.011197471618652343, 0.011276255607604981, 0.01103052806854248, 0.01083084774017334, 0.010832927703857421, 0.010832863807678223, 0.0108472318649292, 0.010834943771362305, 0.0108472318649292, 0.010805248260498047, 0.010880000114440918, 0.010782719612121582, 0.010730496406555176, 0.010817536354064941, 0.011958271980285644, 0.011076607704162598, 0.01084006404876709, 0.010869759559631348, 0.010861568450927735, 0.010775551795959473, 0.01083084774017334, 0.010852352142333984, 0.010818559646606446, 0.01084006404876709, 0.010873855590820313, 0.010859519958496093, 0.010808383941650391, 0.010846143722534179, 0.010844160079956054, 0.010793984413146973, 0.010851327896118163, 0.010833919525146484, 0.010817536354064941, 0.010801152229309082, 0.010896384239196777, 0.010711039543151855, 0.010815487861633302, 0.010826751708984375, 0.010817536354064941, 0.011187199592590333, 0.01120358371734619, 0.011217920303344727, 0.01122815990447998, 0.011132991790771485, 0.010846143722534179, 0.010853376388549805, 0.01083084774017334, 0.010860544204711914, 0.011152383804321288, 0.010905599594116211, 0.010933247566223145, 0.011165696144104004, 0.011423744201660157, 0.011283455848693847, 0.011250687599182128, 0.011173888206481934, 0.010856512069702149, 0.010812352180480957, 0.010858495712280274, 0.010887167930603027, 0.010869759559631348, 0.010889216423034668, 0.01081446361541748, 0.010888192176818847, 0.010876928329467773, 0.010850303649902344, 0.010905599594116211, 0.01093939208984375, 0.010873855590820313, 0.010860575675964355, 0.010869728088378907, 0.010913791656494141, 0.010861568450927735, 0.010888192176818847, 0.010896384239196777, 0.010880000114440918, 0.010854399681091309, 0.010890239715576172, 0.010863615989685058, 0.010863615989685058, 0.010777600288391113, 0.010885120391845703, 0.010863615989685058, 0.010833919525146484, 0.010897407531738281, 0.01093222427368164, 0.010904576301574707, 0.010842111587524414, 0.01093939208984375, 0.010874879837036134, 0.01095372772216797, 0.011464703559875488, 0.011484160423278808, 0.011187199592590333, 0.011234304428100587, 0.011279423713684083, 0.011206591606140136, 0.01125376033782959, 0.011191295623779298, 0.011204607963562012, 0.011220992088317871, 0.011198464393615723, 0.01164185619354248, 0.011346943855285644, 0.011559935569763183, 0.010969087600708008, 0.010868736267089844, 0.010899456024169921, 0.010819583892822266, 0.01084928035736084, 0.010842111587524414, 0.010851327896118163, 0.010820608139038086, 0.010872832298278809, 0.010881024360656738, 0.01084108829498291, 0.010852352142333984, 0.01083084774017334, 0.010867712020874023, 0.011796480178833007, 0.012360704421997071, 0.011396096229553223, 0.011483200073242187, 0.011338687896728516, 0.011214847564697266, 0.01125376033782959, 0.011275263786315918, 0.011314175605773925, 0.010952704429626465, 0.0108472318649292, 0.010909695625305176, 0.010811391830444337, 0.010844160079956054, 0.010850303649902344, 0.010860544204711914, 0.01084108829498291, 0.010856448173522949, 0.010848256111145019, 0.01084006404876709, 0.01084928035736084, 0.010876928329467773, 0.010797056198120117, 0.01083084774017334, 0.01083289623260498, 0.01084620761871338, 0.010806271553039551, 0.010851360321044922, 0.010769375801086426, 0.010820608139038086, 0.010856448173522949, 0.010869759559631348, 0.010812416076660156, 0.01083084774017334, 0.010848256111145019, 0.010829888343811035, 0.01081439971923828, 0.010860544204711914, 0.010858495712280274, 0.010803199768066407, 0.01084108829498291, 0.010853376388549805, 0.010834943771362305, 0.01084108829498291, 0.01083903980255127, 0.010828800201416015, 0.010812416076660156, 0.010856448173522949, 0.010844160079956054, 0.010833951950073242, 0.010714112281799316, 0.012651488304138184, 0.01229312038421631, 0.011363424301147462, 0.011198368072509766, 0.011250687599182128, 0.011303936004638672, 0.010729472160339355, 0.010686464309692383, 0.010769408226013183, 0.010870783805847169, 0.010807295799255372, 0.010854399681091309, 0.010864640235900879, 0.010864640235900879, 0.010801152229309082, 0.01084006404876709, 0.010850336074829102, 0.010824671745300293, 0.010819583892822266, 0.010834943771362305, 0.010796031951904296, 0.010777600288391113, 0.010918911933898925, 0.011003904342651367, 0.011064319610595704, 0.010946559906005859, 0.010871808052062988, 0.010913791656494141, 0.01084928035736084, 0.01084006404876709, 0.010875904083251953, 0.010820608139038086, 0.01083084774017334, 0.010856448173522949, 0.010811391830444337, 0.010769408226013183, 0.01082265567779541, 0.01082470417022705, 0.010767359733581543, 0.010811391830444337, 0.010851327896118163, 0.010836992263793945, 0.01083903980255127, 0.010818559646606446, 0.010788864135742187, 0.010729503631591798, 0.010847200393676757, 0.010842111587524414, 0.010835968017578124, 0.01083187198638916, 0.010855423927307128, 0.01083903980255127, 0.010807295799255372, 0.010853376388549805, 0.010817536354064941, 0.01079196834564209, 0.012020735740661622, 0.011326432228088379, 0.011060223579406739, 0.01083801555633545, 0.010786815643310547, 0.010753024101257324, 0.010818559646606446, 0.010802176475524903, 0.01083084774017334, 0.010772480010986327, 0.01071718406677246, 0.010812416076660156, 0.010777600288391113, 0.010800127983093261, 0.010829824447631836, 0.010804224014282226, 0.010817536354064941, 0.01083187198638916, 0.010817567825317382, 0.010818528175354005, 0.01083289623260498, 0.010845184326171875, 0.010782719612121582, 0.010811391830444337, 0.010826751708984375, 0.010807295799255372, 0.010829824447631836, 0.010844223976135254, 0.010836928367614747, 0.010773504257202148, 0.010862591743469239, 0.0108472318649292, 0.010800127983093261, 0.010825728416442871, 0.01085747241973877, 0.010812416076660156, 0.010809344291687012, 0.01082470417022705, 0.01124454402923584, 0.011717632293701171, 0.011239423751831054, 0.01096396827697754, 0.01082367992401123, 0.010916864395141602, 0.010638336181640624, 0.010820608139038086, 0.010694656372070312, 0.011100159645080567, 0.011204671859741212, 0.011273152351379394, 0.011279359817504882, 0.011247615814208984, 0.011238431930541993, 0.01118511962890625, 0.0113438720703125, 0.01185484790802002, 0.011305983543395997, 0.011573247909545899, 0.011254783630371093, 0.011250687599182128, 0.010991616249084473, 0.010864640235900879, 0.010907648086547851, 0.010815487861633302, 0.010850303649902344, 0.010908672332763672, 0.010921983718872071, 0.01082367992401123, 0.011061247825622558, 0.01140121555328369, 0.01123737621307373, 0.01122815990447998, 0.010862591743469239, 0.01081446361541748, 0.010865663528442383, 0.010893312454223633, 0.010834943771362305, 0.010844160079956054, 0.010800127983093261, 0.010869759559631348, 0.01083801555633545, 0.01084928035736084, 0.010877951622009278, 0.01081651210784912, 0.010862591743469239, 0.011003904342651367, 0.010904576301574707, 0.010880000114440918, 0.010852352142333984, 0.010901503562927246, 0.010805248260498047, 0.010883071899414062, 0.010873855590820313, 0.010863615989685058, 0.010780672073364257, 0.010804224014282226, 0.010806271553039551, 0.010786815643310547, 0.010811391830444337, 0.010850303649902344, 0.010784768104553222, 0.011610112190246581, 0.011297856330871582, 0.01127519989013672, 0.011215871810913085, 0.011230208396911622, 0.011210751533508301, 0.011053055763244628, 0.011280384063720703, 0.010809344291687012, 0.010806271553039551, 0.010805248260498047, 0.010821632385253906, 0.01080832004547119, 0.010818559646606446, 0.010819583892822266, 0.010855423927307128, 0.011062272071838379, 0.011692031860351563, 0.011330559730529785, 0.011305983543395997, 0.011263999938964844, 0.011059200286865235, 0.010775551795959473, 0.01076633644104004, 0.01084928035736084, 0.01112166404724121, 0.010876928329467773, 0.01092915153503418, 0.010864640235900879, 0.01081651210784912, 0.010783743858337403, 0.010839103698730469, 0.010852288246154786, 0.010796031951904296, 0.01084620761871338, 0.010819583892822266, 0.010801152229309082, 0.01082470417022705, 0.010844160079956054, 0.010853376388549805, 0.01084006404876709, 0.010942463874816894, 0.010887167930603027, 0.010943488121032715, 0.011214847564697266, 0.011536383628845214, 0.011307007789611816, 0.011048959732055665, 0.010835968017578124, 0.01084928035736084, 0.010863648414611817, 0.0113919677734375, 0.011083776473999024, 0.01083084774017334, 0.010855423927307128, 0.010866687774658204, 0.010806271553039551, 0.010825728416442871, 0.010860544204711914, 0.010890239715576172, 0.010971136093139648, 0.01126195240020752, 0.010918911933898925, 0.010886143684387208, 0.01083289623260498, 0.010870783805847169, 0.010854399681091309, 0.010712063789367676, 0.010854399681091309, 0.010868736267089844, 0.011275263786315918, 0.010926079750061036, 0.011096063613891602, 0.011239423751831054, 0.011246591567993165, 0.011577343940734864, 0.011577343940734864, 0.011736063957214356, 0.011555839538574218, 0.011346943855285644, 0.011279359817504882, 0.010979328155517578, 0.010864640235900879, 0.01084006404876709, 0.010881024360656738, 0.010858495712280274, 0.01083801555633545, 0.010925056457519532, 0.010966015815734862, 0.010858495712280274, 0.011011072158813476, 0.011257856369018555, 0.011339776039123535, 0.011233280181884766, 0.012413951873779297, 0.011309056282043458, 0.011296768188476563, 0.011273216247558594, 0.011295743942260742, 0.011206656455993653, 0.011274239540100098, 0.011258879661560058, 0.011272192001342773, 0.011105279922485351, 0.010853376388549805, 0.010852352142333984, 0.0109486083984375, 0.011363327980041504, 0.011229184150695801, 0.011344896316528321, 0.011275263786315918, 0.011126784324645997, 0.011294719696044921, 0.011308032035827637, 0.011275263786315918, 0.01142579174041748, 0.011337727546691894, 0.011248640060424805, 0.01131827163696289, 0.011232255935668945, 0.011292672157287598, 0.01127731227874756, 0.011336704254150391, 0.01124454402923584, 0.011273216247558594, 0.011297792434692382, 0.011111424446105958, 0.01085747241973877, 0.010863615989685058, 0.010818559646606446, 0.010908672332763672, 0.012026880264282227, 0.01145036792755127, 0.01126195240020752, 0.011528191566467285, 0.011267104148864747, 0.01130288028717041, 0.01120358371734619, 0.010908672332763672, 0.010706944465637207, 0.010727423667907715, 0.010730496406555176, 0.010686464309692383, 0.011216896057128906, 0.010798080444335938, 0.010870783805847169, 0.01083801555633545, 0.010855423927307128, 0.010862591743469239, 0.01085747241973877, 0.010868736267089844, 0.010897407531738281, 0.011258879661560058, 0.011255807876586914, 0.011143168449401856, 0.010875904083251953, 0.010872832298278809, 0.010871808052062988, 0.010654720306396484, 0.010670080184936523, 0.011134976387023926, 0.011258879661560058, 0.011234304428100587, 0.011205632209777832, 0.011307007789611816, 0.011320320129394532, 0.011259903907775879, 0.011140095710754394, 0.010968064308166504, 0.011382783889770508, 0.011287551879882812, 0.011300864219665528, 0.011285504341125489, 0.011200511932373047, 0.011255807876586914, 0.011308032035827637, 0.011365376472473144, 0.010858495712280274, 0.010964991569519043, 0.0107325439453125, 0.01073971176147461, 0.01073151969909668, 0.010701824188232421, 0.010742783546447754, 0.010842111587524414, 0.011191360473632813, 0.011284416198730468, 0.011330559730529785, 0.011222016334533692, 0.011267071723937988, 0.010854399681091309, 0.010862591743469239, 0.010854399681091309, 0.010828800201416015, 0.01093734359741211, 0.010874879837036134, 0.01112883186340332, 0.011006976127624512, 0.010897407531738281, 0.010893312454223633, 0.010786815643310547, 0.010789888381958008, 0.01085747241973877, 0.011182080268859864, 0.011322367668151855, 0.01122713565826416, 0.011263999938964844, 0.01131929588317871, 0.011130880355834961, 0.01113702392578125, 0.01083084774017334, 0.01084928035736084, 0.01101414394378662, 0.010718208312988281, 0.010713088035583495, 0.011272192001342773, 0.011272192001342773, 0.011229184150695801, 0.01083187198638916]",tokens/s,90.54278710282965,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8227.61472,12512.13312,0.0,11882.463232,11315.947008,s,1,13.9002080078125,13.9002080078125,0.0,13.9002080078125,13.9002080078125,13.9002080078125,13.9002080078125,[13.9002080078125],,kWh,8.115264086805181e-05,4.446262591748636e-05,0.00015588234692798242,0.00028149761371352056,,MB,3892.334592,12533.10464,0.0,11884.560384,11070.3104,s,10,2.0838056030273435,0.20838056030273436,9.948447081853657e-05,0.20834156799316406,0.20854382934570312,0.20857042999267578,0.2085917105102539,"[0.20859703063964843, 0.2085379180908203, 0.20830461120605467, 0.2083431701660156, 0.20830192565917968, 0.20832940673828124, 0.20837295532226563, 0.20839532470703126, 0.2083399658203125, 0.20828329467773438]",tokens/s,1228.5215071313962,kWh,2.4637538842589554e-06,1.3497550968389621e-06,1.0537641531957465e-05,1.4351150513055382e-05,tokens/kWh,17838291.067124847,MB,3900.985344,12537.298944,0.0,11886.657536,11070.31296,s,10,23.905529052734376,2.3905529052734376,0.025578852612522587,2.3859503173828127,2.420712768554688,2.4238158569335937,2.4262983276367187,"[2.377474365234375, 2.420023193359375, 2.4269189453125, 2.41117431640625, 2.38911572265625, 2.4188828125, 2.361778076171875, 2.35469970703125, 2.362677001953125, 2.382784912109375]",tokens/s,26.353735933233363,kWh,2.846095686115743e-05,1.559805238218463e-05,8.058615358923771e-05,0.00012464516283257977,tokens/kWh,505434.77635485947,,s,630,23.903483871459954,0.03794203789120629,0.0007319817326494637,0.037907455444335936,0.03873689651489258,0.038908467864990234,0.04001422256469726,"[0.03877171325683594, 0.03875942230224609, 0.037282817840576174, 0.03719168090820312, 0.03722649765014648, 0.037364734649658206, 0.03727974319458008, 0.03685273742675781, 0.036828159332275394, 0.037351425170898435, 0.03776102447509765, 0.03716198348999023, 0.03750912094116211, 0.03863552093505859, 0.038703102111816406, 0.0385689582824707, 0.03855052947998047, 0.038730751037597655, 0.03841843032836914, 0.03860889434814453, 0.038640640258789063, 0.038596607208251955, 0.03826892852783203, 0.03855462265014648, 0.038449153900146485, 0.03874303817749023, 0.0383375358581543, 0.03849728012084961, 0.03853414535522461, 0.03793407821655274, 0.038817790985107424, 0.038561790466308594, 0.03866828918457031, 0.038174720764160154, 0.03806412887573242, 0.037272575378417966, 0.038653953552246094, 0.037269504547119144, 0.037335041046142575, 0.036877311706542966, 0.03726028823852539, 0.03738214492797851, 0.03727155303955078, 0.0372305908203125, 0.037160961151123044, 0.03731660842895508, 0.03727360153198242, 0.037558273315429686, 0.03809996795654297, 0.03743334579467773, 0.03685580825805664, 0.03721113586425781, 0.03721625518798828, 0.037198848724365234, 0.036724735260009765, 0.03720601654052735, 0.03678822326660156, 0.03711795043945312, 0.037166080474853515, 0.03727872085571289, 0.03701145553588867, 0.036977664947509765, 0.03670016098022461, 0.03791155242919922, 0.03846656036376953, 0.03873689651489258, 0.03973529434204102, 0.038609920501708986, 0.038255615234375, 0.03822694396972656, 0.03859558486938477, 0.0385351676940918, 0.038416385650634766, 0.03847679901123047, 0.0384634895324707, 0.038204414367675785, 0.038621185302734375, 0.038542335510253906, 0.03861913681030273, 0.03853619384765625, 0.03774566268920899, 0.03716505432128906, 0.038266880035400394, 0.03908607864379883, 0.03878911972045898, 0.03856793594360351, 0.03873689651489258, 0.03828838348388672, 0.038537216186523435, 0.03874508666992187, 0.038507518768310545, 0.03885670471191406, 0.03865087890625, 0.03757875061035156, 0.03861913681030273, 0.03848396682739258, 0.038604801177978515, 0.038583297729492184, 0.03850239944458008, 0.03850035095214844, 0.03816755294799805, 0.03851161575317383, 0.03866828918457031, 0.038356990814208985, 0.03822182464599609, 0.03839487838745117, 0.0381317138671875, 0.038013950347900394, 0.038882305145263675, 0.03896627044677734, 0.038661121368408206, 0.03836620712280273, 0.03831500625610352, 0.037792766571044925, 0.037991424560546876, 0.038591487884521485, 0.03858943939208984, 0.03875328063964844, 0.03868467330932617, 0.03863654327392578, 0.038130687713623046, 0.03782553482055664, 0.03790028762817383, 0.03776204681396484, 0.03797094345092773, 0.03776409530639648, 0.03765350341796875, 0.03830681610107422, 0.038373374938964845, 0.03783065414428711, 0.03952435302734375, 0.040016895294189454, 0.03864371109008789, 0.0384266242980957, 0.03816755294799805, 0.03849932861328125, 0.03849216079711914, 0.03802624130249024, 0.03863142395019531, 0.0385167350769043, 0.038484992980957033, 0.038730751037597655, 0.03853209686279297, 0.038612991333007815, 0.03880652618408203, 0.03799347305297852, 0.03795455932617187, 0.03828940963745117, 0.037835777282714846, 0.038545406341552735, 0.03828940963745117, 0.03847372817993164, 0.03845017623901367, 0.038397953033447264, 0.03803443145751953, 0.03847884750366211, 0.03884543991088867, 0.03832627105712891, 0.03860070419311523, 0.03854131317138672, 0.03858943939208984, 0.03857305526733398, 0.03827609634399414, 0.03844812774658203, 0.04057907104492187, 0.03857100677490234, 0.03870207977294922, 0.03845119857788086, 0.03869900894165039, 0.038150142669677735, 0.0385873908996582, 0.03784601593017578, 0.0381102066040039, 0.03888127899169922, 0.03857920074462891, 0.03823923110961914, 0.03832217788696289, 0.03859558486938477, 0.038596607208251955, 0.038679550170898434, 0.03841740798950195, 0.038591487884521485, 0.039327743530273435, 0.03867750549316406, 0.038520832061767575, 0.0385269775390625, 0.03867238235473633, 0.038604801177978515, 0.038569984436035154, 0.038075393676757815, 0.03755929565429687, 0.03823616027832031, 0.03865804672241211, 0.039201793670654295, 0.03825971221923828, 0.03867238235473633, 0.03853004837036133, 0.038529022216796875, 0.03859558486938477, 0.03863961410522461, 0.0385873908996582, 0.038785022735595705, 0.03878092956542969, 0.038345729827880856, 0.038586368560791014, 0.038679550170898434, 0.03867340850830078, 0.038637569427490234, 0.03854131317138672, 0.038588417053222655, 0.038763519287109374, 0.038438911437988284, 0.03845529556274414, 0.038381568908691405, 0.038317054748535154, 0.03863552093505859, 0.0385873908996582, 0.03864371109008789, 0.038040576934814455, 0.03838771057128906, 0.038332416534423826, 0.038640640258789063, 0.038019073486328124, 0.03816243362426758, 0.041565185546875, 0.03921100616455078, 0.03866726303100586, 0.03872870254516601, 0.0387583999633789, 0.03875635147094727, 0.038421504974365236, 0.038724609375, 0.0385269775390625, 0.038424575805664066, 0.039518207550048826, 0.03959296035766602, 0.03725209426879883, 0.03709132766723633, 0.03691110229492187, 0.03727155303955078, 0.03734220886230469, 0.037238784790039066, 0.03729305648803711, 0.037125118255615236, 0.037256191253662106, 0.03721932983398438, 0.03727155303955078, 0.03705548858642578, 0.037207038879394534, 0.03715071868896484, 0.037166080474853515, 0.03726131057739258, 0.03743641662597656, 0.037031936645507815, 0.03731558227539063, 0.03728793716430664, 0.037310462951660156, 0.037154815673828126, 0.03742822265625, 0.03729919815063477, 0.037351425170898435, 0.03696332931518555, 0.037028865814208986, 0.036782081604003904, 0.03729817581176758, 0.03723468780517578, 0.03726335906982422, 0.03728384017944336, 0.038130687713623046, 0.037338111877441404, 0.03731353759765625, 0.03715686416625977, 0.0373493766784668, 0.037288959503173826, 0.03727667236328125, 0.03684454345703125, 0.037663745880126956, 0.0381214714050293, 0.03731763076782227, 0.03730022430419922, 0.037324798583984374, 0.0373125114440918, 0.037176319122314457, 0.03722649765014648, 0.03812351989746094, 0.03868467330932617, 0.0387061767578125, 0.03866726303100586, 0.03890892791748047, 0.038454273223876956, 0.038594558715820314, 0.03843379211425781, 0.038010879516601564, 0.03859763336181641, 0.038863872528076174, 0.03864780807495117, 0.038340606689453126, 0.037689342498779296, 0.03820851135253906, 0.03846553421020508, 0.0384983024597168, 0.03817267227172851, 0.038637569427490234, 0.0384983024597168, 0.038269950866699216, 0.03851468658447266, 0.03847679901123047, 0.03812761688232422, 0.038312961578369144, 0.038269950866699216, 0.03914854431152344, 0.040548351287841795, 0.039060478210449216, 0.03866726303100586, 0.038691841125488284, 0.03846758270263672, 0.03850342559814453, 0.03816447830200195, 0.03854848098754883, 0.03851468658447266, 0.03861913681030273, 0.03868364715576172, 0.03889459228515625, 0.03866624069213867, 0.038711296081542966, 0.038520832061767575, 0.03856281661987305, 0.038714366912841795, 0.038629375457763675, 0.03875942230224609, 0.038599681854248044, 0.03894169616699219, 0.038725631713867184, 0.038640640258789063, 0.03906252670288086, 0.03806617736816406, 0.037407745361328126, 0.0373309440612793, 0.03743436813354492, 0.03776716613769531, 0.037343231201171875, 0.03759308624267578, 0.03686195373535156, 0.03806105422973633, 0.0373125114440918, 0.03737497711181641, 0.037005313873291014, 0.03790335845947266, 0.03833651351928711, 0.03890790557861328, 0.03894476699829102, 0.03877478408813476, 0.038421504974365236, 0.038042625427246096, 0.038368255615234374, 0.03827199935913086, 0.03863142395019531, 0.03877785491943359, 0.03831808090209961, 0.03885567855834961, 0.03873894500732422, 0.03846656036376953, 0.03863449478149414, 0.038695934295654294, 0.03836620712280273, 0.03856076812744141, 0.038811649322509766, 0.03889766311645508, 0.03775795364379883, 0.03820544052124023, 0.03807743835449219, 0.038416385650634766, 0.03851571273803711, 0.03835289764404297, 0.03848294448852539, 0.03978035354614258, 0.0400076789855957, 0.038860801696777345, 0.03914854431152344, 0.03836108779907227, 0.037473281860351565, 0.03726028823852539, 0.03721420669555664, 0.03725209426879883, 0.0373831672668457, 0.03709439849853516, 0.037266433715820314, 0.03724492645263672, 0.03722137451171875, 0.03730636978149414, 0.03724492645263672, 0.03711795043945312, 0.03725516891479492, 0.0373125114440918, 0.037269504547119144, 0.03726847839355469, 0.03726233673095703, 0.037392383575439454, 0.03727769470214844, 0.03726028823852539, 0.03700326538085937, 0.03683430480957031, 0.03853823852539062, 0.03697459030151367, 0.03747635269165039, 0.037324798583984374, 0.03738521575927734, 0.03740364837646484, 0.03806208038330078, 0.03696332931518555, 0.037389312744140625, 0.037372928619384765, 0.037359615325927735, 0.037408767700195314, 0.03691110229492187, 0.03843686294555664, 0.03742310333251953, 0.0373831672668457, 0.037119998931884765, 0.0375203857421875, 0.03730636978149414, 0.037292030334472655, 0.037389312744140625, 0.03758182525634766, 0.03740364837646484, 0.037321727752685545, 0.03729919815063477, 0.03732582473754883, 0.036928512573242187, 0.03854643249511719, 0.03847679901123047, 0.03812351989746094, 0.03766579055786133, 0.04055551910400391, 0.03779174423217774, 0.03741900634765625, 0.03730944061279297, 0.03702374267578125, 0.037212158203125, 0.037288959503173826, 0.03713433456420898, 0.037591041564941405, 0.037250049591064455, 0.037168128967285156, 0.037574657440185545, 0.03735551834106445, 0.03716403198242187, 0.0379791374206543, 0.037310462951660156, 0.03701760101318359, 0.03729612731933594, 0.03723161697387695, 0.03736064147949219, 0.03731763076782227, 0.037302272796630856, 0.03727052688598633, 0.03723161697387695, 0.037302272796630856, 0.03731558227539063, 0.03727872085571289, 0.03719987106323242, 0.03730636978149414, 0.0373043212890625, 0.037340160369873046, 0.03729715347290039, 0.037372928619384765, 0.03714559936523437, 0.039193599700927735, 0.03940147018432617, 0.03826176071166992, 0.03740979385375977, 0.03741183853149414, 0.03727462387084961, 0.037220352172851565, 0.037253120422363284, 0.03732070541381836, 0.037294078826904296, 0.03735859298706055, 0.03731353759765625, 0.03744153594970703, 0.03727667236328125, 0.03735244750976562, 0.03733196640014649, 0.03725107192993164, 0.03713740921020508, 0.03709030532836914, 0.03725209426879883, 0.03729919815063477, 0.037367809295654295, 0.037250049591064455, 0.037408767700195314, 0.0373493766784668, 0.037013504028320314, 0.03720191955566406, 0.036972545623779295, 0.03730022430419922, 0.03738521575927734, 0.03737497711181641, 0.03731660842895508, 0.03738521575927734, 0.037179393768310545, 0.037179393768310545, 0.036908031463623044, 0.037482494354248046, 0.03759308624267578, 0.037766143798828124, 0.037100543975830076, 0.03743334579467773, 0.03719987106323242, 0.03793305587768555, 0.03792486572265625, 0.03750809478759766, 0.037354496002197264, 0.03780710220336914, 0.0374015998840332, 0.037362686157226564, 0.03727155303955078, 0.037528575897216795, 0.03810815811157227, 0.03733708953857422, 0.03731455993652344, 0.037147647857666014, 0.03785830307006836, 0.036874240875244144, 0.03687526321411133, 0.0373043212890625, 0.037326847076416016, 0.03822284698486328, 0.03788185501098633, 0.037341182708740234, 0.0384983024597168, 0.037236736297607424, 0.037348350524902346, 0.03723980712890625, 0.03742310333251953, 0.037253120422363284, 0.03733299255371094, 0.03709439849853516, 0.037610496520996094, 0.03736576080322265, 0.03708313751220703, 0.037131263732910154, 0.037495807647705076, 0.03761151885986328, 0.03681075286865235, 0.0381952018737793, 0.037427200317382815, 0.03858943939208984, 0.03717529678344727, 0.03671142578125, 0.03721420669555664, 0.038330368041992184, 0.037819393157958986, 0.03841129684448242, 0.04012643051147461, 0.037719039916992186, 0.03775283050537109, 0.03713536071777344, 0.037515262603759765, 0.03727360153198242, 0.036822017669677735, 0.036994049072265625, 0.03727872085571289, 0.03708620834350586, 0.03702579116821289, 0.03727974319458008, 0.03728179168701172, 0.03757363128662109, 0.03724595260620117, 0.03693670272827149, 0.0371486701965332, 0.037321727752685545, 0.03724595260620117, 0.037305343627929685, 0.03732889556884766, 0.03873382568359375, 0.03724492645263672, 0.03741900634765625, 0.03912908935546875, 0.038547454833984376, 0.037362686157226564, 0.03733708953857422, 0.03684966278076172, 0.03739340972900391, 0.03700940704345703, 0.03738623809814453, 0.037364734649658206, 0.03737190246582031, 0.037326847076416016, 0.03683225631713867, 0.03794636917114258, 0.039158782958984374, 0.03758489608764649, 0.039428096771240234, 0.03745280075073242, 0.037302272796630856, 0.037940223693847655, 0.036743167877197266, 0.04018380737304687, 0.03997081756591797, 0.038957054138183594, 0.038870014190673825, 0.03832627105712891, 0.03825971221923828, 0.038449153900146485, 0.037362686157226564, 0.03760947036743164, 0.037705726623535156, 0.03699507141113281, 0.038434814453125, 0.03775385665893555, 0.03745280075073242, 0.03764326477050781, 0.03742105484008789, 0.03749785614013672, 0.03732275390625, 0.0370247688293457, 0.0375838737487793, 0.037945343017578126, 0.03857100677490234, 0.038147071838378906, 0.03889664077758789, 0.038095870971679685, 0.03753779220581055, 0.03737395095825195, 0.03856076812744141, 0.037364734649658206, 0.0375654411315918, 0.03830169677734375, 0.038424575805664066]",tokens/s,26.3559907579916,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694815d-5e017f6503242c5c374369e3;94556fa9-f33c-44da-82ef-c749cdb3c7b3) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,2000.678912,3121.086464,0.0,2491.416576,2425.650176,s,1,9.2525068359375,9.2525068359375,0.0,9.2525068359375,9.2525068359375,9.2525068359375,9.2525068359375,[9.2525068359375],,kWh,2.89900960451127e-05,1.5872944581760848e-05,4.3123367831965354e-05,8.79864084588389e-05,,MB,1904.205824,3353.870336,0.0,2705.32608,2606.129664,s,10,0.3069953632354736,0.030699536323547362,3.6043350230667174e-05,0.03068718433380127,0.030747600746154787,0.030763368701934815,0.030775983066558838,"[0.030744096755981446, 0.030779136657714843, 0.030664384841918944, 0.030683807373046875, 0.030685216903686523, 0.03067616081237793, 0.030694976806640625, 0.030656864166259765, 0.030689151763916015, 0.030721567153930665]",tokens/s,8338.888161110146,kWh,3.623242020215535e-07,1.9853511048494508e-07,1.4041337068097784e-06,1.9649930193162766e-06,tokens/kWh,130280361.04121923,MB,1911.762944,3355.967488,0.0,2705.32608,2606.132224,s,10,14.20911572265625,1.420911572265625,0.012410046240487367,1.4169140014648438,1.4322424560546874,1.4418094604492186,1.4494630639648436,"[1.45137646484375, 1.4300694580078126, 1.4148829345703124, 1.4176756591796875, 1.41615234375, 1.410385009765625, 1.4190238037109375, 1.4076070556640625, 1.4118265380859376, 1.430116455078125]",tokens/s,44.3377344724889,kWh,1.6727904355755712e-05,9.166443161414739e-06,3.098941998579033e-05,5.68837675029608e-05,tokens/kWh,1107521.5789235628,,s,630,14.207369205474828,0.022551379691229924,0.0003730595955963139,0.02242201519012451,0.023136972045898437,0.023224319458007812,0.02369780780792237,"[0.023234560012817384, 0.023212032318115236, 0.02312499237060547, 0.023191551208496093, 0.02313113594055176, 0.023170047760009766, 0.023134208679199218, 0.022943744659423827, 0.02306355285644531, 0.023184383392333984, 0.023224319458007812, 0.023216127395629883, 0.022962175369262695, 0.023146495819091797, 0.023126016616821288, 0.022985727310180663, 0.02287001609802246, 0.02310758399963379, 0.023187456130981447, 0.02310758399963379, 0.02253209686279297, 0.022575103759765625, 0.023340032577514647, 0.02330624008178711, 0.022755327224731444, 0.02266828727722168, 0.022762496948242186, 0.02310348892211914, 0.02307276725769043, 0.02309222412109375, 0.023112703323364257, 0.023233535766601563, 0.023133184432983397, 0.023151615142822265, 0.023224319458007812, 0.02329395294189453, 0.023133184432983397, 0.02305433654785156, 0.02314854431152344, 0.023220224380493162, 0.02284339141845703, 0.023214080810546874, 0.023205888748168944, 0.022937599182128905, 0.022529024124145508, 0.022737920761108397, 0.023143423080444335, 0.02332057571411133, 0.023053312301635744, 0.023162879943847657, 0.02327859115600586, 0.023061504364013673, 0.02290995216369629, 0.02294169616699219, 0.02285875129699707, 0.02272768020629883, 0.022575103759765625, 0.022968320846557616, 0.0226693115234375, 0.02307276725769043, 0.023217151641845703, 0.022591487884521484, 0.022947839736938477, 0.023187456130981447, 0.022944768905639647, 0.022991872787475585, 0.02294169616699219, 0.023169023513793945, 0.02266828727722168, 0.02292736053466797, 0.023172096252441408, 0.02309734344482422, 0.02312294387817383, 0.023200767517089844, 0.023208959579467774, 0.022837247848510742, 0.02270310401916504, 0.02334617614746094, 0.023250944137573244, 0.02313216018676758, 0.02326835250854492, 0.022749183654785156, 0.023136255264282226, 0.023022592544555662, 0.022518783569335937, 0.022289407730102538, 0.0226375675201416, 0.02295091247558594, 0.022403072357177735, 0.02248806381225586, 0.02253824043273926, 0.022441984176635742, 0.022200319290161134, 0.022207487106323243, 0.0225167350769043, 0.022433792114257813, 0.022411264419555665, 0.02246143913269043, 0.02249318313598633, 0.022404096603393556, 0.022872064590454103, 0.022656000137329102, 0.023302143096923827, 0.022597631454467772, 0.022792192459106447, 0.022574079513549804, 0.02245529556274414, 0.02241433525085449, 0.022625280380249024, 0.022336511611938475, 0.02244710350036621, 0.02234982490539551, 0.022138879776000975, 0.02228121566772461, 0.02230067253112793, 0.022383615493774413, 0.023545856475830077, 0.02266726493835449, 0.022552576065063477, 0.02241433525085449, 0.022509567260742186, 0.02265292739868164, 0.022410240173339844, 0.022222848892211915, 0.022444032669067384, 0.02247270393371582, 0.02245631980895996, 0.022337535858154296, 0.022397951126098634, 0.022385663986206054, 0.022387712478637696, 0.022609920501708985, 0.022450176239013672, 0.02246348762512207, 0.022416383743286132, 0.022390783309936522, 0.022418432235717774, 0.02233344078063965, 0.022384639739990234, 0.022199296951293947, 0.022280191421508787, 0.02250752067565918, 0.02231705665588379, 0.02245427131652832, 0.022372352600097657, 0.022358015060424806, 0.02244915199279785, 0.02228531265258789, 0.022030336380004883, 0.023367679595947266, 0.02253824043273926, 0.0223242244720459, 0.022311935424804686, 0.022215679168701173, 0.02231705665588379, 0.022412288665771486, 0.022401023864746093, 0.022749183654785156, 0.022354944229125977, 0.022379520416259766, 0.022996992111206056, 0.02249625587463379, 0.022392831802368163, 0.02244607925415039, 0.02271334457397461, 0.022404096603393556, 0.022395904541015626, 0.022391807556152343, 0.023006208419799806, 0.02233344078063965, 0.022458368301391602, 0.02227916717529297, 0.022167552947998048, 0.02234060859680176, 0.023468032836914062, 0.023219200134277345, 0.022436864852905275, 0.02252288055419922, 0.022384639739990234, 0.022413312911987306, 0.022364160537719727, 0.022477823257446287, 0.022164480209350586, 0.022373376846313478, 0.022647808074951172, 0.022411264419555665, 0.022255615234375, 0.02225766372680664, 0.02243071937561035, 0.022502399444580077, 0.02215116882324219, 0.02245631980895996, 0.022371328353881836, 0.0221214714050293, 0.022141952514648438, 0.022433792114257813, 0.02248294448852539, 0.02285772705078125, 0.02239593505859375, 0.022393823623657227, 0.02244915199279785, 0.022399999618530272, 0.022413312911987306, 0.022715391159057616, 0.023190528869628906, 0.022603776931762694, 0.022657024383544923, 0.022839296340942384, 0.022419456481933595, 0.02241535949707031, 0.023088127136230468, 0.023166976928710937, 0.022344703674316405, 0.022245376586914063, 0.022363136291503907, 0.02240716743469238, 0.022353919982910156, 0.023061504364013673, 0.024088672637939453, 0.0234617919921875, 0.022805503845214844, 0.022553600311279298, 0.02270310401916504, 0.02259660720825195, 0.022487039566040038, 0.022370304107666016, 0.022312959671020507, 0.02249318313598633, 0.02244710350036621, 0.022389759063720704, 0.02227302360534668, 0.02246963119506836, 0.022353919982910156, 0.02243174362182617, 0.022392831802368163, 0.022322175979614257, 0.022437887191772463, 0.022362112045288086, 0.022389759063720704, 0.02220953559875488, 0.02210406494140625, 0.021987327575683592, 0.022311935424804686, 0.02231091117858887, 0.022429695129394533, 0.022553600311279298, 0.022361087799072265, 0.02229555130004883, 0.02266111946105957, 0.022421503067016603, 0.022021120071411132, 0.02225049591064453, 0.0230645751953125, 0.02230169677734375, 0.02244710350036621, 0.022393856048583984, 0.022396928787231447, 0.022328319549560546, 0.022689792633056642, 0.022543359756469726, 0.022764543533325195, 0.023266304016113282, 0.02226380729675293, 0.02232524871826172, 0.02234880065917969, 0.022297599792480468, 0.02223411178588867, 0.022794240951538085, 0.022354944229125977, 0.02209587287902832, 0.022133760452270508, 0.022371328353881836, 0.02244710350036621, 0.02233344078063965, 0.022403072357177735, 0.02248908805847168, 0.022360063552856444, 0.022153215408325197, 0.02222591972351074, 0.022382591247558595, 0.02210918426513672, 0.022372352600097657, 0.02234982490539551, 0.02246553611755371, 0.022365184783935548, 0.022421503067016603, 0.022383615493774413, 0.022155263900756835, 0.0223191032409668, 0.02244812774658203, 0.022412288665771486, 0.022798336029052735, 0.02246348762512207, 0.022252544403076172, 0.022173696517944336, 0.022492160797119142, 0.022072320938110353, 0.022137855529785155, 0.022322175979614257, 0.022371328353881836, 0.022334463119506837, 0.02211123275756836, 0.022176767349243166, 0.022707199096679686, 0.022297599792480468, 0.02227712059020996, 0.022405120849609376, 0.022369279861450195, 0.024252416610717774, 0.02416640090942383, 0.02326937675476074, 0.023160831451416015, 0.02264678382873535, 0.022409215927124023, 0.02229452705383301, 0.022198272705078126, 0.022123519897460937, 0.022193151473999022, 0.022253568649291993, 0.022975488662719725, 0.024406015396118166, 0.023035903930664063, 0.022536191940307617, 0.022215679168701173, 0.02227097511291504, 0.022041599273681642, 0.02208563232421875, 0.02205183982849121, 0.02192793655395508, 0.022082559585571288, 0.02229043197631836, 0.022296575546264647, 0.022380544662475587, 0.022172672271728516, 0.022326271057128907, 0.022410240173339844, 0.022329343795776366, 0.023224319458007812, 0.02290278434753418, 0.022207487106323243, 0.02229452705383301, 0.022420480728149415, 0.02208870315551758, 0.02207846450805664, 0.02205081558227539, 0.022548479080200197, 0.02251366424560547, 0.022391807556152343, 0.02269696044921875, 0.022581247329711913, 0.02206105613708496, 0.02209587287902832, 0.022210559844970702, 0.022425600051879883, 0.022403072357177735, 0.02229248046875, 0.022168575286865236, 0.02234163284301758, 0.02240716743469238, 0.022443008422851563, 0.022200351715087892, 0.022038496017456055, 0.02226790428161621, 0.02239897537231445, 0.02244095993041992, 0.02245631980895996, 0.02225049591064453, 0.022197248458862305, 0.022404096603393556, 0.023248895645141602, 0.02305638313293457, 0.022280191421508787, 0.022322240829467772, 0.022214591979980467, 0.021965824127197265, 0.02232524871826172, 0.022378496170043945, 0.02231500816345215, 0.022889471054077147, 0.022416383743286132, 0.022416383743286132, 0.022709247589111328, 0.02232524871826172, 0.022375423431396483, 0.022408191680908202, 0.022502399444580077, 0.0224901123046875, 0.022590463638305663, 0.02229043197631836, 0.022204416275024414, 0.022413312911987306, 0.022360063552856444, 0.0224901123046875, 0.022477823257446287, 0.0224901123046875, 0.02330316734313965, 0.022626304626464845, 0.022424575805664062, 0.022416383743286132, 0.023787519454956055, 0.02449510383605957, 0.023015424728393553, 0.022701055526733398, 0.022354944229125977, 0.022364160537719727, 0.022330368041992187, 0.022172672271728516, 0.022367231369018553, 0.022435840606689454, 0.022397951126098634, 0.02229145622253418, 0.022391807556152343, 0.022887424468994142, 0.022366207122802736, 0.02204876708984375, 0.022246400833129884, 0.021947391510009767, 0.0222423038482666, 0.022354944229125977, 0.022380544662475587, 0.022383615493774413, 0.022365184783935548, 0.022529024124145508, 0.02245529556274414, 0.022381568908691408, 0.022561792373657227, 0.022767616271972657, 0.022286336898803712, 0.02242252731323242, 0.02238057518005371, 0.022398944854736327, 0.022397951126098634, 0.022434816360473633, 0.022754304885864256, 0.02291302490234375, 0.02308608055114746, 0.02267033576965332, 0.022458368301391602, 0.022197248458862305, 0.022361087799072265, 0.022445056915283205, 0.022452224731445314, 0.022323200225830078, 0.022374399185180666, 0.022339584350585938, 0.02231091117858887, 0.022344703674316405, 0.02230271911621094, 0.02248908805847168, 0.02207027244567871, 0.022289407730102538, 0.022000640869140626, 0.022009855270385743, 0.022016000747680665, 0.022074367523193358, 0.022623231887817383, 0.022459392547607423, 0.022460416793823244, 0.022342655181884767, 0.02230067253112793, 0.022427648544311524, 0.022372352600097657, 0.022592512130737305, 0.022169599533081053, 0.02226278305053711, 0.022344703674316405, 0.022405120849609376, 0.02229452705383301, 0.022419456481933595, 0.022614015579223632, 0.02231603240966797, 0.022353919982910156, 0.0224768009185791, 0.022405120849609376, 0.022352895736694335, 0.022378496170043945, 0.02229043197631836, 0.02244812774658203, 0.022420480728149415, 0.022425600051879883, 0.022184959411621095, 0.02206208038330078, 0.022199296951293947, 0.022406143188476564, 0.02226483154296875, 0.02212761688232422, 0.022191104888916017, 0.022153215408325197, 0.0224768009185791, 0.022408191680908202, 0.022466560363769532, 0.022389759063720704, 0.02229452705383301, 0.022320127487182616, 0.022432767868041992, 0.022435840606689454, 0.022492160797119142, 0.022170623779296874, 0.022405120849609376, 0.02244095993041992, 0.022434816360473633, 0.022436864852905275, 0.022443008422851563, 0.022443008422851563, 0.022549503326416014, 0.0224532470703125, 0.022374399185180666, 0.022466560363769532, 0.022383615493774413, 0.022631423950195313, 0.022411264419555665, 0.022408191680908202, 0.022758399963378906, 0.022369279861450195, 0.02189619255065918, 0.022288383483886717, 0.02229043197631836, 0.02207539176940918, 0.022313983917236328, 0.022404096603393556, 0.02243891143798828, 0.02241433525085449, 0.022587392807006838, 0.022413312911987306, 0.0223242244720459, 0.022240255355834963, 0.02231603240966797, 0.022378496170043945, 0.022330368041992187, 0.022359039306640623, 0.022374399185180666, 0.02226688003540039, 0.022323200225830078, 0.022406143188476564, 0.022024192810058595, 0.022329343795776366, 0.02231091117858887, 0.02289151954650879, 0.02271232032775879, 0.022966272354125978, 0.02263859176635742, 0.02231603240966797, 0.02230681610107422, 0.02220134353637695, 0.02234163284301758, 0.02240716743469238, 0.022373376846313478, 0.022235136032104492, 0.023159807205200195, 0.02248908805847168, 0.022584320068359375, 0.022428672790527345, 0.022584320068359375, 0.02225049591064453, 0.022358015060424806, 0.022342655181884767, 0.022331392288208008, 0.022326271057128907, 0.022212608337402344, 0.022289407730102538, 0.02241535949707031, 0.022565887451171874, 0.02226688003540039, 0.022055936813354493, 0.022170623779296874, 0.022535167694091796, 0.022979583740234375, 0.022606847763061523, 0.022410240173339844, 0.023007232666015624, 0.022487039566040038, 0.022418432235717774, 0.022387712478637696, 0.022367231369018553, 0.022477823257446287, 0.02246246337890625, 0.022176767349243166, 0.022128639221191407, 0.022150144577026368, 0.022384639739990234, 0.02248192024230957, 0.022427648544311524, 0.02227609634399414, 0.022433792114257813, 0.022211584091186523, 0.022675455093383787, 0.022723583221435546, 0.02249728012084961, 0.02248908805847168, 0.02247065544128418, 0.022599679946899414, 0.022768640518188478, 0.02249728012084961, 0.02246246337890625, 0.022450176239013672, 0.02265907287597656, 0.022443008422851563, 0.022550527572631835, 0.02263039970397949, 0.0228351993560791, 0.022819839477539062, 0.022849536895751952, 0.022408191680908202, 0.02234982490539551, 0.022392831802368163, 0.0224901123046875, 0.022606847763061523, 0.02247987174987793, 0.022380544662475587, 0.02267750358581543, 0.023759872436523437, 0.023366655349731445, 0.02349056053161621, 0.02308710479736328, 0.02349772834777832, 0.023234560012817384, 0.0231014404296875, 0.02294988822937012, 0.023206911087036132, 0.023003135681152344, 0.023143423080444335, 0.023207935333251953, 0.02323148727416992, 0.023215103149414062, 0.022952959060668944, 0.02290380859375, 0.023028736114501954, 0.02287513542175293, 0.022351871490478514, 0.022821887969970703]",tokens/s,44.3431849266807,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 124397 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694910a-1fc585f87c96f8c55bf20dc7;210cbf51-6bfb-4b75-828d-c24cb5bad6ac) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948234-0b6b1da752c6eafa11f86275;8c3679d8-ef5d-4d1f-9643-6ce65ed5db5d) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481c6-7de5cf01677919b4242aab6c;3ad2d841-515a-4781-8edf-860ae04b15bd) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694947b-53dbb31c0a7512f03614f0c4;f3e7a548-682a-4605-979e-e6805ddf6104) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2218.950656,3198.681088,0.0,2569.0112,2295.745536,s,1,8.589455078125,8.589455078125,0.0,8.589455078125,8.589455078125,8.589455078125,8.589455078125,[8.589455078125],,kWh,2.023945256667553e-05,1.1076912783599765e-05,3.524613930799836e-05,6.656250465827366e-05,,MB,2388.492288,3221.74976,0.0,2571.108352,2282.97216,s,10,0.5386476860046386,0.053864768600463866,0.00020593326494057617,0.05379956817626953,0.05406180572509766,0.05423811912536621,0.054379169845581055,"[0.053819263458251956, 0.054414432525634764, 0.053720481872558595, 0.053749313354492186, 0.05373225784301758, 0.053683391571044924, 0.05380636978149414, 0.05379276657104492, 0.05402262496948242, 0.05390678405761719]",tokens/s,4752.642713437655,kWh,6.366563121004594e-07,3.4885355087274544e-07,2.4707367436131857e-06,3.4562466065863907e-06,tokens/kWh,74068788.81621295,MB,2411.384832,3221.74976,0.0,2571.108352,2390.926848,s,10,17.192418334960937,1.7192418334960937,0.013621342172203411,1.7163196411132813,1.7345747192382812,1.7431281799316407,1.7499709484863282,"[1.7068880615234374, 1.7068663330078124, 1.7149542236328126, 1.7120369873046875, 1.71768505859375, 1.71863427734375, 1.705112060546875, 1.7258857421875, 1.751681640625, 1.7326739501953126]",tokens/s,36.64405947584985,kWh,2.04936445487328e-05,1.1230797690612358e-05,3.919235214238272e-05,7.091679438172786e-05,tokens/kWh,888365.0276249984,,s,630,17.189794830322253,0.027285388619559155,0.0005682163666158149,0.0270699520111084,0.027847679138183593,0.028123597240448,0.03002859504699708,"[0.02668339157104492, 0.026900480270385742, 0.026863616943359377, 0.026860544204711914, 0.027397119522094726, 0.027089920043945313, 0.026970111846923828, 0.02713907241821289, 0.027088895797729492, 0.027449344635009764, 0.02778828811645508, 0.027183103561401366, 0.02696601676940918, 0.027687936782836913, 0.02700492858886719, 0.027216896057128907, 0.027159551620483398, 0.02689023971557617, 0.02673971176147461, 0.0269434871673584, 0.026808319091796876, 0.027088895797729492, 0.026820640563964843, 0.027142112731933593, 0.0271779842376709, 0.03060633659362793, 0.027847679138183593, 0.02697318458557129, 0.027206655502319335, 0.027243520736694334, 0.027081727981567383, 0.027084800720214845, 0.026817535400390623, 0.027009023666381835, 0.026868736267089844, 0.026876928329467774, 0.026802175521850585, 0.026764287948608398, 0.026836992263793946, 0.026927104949951174, 0.026816511154174806, 0.027406335830688477, 0.02704080009460449, 0.02701103973388672, 0.02720358467102051, 0.026909696578979493, 0.026610687255859376, 0.027072511672973632, 0.026969087600708007, 0.027043840408325196, 0.026995712280273438, 0.026908672332763672, 0.026952703475952147, 0.027173887252807616, 0.026847232818603517, 0.026961919784545898, 0.026808319091796876, 0.026909696578979493, 0.026968063354492186, 0.02688102340698242, 0.02691481590270996, 0.026991615295410155, 0.02721177673339844, 0.02752102470397949, 0.027225088119506836, 0.027494400024414063, 0.02729471969604492, 0.026927104949951174, 0.02716876792907715, 0.026927104949951174, 0.027025407791137695, 0.026862592697143556, 0.026820608139038086, 0.02688921546936035, 0.026868736267089844, 0.026816511154174806, 0.02716057586669922, 0.02697216033935547, 0.027092992782592775, 0.02697216033935547, 0.02695680046081543, 0.02695680046081543, 0.026854400634765626, 0.02693017578125, 0.026992639541625976, 0.02694963264465332, 0.026876928329467774, 0.02695475196838379, 0.02688921546936035, 0.026910720825195314, 0.026885120391845704, 0.02675712013244629, 0.026893312454223633, 0.02711347198486328, 0.02739200019836426, 0.026829824447631836, 0.026925056457519532, 0.026864639282226564, 0.026855424880981447, 0.02835968017578125, 0.03038617515563965, 0.027395072937011718, 0.027035648345947266, 0.026863616943359377, 0.02746675109863281, 0.027669504165649415, 0.027684864044189454, 0.026986495971679687, 0.02697216033935547, 0.026793983459472655, 0.026960895538330077, 0.026859519958496093, 0.026796031951904296, 0.027065343856811523, 0.027015167236328123, 0.02693734359741211, 0.026843135833740234, 0.02686566352844238, 0.02712678337097168, 0.02719539260864258, 0.02711859130859375, 0.026927104949951174, 0.02687283134460449, 0.026839040756225587, 0.026912832260131837, 0.026851264953613282, 0.0267827205657959, 0.02691481590270996, 0.0270960636138916, 0.027256832122802735, 0.026844160079956055, 0.026867712020874023, 0.026818559646606444, 0.026870784759521486, 0.026811391830444335, 0.027568128585815428, 0.030744575500488282, 0.02791628837585449, 0.026864639282226564, 0.02695475196838379, 0.02698137664794922, 0.027609088897705077, 0.02691993522644043, 0.026876928329467774, 0.026693632125854492, 0.026844160079956055, 0.026785791397094725, 0.026885120391845704, 0.027853824615478515, 0.027185152053833008, 0.027108352661132814, 0.02699776077270508, 0.02693939208984375, 0.02713804817199707, 0.027463680267333986, 0.027124736785888674, 0.02687283134460449, 0.02699776077270508, 0.02690457534790039, 0.027188224792480467, 0.02900275230407715, 0.02795622444152832, 0.027133951187133788, 0.0269117431640625, 0.02693939208984375, 0.02711039924621582, 0.027076608657836915, 0.026859519958496093, 0.02691993522644043, 0.026918912887573244, 0.026875904083251953, 0.02689023971557617, 0.026868736267089844, 0.027700223922729493, 0.030822399139404297, 0.02795315170288086, 0.027173887252807616, 0.02716057586669922, 0.026990591049194337, 0.026933248519897462, 0.026883071899414062, 0.026934272766113283, 0.02697420883178711, 0.027080703735351562, 0.02689740753173828, 0.02696396827697754, 0.027035648345947266, 0.027046911239624022, 0.026983423233032225, 0.027471872329711915, 0.026983423233032225, 0.02689638328552246, 0.027001855850219726, 0.026917888641357423, 0.026894336700439454, 0.026870784759521486, 0.027010047912597656, 0.027043840408325196, 0.0269117431640625, 0.026927104949951174, 0.02698956871032715, 0.027076608657836915, 0.0268984317779541, 0.02694041633605957, 0.02712985610961914, 0.026877952575683595, 0.02691993522644043, 0.026990591049194337, 0.026933248519897462, 0.027517951965332032, 0.03080601692199707, 0.02834841537475586, 0.027197439193725585, 0.027000831604003905, 0.02691379165649414, 0.027067424774169922, 0.026857440948486327, 0.027172864913940428, 0.026942464828491212, 0.027061248779296877, 0.027214847564697265, 0.026953727722167968, 0.02714726448059082, 0.027017215728759765, 0.02700492858886719, 0.027089920043945313, 0.027337728500366212, 0.02690355110168457, 0.028325887680053712, 0.027630592346191408, 0.027184127807617187, 0.02710937690734863, 0.02695782470703125, 0.027091968536376954, 0.02688921546936035, 0.026892288208007813, 0.027116544723510744, 0.026868736267089844, 0.02698956871032715, 0.02697420883178711, 0.02693939208984375, 0.02689129638671875, 0.026940383911132813, 0.02696601676940918, 0.026901504516601563, 0.02687385559082031, 0.02715443229675293, 0.027486207962036133, 0.02832691192626953, 0.027611135482788086, 0.027045888900756834, 0.027340799331665038, 0.026894336700439454, 0.026858495712280273, 0.027099136352539063, 0.03063910484313965, 0.02833612823486328, 0.02809753608703613, 0.027295743942260742, 0.027115520477294923, 0.02672230339050293, 0.027594751358032226, 0.027084800720214845, 0.026908672332763672, 0.026868736267089844, 0.02753023910522461, 0.02713804817199707, 0.02716979217529297, 0.026918912887573244, 0.026834943771362304, 0.026901504516601563, 0.028477439880371092, 0.027456512451171877, 0.027010047912597656, 0.02698240089416504, 0.026942464828491212, 0.026908672332763672, 0.02695884895324707, 0.026918912887573244, 0.026884096145629883, 0.027201536178588868, 0.027007999420166014, 0.027021312713623048, 0.027378688812255858, 0.02754969596862793, 0.027510784149169923, 0.02709503936767578, 0.02695782470703125, 0.026785791397094725, 0.026875904083251953, 0.027347967147827147, 0.028132352828979492, 0.02779648017883301, 0.027042816162109375, 0.02691584014892578, 0.026953727722167968, 0.027043840408325196, 0.02698137664794922, 0.02731520080566406, 0.027792383193969726, 0.027067392349243165, 0.027049983978271484, 0.0271011848449707, 0.027619327545166016, 0.02691379165649414, 0.027002880096435547, 0.026849279403686522, 0.02774323272705078, 0.027703296661376952, 0.028161024093627928, 0.027003904342651368, 0.026936319351196288, 0.0269117431640625, 0.026908672332763672, 0.02712063980102539, 0.027007999420166014, 0.026901504516601563, 0.02700595283508301, 0.027440128326416017, 0.02716262435913086, 0.027827199935913087, 0.028075008392333983, 0.02717900848388672, 0.02688102340698242, 0.02708684730529785, 0.026990591049194337, 0.02728550338745117, 0.02711347198486328, 0.02731520080566406, 0.027060224533081056, 0.0271646728515625, 0.027065343856811523, 0.02690662384033203, 0.02755583953857422, 0.027381759643554687, 0.027257856369018556, 0.027462656021118165, 0.027003904342651368, 0.027233280181884766, 0.029825023651123047, 0.028433408737182617, 0.027026432037353516, 0.027062271118164064, 0.02721075248718262, 0.026959871292114256, 0.026910720825195314, 0.027052032470703126, 0.026995712280273438, 0.026953727722167968, 0.027068416595458986, 0.026821632385253907, 0.026950656890869142, 0.027045888900756834, 0.02694655990600586, 0.027288576126098633, 0.027157503128051756, 0.030111743927001954, 0.028211200714111328, 0.027878400802612304, 0.027299840927124022, 0.027001855850219726, 0.027029504776000978, 0.027584512710571288, 0.02713398361206055, 0.02702742385864258, 0.026910720825195314, 0.027018239974975586, 0.02698137664794922, 0.026868736267089844, 0.026888191223144533, 0.027852800369262694, 0.02707148742675781, 0.0271011848449707, 0.02712678337097168, 0.027034624099731445, 0.026950656890869142, 0.02732748794555664, 0.026928127288818358, 0.027296768188476563, 0.02776166343688965, 0.02736128044128418, 0.026976255416870116, 0.026879999160766603, 0.027374591827392578, 0.02711961555480957, 0.027411455154418944, 0.027216896057128907, 0.02710220718383789, 0.02682368087768555, 0.026926080703735353, 0.02688921546936035, 0.02697113609313965, 0.027002880096435547, 0.02715238380432129, 0.027468799591064453, 0.027321344375610353, 0.026883071899414062, 0.027769855499267578, 0.027488256454467775, 0.027435007095336913, 0.027281408309936524, 0.02706329536437988, 0.026888191223144533, 0.026936319351196288, 0.026860544204711914, 0.02710220718383789, 0.026978303909301758, 0.02698854446411133, 0.026944511413574217, 0.027076608657836915, 0.027032575607299804, 0.02693222427368164, 0.026978303909301758, 0.026976255416870116, 0.026985471725463867, 0.02689740753173828, 0.02694041633605957, 0.026976255416870116, 0.0269117431640625, 0.026863616943359377, 0.026851327896118164, 0.026863616943359377, 0.026756095886230468, 0.02690764808654785, 0.026959871292114256, 0.027040767669677734, 0.026813440322875977, 0.026855424880981447, 0.026778623580932616, 0.027399168014526368, 0.027021312713623048, 0.02694041633605957, 0.027297792434692384, 0.027782144546508788, 0.0269803524017334, 0.02711039924621582, 0.02687283134460449, 0.026832895278930666, 0.026798080444335938, 0.026892288208007813, 0.026864639282226564, 0.026960895538330077, 0.026828800201416016, 0.026785791397094725, 0.02692198371887207, 0.026764287948608398, 0.027022335052490236, 0.026828800201416016, 0.026852352142333984, 0.026836992263793946, 0.026846208572387696, 0.026810367584228514, 0.026798080444335938, 0.026827775955200195, 0.026809343338012694, 0.026894336700439454, 0.026909696578979493, 0.02674073600769043, 0.026884096145629883, 0.026875904083251953, 0.026950656890869142, 0.026828800201416016, 0.027472896575927733, 0.027074560165405274, 0.027183135986328124, 0.027112415313720703, 0.02771353530883789, 0.026850303649902343, 0.02750054359436035, 0.028112895965576173, 0.028039167404174805, 0.027819007873535157, 0.027798528671264647, 0.027660287857055665, 0.02773401641845703, 0.027452415466308593, 0.02755686378479004, 0.02793779182434082, 0.027785215377807617, 0.027670528411865233, 0.027870208740234374, 0.027673599243164062, 0.02769817543029785, 0.027614208221435548, 0.027711488723754882, 0.027451391220092772, 0.027871231079101562, 0.027959327697753906, 0.02780975914001465, 0.027587583541870117, 0.027643903732299805, 0.027656192779541015, 0.027845632553100585, 0.02774835205078125, 0.027639808654785155, 0.0276889591217041, 0.027785215377807617, 0.027814912796020507, 0.027814912796020507, 0.027633663177490234, 0.02768998336791992, 0.027694080352783205, 0.027677696228027345, 0.027586559295654296, 0.028269567489624024, 0.028006399154663086, 0.027836416244506838, 0.027881471633911133, 0.027658239364624023, 0.027855871200561523, 0.027570175170898437, 0.02814361572265625, 0.027860992431640624, 0.027683839797973633, 0.027983871459960938, 0.02939289665222168, 0.028494848251342773, 0.028181503295898438, 0.02771865653991699, 0.02772172737121582, 0.027681791305541992, 0.027707391738891602, 0.0276889591217041, 0.027675647735595704, 0.027635711669921875, 0.02775347137451172, 0.027664384841918944, 0.027847679138183593, 0.027571199417114257, 0.027711488723754882, 0.02752204895019531, 0.02774527931213379, 0.0277391357421875, 0.027749376296997072, 0.02792857551574707, 0.02797875213623047, 0.027677696228027345, 0.02750054359436035, 0.02791116714477539, 0.028400640487670898, 0.02833305549621582, 0.02837299156188965, 0.028019712448120116, 0.027836416244506838, 0.027696128845214843, 0.027752447128295898, 0.027616256713867186, 0.027724800109863282, 0.027782144546508788, 0.02779545593261719, 0.027657215118408202, 0.027670528411865233, 0.02758246421813965, 0.027613183975219727, 0.027618303298950195, 0.02775449562072754, 0.027621376037597657, 0.02916044807434082, 0.02897715187072754, 0.02772275161743164, 0.027848703384399414, 0.026927104949951174, 0.02689740753173828, 0.026934272766113283, 0.027080703735351562, 0.027189247131347655, 0.026917888641357423, 0.02689740753173828, 0.026928127288818358, 0.02710425567626953, 0.02695680046081543, 0.026792959213256837, 0.027058176040649414, 0.027337728500366212, 0.02749849510192871, 0.027454463958740235, 0.027744255065917968, 0.027497472763061522, 0.02698240089416504, 0.026934272766113283, 0.026879999160766603, 0.027257856369018556, 0.027707391738891602, 0.027631616592407225, 0.027568128585815428, 0.027671552658081053, 0.02773708724975586, 0.027570175170898437, 0.027599872589111327, 0.027485183715820313, 0.02794393539428711, 0.027816959381103516, 0.027906047821044923, 0.0276889591217041, 0.02753023910522461, 0.02776268768310547, 0.027833343505859375, 0.027707391738891602, 0.027913215637207032, 0.02769817543029785, 0.027610111236572265, 0.027784191131591796, 0.02768998336791992, 0.027615232467651366, 0.02775347137451172, 0.027635711669921875, 0.027798528671264647, 0.027701248168945314, 0.027741184234619142, 0.02796031951904297, 0.029657087326049804, 0.028695552825927735, 0.028064767837524415, 0.027886592864990234, 0.027749376296997072, 0.02696499252319336, 0.027190271377563476, 0.027455488204956056, 0.027268096923828124, 0.027045888900756834, 0.02693222427368164, 0.02694041633605957, 0.027031551361083983, 0.027032575607299804, 0.026950656890869142, 0.026995712280273438, 0.026861568450927735, 0.02819993591308594, 0.027240447998046875, 0.02687283134460449]",tokens/s,36.6496520882669,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code - answer = input( -EOFError: EOF when reading a line - -During handling of the above exception, another exception occurred: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model - self.create_no_weights_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model - meta_model = self.automodel_loader.from_config(self.pretrained_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config - trust_remote_code = resolve_trust_remote_code( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code - raise ValueError( -ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. -Please pass the argument `trust_remote_code=True` to allow custom code to be run. - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3575.84896,5499.256832,0.0,4869.586944,4520.068608,s,1,10.815341796875,10.815341796875,0.0,10.815341796875,10.815341796875,10.815341796875,10.815341796875,[10.815341796875],,kWh,4.373537506597159e-05,2.3954797824220522e-05,7.536422695799891e-05,0.00014305439984819103,,MB,1760.219136,5539.10272,0.0,4888.461312,4194.018304,s,10,0.987206901550293,0.0987206901550293,7.949935685759503e-05,0.09869211196899413,0.0987966423034668,0.09886044883728028,0.09891149406433106,"[0.09892425537109376, 0.09862812805175782, 0.09873638153076172, 0.09868806457519531, 0.09873600006103515, 0.09867836761474609, 0.09869615936279297, 0.09866754913330078, 0.09866953277587891, 0.09878246307373047]",tokens/s,2593.1747397428235,kWh,1.1675582880787124e-06,6.397657586386497e-07,4.621654024117641e-06,6.428978070835003e-06,tokens/kWh,39819703.40843774,MB,1790.152704,5547.491328,0.0,4896.84992,4194.020864,s,10,17.760134155273438,1.7760134155273437,0.017933769890637493,1.7683128662109375,1.8033778564453125,1.8110844360351563,1.8172496997070313,"[1.7754610595703124, 1.7654295654296874, 1.7659063720703125, 1.7645032958984375, 1.7605350341796875, 1.7680712890625, 1.801665283203125, 1.771216796875, 1.768554443359375, 1.818791015625]",tokens/s,35.47270501968235,kWh,2.092853824157411e-05,1.1469215962683766e-05,4.7043665085882286e-05,7.944141929014014e-05,tokens/kWh,793037.1909634202,,s,630,17.7581813697815,0.028187589475843645,0.00065324191294238,0.027862528800964355,0.029030604171752933,0.02920709104537964,0.030088059310913094,"[0.028641279220581056, 0.0295546875, 0.028941312789916993, 0.028875776290893555, 0.028844032287597656, 0.02858393669128418, 0.02769817543029785, 0.027991039276123047, 0.028654592514038086, 0.028735488891601563, 0.027662336349487306, 0.02777497673034668, 0.027793407440185547, 0.027687936782836913, 0.02775040054321289, 0.027873279571533204, 0.02771353530883789, 0.02770227241516113, 0.027809791564941407, 0.027749376296997072, 0.02775654411315918, 0.02798591995239258, 0.027744255065917968, 0.027857919692993165, 0.02775551986694336, 0.02771046447753906, 0.027786239624023438, 0.027741184234619142, 0.02815999984741211, 0.027629568099975587, 0.027847679138183593, 0.028441600799560547, 0.02812518310546875, 0.027830272674560546, 0.027886592864990234, 0.028000255584716797, 0.027824127197265625, 0.02777497673034668, 0.02772889518737793, 0.02790297508239746, 0.027822080612182616, 0.02771455955505371, 0.02795110321044922, 0.030258176803588867, 0.029215744018554687, 0.028823551177978517, 0.027814912796020507, 0.028859392166137695, 0.02872831916809082, 0.028803071975708007, 0.028719104766845704, 0.028868608474731446, 0.02890547180175781, 0.02875801658630371, 0.027884544372558592, 0.028653568267822265, 0.028460031509399415, 0.027892736434936522, 0.02778726387023926, 0.02776678466796875, 0.027872255325317383, 0.02854400062561035, 0.027638784408569338, 0.02798080062866211, 0.028895231246948243, 0.028051456451416015, 0.027784191131591796, 0.028472320556640625, 0.028935167312622072, 0.028260351181030274, 0.027906047821044923, 0.02769715118408203, 0.02771353530883789, 0.027681791305541992, 0.027778047561645508, 0.027717632293701173, 0.02770636749267578, 0.027683839797973633, 0.027648000717163085, 0.02774015998840332, 0.029048831939697265, 0.02962124824523926, 0.029030399322509767, 0.02880102348327637, 0.027785215377807617, 0.027786239624023438, 0.02859519958496094, 0.028878847122192384, 0.02859519958496094, 0.028504064559936523, 0.027664384841918944, 0.02755891227722168, 0.02872831916809082, 0.02879283142089844, 0.028817407608032225, 0.028306432723999023, 0.02773094367980957, 0.027883520126342775, 0.027760639190673828, 0.027604991912841798, 0.027785215377807617, 0.02838630485534668, 0.02815897560119629, 0.02777190399169922, 0.028321792602539062, 0.028473344802856446, 0.02770636749267578, 0.027720703125, 0.027668479919433595, 0.02771455955505371, 0.027794431686401368, 0.027633663177490234, 0.027629568099975587, 0.027642879486083984, 0.027676671981811524, 0.027634687423706054, 0.027645952224731447, 0.027625471115112304, 0.02776780891418457, 0.02755583953857422, 0.027645952224731447, 0.02755072021484375, 0.027592704772949218, 0.027669504165649415, 0.027666431427001953, 0.027645952224731447, 0.02792959976196289, 0.028042240142822264, 0.027715583801269532, 0.02772275161743164, 0.028044288635253906, 0.02770636749267578, 0.027640832901000976, 0.027685888290405275, 0.028053504943847656, 0.028679168701171875, 0.02891366386413574, 0.028704767227172853, 0.028617727279663087, 0.027663360595703124, 0.027641855239868163, 0.027694080352783205, 0.027694080352783205, 0.027683839797973633, 0.027683839797973633, 0.027711488723754882, 0.02935398483276367, 0.02959052848815918, 0.028272640228271483, 0.02796953582763672, 0.02776780891418457, 0.027634687423706054, 0.02772684860229492, 0.02776473617553711, 0.02771968078613281, 0.027805696487426756, 0.02772889518737793, 0.02781494331359863, 0.027879392623901367, 0.027862016677856444, 0.028421119689941408, 0.02819174385070801, 0.02774732780456543, 0.028095487594604493, 0.027691007614135742, 0.027812864303588865, 0.027716608047485353, 0.027782144546508788, 0.027837440490722655, 0.0277391357421875, 0.027852800369262694, 0.03137126350402832, 0.029722623825073242, 0.028901376724243165, 0.02751283264160156, 0.027811840057373048, 0.028281856536865234, 0.027502592086791993, 0.027651071548461914, 0.027623424530029295, 0.027724800109863282, 0.02776780891418457, 0.027650047302246093, 0.027682815551757813, 0.027646976470947264, 0.027588607788085938, 0.028180479049682617, 0.028009471893310548, 0.028078079223632812, 0.028506111145019532, 0.028809215545654295, 0.02855731201171875, 0.028421119689941408, 0.027580415725708008, 0.027675647735595704, 0.028286975860595705, 0.027889663696289063, 0.02778112030029297, 0.02775449562072754, 0.02768998336791992, 0.027813888549804686, 0.02772787284851074, 0.02772684860229492, 0.028836864471435547, 0.029458431243896483, 0.029132799148559572, 0.028283903121948242, 0.02773708724975586, 0.027910144805908203, 0.027464704513549806, 0.027849727630615235, 0.02833919906616211, 0.027853824615478515, 0.02773504066467285, 0.027708415985107423, 0.027320320129394532, 0.02732339286804199, 0.027453439712524414, 0.027588607788085938, 0.02733260726928711, 0.02775961685180664, 0.027775999069213866, 0.027703296661376952, 0.027792383193969726, 0.02819071960449219, 0.02878361511230469, 0.028433408737182617, 0.027862016677856444, 0.02852556800842285, 0.02852556800842285, 0.02774732780456543, 0.028220415115356445, 0.02772172737121582, 0.027645952224731447, 0.027661312103271486, 0.02770227241516113, 0.027805696487426756, 0.02776371192932129, 0.027650047302246093, 0.028038143157958984, 0.02775551986694336, 0.02832076835632324, 0.02814259147644043, 0.028210176467895507, 0.027882495880126954, 0.027623424530029295, 0.027741184234619142, 0.02772787284851074, 0.02773811149597168, 0.028031999588012696, 0.0292096004486084, 0.029063167572021483, 0.02755686378479004, 0.027810815811157227, 0.027790336608886718, 0.027760639190673828, 0.027769855499267578, 0.027864063262939453, 0.027865087509155274, 0.027846656799316406, 0.02774015998840332, 0.027782144546508788, 0.027711488723754882, 0.02815078353881836, 0.027793407440185547, 0.02770534324645996, 0.027741184234619142, 0.02776268768310547, 0.027628543853759766, 0.02775449562072754, 0.02778828811645508, 0.02817535972595215, 0.028120063781738282, 0.027823104858398437, 0.02787942314147949, 0.028605440139770507, 0.02814873504638672, 0.02771251106262207, 0.027716608047485353, 0.02771353530883789, 0.028254207611083985, 0.028464128494262695, 0.027709440231323244, 0.027812864303588865, 0.027749376296997072, 0.027777023315429687, 0.027801599502563477, 0.027811840057373048, 0.02775859260559082, 0.027805696487426756, 0.02778828811645508, 0.027864063262939453, 0.02778726387023926, 0.02777292823791504, 0.027871231079101562, 0.027798528671264647, 0.02816409683227539, 0.028943359375, 0.028269567489624024, 0.028274688720703125, 0.02778009605407715, 0.027830272674560546, 0.02774630355834961, 0.027757568359375, 0.027792383193969726, 0.027674623489379883, 0.02778112030029297, 0.029139968872070314, 0.029722623825073242, 0.028496896743774414, 0.027693056106567384, 0.027860992431640624, 0.028308479309082032, 0.027778047561645508, 0.02778316879272461, 0.027828224182128908, 0.027709440231323244, 0.027729951858520507, 0.027728864669799805, 0.02774732780456543, 0.02773401641845703, 0.02775040054321289, 0.027815935134887695, 0.027649023056030272, 0.027604991912841798, 0.027628543853759766, 0.0277391357421875, 0.027622400283813478, 0.028225536346435546, 0.02873651123046875, 0.027975679397583008, 0.028100608825683594, 0.027654144287109376, 0.027692031860351563, 0.02778828811645508, 0.02769817543029785, 0.02771455955505371, 0.02772684860229492, 0.02794086456298828, 0.02892185592651367, 0.03014656066894531, 0.029233152389526368, 0.028399616241455077, 0.028652544021606444, 0.027835391998291017, 0.028485631942749022, 0.02817945671081543, 0.027966463088989257, 0.027819007873535157, 0.02795827293395996, 0.028040191650390626, 0.02771251106262207, 0.027615232467651366, 0.027658239364624023, 0.02774630355834961, 0.02776473617553711, 0.028435455322265626, 0.02897715187072754, 0.028926975250244142, 0.0279685115814209, 0.02794905662536621, 0.02795212745666504, 0.02791628837585449, 0.027877376556396483, 0.02873958396911621, 0.028850175857543944, 0.029139968872070314, 0.02874982452392578, 0.027860992431640624, 0.027752447128295898, 0.027845632553100585, 0.02776371192932129, 0.027974655151367187, 0.027966463088989257, 0.02814771270751953, 0.02796953582763672, 0.02771046447753906, 0.027716608047485353, 0.028428287506103517, 0.0283504638671875, 0.032699390411376955, 0.02920243263244629, 0.028414976119995116, 0.028669952392578125, 0.02856755256652832, 0.028276735305786133, 0.02874166488647461, 0.028710880279541016, 0.027724800109863282, 0.02857676887512207, 0.028322816848754883, 0.027663360595703124, 0.027708415985107423, 0.02876518440246582, 0.028828672409057617, 0.02895462417602539, 0.02892083168029785, 0.02877644729614258, 0.027674623489379883, 0.0276889591217041, 0.027777023315429687, 0.027828224182128908, 0.0275599365234375, 0.027614208221435548, 0.027613183975219727, 0.027715583801269532, 0.02775347137451172, 0.027839487075805663, 0.027424768447875978, 0.027676671981811524, 0.027663360595703124, 0.029080575942993164, 0.029147136688232423, 0.029033472061157226, 0.02896998405456543, 0.028672000885009766, 0.028668928146362304, 0.02892902374267578, 0.028828672409057617, 0.030228479385375977, 0.0293621768951416, 0.029032447814941405, 0.02882252883911133, 0.02889625549316406, 0.028933120727539063, 0.028926975250244142, 0.02878054428100586, 0.028907520294189453, 0.028940288543701172, 0.028837888717651368, 0.02897715187072754, 0.029664255142211913, 0.029327360153198243, 0.028860416412353516, 0.028477439880371092, 0.02870783996582031, 0.028673023223876954, 0.028610559463500978, 0.028604448318481444, 0.02878665542602539, 0.027649023056030272, 0.02792959976196289, 0.02877440071105957, 0.028825599670410155, 0.028712959289550782, 0.028863487243652345, 0.02889727973937988, 0.028717056274414062, 0.028891136169433593, 0.028819456100463867, 0.02879283142089844, 0.02855423927307129, 0.027808799743652343, 0.02764080047607422, 0.027678720474243163, 0.027644927978515626, 0.02780364799499512, 0.027657215118408202, 0.027664384841918944, 0.027659263610839844, 0.02877644729614258, 0.027627519607543945, 0.02933145523071289, 0.029206527709960937, 0.029085695266723634, 0.027572223663330078, 0.0274913272857666, 0.027782144546508788, 0.027835391998291017, 0.02771865653991699, 0.027806720733642577, 0.027501567840576172, 0.02787942314147949, 0.028003328323364256, 0.027885568618774413, 0.027885568618774413, 0.027765760421752928, 0.02775449562072754, 0.028867584228515625, 0.028812288284301758, 0.02883072090148926, 0.028725248336791992, 0.02774323272705078, 0.02780364799499512, 0.02774220848083496, 0.02775142478942871, 0.02769817543029785, 0.02772787284851074, 0.027467775344848632, 0.02734284782409668, 0.027399168014526368, 0.027616256713867186, 0.027609088897705077, 0.027709440231323244, 0.02757427215576172, 0.02780364799499512, 0.02877235221862793, 0.028613632202148437, 0.028473344802856446, 0.028888063430786134, 0.028644351959228515, 0.02771046447753906, 0.0277258243560791, 0.02772684860229492, 0.028010496139526365, 0.028729343414306642, 0.028855295181274415, 0.028703744888305665, 0.028705791473388673, 0.028667903900146483, 0.028735488891601563, 0.02879795265197754, 0.028692480087280273, 0.02869862365722656, 0.028678144454956055, 0.02874777603149414, 0.028181503295898438, 0.02755788803100586, 0.027603967666625977, 0.027469823837280274, 0.027594751358032226, 0.027588607788085938, 0.02774527931213379, 0.02775040054321289, 0.027880447387695313, 0.027789312362670897, 0.02772787284851074, 0.028065792083740236, 0.028664831161499024, 0.028676095962524413, 0.029066240310668946, 0.028448768615722656, 0.027546623229980468, 0.027686912536621092, 0.02774015998840332, 0.027768831253051757, 0.027658239364624023, 0.027679744720458983, 0.027871231079101562, 0.02791731262207031, 0.027823104858398437, 0.028013568878173828, 0.028043264389038085, 0.027863040924072265, 0.027816959381103516, 0.028055551528930665, 0.028810239791870116, 0.027812864303588865, 0.02774630355834961, 0.02773811149597168, 0.027681791305541992, 0.027808767318725586, 0.02772172737121582, 0.02778726387023926, 0.027673599243164062, 0.027673599243164062, 0.02837708854675293, 0.027625471115112304, 0.02775142478942871, 0.027782144546508788, 0.027455488204956056, 0.027675647735595704, 0.02771251106262207, 0.02792959976196289, 0.02874163246154785, 0.028651519775390624, 0.028879871368408205, 0.02795724868774414, 0.027768831253051757, 0.027667455673217774, 0.027986944198608397, 0.028613632202148437, 0.027809791564941407, 0.028949504852294923, 0.028991487503051756, 0.028811264038085937, 0.02869964790344238, 0.02869964790344238, 0.028834815979003905, 0.028660736083984374, 0.028726272583007813, 0.028786687850952147, 0.028854272842407228, 0.02874163246154785, 0.03187404823303223, 0.032361473083496094, 0.029944831848144532, 0.02974412727355957, 0.029130752563476563, 0.0289751033782959, 0.029189119338989256, 0.02932124710083008, 0.02903856086730957, 0.029099008560180665, 0.02904473686218262, 0.027620351791381836, 0.028590080261230468, 0.029095935821533202, 0.029070335388183592, 0.029293567657470702, 0.02933247947692871, 0.02918707275390625, 0.02919628715515137, 0.02892902374267578, 0.02896895980834961, 0.029045759201049806, 0.029070335388183592, 0.02933247947692871, 0.0292096004486084, 0.02911027145385742, 0.02958438491821289, 0.029138944625854493, 0.02916864013671875, 0.029231103897094726, 0.029207551956176758, 0.029046783447265623, 0.029060096740722657, 0.029251583099365236, 0.02913689613342285, 0.029182975769042968, 0.02898124885559082, 0.02775142478942871, 0.028917760848999025, 0.027908128738403322, 0.02759881591796875, 0.027584512710571288, 0.027623424530029295, 0.027654144287109376, 0.027632640838623046, 0.027600896835327147]",tokens/s,35.476605789827666,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neo,MB,2224.074752,2783.444992,0.0,2153.775104,2041.744384,s,1,9.404611328125,9.404611328125,0.0,9.404611328125,9.404611328125,9.404611328125,9.404611328125,[9.404611328125],,kWh,3.0341497197218187e-05,1.661359217613593e-05,4.9463650682007865e-05,9.641874005536197e-05,,MB,2324.885504,2802.31936,0.0,2153.775104,1917.691904,s,10,0.5479749755859376,0.054797497558593755,0.000427171795324102,0.05491507339477539,0.05515751876831054,0.05524442405700684,0.05531394828796387,"[0.053919902801513674, 0.05513820648193359, 0.05408403015136719, 0.055331329345703124, 0.05501529693603516, 0.05478857421875, 0.05498515319824219, 0.05503952026367188, 0.05482796859741211, 0.054844993591308594]",tokens/s,4671.7461819541095,kWh,6.387640017530222e-07,3.500114523056594e-07,2.3454838583676304e-06,3.334259312426312e-06,tokens/kWh,76778671.36665834,MB,2335.219712,2804.416512,0.0,2153.775104,2001.552384,s,10,14.356662963867185,1.4356662963867186,0.026013979103963,1.4386302490234375,1.46157080078125,1.4630281372070313,1.4641940063476564,"[1.4644854736328126, 1.4612469482421875, 1.4412901611328126, 1.4561494140625, 1.458032470703125, 1.4359703369140624, 1.4345888671875, 1.4225919189453125, 1.4010528564453124, 1.3812545166015624]",tokens/s,43.88206379056069,kWh,1.7028550658454494e-05,9.331522387653628e-06,3.3195239018632865e-05,5.955531206474097e-05,tokens/kWh,1057840.1458381144,,s,630,14.352910377502443,0.02278239742460705,0.0006567162139657233,0.023002623558044433,0.023323648452758788,0.023558127880096435,0.024263516178131102,"[0.02168217658996582, 0.02206822395324707, 0.022583295822143554, 0.02248089599609375, 0.02205388832092285, 0.021815296173095702, 0.023949312210083007, 0.023199743270874023, 0.023004159927368165, 0.02411008071899414, 0.023533567428588868, 0.023195648193359376, 0.02307174491882324, 0.02312499237060547, 0.02312704086303711, 0.02332569694519043, 0.0230830078125, 0.02330009651184082, 0.023243776321411135, 0.02346086311340332, 0.023630847930908205, 0.023183359146118163, 0.023186431884765626, 0.023209983825683594, 0.02413465690612793, 0.0233175048828125, 0.02311577606201172, 0.023183359146118163, 0.023345151901245118, 0.023206911087036132, 0.023334911346435547, 0.02329190444946289, 0.023150592803955077, 0.02266111946105957, 0.022861824035644532, 0.023205888748168944, 0.023138303756713868, 0.02304204750061035, 0.023150592803955077, 0.023203840255737306, 0.023134208679199218, 0.02327961540222168, 0.02333695983886719, 0.024410112380981445, 0.02426470375061035, 0.02349158477783203, 0.023194623947143556, 0.02330419158935547, 0.024260608673095704, 0.02327961540222168, 0.023130111694335938, 0.023036928176879884, 0.023159807205200195, 0.02310655975341797, 0.023192575454711914, 0.023109632492065428, 0.023431167602539063, 0.023868415832519533, 0.024589311599731444, 0.02351103973388672, 0.023553024291992186, 0.023181312561035155, 0.023361536026000978, 0.02292633628845215, 0.02311577606201172, 0.023422975540161133, 0.023141376495361327, 0.02310348892211914, 0.023096319198608398, 0.023144447326660156, 0.023166976928710937, 0.024412160873413087, 0.023364608764648437, 0.023331840515136718, 0.023177215576171875, 0.02309017562866211, 0.02411520004272461, 0.023000064849853515, 0.023149568557739256, 0.02308198356628418, 0.02253107261657715, 0.022980607986450196, 0.023013376235961915, 0.023043071746826172, 0.02364825630187988, 0.023994367599487306, 0.02390323257446289, 0.023157760620117186, 0.02310655975341797, 0.022962175369262695, 0.023444480895996093, 0.022978559494018554, 0.02305023956298828, 0.02305536079406738, 0.02309939193725586, 0.02306252861022949, 0.02289356803894043, 0.022951936721801756, 0.022966272354125978, 0.022999040603637694, 0.023035903930664063, 0.02308608055114746, 0.023430143356323242, 0.02310246467590332, 0.023003135681152344, 0.023120895385742187, 0.023120895385742187, 0.023347200393676756, 0.023112703323364257, 0.023180288314819338, 0.02325606346130371, 0.02311577606201172, 0.022980607986450196, 0.023079935073852538, 0.023031808853149413, 0.023009279251098632, 0.0231014404296875, 0.023175167083740233, 0.02308915138244629, 0.023323648452758788, 0.023201791763305665, 0.023172096252441408, 0.023171072006225587, 0.023053312301635744, 0.02314035224914551, 0.0231014404296875, 0.021978111267089845, 0.022519807815551757, 0.021988351821899413, 0.021839872360229492, 0.021779455184936524, 0.02184806442260742, 0.022107135772705077, 0.02185625648498535, 0.02164735984802246, 0.021744640350341796, 0.02169753646850586, 0.021917695999145507, 0.021586944580078125, 0.02167193603515625, 0.022303743362426756, 0.02283622360229492, 0.022806560516357422, 0.02317923164367676, 0.023356416702270507, 0.023179264068603517, 0.023022592544555662, 0.023018495559692383, 0.023186431884765626, 0.023152639389038086, 0.023144447326660156, 0.022988800048828126, 0.02309734344482422, 0.02308403205871582, 0.023023616790771483, 0.02308403205871582, 0.02305023956298828, 0.023175167083740233, 0.023109632492065428, 0.022985727310180663, 0.02290176010131836, 0.023040000915527343, 0.023226367950439454, 0.02306252861022949, 0.023027711868286133, 0.023184383392333984, 0.02313113594055176, 0.02304819107055664, 0.022573055267333983, 0.02310860824584961, 0.023302143096923827, 0.023053312301635744, 0.023212032318115236, 0.023752704620361328, 0.023155712127685548, 0.0231014404296875, 0.02307379150390625, 0.023034879684448242, 0.022691839218139647, 0.023200767517089844, 0.023151615142822265, 0.023343103408813477, 0.023205888748168944, 0.02328883171081543, 0.024207359313964845, 0.024574975967407226, 0.023871488571166992, 0.02324787139892578, 0.02321308708190918, 0.023606271743774415, 0.023167999267578124, 0.022944768905639647, 0.022987775802612305, 0.02304921531677246, 0.02307891273498535, 0.023012351989746094, 0.023259136199951173, 0.02305023956298828, 0.0233123836517334, 0.023188480377197264, 0.022939647674560547, 0.023000064849853515, 0.023142400741577147, 0.022944768905639647, 0.022964223861694336, 0.02290995216369629, 0.023030784606933592, 0.023053312301635744, 0.023171072006225587, 0.02307174491882324, 0.02327142333984375, 0.02326425552368164, 0.023104511260986327, 0.02307379150390625, 0.02349158477783203, 0.023756799697875978, 0.023154687881469727, 0.023163904190063478, 0.022993919372558593, 0.022992895126342772, 0.02304819107055664, 0.02307174491882324, 0.02308710479736328, 0.022992895126342772, 0.023045120239257814, 0.02326118469238281, 0.023211008071899415, 0.023104511260986327, 0.023230464935302734, 0.02312704086303711, 0.02305023956298828, 0.022989824295043947, 0.023104511260986327, 0.023212032318115236, 0.022976512908935546, 0.02309427261352539, 0.023133184432983397, 0.02308608055114746, 0.02304921531677246, 0.023069696426391603, 0.023142400741577147, 0.023162879943847657, 0.023031808853149413, 0.02309734344482422, 0.023176191329956054, 0.022987775802612305, 0.02305843162536621, 0.023036928176879884, 0.02307174491882324, 0.022938623428344726, 0.023011327743530274, 0.023019519805908203, 0.021932031631469725, 0.02166067123413086, 0.02166067123413086, 0.02268569564819336, 0.02328166389465332, 0.023998464584350586, 0.023447551727294923, 0.023221248626708983, 0.023205888748168944, 0.023164928436279295, 0.022977535247802734, 0.023036928176879884, 0.023023616790771483, 0.02308403205871582, 0.022965248107910157, 0.02348646354675293, 0.023163904190063478, 0.0231014404296875, 0.023040000915527343, 0.023031808853149413, 0.023052288055419923, 0.02307276725769043, 0.02312704086303711, 0.022985727310180663, 0.023034879684448242, 0.022993919372558593, 0.023137279510498047, 0.023040000915527343, 0.02304614448547363, 0.022979583740234375, 0.0230830078125, 0.02313216018676758, 0.022958080291748048, 0.0230645751953125, 0.0230328311920166, 0.023214080810546874, 0.023045120239257814, 0.022993919372558593, 0.02290995216369629, 0.02311577606201172, 0.022956031799316406, 0.022965248107910157, 0.023026687622070312, 0.023109632492065428, 0.022988800048828126, 0.023040000915527343, 0.023020544052124024, 0.023254016876220703, 0.024687616348266602, 0.02776371192932129, 0.023946271896362305, 0.023275487899780272, 0.023562303543090822, 0.023193536758422853, 0.02287615966796875, 0.023008256912231444, 0.02294272041320801, 0.023026687622070312, 0.0230328311920166, 0.02305433654785156, 0.022960128784179686, 0.022914047241210937, 0.022928384780883788, 0.02167807960510254, 0.021601280212402343, 0.021570560455322265, 0.02166886329650879, 0.02169343948364258, 0.02165043258666992, 0.021606399536132814, 0.02171801567077637, 0.02165862464904785, 0.02171801567077637, 0.02165555191040039, 0.02169036865234375, 0.02328985595703125, 0.023620607376098633, 0.023141376495361327, 0.023133184432983397, 0.023006208419799806, 0.02307379150390625, 0.0228853759765625, 0.022983680725097655, 0.022975488662719725, 0.023182336807250976, 0.023194623947143556, 0.023182336807250976, 0.02289356803894043, 0.02306764793395996, 0.022923263549804687, 0.022988800048828126, 0.023002111434936523, 0.02342092704772949, 0.023666688919067383, 0.023262208938598632, 0.023013376235961915, 0.022991872787475585, 0.0230830078125, 0.023028736114501954, 0.022943744659423827, 0.02302566337585449, 0.023052288055419923, 0.023120895385742187, 0.023036928176879884, 0.023026687622070312, 0.02292736053466797, 0.02304102325439453, 0.02311680030822754, 0.0231147518157959, 0.022938623428344726, 0.023158784866333007, 0.023365631103515624, 0.02306764793395996, 0.02301644706726074, 0.023000064849853515, 0.02292736053466797, 0.022932479858398438, 0.023013376235961915, 0.022998016357421876, 0.0226375675201416, 0.023060480117797853, 0.022971391677856445, 0.022792192459106447, 0.02251263999938965, 0.02293452835083008, 0.022999040603637694, 0.021779455184936524, 0.021396480560302734, 0.021347328186035155, 0.02167807960510254, 0.021739519119262696, 0.021651456832885742, 0.02163711929321289, 0.021794815063476563, 0.02168934440612793, 0.02207539176940918, 0.022218751907348632, 0.021614591598510743, 0.02148044776916504, 0.02332467269897461, 0.0242227840423584, 0.02362771224975586, 0.02309939193725586, 0.023069696426391603, 0.023069696426391603, 0.0230328311920166, 0.023009279251098632, 0.022775808334350587, 0.022563840866088865, 0.02315673637390137, 0.02295910453796387, 0.02304204750061035, 0.023133184432983397, 0.023014400482177736, 0.022939647674560547, 0.023031808853149413, 0.022583295822143554, 0.023161855697631836, 0.022994943618774414, 0.022995967864990235, 0.022957056045532227, 0.02285158348083496, 0.022939647674560547, 0.02290790367126465, 0.0228853759765625, 0.023021631240844727, 0.022896575927734374, 0.022935552597045897, 0.022956031799316406, 0.022923263549804687, 0.022965248107910157, 0.02332159996032715, 0.02305843162536621, 0.02304204750061035, 0.02313523292541504, 0.023019519805908203, 0.022967296600341795, 0.023047168731689452, 0.022931455612182617, 0.023045120239257814, 0.023391231536865235, 0.02367897605895996, 0.02304102325439453, 0.022914047241210937, 0.022792192459106447, 0.022957056045532227, 0.022928384780883788, 0.02285670471191406, 0.022993919372558593, 0.023567359924316408, 0.023367679595947266, 0.023018495559692383, 0.023021568298339845, 0.023476224899291992, 0.023126016616821288, 0.022983680725097655, 0.02309836769104004, 0.022956031799316406, 0.022965248107910157, 0.023031808853149413, 0.02307891273498535, 0.023061504364013673, 0.02309734344482422, 0.022817792892456053, 0.023027711868286133, 0.022993919372558593, 0.022963199615478515, 0.02326425552368164, 0.023407615661621094, 0.02225049591064453, 0.023282688140869142, 0.02269900894165039, 0.02254745674133301, 0.02250444793701172, 0.02240716743469238, 0.02207744026184082, 0.021731327056884766, 0.021760000228881835, 0.02169856071472168, 0.021715967178344727, 0.02180607986450195, 0.021704704284667968, 0.021703680038452147, 0.021700607299804688, 0.021739519119262696, 0.021533695220947266, 0.02168320083618164, 0.02171494483947754, 0.02268262481689453, 0.022510591506958007, 0.022615039825439453, 0.022591487884521484, 0.022583295822143554, 0.02249318313598633, 0.022590463638305663, 0.02247475242614746, 0.022607872009277344, 0.02272153663635254, 0.02261299133300781, 0.022495231628417968, 0.023364608764648437, 0.023323648452758788, 0.022558719635009765, 0.022404096603393556, 0.02248192024230957, 0.0224901123046875, 0.022641664505004884, 0.022361087799072265, 0.022353919982910156, 0.02241433525085449, 0.02231500816345215, 0.022046720504760742, 0.022841344833374022, 0.023654399871826173, 0.023061504364013673, 0.02268671989440918, 0.02251263999938965, 0.02263654327392578, 0.02247270393371582, 0.022687744140625, 0.022573055267333983, 0.022492160797119142, 0.022701055526733398, 0.022502399444580077, 0.022450176239013672, 0.02290176010131836, 0.022622207641601562, 0.022566911697387695, 0.022536191940307617, 0.02286796760559082, 0.022600704193115235, 0.022777856826782225, 0.02267852783203125, 0.02265907287597656, 0.022553600311279298, 0.022824960708618162, 0.022518783569335937, 0.022560768127441407, 0.022577152252197266, 0.022375423431396483, 0.02253824043273926, 0.022573055267333983, 0.02246451187133789, 0.022993919372558593, 0.02285158348083496, 0.022139904022216796, 0.022106111526489256, 0.02168832015991211, 0.021746688842773438, 0.0217262077331543, 0.021481472015380858, 0.02171494483947754, 0.02163302421569824, 0.021712896347045898, 0.021704704284667968, 0.021638143539428712, 0.02171801567077637, 0.0218470401763916, 0.021770240783691407, 0.021815296173095702, 0.0216760311126709, 0.02167807960510254, 0.02150297546386719, 0.021817344665527344, 0.02188595199584961, 0.021768192291259765, 0.021704704284667968, 0.022010879516601564, 0.021741567611694337, 0.021751808166503905, 0.021716991424560548, 0.021646335601806642, 0.02188902473449707, 0.021606399536132814, 0.021635072708129883, 0.021780479431152345, 0.022238208770751954, 0.022130687713623046, 0.021805055618286134, 0.02181427192687988, 0.02182246398925781, 0.021734399795532225, 0.021808128356933593, 0.0222423038482666, 0.02243174362182617, 0.021840896606445313, 0.021780479431152345, 0.021950464248657226, 0.021756927490234376, 0.022152191162109376, 0.02207334327697754, 0.022426624298095704, 0.022486015319824217, 0.021753856658935547, 0.02166988754272461, 0.02166374397277832, 0.02167807960510254, 0.021926912307739257, 0.022478847503662108, 0.021985279083251954, 0.022443008422851563, 0.02208051109313965, 0.02167296028137207, 0.02168012809753418, 0.022114303588867186, 0.02190028762817383, 0.021735424041748046, 0.02168934440612793, 0.0216944637298584, 0.022208511352539064, 0.021746688842773438, 0.02164735984802246, 0.022013952255249023, 0.02168217658996582, 0.02186649513244629, 0.021753856658935547, 0.022519807815551757, 0.023045120239257814, 0.022768640518188478, 0.022018047332763673, 0.02169241523742676, 0.02168934440612793, 0.021747711181640626, 0.021743616104125976, 0.02168524742126465, 0.021702655792236326, 0.02188083267211914, 0.02166886329650879, 0.021579776763916016, 0.021609472274780273, 0.021824512481689453, 0.021625856399536132, 0.021761024475097656, 0.02205081558227539, 0.02169241523742676, 0.021651456832885742, 0.022013952255249023, 0.02165043258666992]",tokens/s,43.89353681100785,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494d9-3ed6ff7a2f4e451a0832c5e7;9f50bcbd-7e2c-4c3d-a291-f640c0ad5763) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5176.860672,6590.824448,0.0,5953.814528,5766.738432,s,1,12.3128212890625,12.3128212890625,0.0,12.3128212890625,12.3128212890625,12.3128212890625,12.3128212890625,[12.3128212890625],,kWh,6.446706596179929e-05,3.529753733521686e-05,0.00011760731630799937,0.0002173719196050155,,MB,1831.890944,6651.641856,0.0,5995.757568,5281.196032,s,10,1.474117446899414,0.1474117446899414,6.394352938161639e-05,0.147416015625,0.14748158264160155,0.14749044036865233,0.14749752655029297,"[0.14727349853515626, 0.14733984375, 0.1474992980957031, 0.1474091796875, 0.14746182250976564, 0.1474228515625, 0.1473842315673828, 0.14740538024902344, 0.1474417266845703, 0.1474796142578125]",tokens/s,1736.6323188051115,kWh,1.7427869145832474e-06,9.54783133229086e-07,6.6132896043532465e-06,9.310859652165578e-06,tokens/kWh,27494775.945898607,MB,1861.177344,6672.613376,0.0,6014.631936,5281.198592,s,10,26.45233642578125,2.645233642578125,0.027801800219134257,2.6552049560546873,2.67648251953125,2.6797645751953123,2.6823902197265626,"[2.665329345703125, 2.6053251953125, 2.621055908203125, 2.66583935546875, 2.616633544921875, 2.651419921875, 2.683046630859375, 2.658989990234375, 2.675753173828125, 2.608943359375]",tokens/s,23.816421727722428,kWh,3.058420516479184e-05,1.6761217870909876e-05,6.932991003904633e-05,0.00011667533307474806,tokens/kWh,539959.8898906853,,s,630,26.449479728698755,0.041983301156664654,0.001068450333005586,0.04220006561279297,0.04286993598937988,0.04372270221710205,0.045537885093688966,"[0.040992767333984374, 0.04112998580932617, 0.04286873626708984, 0.04236697769165039, 0.04293427276611328, 0.04294144058227539, 0.042910720825195314, 0.042919967651367186, 0.04261065673828125, 0.042412033081054686, 0.04254924774169922, 0.04232396697998047, 0.0422031364440918, 0.041240577697753904, 0.0416993293762207, 0.04091187286376953, 0.040804351806640625, 0.04150067138671875, 0.042243072509765625, 0.042262527465820314, 0.042261505126953126, 0.042428417205810545, 0.04192768096923828, 0.04232191848754883, 0.04245811080932617, 0.042297344207763675, 0.0424192008972168, 0.04288409423828125, 0.042409984588623044, 0.04287180709838867, 0.043052032470703126, 0.042649600982666014, 0.042780670166015625, 0.041589759826660154, 0.04211814498901367, 0.04231782531738281, 0.04218777465820313, 0.04210073471069336, 0.04223283386230469, 0.042382335662841795, 0.042229759216308595, 0.04226867294311523, 0.04220927810668945, 0.04234854507446289, 0.0421847038269043, 0.04223897552490234, 0.04267827224731445, 0.042705921173095705, 0.04332134246826172, 0.043763713836669924, 0.04236185455322266, 0.04223283386230469, 0.042205184936523435, 0.04279808044433594, 0.042866687774658206, 0.04261068725585938, 0.04213452911376953, 0.04210483169555664, 0.04223590469360351, 0.04214067077636719, 0.04213248062133789, 0.0418078727722168, 0.041985023498535154, 0.04084633636474609, 0.04059340667724609, 0.04063334274291992, 0.04069580841064453, 0.04068556976318359, 0.04077772903442383, 0.04072140884399414, 0.04093132781982422, 0.04070297622680664, 0.04068044662475586, 0.04062822341918945, 0.04252262496948242, 0.04492800140380859, 0.042625022888183595, 0.042240001678466796, 0.04235468673706055, 0.042136577606201174, 0.04208332824707031, 0.04192665481567383, 0.040899585723876954, 0.04077056121826172, 0.04076236724853516, 0.04081151962280274, 0.0407982063293457, 0.040637439727783206, 0.040583168029785156, 0.04068556976318359, 0.04070195388793945, 0.040891391754150394, 0.040799232482910154, 0.04064972686767578, 0.04070809555053711, 0.04076134490966797, 0.04069887924194336, 0.04061183929443359, 0.040924160003662106, 0.04199935913085937, 0.04539084625244141, 0.042426368713378904, 0.040927230834960936, 0.040667137145996096, 0.0406743049621582, 0.04052479934692383, 0.040545280456542966, 0.04254515075683594, 0.04348518371582031, 0.04135628890991211, 0.04093849563598633, 0.04063129425048828, 0.04063846588134765, 0.04062412643432617, 0.04059545516967773, 0.04056883239746094, 0.04084428787231445, 0.040597503662109374, 0.04228607940673828, 0.04211199951171875, 0.042234878540039066, 0.04204032135009766, 0.04096614456176758, 0.04074496078491211, 0.04192153549194336, 0.04519116973876953, 0.04054422378540039, 0.040521728515625, 0.04056268692016601, 0.04059545516967773, 0.0406927375793457, 0.04064767837524414, 0.0407347183227539, 0.04084531021118164, 0.04273152160644531, 0.042788864135742184, 0.042439678192138675, 0.042202110290527346, 0.04218163299560547, 0.04206489562988281, 0.04213759994506836, 0.042297344207763675, 0.04214886474609375, 0.04206694412231445, 0.04221132659912109, 0.04238131332397461, 0.04547788619995117, 0.04163686370849609, 0.04051763153076172, 0.04074393463134766, 0.040581119537353515, 0.04067020797729492, 0.04064255905151367, 0.04067942428588867, 0.04067327880859375, 0.04055756759643555, 0.04065689468383789, 0.04077977752685547, 0.04065484619140625, 0.040599552154541016, 0.04053504180908203, 0.040714241027832034, 0.0405852165222168, 0.04055244827270508, 0.040600574493408204, 0.04106444931030274, 0.04060671997070313, 0.040613887786865234, 0.04049100875854492, 0.040599552154541016, 0.04179148864746094, 0.045484031677246094, 0.04294655990600586, 0.042436607360839845, 0.04216934585571289, 0.04205055999755859, 0.04224512100219727, 0.04226662445068359, 0.04233216094970703, 0.04230246353149414, 0.04221644973754883, 0.042382335662841795, 0.04233011245727539, 0.04220006561279297, 0.042439678192138675, 0.042452991485595705, 0.04224716949462891, 0.042275840759277344, 0.042229759216308595, 0.040804351806640625, 0.04071731185913086, 0.04067737579345703, 0.045669376373291014, 0.042759166717529294, 0.042418174743652344, 0.04239769744873047, 0.0423741455078125, 0.04210483169555664, 0.04227481460571289, 0.042213375091552735, 0.04238131332397461, 0.042205184936523435, 0.042403839111328126, 0.04220415878295898, 0.04246732711791992, 0.04340224075317383, 0.04504883193969727, 0.042656768798828126, 0.04233932876586914, 0.04225740814208984, 0.04245401763916016, 0.04249599838256836, 0.042401790618896484, 0.04234137725830078, 0.04249190521240234, 0.042521598815917966, 0.044211200714111325, 0.044478462219238284, 0.04250419235229492, 0.042446849822998046, 0.04238848114013672, 0.04232089614868164, 0.04231270217895508, 0.0421212158203125, 0.04235878372192383, 0.042102783203125, 0.042105857849121096, 0.042259456634521485, 0.04222771072387695, 0.041816062927246093, 0.04218368148803711, 0.042006526947021484, 0.04214479827880859, 0.04210172653198242, 0.04216012954711914, 0.04138905715942383, 0.04085760116577149, 0.04055756759643555, 0.04067020797729492, 0.04058214569091797, 0.040288257598876956, 0.04551679992675781, 0.04276428985595703, 0.04222873687744141, 0.04244275283813476, 0.042231807708740236, 0.04227481460571289, 0.04206694412231445, 0.042292224884033204, 0.042087425231933595, 0.0421662712097168, 0.04235366439819336, 0.04073984146118164, 0.04052787017822266, 0.04017356872558594, 0.04051865768432617, 0.04076339340209961, 0.040630271911621094, 0.0406640625, 0.04067942428588867, 0.04055859375, 0.04062412643432617, 0.04332953643798828, 0.04427264022827149, 0.04238131332397461, 0.04203417587280273, 0.042363903045654294, 0.04225740814208984, 0.042126335144042966, 0.04198604965209961, 0.042145790100097655, 0.04211711883544922, 0.0409620475769043, 0.04067020797729492, 0.040635391235351564, 0.040787967681884765, 0.04056371307373047, 0.040581119537353515, 0.040662017822265625, 0.04072550582885742, 0.04073164749145508, 0.04055859375, 0.04059648132324219, 0.04068454360961914, 0.04063846588134765, 0.040592414855957035, 0.04049711990356445, 0.04343091201782227, 0.04430131149291992, 0.042281982421875, 0.04212736129760742, 0.042275840759277344, 0.04214374542236328, 0.04155187225341797, 0.040551422119140625, 0.04071321487426758, 0.04140544128417969, 0.04250726318359375, 0.04225228881835937, 0.04193689727783203, 0.0421734390258789, 0.0423454704284668, 0.042218494415283206, 0.04231167984008789, 0.04159385681152344, 0.042172416687011716, 0.04194918441772461, 0.04067839813232422, 0.040665088653564455, 0.04071116638183594, 0.04063436889648438, 0.04030361557006836, 0.045259777069091796, 0.0429117431640625, 0.042178558349609374, 0.040665088653564455, 0.040531967163085936, 0.04237107086181641, 0.042218494415283206, 0.042259456634521485, 0.04229939270019531, 0.042243072509765625, 0.04241408157348633, 0.042261505126953126, 0.04179763031005859, 0.04091904067993164, 0.040793087005615236, 0.04064460754394531, 0.04068864059448242, 0.040592384338378903, 0.04087910461425781, 0.04067737579345703, 0.04175667190551758, 0.04590694427490234, 0.04452761459350586, 0.042618881225585936, 0.04065689468383789, 0.04047872161865235, 0.040875007629394534, 0.040622081756591794, 0.040629249572753906, 0.04270489501953125, 0.042403839111328126, 0.0423454704284668, 0.041981952667236325, 0.04227174377441406, 0.04235776138305664, 0.042229759216308595, 0.04221542358398438, 0.04213759994506836, 0.04211097717285156, 0.04216524887084961, 0.04219289779663086, 0.04219801712036133, 0.042502143859863284, 0.04214374542236328, 0.04210892868041992, 0.042261505126953126, 0.04544924926757812, 0.04275811386108398, 0.04226867294311523, 0.04220927810668945, 0.04353843307495117, 0.0426956787109375, 0.042452991485595705, 0.04232294464111328, 0.0420208625793457, 0.042224639892578124, 0.042226688385009765, 0.04213145446777344, 0.04238643264770508, 0.04240793609619141, 0.04237311935424805, 0.04220006561279297, 0.04221542358398438, 0.04215193557739258, 0.042241024017333983, 0.04222771072387695, 0.04108390426635742, 0.04526387023925781, 0.042929153442382816, 0.042248191833496096, 0.0421580810546875, 0.04421734237670898, 0.04367257690429688, 0.04297011184692383, 0.04220006561279297, 0.04218368148803711, 0.042016769409179686, 0.04206182479858398, 0.042277889251708986, 0.042537982940673826, 0.04227686309814453, 0.043875328063964845, 0.043038719177246096, 0.042229759216308595, 0.04213452911376953, 0.042229793548583985, 0.042275806427001954, 0.04236697769165039, 0.04221644973754883, 0.04256665420532227, 0.04237516784667969, 0.0454912338256836, 0.042869728088378904, 0.042417152404785156, 0.04208127975463867, 0.042633216857910154, 0.04266495895385742, 0.04282572937011719, 0.042385406494140625, 0.04172697448730469, 0.04219801712036133, 0.04241305541992187, 0.04216012954711914, 0.04262809753417969, 0.04249292755126953, 0.04279808044433594, 0.04238950347900391, 0.04260147094726562, 0.04204851150512695, 0.04222259140014648, 0.04188671875, 0.04191231918334961, 0.04155289459228516, 0.04085760116577149, 0.040699905395507815, 0.04353126525878906, 0.04570214462280273, 0.04301619338989258, 0.04244889450073242, 0.04273664093017578, 0.04237209701538086, 0.04274790573120117, 0.044063743591308595, 0.04288819122314453, 0.04226047897338867, 0.042417152404785156, 0.04229324722290039, 0.04243251037597656, 0.04250624084472656, 0.04134297561645508, 0.0421396484375, 0.04216934585571289, 0.04230246353149414, 0.042277889251708986, 0.042105857849121096, 0.04233216094970703, 0.043342880249023434, 0.044874721527099606, 0.04244582366943359, 0.04223590469360351, 0.04214476776123047, 0.041987071990966796, 0.04206387329101562, 0.04211199951171875, 0.04221644973754883, 0.04192768096923828, 0.04099071884155273, 0.040600574493408204, 0.04066918563842774, 0.040662017822265625, 0.040612865447998046, 0.04058726501464844, 0.04073779296875, 0.04131840133666992, 0.042159103393554685, 0.04218265533447266, 0.04231782531738281, 0.04291993713378906, 0.04308992004394531, 0.04251340866088867, 0.04221440124511719, 0.04544921493530273, 0.042793983459472655, 0.042815486907958986, 0.042347518920898435, 0.042076160430908206, 0.042246143341064454, 0.04216831970214844, 0.042294273376464846, 0.04206182479858398, 0.042065921783447265, 0.042363903045654294, 0.042401790618896484, 0.04209766387939453, 0.04213043212890625, 0.04202905654907227, 0.042224639892578124, 0.04185497665405274, 0.042022911071777344, 0.042001407623291014, 0.04212838363647461, 0.041954303741455076, 0.04206387329101562, 0.041992191314697266, 0.042259456634521485, 0.04208025741577148, 0.04580556869506836, 0.04245916748046875, 0.04238435363769531, 0.04205158233642578, 0.04227174377441406, 0.042254337310791014, 0.042971134185791016, 0.04232499313354492, 0.04235673522949219, 0.04278169631958008, 0.042382335662841795, 0.04240281677246094, 0.0423372802734375, 0.04262400054931641, 0.0421662712097168, 0.04219596862792969, 0.04231679916381836, 0.04227481460571289, 0.04221542358398438, 0.040834049224853515, 0.041842689514160154, 0.04597452926635742, 0.042858497619628906, 0.04226969528198242, 0.04244172668457031, 0.042196990966796875, 0.04228710556030273, 0.04232294464111328, 0.04252876663208008, 0.04242227172851563, 0.042355712890625, 0.042275840759277344, 0.042327041625976565, 0.04211711883544922, 0.040736766815185545, 0.04211916732788086, 0.042224639892578124, 0.04230963134765625, 0.042431488037109374, 0.041796607971191405, 0.0421212158203125, 0.04221747207641602, 0.04241408157348633, 0.04181094360351562, 0.04269158554077149, 0.045744129180908207, 0.04289945602416992, 0.04226559829711914, 0.04212736129760742, 0.04222259140014648, 0.042218494415283206, 0.04221747207641602, 0.04250217437744141, 0.04253692626953125, 0.04228607940673828, 0.04225740814208984, 0.042308609008789064, 0.04216012954711914, 0.04220620727539062, 0.042210304260253906, 0.044938240051269535, 0.044184574127197264, 0.04293836975097656, 0.042280960083007815, 0.042177536010742187, 0.04229632186889649, 0.04230451202392578, 0.0422553596496582, 0.042575870513916016, 0.041057281494140625, 0.040564735412597655, 0.04063641738891602, 0.040622081756591794, 0.04064665603637695, 0.04072959899902344, 0.04055756759643555, 0.041057281494140625, 0.04059852981567383, 0.040667137145996096, 0.04067532730102539, 0.04056371307373047, 0.04054732894897461, 0.040627201080322264, 0.04070707321166992, 0.04080332946777344, 0.04051968002319336, 0.04120064163208008, 0.04348620986938476, 0.042587135314941404, 0.04220723342895508, 0.04244889450073242, 0.045483009338378906, 0.04291788864135742, 0.042224639892578124, 0.042417152404785156, 0.04224512100219727, 0.04228403091430664, 0.04183552169799805, 0.04061695861816406, 0.040602622985839845, 0.0408616943359375, 0.04064255905151367, 0.04074086380004883, 0.04070195388793945, 0.04083609771728516, 0.04066304016113281, 0.04069683074951172, 0.04070707321166992, 0.04092620849609375, 0.040629249572753906, 0.04081151962280274, 0.04059545516967773, 0.04081049728393555, 0.040768512725830076, 0.041431041717529295, 0.04236800003051758, 0.0455464973449707, 0.04298854446411133, 0.042396671295166014, 0.04230656051635742, 0.042382335662841795, 0.042208255767822264, 0.042864639282226565, 0.04216934585571289, 0.04239769744873047, 0.041201663970947267, 0.04068966293334961, 0.04059852981567383, 0.040796161651611325, 0.04056371307373047, 0.04068454360961914, 0.04059545516967773]",tokens/s,23.81899403928256,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,codegen,MB,4288.483328,6257.37728,0.0,5620.36736,5240.96,s,1,10.430087890625,10.430087890625,0.0,10.430087890625,10.430087890625,10.430087890625,10.430087890625,[10.430087890625],,kWh,4.2233730581945235e-05,2.312850354583653e-05,8.103589816202406e-05,0.00014639813228980582,,MB,1710.215168,6297.223168,0.0,5639.241728,4814.334464,s,10,1.2227707138061523,0.12227707138061523,0.0001830435634886841,0.12220438385009766,0.12247502365112305,0.12260759086608887,0.12271364463806153,"[0.12212723541259765, 0.12244556427001953, 0.12221702575683593, 0.12237308502197265, 0.12219657897949218, 0.12274015808105469, 0.12215779113769532, 0.12215033721923828, 0.12221218872070312, 0.12215074920654297]",tokens/s,2093.6059157251298,kWh,1.4451820487212255e-06,7.918483441008194e-07,5.81191183109763e-06,8.048942223919675e-06,tokens/kWh,31805421.492432218,MB,1745.846272,6297.223168,0.0,5639.241728,4946.948096,s,10,23.303291748046878,2.330329174804688,0.007355525280273777,2.3308511962890623,2.3386685302734374,2.339441198730469,2.3400593334960935,"[2.3402138671875, 2.338496826171875, 2.331844970703125, 2.313841552734375, 2.3334072265625, 2.3289921875, 2.329857421875, 2.3283359375, 2.33564111328125, 2.32266064453125]",tokens/s,27.034807220005828,kWh,2.766670566676487e-05,1.5161651867649086e-05,6.226455929670327e-05,0.00010509291683111724,tokens/kWh,599469.516116296,,s,630,23.301004241943318,0.036985721018957717,0.0007245700363062636,0.03661004829406738,0.03790674018859863,0.03806573104858398,0.03889160289764405,"[0.038130687713623046, 0.03793100738525391, 0.03746713638305664, 0.037926910400390625, 0.03794432067871094, 0.037645313262939455, 0.03762995147705078, 0.03779993438720703, 0.03978342437744141, 0.037531646728515625, 0.03638784027099609, 0.03657932662963867, 0.03646156692504883, 0.03648409652709961, 0.036395008087158204, 0.03658342361450195, 0.036620288848876956, 0.03653529739379883, 0.036928512573242187, 0.03652403259277344, 0.03648102569580078, 0.036506622314453126, 0.0365404167175293, 0.03662131118774414, 0.03651071929931641, 0.03646976089477539, 0.03688140869140625, 0.04068659210205078, 0.03822182464599609, 0.03799039840698242, 0.03801599884033203, 0.03785625457763672, 0.03661209487915039, 0.03658137512207031, 0.03645542526245117, 0.03760844802856445, 0.03750604629516602, 0.036511745452880856, 0.03656192016601562, 0.036523006439208985, 0.0364400634765625, 0.03645439910888672, 0.03644416046142578, 0.03713433456420898, 0.037561344146728515, 0.03781222534179687, 0.03647180938720703, 0.0366295051574707, 0.036569087982177735, 0.03644518280029297, 0.03651891326904297, 0.03671859359741211, 0.0360263671875, 0.036299774169921875, 0.0366376953125, 0.037634048461914066, 0.03725209426879883, 0.03642777633666992, 0.03684044647216797, 0.037200897216796876, 0.039049217224121094, 0.03799961471557617, 0.038970367431640625, 0.03655475234985352, 0.036310016632080076, 0.036450302124023434, 0.03644518280029297, 0.03642777633666992, 0.03673190307617188, 0.036466686248779294, 0.03660287857055664, 0.03657932662963867, 0.03647078323364258, 0.03656192016601562, 0.03663052749633789, 0.03644723129272461, 0.03643904113769531, 0.03640934371948242, 0.03660902404785156, 0.036506622314453126, 0.03679948806762695, 0.03652403259277344, 0.03690291213989258, 0.037705726623535156, 0.03771084976196289, 0.036596736907958984, 0.037700607299804685, 0.03799347305297852, 0.03893350219726562, 0.03811840057373047, 0.037043201446533204, 0.036657150268554685, 0.037994495391845705, 0.036913150787353514, 0.036598785400390625, 0.03654246520996094, 0.036536319732666016, 0.0378081283569336, 0.03791257476806641, 0.037741600036621095, 0.03658953475952149, 0.03650457763671875, 0.0367534065246582, 0.037759998321533206, 0.03776409530639648, 0.03795455932617187, 0.03768320083618164, 0.03770675277709961, 0.037787647247314454, 0.03776921463012695, 0.03782656097412109, 0.036744190216064454, 0.03738214492797851, 0.037743614196777346, 0.037748737335205076, 0.03775692749023438, 0.03771187210083008, 0.037743614196777346, 0.03812761688232422, 0.03662236785888672, 0.0365250244140625, 0.03638272094726563, 0.036825088500976565, 0.03759820938110352, 0.0365291519165039, 0.03735244750976562, 0.036567039489746093, 0.03649126434326172, 0.03644416046142578, 0.036501502990722655, 0.03652710342407227, 0.037114879608154294, 0.036582401275634766, 0.0379791374206543, 0.0377784309387207, 0.03756851196289063, 0.03744563293457031, 0.03790643310546875, 0.03729919815063477, 0.03650867080688477, 0.036877311706542966, 0.037928958892822266, 0.03774156951904297, 0.037272575378417966, 0.03642572784423828, 0.03654143905639649, 0.036466686248779294, 0.036634624481201174, 0.03641548919677735, 0.03664179229736328, 0.036517887115478515, 0.037787647247314454, 0.03763507080078125, 0.036759552001953126, 0.03652608108520508, 0.0365404167175293, 0.03749478530883789, 0.03799552154541016, 0.037389312744140625, 0.037564414978027344, 0.03768012619018555, 0.03770265579223633, 0.03678105545043946, 0.036580352783203124, 0.036560897827148435, 0.0366376953125, 0.03639807891845703, 0.03654655838012695, 0.03642777633666992, 0.03765248107910156, 0.03758182525634766, 0.03658137512207031, 0.03644927978515625, 0.03665510559082031, 0.03647795104980469, 0.03640627288818359, 0.03657011032104492, 0.03636633682250977, 0.037874687194824216, 0.03770163345336914, 0.037784576416015625, 0.03714559936523437, 0.03727360153198242, 0.03744768142700195, 0.03762995147705078, 0.03717222213745117, 0.036738048553466796, 0.036397056579589845, 0.036506622314453126, 0.03669097518920898, 0.036385761260986325, 0.03645951843261719, 0.036514816284179685, 0.03641241455078125, 0.036375553131103515, 0.03649638366699219, 0.036523006439208985, 0.0366110725402832, 0.036413440704345705, 0.03643494415283203, 0.03650969696044922, 0.03718656158447266, 0.03637350463867187, 0.036441089630126954, 0.03660083389282227, 0.03640115356445312, 0.03641241455078125, 0.037743614196777346, 0.03723263931274414, 0.037763072967529294, 0.03783782577514649, 0.03758796691894531, 0.037375999450683595, 0.03657113647460938, 0.03647078323364258, 0.036514816284179685, 0.0364400634765625, 0.036397056579589845, 0.03781836700439453, 0.03778355026245117, 0.037771263122558595, 0.03665407943725586, 0.03642675018310547, 0.036441089630126954, 0.036468734741210936, 0.03649740982055664, 0.0363612174987793, 0.03772415924072266, 0.03757567977905273, 0.036431873321533206, 0.036429824829101565, 0.03638681411743164, 0.03640729522705078, 0.03642572784423828, 0.03635200119018555, 0.03649740982055664, 0.03662233734130859, 0.03669401550292969, 0.03708927917480469, 0.036574207305908206, 0.03647180938720703, 0.036606975555419925, 0.036967422485351564, 0.03656294250488281, 0.036618240356445314, 0.03688345718383789, 0.03654246520996094, 0.03642572784423828, 0.03643392181396484, 0.03645439910888672, 0.03653017425537109, 0.036512767791748044, 0.03778355026245117, 0.03773747253417969, 0.037988353729248046, 0.03768729782104492, 0.03765657424926758, 0.03773132705688476, 0.03761971282958984, 0.03651379013061523, 0.036675582885742186, 0.0380313606262207, 0.03766681671142578, 0.0364400634765625, 0.03642777633666992, 0.038300670623779294, 0.03737395095825195, 0.03890585708618164, 0.0368773422241211, 0.037080032348632816, 0.036531200408935545, 0.036495361328125, 0.03643084716796875, 0.03644518280029297, 0.036514816284179685, 0.03650969696044922, 0.036413440704345705, 0.03657318496704102, 0.037147647857666014, 0.03666022491455078, 0.03655168151855469, 0.036421630859375, 0.03642367935180664, 0.03686502456665039, 0.03801094436645508, 0.038456256866455076, 0.03808358383178711, 0.03785932922363281, 0.03803955078125, 0.037718017578125, 0.037579776763916016, 0.03657625579833984, 0.03828531265258789, 0.03693670272827149, 0.03652710342407227, 0.036431873321533206, 0.037028926849365235, 0.037757888793945316, 0.03765862274169922, 0.036585472106933595, 0.03645747375488281, 0.03654348754882813, 0.03662335968017578, 0.0364031982421875, 0.03657113647460938, 0.03653734588623047, 0.03657011032104492, 0.03652505493164063, 0.03648921585083008, 0.036506622314453126, 0.03638476943969727, 0.03623116683959961, 0.03631411361694336, 0.03651583862304687, 0.03649331283569336, 0.03703807830810547, 0.03757056045532227, 0.03680460739135742, 0.036636672973632815, 0.03652608108520508, 0.03639603042602539, 0.03717222213745117, 0.03787673568725586, 0.037100543975830076, 0.03648716735839844, 0.03657113647460938, 0.03666636657714844, 0.03654553604125976, 0.036913150787353514, 0.03806617736816406, 0.037294078826904296, 0.03721625518798828, 0.03649126434326172, 0.03646771240234375, 0.03749273681640625, 0.036770816802978515, 0.036795391082763675, 0.036795391082763675, 0.03660800170898437, 0.03816447830200195, 0.03770982360839844, 0.037647361755371096, 0.03806719970703125, 0.03832524871826172, 0.03814092636108399, 0.03771187210083008, 0.037901313781738284, 0.03784089660644531, 0.03783782577514649, 0.03687936019897461, 0.03650764846801758, 0.036468734741210936, 0.036706302642822264, 0.036603904724121096, 0.03670732879638672, 0.037326847076416016, 0.03717222213745117, 0.03662540817260742, 0.036560897827148435, 0.036534271240234374, 0.03651379013061523, 0.036441089630126954, 0.036511745452880856, 0.03650048065185547, 0.03649228668212891, 0.03663564682006836, 0.037341182708740234, 0.03701964950561523, 0.0364769287109375, 0.03690291213989258, 0.03681484985351562, 0.03649228668212891, 0.036498432159423826, 0.03657727813720703, 0.03648102569580078, 0.03645747375488281, 0.03639910507202149, 0.03647590255737305, 0.03657625579833984, 0.03663872146606445, 0.03655987167358398, 0.03656499099731445, 0.03644927978515625, 0.03697971343994141, 0.03652608108520508, 0.03885670471191406, 0.03691622543334961, 0.03665100860595703, 0.038441982269287106, 0.03784908676147461, 0.038002689361572264, 0.03745792007446289, 0.03747635269165039, 0.038076416015625, 0.03827206420898437, 0.037813182830810546, 0.0377262077331543, 0.036752384185791014, 0.036549633026123046, 0.03650252914428711, 0.03659161758422851, 0.036719615936279294, 0.0365404167175293, 0.0365588493347168, 0.036566017150878906, 0.037294078826904296, 0.03799961471557617, 0.03790233612060547, 0.036908096313476565, 0.037142463684082035, 0.03651379013061523, 0.03686912155151367, 0.0376360969543457, 0.036528129577636716, 0.03642879867553711, 0.03645542526245117, 0.0365865592956543, 0.036477886199951175, 0.03648921585083008, 0.03712204742431641, 0.037800960540771485, 0.038063102722167966, 0.03774566268920899, 0.037223423004150394, 0.03656192016601562, 0.036686847686767575, 0.03665203094482422, 0.03642675018310547, 0.036555774688720705, 0.036523006439208985, 0.03648614501953125, 0.03644518280029297, 0.03638784027099609, 0.037294078826904296, 0.036890625, 0.03646361541748047, 0.03661312103271484, 0.03650764846801758, 0.036483070373535154, 0.03643392181396484, 0.036421630859375, 0.03647590255737305, 0.03640729522705078, 0.037394432067871096, 0.03778355026245117, 0.0380313606262207, 0.03656294250488281, 0.03755212783813477, 0.03667865753173828, 0.03697971343994141, 0.037743614196777346, 0.037868545532226565, 0.03870719909667969, 0.03754291152954101, 0.036703231811523435, 0.03658137512207031, 0.03679129409790039, 0.03657529449462891, 0.037453758239746095, 0.0364400634765625, 0.036566017150878906, 0.036155391693115234, 0.036547584533691405, 0.03649126434326172, 0.03657932662963867, 0.036536319732666016, 0.036603904724121096, 0.03655680084228516, 0.036828159332275394, 0.03660902404785156, 0.036557823181152346, 0.03674214553833008, 0.036926464080810545, 0.03666233444213867, 0.044980159759521486, 0.03832524871826172, 0.03649331283569336, 0.03700121688842774, 0.03678822326660156, 0.03769343948364258, 0.03706367874145508, 0.036647937774658204, 0.03638476943969727, 0.03642879867553711, 0.03636838531494141, 0.03668582534790039, 0.03648102569580078, 0.036555774688720705, 0.03649433517456055, 0.036536319732666016, 0.03641139221191406, 0.036615169525146485, 0.03641446304321289, 0.036557823181152346, 0.03638681411743164, 0.03649331283569336, 0.036572158813476564, 0.036587520599365236, 0.03653324890136719, 0.03791257476806641, 0.03645849609375, 0.03651071929931641, 0.03648921585083008, 0.036601856231689454, 0.03648716735839844, 0.03645337677001953, 0.03654860687255859, 0.03651583862304687, 0.03639603042602539, 0.03661721420288086, 0.03647385787963867, 0.03662335968017578, 0.03661312103271484, 0.036466686248779294, 0.036544513702392575, 0.0364400634765625, 0.036503551483154296, 0.03652403259277344, 0.03645132827758789, 0.0365926399230957, 0.03644416046142578, 0.036528129577636716, 0.03656192016601562, 0.037743614196777346, 0.036604927062988284, 0.03667660903930664, 0.036853759765625, 0.03654246520996094, 0.036495361328125, 0.03641446304321289, 0.03648716735839844, 0.03650457763671875, 0.036603904724121096, 0.03662438583374023, 0.03654348754882813, 0.03653734588623047, 0.03656095886230469, 0.03653113555908203, 0.03648102569580078, 0.036587520599365236, 0.03653734588623047, 0.03822284698486328, 0.03878604888916016, 0.03841331100463867, 0.038109184265136715, 0.037773311614990236, 0.03754086303710937, 0.037541889190673826, 0.03781324768066406, 0.03791360092163086, 0.03827507019042969, 0.03789926528930664, 0.038065185546875, 0.037660640716552736, 0.03640524673461914, 0.03641446304321289, 0.03738009643554688, 0.037599231719970705, 0.0379504623413086, 0.037884929656982425, 0.037749759674072264, 0.03776204681396484, 0.037943294525146484, 0.037765121459960936, 0.03775590515136719, 0.037872638702392575, 0.037792766571044925, 0.036598785400390625, 0.03648921585083008, 0.03705036926269531, 0.03794636917114258, 0.03873689651489258, 0.038027263641357424, 0.03800678253173828, 0.03790950393676758, 0.037713920593261716, 0.037733375549316404, 0.036485214233398434, 0.03654339218139648, 0.03607654571533203, 0.03602534484863281, 0.0358737907409668, 0.03653222274780273, 0.03649228668212891, 0.03642572784423828, 0.03644825744628906, 0.036703231811523435, 0.03648409652709961, 0.03643801498413086, 0.036318206787109376, 0.03641753768920898, 0.03645753479003906, 0.03636627197265625, 0.03644416046142578, 0.03631513595581055, 0.03649228668212891, 0.03654246520996094, 0.03655987167358398, 0.03649331283569336, 0.0363397102355957, 0.036961280822753906, 0.03790233612060547, 0.03654655838012695, 0.036375553131103515, 0.03634380722045898, 0.036465663909912106, 0.0366376953125, 0.036580352783203124, 0.03655372619628906, 0.036347904205322266, 0.03727769470214844, 0.03769651031494141, 0.03679948806762695, 0.036759552001953126, 0.03790950393676758, 0.03732582473754883, 0.036588542938232424, 0.03654143905639649, 0.037566463470458986, 0.037631999969482424, 0.03778662490844727, 0.036598785400390625, 0.036657150268554685, 0.036634624481201174, 0.03644313430786133, 0.03641446304321289, 0.036566017150878906, 0.03757875061035156, 0.03779993438720703, 0.03765657424926758]",tokens/s,27.0374612810017,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 86460 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493c4-3b2e74dc6e607ee34f0464bc;6a9f2d1a-1120-492e-a51c-9bf5c063028a) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490b9-22be1ff2055a4add6d74494d;8b5ceaee-3d79-4d05-8ed7-97d8bf434133) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,codegen,MB,8812.408832,12729.188352,0.0,12092.178432,11230.352384,s,1,12.63940234375,12.63940234375,0.0,12.63940234375,12.63940234375,12.63940234375,12.63940234375,[12.63940234375],,kWh,6.878583669930486e-05,3.7681261199117416e-05,0.00014969595309000105,0.00025616305098842334,,MB,1914.359808,12766.937088,0.0,12108.955648,10041.084416,s,10,2.628558135986328,0.2628558135986328,0.0002611343577937706,0.26280087280273434,0.2630904449462891,0.2632987319946289,0.26346536163330075,"[0.2630441589355469, 0.26270751953125, 0.26269061279296874, 0.26295098876953127, 0.2626379089355469, 0.2628329162597656, 0.26350701904296875, 0.26252825927734375, 0.2627688293457031, 0.26288992309570314]",tokens/s,973.9179685441492,kWh,3.1062728318554305e-06,1.702004571112117e-06,1.2274404406410933e-05,1.708268180937848e-05,tokens/kWh,14985937.3871528,MB,1928.695808,12766.937088,0.0,12108.955648,10287.128576,s,10,24.299138427734377,2.4299138427734377,0.026846921042359688,2.4357945556640628,2.46067158203125,2.461858984375,2.4628089062500003,"[2.441497802734375, 2.43009130859375, 2.46040771484375, 2.3835390625, 2.392809814453125, 2.415308349609375, 2.4106875, 2.44555322265625, 2.456197265625, 2.46304638671875]",tokens/s,25.926845179042854,kWh,2.8669062709603595e-05,1.5710501403634395e-05,8.529439587078979e-05,0.00012967395998402775,tokens/kWh,485833.85598588845,,s,630,24.296575954437255,0.03856599357847183,0.0007075134568515916,0.03888332748413086,0.0392502269744873,0.03947171974182129,0.04011685771942139,"[0.03829248046875, 0.037907455444335936, 0.0378152961730957, 0.03853414535522461, 0.03938304138183594, 0.038166526794433595, 0.03900620651245117, 0.039024639129638675, 0.03882905578613281, 0.03847884750366211, 0.038962177276611325, 0.03875635147094727, 0.03892531204223633, 0.03896115112304688, 0.039002113342285157, 0.03948646545410156, 0.03908812713623047, 0.038730751037597655, 0.038991870880126955, 0.03899903869628906, 0.038934528350830076, 0.03899801635742187, 0.03888435363769531, 0.03906867218017578, 0.0390041618347168, 0.039034881591796876, 0.0381399040222168, 0.03889254379272461, 0.039005184173583986, 0.03866316986083984, 0.03922739028930664, 0.03908403015136719, 0.038957054138183594, 0.03909734344482422, 0.03890790557861328, 0.03954278564453125, 0.03906969451904297, 0.03896012878417969, 0.039008255004882815, 0.03908607864379883, 0.03861196899414063, 0.039330814361572264, 0.03908095932006836, 0.03906867218017578, 0.037917697906494144, 0.038951934814453124, 0.03899801635742187, 0.03908812713623047, 0.03822390365600586, 0.03835593414306641, 0.038504447937011715, 0.03896012878417969, 0.03901030349731445, 0.038566913604736325, 0.03730022430419922, 0.03770265579223633, 0.03945167922973633, 0.039054302215576174, 0.03805388641357422, 0.037722110748291016, 0.03770163345336914, 0.03784806442260742, 0.03885772705078125, 0.0388853759765625, 0.03864780807495117, 0.03894681549072266, 0.03892230224609375, 0.039064510345458985, 0.037703678131103514, 0.037773311614990236, 0.03775692749023438, 0.038724609375, 0.03903692626953125, 0.038882305145263675, 0.03925299072265625, 0.03904409790039062, 0.037678081512451174, 0.03769651031494141, 0.037743614196777346, 0.0384634895324707, 0.03800678253173828, 0.03764633560180664, 0.03760025787353516, 0.038724609375, 0.03885260772705078, 0.03913113784790039, 0.038102046966552734, 0.03918844985961914, 0.03908812713623047, 0.039060478210449216, 0.038994945526123044, 0.038975486755371096, 0.03889664077758789, 0.038744064331054685, 0.039103488922119144, 0.03896831893920898, 0.03903692626953125, 0.03888332748413086, 0.03907379150390625, 0.03909939193725586, 0.038419456481933595, 0.039258113861083986, 0.03942195129394531, 0.03894579315185547, 0.03890796661376953, 0.03797087860107422, 0.038042625427246096, 0.03887206268310547, 0.03885567855834961, 0.03757363128662109, 0.03940556716918946, 0.0389939193725586, 0.037684257507324216, 0.0376319694519043, 0.038458366394042966, 0.03920383834838867, 0.03753267288208008, 0.03764633560180664, 0.03776716613769531, 0.03881062316894531, 0.03892838287353516, 0.038919166564941404, 0.03889766311645508, 0.03893964767456055, 0.03771289443969727, 0.03763097763061524, 0.03783475112915039, 0.03861708831787109, 0.03917107009887695, 0.03907993698120117, 0.03909222412109375, 0.03767398452758789, 0.038898689270019535, 0.0389857292175293, 0.03889766311645508, 0.037591041564941405, 0.03882393646240234, 0.04117913436889648, 0.04045209503173828, 0.039504894256591795, 0.039346176147460936, 0.039314430236816404, 0.03891712188720703, 0.03898470306396484, 0.03895296096801758, 0.03809689712524414, 0.039216159820556644, 0.03912803268432617, 0.03918643188476562, 0.039144447326660156, 0.03932057571411133, 0.038932479858398435, 0.039034881591796876, 0.038965248107910154, 0.03901030349731445, 0.039019519805908204, 0.038661121368408206, 0.03892531204223633, 0.038965248107910154, 0.038972415924072266, 0.03891814422607422, 0.03902975845336914, 0.03889977645874024, 0.03862726211547852, 0.038765567779541016, 0.03904000091552735, 0.03892633438110352, 0.03886899185180664, 0.03964108657836914, 0.03971075057983398, 0.03904201507568359, 0.039293952941894535, 0.038937599182128906, 0.03896115112304688, 0.038319103240966795, 0.03896012878417969, 0.038967296600341796, 0.03953462219238281, 0.03909423828125, 0.03901747131347656, 0.0391383056640625, 0.039180286407470705, 0.03884339141845703, 0.03954278564453125, 0.040406017303466796, 0.03944243240356445, 0.03903897476196289, 0.038991870880126955, 0.03904819107055664, 0.03786342239379883, 0.03907993698120117, 0.03814092636108399, 0.03762790298461914, 0.03760537719726562, 0.037602302551269534, 0.03806208038330078, 0.03772723388671875, 0.03758899307250976, 0.03754291152954101, 0.037599231719970705, 0.037610496520996094, 0.03747430419921875, 0.03731558227539063, 0.03764223861694336, 0.03787366485595703, 0.03757567977905273, 0.037495807647705076, 0.037615615844726565, 0.03754291152954101, 0.03773235321044922, 0.037617664337158206, 0.037569534301757815, 0.03768524932861328, 0.03758694458007812, 0.03757056045532227, 0.037556224822998044, 0.038942718505859376, 0.03902566528320312, 0.037733375549316404, 0.037623809814453124, 0.037703678131103514, 0.03880243301391602, 0.03933184051513672, 0.03745177459716797, 0.03750092697143555, 0.03764223861694336, 0.03766579055786133, 0.03757056045532227, 0.037765121459960936, 0.03758489608764649, 0.037678081512451174, 0.03771596908569336, 0.03765760040283203, 0.03765657424926758, 0.0378152961730957, 0.038865921020507815, 0.03910246276855469, 0.03822079849243164, 0.03756851196289063, 0.03753276824951172, 0.03764214324951172, 0.03750604629516602, 0.03767193603515625, 0.038989822387695314, 0.03787059020996094, 0.037541889190673826, 0.03773440170288086, 0.037572608947753904, 0.038027263641357424, 0.037591041564941405, 0.03760435104370117, 0.03749683380126953, 0.037766143798828124, 0.03955507278442383, 0.03916185760498047, 0.03898883056640625, 0.03769443130493164, 0.037850112915039064, 0.03769036865234375, 0.03771596908569336, 0.03764223861694336, 0.03716198348999023, 0.037498878479003905, 0.03765555191040039, 0.03762278366088867, 0.037569534301757815, 0.03764326477050781, 0.037682239532470706, 0.037892032623291015, 0.03757363128662109, 0.0375654411315918, 0.03751116943359375, 0.03755929565429687, 0.03762790298461914, 0.037528575897216795, 0.0377262077331543, 0.037359615325927735, 0.037375038146972656, 0.03761145782470703, 0.03785932922363281, 0.0376360969543457, 0.0375623664855957, 0.037556224822998044, 0.037743614196777346, 0.037585918426513674, 0.03875532913208008, 0.03888127899169922, 0.037634048461914066, 0.037585918426513674, 0.03894374465942383, 0.03888742446899414, 0.03900620651245117, 0.037577728271484374, 0.03773235321044922, 0.03794739151000977, 0.03759308624267578, 0.03765657424926758, 0.03748966217041016, 0.037631999969482424, 0.037759998321533206, 0.037544960021972655, 0.037579776763916016, 0.0375203857421875, 0.0377968635559082, 0.037582847595214845, 0.037318656921386716, 0.0384716796875, 0.038817790985107424, 0.03877891159057617, 0.03846345520019531, 0.03877273559570313, 0.0389119987487793, 0.03881881713867188, 0.038801406860351564, 0.04010086441040039, 0.03772415924072266, 0.03752755355834961, 0.03736678314208984, 0.03781017684936523, 0.03752243041992188, 0.037577728271484374, 0.03907379150390625, 0.037647361755371096, 0.037200897216796876, 0.037599231719970705, 0.03841843032836914, 0.03889561462402344, 0.038773761749267575, 0.038597663879394534, 0.03878294372558594, 0.03899289703369141, 0.03856588745117188, 0.03878297424316406, 0.03909836959838867, 0.0392171516418457, 0.03892531204223633, 0.039054336547851565, 0.039018497467041016, 0.039204864501953124, 0.03948646545410156, 0.03911782455444336, 0.03875328063964844, 0.039002113342285157, 0.038965248107910154, 0.03903590393066406, 0.037800960540771485, 0.03791360092163086, 0.037989376068115234, 0.03775283050537109, 0.03774771118164062, 0.03770982360839844, 0.03911065673828125, 0.038675457000732424, 0.03884646224975586, 0.03908095932006836, 0.038171646118164065, 0.0378152961730957, 0.03767193603515625, 0.03789823913574219, 0.03895603179931641, 0.039570430755615234, 0.0387061767578125, 0.03769651031494141, 0.037746688842773435, 0.03765862274169922, 0.03770163345336914, 0.038228992462158204, 0.03775283050537109, 0.03782860946655273, 0.03768524932861328, 0.03902771377563476, 0.0377968635559082, 0.037928958892822266, 0.03795251083374023, 0.037705726623535156, 0.03765350341796875, 0.03769548797607422, 0.039820289611816405, 0.03803033447265625, 0.03795251083374023, 0.03796275329589844, 0.03776921463012695, 0.03768115234375, 0.0376360969543457, 0.03766579055786133, 0.038125568389892575, 0.03771289443969727, 0.03762995147705078, 0.03853414535522461, 0.038970367431640625, 0.03909632110595703, 0.03888435363769531, 0.03903692626953125, 0.038302719116210936, 0.03736576080322265, 0.03769241714477539, 0.03788496017456055, 0.0377108154296875, 0.03859763336181641, 0.03913017654418945, 0.03770159912109375, 0.03779068756103516, 0.03763507080078125, 0.03995443344116211, 0.03943833541870117, 0.039414783477783204, 0.03908095932006836, 0.03910758590698242, 0.03889561462402344, 0.038937599182128906, 0.039018497467041016, 0.039314430236816404, 0.03892428970336914, 0.03896319961547851, 0.037452831268310546, 0.037645278930664064, 0.03761971282958984, 0.03786751937866211, 0.03778867340087891, 0.03764940643310547, 0.03768729782104492, 0.03774671936035156, 0.03758179092407227, 0.038942718505859376, 0.039067649841308595, 0.040360958099365234, 0.03782963180541992, 0.03754393768310547, 0.03745792007446289, 0.03779379272460937, 0.03758489608764649, 0.03770163345336914, 0.03821977615356445, 0.03894169616699219, 0.03892838287353516, 0.038934528350830076, 0.037667839050292966, 0.038161407470703124, 0.03759513473510742, 0.0375623664855957, 0.03756851196289063, 0.03784089660644531, 0.037749759674072264, 0.037340160369873046, 0.0376627197265625, 0.037889022827148434, 0.038793216705322264, 0.03887615966796875, 0.038983680725097655, 0.03888435363769531, 0.03796889495849609, 0.03931340789794922, 0.0392355842590332, 0.03896115112304688, 0.03894784164428711, 0.038768638610839845, 0.03945369720458984, 0.03880755233764648, 0.03812351989746094, 0.039060478210449216, 0.03775590515136719, 0.0393809928894043, 0.03901337432861328, 0.03896627044677734, 0.0390010871887207, 0.03886899185180664, 0.03890380859375, 0.037645313262939455, 0.03782553482055664, 0.03743027114868164, 0.03950284957885742, 0.039175167083740234, 0.03909529495239258, 0.03888127899169922, 0.03899084854125977, 0.03887923049926758, 0.03869388961791992, 0.0397946891784668, 0.039213054656982424, 0.0387061767578125, 0.039204864501953124, 0.039201793670654295, 0.039554046630859374, 0.03884134292602539, 0.03878604888916016, 0.03873894500732422, 0.03926220703125, 0.038994945526123044, 0.0391536636352539, 0.03891712188720703, 0.03894275283813477, 0.03896931076049805, 0.038830078125, 0.03863347244262695, 0.03848294448852539, 0.0388403205871582, 0.03927961730957031, 0.03923251342773437, 0.03934515380859375, 0.038981632232666014, 0.03915776062011719, 0.03830476760864258, 0.040123390197753905, 0.03914854431152344, 0.03922022247314453, 0.039188480377197264, 0.03924991989135742, 0.039144447326660156, 0.03910451126098633, 0.03888742446899414, 0.038953983306884765, 0.03896831893920898, 0.03896115112304688, 0.038849536895751956, 0.038870014190673825, 0.03896934509277344, 0.03897139358520508, 0.03909120178222656, 0.039093246459960936, 0.039139328002929685, 0.039032833099365234, 0.039311359405517575, 0.03953868865966797, 0.03899084854125977, 0.03867238235473633, 0.03900620651245117, 0.0393256950378418, 0.03914137649536133, 0.038975486755371096, 0.039065601348876954, 0.038932479858398435, 0.03892940902709961, 0.039060478210449216, 0.03897958374023437, 0.038591487884521485, 0.039000064849853515, 0.03902975845336914, 0.039034881591796876, 0.039003135681152344, 0.03905843353271484, 0.038781951904296875, 0.03897651290893555, 0.03896319961547851, 0.03909222412109375, 0.03902668762207031, 0.03897446441650391, 0.03910860824584961, 0.03914342498779297, 0.03895603179931641, 0.039021568298339845, 0.03876454544067383, 0.039054336547851565, 0.03927859115600586, 0.03930214309692383, 0.03887615966796875, 0.039093246459960936, 0.039155712127685545, 0.039136257171630856, 0.0389857292175293, 0.03899903869628906, 0.0389222412109375, 0.03889152145385742, 0.03888332748413086, 0.03890687942504883, 0.0378787841796875, 0.038245376586914064, 0.03817779159545898, 0.039723007202148435, 0.0391536636352539, 0.039241729736328126, 0.03872256088256836, 0.039570430755615234, 0.03891404724121094, 0.038989822387695314, 0.03927552032470703, 0.03903590393066406, 0.03924787139892578, 0.03890892791748047, 0.03911679840087891, 0.038981632232666014, 0.039049217224121094, 0.038978561401367184, 0.038640640258789063, 0.03884236907958984, 0.038798336029052735, 0.038934528350830076, 0.03903897476196289, 0.03931852722167969, 0.03911475372314453, 0.03904204940795898, 0.039152641296386716, 0.03982643127441406, 0.039204864501953124, 0.038970367431640625, 0.039012416839599606, 0.039045055389404296, 0.03978854370117187, 0.03959500885009765, 0.03971686553955078, 0.03914137649536133, 0.03920588684082031, 0.0389857292175293, 0.03915468978881836, 0.03913011169433594, 0.03911884689331055, 0.039626750946044925, 0.03934515380859375, 0.03901030349731445, 0.04050124740600586, 0.04139724731445313, 0.039413761138916016, 0.03918745422363281, 0.039139328002929685, 0.03929087829589844, 0.038596607208251955, 0.03759206390380859, 0.03764121627807617, 0.03850342559814453, 0.03789311981201172, 0.03836006546020508, 0.03786342239379883, 0.03861196899414063, 0.0395489273071289, 0.03906252670288086, 0.039056385040283206, 0.03902566528320312, 0.03896934509277344, 0.03923660659790039, 0.03903388977050781, 0.039203807830810546]",tokens/s,25.929579591026442,,,2,64,1,,, -4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 466, in post_init_awq_exllama_modules - model = exllamav2_post_init( - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 198, in exllamav2_post_init - submodule.post_init(scratch_space=model.scratch_spaces[device]) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 81, in post_init - self.q_handle = exlv2_ext.make_q_matrix( -NameError: name 'exlv2_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: . does not appear to have a file named config.json. Checkout 'https://huggingface.co/./tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,l,l,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/l/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669491c6-16e5cc07103660a311ab2310;d3f549f5-adbc-4caa-962c-5405faa4a87a) - -Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: l is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,17837.03552,24050.663424,0.0,23420.993536,21732.465152,s,1,17.06374609375,17.06374609375,0.0,17.06374609375,17.06374609375,17.06374609375,17.06374609375,[17.06374609375],,kWh,0.00012099183414793011,6.627365058201227e-05,0.00026868521494793907,0.00045595069967788144,,MB,4436.455424,24138.743808,0.0,23492.296704,20639.09888,s,10,56.62566259765625,5.662566259765625,0.00022114139218815476,5.6626127929687495,5.662740917968749,5.662840673828125,5.662920478515625,"[5.6625556640625, 5.6624521484375, 5.662642578125, 5.66214697265625, 5.66224951171875, 5.6625830078125, 5.66266064453125, 5.66271875, 5.662712890625, 5.6629404296875]",tokens/s,45.209184008841234,kWh,6.687156002812623e-05,3.6648271328772315e-05,0.0004032292392498027,0.0005067490706067013,tokens/kWh,505180.9955832895,MB,4440.7808,24142.938112,0.0,23498.58816,20639.10144,s,10,34.503544921875,3.4503544921875005,0.03548614985026348,3.431356689453125,3.501470068359375,3.5139674560546874,3.5239653662109376,"[3.4260361328125, 3.429067138671875, 3.425377197265625, 3.442102294921875, 3.427401123046875, 3.433646240234375, 3.478225341796875, 3.52646484375, 3.41653173828125, 3.49869287109375]",tokens/s,18.25899342883416,kWh,4.044569405375114e-05,2.216938181239754e-05,0.00017463375081799842,0.00023724882668414715,tokens/kWh,265543.98974488006,,s,630,34.500974613189705,0.05476345176696777,0.001012921453592182,0.0544138240814209,0.05611540641784668,0.056487885284423826,0.057522246284484865,"[0.05637529754638672, 0.054561790466308595, 0.05464985656738281, 0.05428531265258789, 0.054019073486328124, 0.05462835311889649, 0.05348454284667969, 0.054346752166748044, 0.05434163284301758, 0.054387710571289063, 0.055119873046875, 0.055431167602539064, 0.054742015838623044, 0.055278591156005856, 0.05399552154541016, 0.05422284698486328, 0.05382144165039063, 0.053698558807373044, 0.05377536010742188, 0.05464780807495117, 0.0562872314453125, 0.054712318420410154, 0.05423923110961914, 0.05499084854125977, 0.0541317138671875, 0.05414912033081055, 0.054642688751220705, 0.053901313781738285, 0.05444095993041992, 0.054042625427246097, 0.05392998504638672, 0.057527294158935545, 0.054417407989501954, 0.0540948486328125, 0.053513214111328124, 0.05536972808837891, 0.05632614517211914, 0.05374259185791016, 0.05442355346679688, 0.05429862213134766, 0.05382758331298828, 0.05428940963745117, 0.054642688751220705, 0.05403647994995117, 0.053779457092285154, 0.053738494873046876, 0.05380198287963867, 0.05388595199584961, 0.05408051300048828, 0.05364940643310547, 0.05381836700439453, 0.053945343017578126, 0.05383782577514649, 0.05404774475097656, 0.053597183227539064, 0.05388185501098633, 0.05348761749267578, 0.05522022247314453, 0.05442457580566406, 0.05364326477050781, 0.05500211334228516, 0.05400985717773438, 0.054141952514648435, 0.05571788787841797, 0.05419007873535156, 0.05431808090209961, 0.05448396682739258, 0.05478707122802735, 0.05462732696533203, 0.05391462326049805, 0.05441535949707031, 0.05389823913574219, 0.05664767837524414, 0.05511884689331055, 0.05398220825195312, 0.05398835372924805, 0.05372927856445313, 0.05332275390625, 0.05369139099121094, 0.0535551986694336, 0.05373235321044922, 0.05372108840942383, 0.0536627197265625, 0.053771263122558595, 0.054798336029052735, 0.05493350219726562, 0.05521100616455078, 0.05650124740600586, 0.05400166320800781, 0.05374771118164062, 0.05432115173339844, 0.05409894561767578, 0.053938175201416014, 0.05424025726318359, 0.05378355026245117, 0.05392588806152344, 0.05398732757568359, 0.0539504623413086, 0.05391772842407227, 0.0536923828125, 0.0558551025390625, 0.054267902374267575, 0.05469900894165039, 0.055512065887451174, 0.05392895889282227, 0.05473894500732422, 0.05475430297851563, 0.05371289443969727, 0.05388083267211914, 0.05516595077514649, 0.0541952018737793, 0.05628211212158203, 0.05584384155273438, 0.05406617736816406, 0.053975040435791016, 0.055218177795410155, 0.05642956924438477, 0.054191104888916014, 0.054151199340820313, 0.05415216064453125, 0.05398527908325195, 0.05403750228881836, 0.05425049591064453, 0.054335487365722655, 0.05448191833496094, 0.05438259124755859, 0.05548543930053711, 0.05424127960205078, 0.05420646286010742, 0.05365760040283203, 0.05418598556518555, 0.05393407821655274, 0.0537784309387207, 0.055259136199951174, 0.05583359909057617, 0.054785022735595705, 0.05391974258422851, 0.05372927856445313, 0.05347635269165039, 0.053302272796630856, 0.05392793655395508, 0.05400371170043945, 0.053910526275634765, 0.05382144165039063, 0.05403955078125, 0.05434777450561523, 0.05505945587158203, 0.055403518676757815, 0.05427711868286133, 0.054051841735839844, 0.05424435043334961, 0.05347840118408203, 0.05724774551391602, 0.05498777770996094, 0.05454131317138672, 0.05464371109008789, 0.05400883102416992, 0.0541102066040039, 0.055051265716552736, 0.054201343536376956, 0.05469081497192383, 0.05418188858032227, 0.05472153472900391, 0.054365184783935545, 0.05429145431518555, 0.05400678253173828, 0.054100990295410156, 0.055823360443115234, 0.05458534240722656, 0.054076416015625, 0.05432524871826172, 0.053838848114013675, 0.0550645751953125, 0.05464473724365235, 0.053926910400390625, 0.05377740859985351, 0.053754878997802735, 0.05375795364379883, 0.05400371170043945, 0.05308620834350586, 0.056809471130371096, 0.05468979263305664, 0.0545873908996582, 0.05463654327392578, 0.05337702560424805, 0.05382656097412109, 0.054354942321777344, 0.054100990295410156, 0.05455769729614258, 0.05529600143432617, 0.054150142669677735, 0.05392281723022461, 0.05387980651855469, 0.053703678131103515, 0.0544450569152832, 0.0546324462890625, 0.05442457580566406, 0.05476761627197266, 0.054932479858398435, 0.05383782577514649, 0.053978111267089846, 0.05513113784790039, 0.054607872009277345, 0.054419456481933595, 0.0535551986694336, 0.053806079864501956, 0.05387776184082031, 0.054179840087890625, 0.054441982269287106, 0.0537968635559082, 0.05363302230834961, 0.053471233367919924, 0.054095870971679685, 0.05448191833496094, 0.053891071319580076, 0.054046718597412106, 0.05414604949951172, 0.054363136291503904, 0.057662464141845705, 0.05526220703125, 0.056638465881347654, 0.054409217834472653, 0.05406617736816406, 0.05396684646606445, 0.05411840057373047, 0.05399859237670898, 0.05550387191772461, 0.054022144317626954, 0.053781505584716796, 0.05383270263671875, 0.054561790466308595, 0.05584281539916992, 0.0574832649230957, 0.054591487884521485, 0.053953536987304686, 0.05570560073852539, 0.053905406951904294, 0.05448191833496094, 0.05558169555664062, 0.0568350715637207, 0.055122943878173826, 0.05464371109008789, 0.05645107269287109, 0.05411840057373047, 0.054106113433837894, 0.05457408142089844, 0.05530112075805664, 0.05406719970703125, 0.05455257415771484, 0.05531340789794922, 0.05604249572753906, 0.05543423843383789, 0.055299072265625, 0.05383475112915039, 0.053817344665527345, 0.054079486846923826, 0.05377740859985351, 0.05409075164794922, 0.05389823913574219, 0.053795841217041014, 0.05385830307006836, 0.05461196899414063, 0.057603073120117185, 0.05620121765136719, 0.05588479995727539, 0.05450342559814453, 0.05528371047973633, 0.05470003128051758, 0.05471846389770508, 0.05403647994995117, 0.05388288116455078, 0.05413683319091797, 0.05389619064331055, 0.05383168029785156, 0.054012928009033206, 0.053991424560546876, 0.0545904655456543, 0.055801856994628904, 0.053885025024414064, 0.05429135894775391, 0.05364940643310547, 0.05389311981201172, 0.054604801177978515, 0.055831550598144535, 0.05380812835693359, 0.053795841217041014, 0.0544266242980957, 0.056406017303466796, 0.05479423904418945, 0.05385523223876953, 0.05367603302001953, 0.054834175109863284, 0.05388288116455078, 0.053901313781738285, 0.053663745880126956, 0.05345075225830078, 0.05379481506347656, 0.05366988754272461, 0.053651454925537106, 0.053714942932128903, 0.05379481506347656, 0.05393612670898437, 0.05363814544677734, 0.05383065414428711, 0.053797889709472656, 0.05494374465942383, 0.0545269775390625, 0.05396684646606445, 0.054163455963134766, 0.05397094345092773, 0.05388083267211914, 0.0575098876953125, 0.056471553802490235, 0.05490073776245117, 0.054196224212646485, 0.055728126525878906, 0.054711296081542966, 0.05613568115234375, 0.05590528106689453, 0.054401023864746094, 0.05408563232421875, 0.05586329650878906, 0.05576396942138672, 0.05400067138671875, 0.053881824493408205, 0.055027713775634764, 0.053935104370117185, 0.05380198287963867, 0.05552435302734375, 0.053716991424560545, 0.055818241119384764, 0.0540948486328125, 0.05386240005493164, 0.05367705535888672, 0.05528268814086914, 0.05561753463745117, 0.05429043197631836, 0.05387468719482422, 0.053961727142333986, 0.05386038589477539, 0.0539903678894043, 0.054566913604736325, 0.056994815826416016, 0.05548543930053711, 0.0540313606262207, 0.054324222564697267, 0.0540313606262207, 0.05412044906616211, 0.0551383056640625, 0.05566668701171875, 0.053981182098388675, 0.05312716674804688, 0.05379379272460937, 0.05481062316894531, 0.056215553283691405, 0.05415935897827148, 0.053550079345703126, 0.05339033508300781, 0.05379174423217774, 0.05441228866577148, 0.05573529434204102, 0.055430145263671876, 0.054508544921875, 0.05374771118164062, 0.05400883102416992, 0.05376716613769531, 0.05506252670288086, 0.05549363327026367, 0.05407027053833008, 0.053951488494873044, 0.053746688842773435, 0.05381119918823242, 0.05486489486694336, 0.0537968635559082, 0.0535880012512207, 0.053778400421142576, 0.05385113525390625, 0.053784576416015625, 0.05666304016113281, 0.05638041687011719, 0.056036350250244144, 0.05623910522460938, 0.0566927375793457, 0.05579980850219726, 0.055610366821289066, 0.055826431274414064, 0.05587558364868164, 0.05567692947387695, 0.05580083084106445, 0.05575372695922851, 0.05609164810180664, 0.05402828979492187, 0.05369244766235352, 0.05506351852416992, 0.055081985473632813, 0.0536995849609375, 0.0553994255065918, 0.05390335845947265, 0.05330636978149414, 0.053781505584716796, 0.05447270584106445, 0.05411328125, 0.05393305587768555, 0.05386444854736328, 0.055229438781738284, 0.05423513412475586, 0.053967872619628904, 0.05449728012084961, 0.05507788848876953, 0.056302593231201174, 0.05652889633178711, 0.05432729721069336, 0.053997566223144534, 0.05490176010131836, 0.05604044723510742, 0.055638015747070314, 0.05526425552368164, 0.055700481414794924, 0.055539710998535156, 0.05653606414794922, 0.05731327819824219, 0.05739519882202149, 0.05509222412109375, 0.056272895812988284, 0.0559554557800293, 0.055137279510498044, 0.0551383056640625, 0.05595852661132812, 0.05588991928100586, 0.053868545532226565, 0.05535334396362305, 0.05389823913574219, 0.05390233612060547, 0.05504819107055664, 0.05391462326049805, 0.053784576416015625, 0.055940097808837894, 0.05405388641357422, 0.055395328521728515, 0.05625241470336914, 0.05582438278198242, 0.055757823944091796, 0.056097793579101565, 0.05614591979980469, 0.05627801513671875, 0.056164352416992185, 0.055585792541503906, 0.05674291229248047, 0.056632320404052736, 0.05590118408203125, 0.05612851333618164, 0.05571583938598633, 0.05624524688720703, 0.056005630493164066, 0.05569331359863281, 0.0558766098022461, 0.05601279830932617, 0.05586022567749024, 0.05587148666381836, 0.055578624725341794, 0.05577523040771484, 0.055943168640136716, 0.055777278900146485, 0.05507583999633789, 0.055444480895996094, 0.05560422515869141, 0.057262081146240235, 0.056097793579101565, 0.05595647811889649, 0.056097793579101565, 0.05567488098144531, 0.056166400909423826, 0.05672243118286133, 0.05630054473876953, 0.05588582229614258, 0.056204288482666016, 0.05580492782592773, 0.05587148666381836, 0.0556492805480957, 0.055771137237548826, 0.05564518356323242, 0.055602176666259766, 0.05571788787841797, 0.05518438339233398, 0.05657702255249023, 0.05618483352661133, 0.055731201171875, 0.05586227035522461, 0.05598003387451172, 0.055771137237548826, 0.05573017501831055, 0.055378944396972656, 0.05563699340820313, 0.05564313507080078, 0.05565235137939453, 0.05568921661376953, 0.05574553680419922, 0.055700481414794924, 0.055927806854248044, 0.05653094482421875, 0.05621145629882812, 0.05581619262695312, 0.05709823989868164, 0.05779251098632812, 0.05540966415405273, 0.05508403015136719, 0.05588684844970703, 0.05492019271850586, 0.055087104797363284, 0.05370675277709961, 0.05372825622558594, 0.054280193328857425, 0.05427302551269531, 0.05519974517822265, 0.054128639221191405, 0.05376102447509765, 0.0539156494140625, 0.054180896759033204, 0.05410915374755859, 0.054002689361572265, 0.05497958374023437, 0.05754265594482422, 0.05429350280761719, 0.05352755355834961, 0.05395251083374023, 0.05403955078125, 0.054145023345947264, 0.053969921112060545, 0.05404774475097656, 0.05410508728027344, 0.05428326416015625, 0.05463142395019531, 0.05482086563110351, 0.054117374420166016, 0.05485670471191406, 0.054588417053222656, 0.05407436752319336, 0.053664768218994144, 0.05376409530639648, 0.05406924819946289, 0.05387980651855469, 0.05380198287963867, 0.05398015975952149, 0.053550079345703126, 0.05382758331298828, 0.05368832015991211, 0.05396889495849609, 0.053989376068115234, 0.05441024017333984, 0.05386956787109375, 0.054232063293457033, 0.054796287536621094, 0.05456486511230469, 0.053937152862548826, 0.05402828979492187, 0.05374054336547852, 0.05381324768066406, 0.054013950347900394, 0.05386751937866211, 0.053997566223144534, 0.053743614196777346, 0.053738494873046876, 0.053989376068115234, 0.05398323059082031, 0.053921791076660154, 0.053648384094238284, 0.0541317138671875, 0.05803519821166992, 0.05958041763305664, 0.056286209106445315, 0.05372723388671875, 0.05359308624267578, 0.05381119918823242, 0.05406208038330078, 0.05598515319824219, 0.055973888397216794, 0.05430476760864258, 0.05591449737548828, 0.05610598373413086, 0.05656576156616211, 0.056443904876708986, 0.05533388900756836, 0.0558766098022461, 0.056048641204833986, 0.05581721496582031, 0.055897087097167966, 0.05599846267700195, 0.05588889694213867, 0.05589606475830078, 0.05568716812133789, 0.05600972747802734, 0.05627801513671875, 0.05612646484375, 0.05599641418457031, 0.055777278900146485, 0.05568204879760742, 0.05604249572753906, 0.056043521881103515, 0.05696614456176758, 0.05600358581542969, 0.055951358795166016, 0.05581619262695312, 0.055330814361572264, 0.05535232162475586, 0.05584896087646484, 0.056008705139160155, 0.056114177703857425, 0.056027137756347656, 0.05576396942138672, 0.05598003387451172, 0.05590835189819336, 0.05607014465332031, 0.055907329559326174, 0.05588889694213867, 0.056182785034179686, 0.056114177703857425, 0.057062400817871096, 0.05446656036376953, 0.05385932922363281, 0.0539607048034668, 0.053961727142333986, 0.053907455444335936, 0.05393203353881836, 0.0537968635559082, 0.05354086303710937, 0.05472256088256836, 0.053596160888671876, 0.05422182464599609, 0.05444300842285156, 0.054945793151855465]",tokens/s,18.260353716475926,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1222.918144,1002.962944,0.0,356.51584,319.013888,s,24,0.17108643198013304,0.007128601332505545,0.00023506038056566344,0.0071254239082336425,0.00730543360710144,0.007380452966690063,0.007901603527069092,"[0.008055839538574219, 0.0070044159889221195, 0.006933440208435058, 0.006980544090270996, 0.007076032161712646, 0.007130688190460205, 0.006901663780212402, 0.00719379186630249, 0.006775263786315918, 0.007147552013397217, 0.006987008094787597, 0.007127007961273193, 0.007123839855194092, 0.0069337282180786134, 0.006967872142791748, 0.007136064052581787, 0.007171455860137939, 0.007136960029602051, 0.007385248184204101, 0.0071905279159545895, 0.007149151802062988, 0.007353280067443848, 0.00712220811843872, 0.007102848052978516]",tokens/s,35911.67299995744,kWh,8.20515813007031e-08,4.496026881836977e-08,1.723284557638797e-07,2.9934030588295257e-07,tokens/kWh,855213931.9991895,MB,1222.918144,1002.962944,0.0,356.51584,319.016448,s,24,10.024570068359376,0.417690419514974,0.009639100573855391,0.4168327026367188,0.4219854522705078,0.42297065277099605,0.4497069650268555,"[0.4576481628417969, 0.4167873229980469, 0.4013796691894531, 0.41602001953125, 0.4197399597167969, 0.41845706176757813, 0.41359945678710935, 0.41306964111328126, 0.41490817260742185, 0.4164496765136719, 0.41565142822265627, 0.413171875, 0.417815185546875, 0.41687808227539064, 0.404253173828125, 0.4180263671875, 0.415652587890625, 0.4194432373046875, 0.4231212158203125, 0.4190736083984375, 0.42167742919921875, 0.4221174621582031, 0.4120869445800781, 0.4175423278808594]",tokens/s,150.82941110585247,kWh,4.878610027839004e-06,2.673259573047272e-06,8.213615752983987e-06,1.5765485353870266e-05,tokens/kWh,3996071.074623411,,s,1511,10.180687867164602,0.0067377153323392525,0.0009229027913049718,0.006619135856628418,0.006779903888702392,0.007192575931549072,0.013923225975036629,"[0.007412735939025879, 0.00744755220413208, 0.007483391761779785, 0.0073994240760803225, 0.007536640167236328, 0.007288832187652588, 0.007184383869171143, 0.007219200134277344, 0.007250944137573242, 0.0071495680809020995, 0.007094272136688232, 0.007154687881469727, 0.007214079856872559, 0.007896063804626464, 0.007287807941436767, 0.007271423816680909, 0.007174208164215088, 0.007232448101043701, 0.007291903972625732, 0.007258111953735351, 0.007313439846038819, 0.007170015811920166, 0.007235583782196045, 0.0073175039291381834, 0.007245823860168457, 0.0071198720932006835, 0.007203839778900147, 0.007159808158874512, 0.007173120021820068, 0.007256063938140869, 0.0072540159225463864, 0.007328767776489258, 0.007588863849639893, 0.007350272178649903, 0.007271423816680909, 0.0072325119972229, 0.007336959838867187, 0.007259136199951172, 0.007278592109680176, 0.007124000072479248, 0.007150559902191162, 0.0071905279159545895, 0.007312384128570557, 0.007115839958190918, 0.00720684814453125, 0.007279615879058838, 0.007194623947143554, 0.0072325119972229, 0.007303167819976806, 0.007305215835571289, 0.007370751857757568, 0.0073471999168395995, 0.007316480159759522, 0.00733900785446167, 0.007209983825683594, 0.0071833600997924804, 0.007266304016113281, 0.007063551902770996, 0.007123968124389648, 0.006914048194885254, 0.007126016139984131, 0.006872064113616944, 0.013767680168151856, 0.006612991809844971, 0.0068055357933044435, 0.0070184640884399414, 0.007288832187652588, 0.007051263809204102, 0.0071526398658752445, 0.00723967981338501, 0.006957056045532227, 0.00682700777053833, 0.006904831886291504, 0.006914048194885254, 0.006833151817321777, 0.0074291200637817386, 0.006706175804138184, 0.006618112087249756, 0.006719488143920899, 0.0067358717918396, 0.006503424167633057, 0.006547520160675049, 0.0065924482345581055, 0.006811647891998291, 0.006649856090545654, 0.006663167953491211, 0.006638591766357422, 0.006681600093841553, 0.006639616012573242, 0.00662937593460083, 0.006624288082122802, 0.006625279903411865, 0.006634463787078857, 0.006653952121734619, 0.006660096168518067, 0.006622208118438721, 0.006669312000274658, 0.006585343837738037, 0.006519807815551758, 0.006428671836853027, 0.0065064959526062015, 0.0065075201988220215, 0.006525951862335205, 0.006609920024871826, 0.006543360233306885, 0.006285376071929931, 0.006284224033355713, 0.0062945599555969236, 0.0063159999847412105, 0.006336575984954834, 0.006356927871704101, 0.006312960147857666, 0.006337535858154297, 0.006352896213531494, 0.006377471923828125, 0.006362112045288086, 0.006487040042877197, 0.00638976001739502, 0.006368256092071533, 0.006372352123260498, 0.006360064029693604, 0.006396927833557129, 0.006378560066223144, 0.006458303928375244, 0.006375423908233643, 0.013642751693725585, 0.006362112045288086, 0.006369279861450195, 0.006368256092071533, 0.006377471923828125, 0.006344704151153564, 0.006386688232421875, 0.006358016014099121, 0.0064133119583129885, 0.006368256092071533, 0.006345727920532227, 0.0063569917678833006, 0.006347775936126709, 0.0063508481979370115, 0.006388735771179199, 0.006345727920532227, 0.0063610877990722655, 0.006331391811370849, 0.006377503871917724, 0.006363103866577149, 0.00642252779006958, 0.006384640216827392, 0.006384640216827392, 0.00637440013885498, 0.006342656135559082, 0.006366208076477051, 0.006367231845855713, 0.006340608119964599, 0.006343679904937744, 0.006337535858154297, 0.0064737281799316405, 0.006309887886047363, 0.006277120113372803, 0.006291456222534179, 0.006271999835968017, 0.006305791854858398, 0.006260735988616943, 0.0063539199829101565, 0.006333439826965332, 0.006277120113372803, 0.006294591903686524, 0.006271935939788818, 0.006261760234832763, 0.006304768085479737, 0.006252543926239014, 0.006292479991912842, 0.006278143882751465, 0.006305791854858398, 0.006262784004211426, 0.006259712219238281, 0.0062740478515625, 0.006270976066589356, 0.006278143882751465, 0.006260735988616943, 0.0062576642036437985, 0.0064849920272827145, 0.006639616012573242, 0.006626304149627686, 0.006595583915710449, 0.006602752208709717, 0.006615039825439453, 0.0066007041931152345, 0.006666240215301514, 0.013848575592041015, 0.006554624080657959, 0.006558720111846924, 0.0065382399559021, 0.006540287971496582, 0.006557695865631104, 0.006494207859039307, 0.006524928092956543, 0.0065146880149841305, 0.006554624080657959, 0.00653926420211792, 0.00653004789352417, 0.006576128005981445, 0.006567935943603515, 0.00653004789352417, 0.006549503803253174, 0.00658022403717041, 0.006554624080657959, 0.006533120155334473, 0.006550528049468994, 0.006508543968200684, 0.006524928092956543, 0.006527999877929688, 0.006545407772064209, 0.006617087841033936, 0.0065812478065490725, 0.006529024124145508, 0.006592512130737305, 0.006603775978088379, 0.0066416640281677245, 0.006628352165222168, 0.006606847763061524, 0.006625279903411865, 0.006633535861968994, 0.006624192237854004, 0.006643743991851807, 0.006575071811676025, 0.006703135967254639, 0.006697951793670654, 0.006589439868927002, 0.006616064071655273, 0.006540287971496582, 0.006535168170928955, 0.006783999919891357, 0.00693452787399292, 0.006716415882110595, 0.006651904106140137, 0.006653952121734619, 0.006609983921051025, 0.006612927913665772, 0.006668320178985596, 0.0065965762138366696, 0.006608895778656006, 0.006614016056060791, 0.006594560146331787, 0.006658048152923584, 0.0066119680404663084, 0.006605823993682861, 0.006628352165222168, 0.006657023906707763, 0.006617119789123535, 0.006628320217132568, 0.006690815925598144, 0.01406771183013916, 0.006669312000274658, 0.006616064071655273, 0.006605823993682861, 0.006593535900115967, 0.006903808116912841, 0.0066826238632202144, 0.006944767951965332, 0.006823935985565186, 0.007096320152282715, 0.007384064197540283, 0.006724607944488525, 0.00667955207824707, 0.006643712043762207, 0.0066744318008422855, 0.006673471927642822, 0.006619103908538818, 0.006709216117858886, 0.006681600093841553, 0.006652927875518798, 0.0066641921997070315, 0.006657023906707763, 0.006631423950195312, 0.006589439868927002, 0.0065177597999572755, 0.006503424167633057, 0.00653004789352417, 0.006574143886566162, 0.006555583953857422, 0.006545407772064209, 0.006633471965789795, 0.006624256134033203, 0.006626304149627686, 0.0065781760215759275, 0.006595583915710449, 0.006606912136077881, 0.006654911994934082, 0.006585343837738037, 0.00663046407699585, 0.0065924482345581055, 0.006612991809844971, 0.006606847763061524, 0.0065771517753601075, 0.0066119680404663084, 0.0066007041931152345, 0.006591487884521485, 0.006599679946899414, 0.006575104236602783, 0.006628352165222168, 0.006615039825439453, 0.0068321280479431154, 0.006767615795135498, 0.0066109437942504885, 0.006635519981384277, 0.006599679946899414, 0.006711296081542969, 0.006590464115142822, 0.0067123517990112305, 0.006628320217132568, 0.006584320068359375, 0.006609920024871826, 0.006616064071655273, 0.0065710082054138185, 0.014100480079650878, 0.0066447358131408694, 0.006599679946899414, 0.006623231887817383, 0.006631423950195312, 0.0066304001808166506, 0.006638591766357422, 0.006631423950195312, 0.006601727962493896, 0.006616064071655273, 0.00684441614151001, 0.006636544227600098, 0.006599679946899414, 0.006639616012573242, 0.0066375679969787596, 0.006628352165222168, 0.00657919979095459, 0.006729728221893311, 0.006616064071655273, 0.006595583915710449, 0.006638591766357422, 0.006619135856628418, 0.0065771517753601075, 0.006623231887817383, 0.006595583915710449, 0.006585343837738037, 0.006602752208709717, 0.0066375679969787596, 0.006621183872222901, 0.006656000137329102, 0.006621183872222901, 0.0066304001808166506, 0.006622208118438721, 0.006920191764831543, 0.006619135856628418, 0.006656000137329102, 0.006635519981384277, 0.0066109437942504885, 0.006660096168518067, 0.0066304001808166506, 0.0066007041931152345, 0.006620160102844238, 0.006638591766357422, 0.006605823993682861, 0.006626304149627686, 0.006624256134033203, 0.006559743881225586, 0.006598656177520752, 0.006622208118438721, 0.006592512130737305, 0.006656000137329102, 0.0066007041931152345, 0.006809599876403808, 0.00676966381072998, 0.006660096168518067, 0.006616064071655273, 0.006612991809844971, 0.006670335769653321, 0.006624288082122802, 0.00667952013015747, 0.006633471965789795, 0.0066119680404663084, 0.006625343799591064, 0.01360582447052002, 0.006369279861450195, 0.006322175979614258, 0.006368256092071533, 0.006367231845855713, 0.006349855899810791, 0.006337503910064698, 0.006335487842559814, 0.0063539199829101565, 0.006343679904937744, 0.0063498239517211915, 0.006377471923828125, 0.006339583873748779, 0.006369279861450195, 0.006277120113372803, 0.0062863359451293946, 0.0063632001876831054, 0.006559679985046387, 0.006557695865631104, 0.00653004789352417, 0.006723584175109864, 0.006628352165222168, 0.006583295822143555, 0.006676479816436768, 0.006598656177520752, 0.006603775978088379, 0.00667955207824707, 0.00662937593460083, 0.0066826238632202144, 0.007476223945617676, 0.006923264026641846, 0.00662937593460083, 0.006625279903411865, 0.00662937593460083, 0.006648831844329834, 0.006622208118438721, 0.006627327919006347, 0.006619135856628418, 0.006661119937896728, 0.006663167953491211, 0.006616064071655273, 0.006624256134033203, 0.006542335987091064, 0.00652396821975708, 0.0065054078102111815, 0.0066826238632202144, 0.006606847763061524, 0.006606847763061524, 0.006627327919006347, 0.006651904106140137, 0.00658022403717041, 0.006604800224304199, 0.006597631931304931, 0.006625279903411865, 0.006626304149627686, 0.006617087841033936, 0.0066119680404663084, 0.006606847763061524, 0.006595583915710449, 0.006601727962493896, 0.0066007041931152345, 0.006569983959197998, 0.006602752208709717, 0.014182463645935058, 0.00659449577331543, 0.006605823993682861, 0.006616064071655273, 0.006615039825439453, 0.006638591766357422, 0.006428671836853027, 0.006589439868927002, 0.0065781760215759275, 0.0066119680404663084, 0.006597663879394531, 0.006642655849456787, 0.006558720111846924, 0.0066344962120056155, 0.006590464115142822, 0.006589439868927002, 0.006726655960083008, 0.0066007041931152345, 0.006597631931304931, 0.006614016056060791, 0.006599679946899414, 0.006591519832611084, 0.006626272201538086, 0.006591487884521485, 0.006618112087249756, 0.006617087841033936, 0.006589439868927002, 0.006617087841033936, 0.006601727962493896, 0.006565887928009034, 0.006592512130737305, 0.006591487884521485, 0.006586368083953857, 0.006599679946899414, 0.006622208118438721, 0.006667263984680176, 0.0065771517753601075, 0.006598656177520752, 0.006585343837738037, 0.0065075201988220215, 0.0064767999649047855, 0.006506559848785401, 0.006499263763427734, 0.0064880638122558594, 0.006511616230010986, 0.006495232105255127, 0.006471680164337158, 0.0065177597999572755, 0.006503424167633057, 0.006497280120849609, 0.006503424167633057, 0.006499328136444092, 0.006496255874633789, 0.006584320068359375, 0.006481919765472412, 0.006559743881225586, 0.006508543968200684, 0.006489088058471679, 0.006508543968200684, 0.006358016014099121, 0.006275072097778321, 0.00626585578918457, 0.006271999835968017, 0.01334169578552246, 0.0062494721412658695, 0.006322175979614258, 0.006260735988616943, 0.006278143882751465, 0.006286399841308594, 0.006353856086730957, 0.006494207859039307, 0.006524928092956543, 0.006524928092956543, 0.006515711784362793, 0.0065136637687683106, 0.006543360233306885, 0.0065372161865234375, 0.0065136637687683106, 0.006545407772064209, 0.006558720111846924, 0.006500351905822754, 0.006492159843444824, 0.006520832061767578, 0.006516736030578613, 0.006515711784362793, 0.006518784046173095, 0.00653926420211792, 0.006526976108551025, 0.006503424167633057, 0.007187456130981445, 0.006808576107025147, 0.006639616012573242, 0.006661119937896728, 0.00658841609954834, 0.006639616012573242, 0.006702079772949219, 0.006563839912414551, 0.006907904148101806, 0.006646783828735352, 0.006625279903411865, 0.006619135856628418, 0.006623231887817383, 0.006564864158630371, 0.007068672180175781, 0.006636544227600098, 0.006612991809844971, 0.006597631931304931, 0.006617087841033936, 0.006592512130737305, 0.0066344962120056155, 0.006673408031463623, 0.006559743881225586, 0.006606847763061524, 0.006609920024871826, 0.006573056221008301, 0.00659660816192627, 0.006716415882110595, 0.006948863983154297, 0.0066826238632202144, 0.006673408031463623, 0.0066375679969787596, 0.0066713600158691405, 0.006602752208709717, 0.0065136637687683106, 0.00657919979095459, 0.0064880638122558594, 0.014107647895812989, 0.006604800224304199, 0.006589439868927002, 0.00659660816192627, 0.006616096019744873, 0.0065965762138366696, 0.006593535900115967, 0.006632448196411133, 0.006594560146331787, 0.006598656177520752, 0.006585343837738037, 0.006618112087249756, 0.0066119680404663084, 0.006623231887817383, 0.006659071922302246, 0.006657023906707763, 0.006614016056060791, 0.006620160102844238, 0.006605823993682861, 0.006597631931304931, 0.006626304149627686, 0.006633471965789795, 0.006767615795135498, 0.006643712043762207, 0.006583295822143555, 0.00658739185333252, 0.006628352165222168, 0.006645760059356689, 0.00662224006652832, 0.006647808074951172, 0.006631392002105713, 0.006622208118438721, 0.006614016056060791, 0.006670335769653321, 0.006593535900115967, 0.006608895778656006, 0.006608895778656006, 0.00658841609954834, 0.0066119680404663084, 0.006653952121734619, 0.006590464115142822, 0.006606847763061524, 0.006604800224304199, 0.006606847763061524, 0.006592512130737305, 0.006573056221008301, 0.00673689603805542, 0.006714367866516113, 0.006563839912414551, 0.006609920024871826, 0.006626304149627686, 0.006592512130737305, 0.006608895778656006, 0.00657919979095459, 0.006510591983795166, 0.006554624080657959, 0.006527999877929688, 0.006492159843444824, 0.0065136637687683106, 0.006621183872222901, 0.006511616230010986, 0.006520832061767578, 0.0065064959526062015, 0.013931520462036133, 0.0066119680404663084, 0.006621183872222901, 0.006615039825439453, 0.006592512130737305, 0.0066406397819519045, 0.006624256134033203, 0.006606847763061524, 0.006616064071655273, 0.006636544227600098, 0.006585343837738037, 0.006627327919006347, 0.00662937593460083, 0.006596640110015869, 0.006647776126861573, 0.006661119937896728, 0.00656489610671997, 0.006495200157165528, 0.006501376152038574, 0.00658841609954834, 0.006501376152038574, 0.006496255874633789, 0.0065136637687683106, 0.006508543968200684, 0.006564864158630371, 0.006609951972961426, 0.006609888076782227, 0.00658841609954834, 0.006608895778656006, 0.006618112087249756, 0.0066109437942504885, 0.006592512130737305, 0.006608895778656006, 0.006608895778656006, 0.00658841609954834, 0.0066744318008422855, 0.006743040084838867, 0.006790143966674805, 0.006713344097137451, 0.0066109437942504885, 0.006657023906707763, 0.0066119680404663084, 0.006625311851501465, 0.006631392002105713, 0.006659071922302246, 0.006608895778656006, 0.006642687797546387, 0.006816768169403077, 0.006625279903411865, 0.0065372161865234375, 0.006508543968200684, 0.006494207859039307, 0.006550528049468994, 0.006509568214416504, 0.0065075201988220215, 0.006590464115142822, 0.006479872226715088, 0.006499328136444092, 0.006538271903991699, 0.00648905611038208, 0.006513728141784668, 0.006510528087615966, 0.006516736030578613, 0.014094335556030273, 0.006583295822143555, 0.006668288230895996, 0.0066498880386352535, 0.006615007877349853, 0.006656000137329102, 0.006551551818847656, 0.006372352123260498, 0.006388768196105957, 0.006356959819793701, 0.0063805441856384275, 0.006368256092071533, 0.006385663986206055, 0.006379519939422608, 0.006598656177520752, 0.0065443840026855465, 0.006545407772064209, 0.00672051191329956, 0.006673408031463623, 0.006669312000274658, 0.006614016056060791, 0.0066416640281677245, 0.006662144184112549, 0.006608895778656006, 0.006653952121734619, 0.006667263984680176, 0.006623231887817383, 0.006631423950195312, 0.0066375679969787596, 0.006616064071655273, 0.00667852783203125, 0.006632448196411133, 0.006620160102844238, 0.0066416640281677245, 0.006661119937896728, 0.006612991809844971, 0.006647808074951172, 0.006628352165222168, 0.006639616012573242, 0.006624256134033203, 0.006603775978088379, 0.006643712043762207, 0.006624256134033203, 0.006603775978088379, 0.006683648109436035, 0.006377471923828125, 0.006328320026397705, 0.006337535858154297, 0.006367231845855713, 0.0063805761337280276, 0.006385632038116455, 0.006344704151153564, 0.006343679904937744, 0.006366208076477051, 0.006387712001800537, 0.006379519939422608, 0.006369279861450195, 0.006379519939422608, 0.00683622407913208, 0.006717440128326416, 0.006616064071655273, 0.006659071922302246, 0.006650879859924317, 0.01406156826019287, 0.006552576065063476, 0.00662937593460083, 0.006673408031463623, 0.0066344962120056155, 0.0066641921997070315, 0.006643712043762207, 0.006604800224304199, 0.006687776088714599, 0.006708191871643066, 0.0066744318008422855, 0.006726655960083008, 0.0067051520347595215, 0.0066447358131408694, 0.0067041277885437015, 0.006714367866516113, 0.006649856090545654, 0.006677504062652588, 0.0066908798217773435, 0.0066221442222595215, 0.006620160102844238, 0.0066375679969787596, 0.006619135856628418, 0.006658048152923584, 0.006638591766357422, 0.006656000137329102, 0.006762495994567871, 0.0066457920074462894, 0.006626272201538086, 0.006645760059356689, 0.006651904106140137, 0.006598656177520752, 0.006618112087249756, 0.006621183872222901, 0.006632448196411133, 0.006625279903411865, 0.006651904106140137, 0.006796288013458252, 0.006672383785247803, 0.006635519981384277, 0.006540287971496582, 0.006524928092956543, 0.006523903846740723, 0.0065669121742248536, 0.0065443840026855465, 0.006558720111846924, 0.006542335987091064, 0.0066416640281677245, 0.006523903846740723, 0.006565887928009034, 0.006543360233306885, 0.006543360233306885, 0.006519807815551758, 0.006540287971496582, 0.006635519981384277, 0.006616064071655273, 0.006602752208709717, 0.006643775939941406, 0.006604735851287842, 0.006605823993682861, 0.0066344962120056155, 0.006631423950195312, 0.006632448196411133, 0.01363046360015869, 0.006363135814666748, 0.006354944229125976, 0.0063610877990722655, 0.006343679904937744, 0.006358016014099121, 0.007881728172302246, 0.006760447978973389, 0.006873087882995605, 0.006906879901885986, 0.006669312000274658, 0.006677504062652588, 0.00667955207824707, 0.006677504062652588, 0.006649856090545654, 0.0066375679969787596, 0.006605823993682861, 0.006631423950195312, 0.006782976150512696, 0.006667263984680176, 0.006597631931304931, 0.006651904106140137, 0.0066375679969787596, 0.0066344962120056155, 0.006649856090545654, 0.0066304001808166506, 0.006657023906707763, 0.0066713600158691405, 0.006625279903411865, 0.006691840171813965, 0.006626304149627686, 0.0066406397819519045, 0.006662144184112549, 0.006652927875518798, 0.006617087841033936, 0.0066406397819519045, 0.006691840171813965, 0.006864895820617676, 0.006680575847625733, 0.006628352165222168, 0.0066406397819519045, 0.006685696125030518, 0.006675456047058105, 0.006592512130737305, 0.006623231887817383, 0.006605823993682861, 0.0065894718170166015, 0.006627295970916748, 0.006621183872222901, 0.006617087841033936, 0.0066375679969787596, 0.006602752208709717, 0.00657919979095459, 0.006604800224304199, 0.006534143924713135, 0.00636518383026123, 0.0063508481979370115, 0.006352896213531494, 0.006369279861450195, 0.006419456005096436, 0.006345727920532227, 0.006393856048583985, 0.006351871967315674, 0.013632512092590332, 0.006351871967315674, 0.006325247764587402, 0.006335487842559814, 0.006329343795776367, 0.006355967998504639, 0.006363135814666748, 0.006375423908233643, 0.006378496170043945, 0.006376448154449463, 0.006355967998504639, 0.006329343795776367, 0.006344704151153564, 0.006337535858154297, 0.006375423908233643, 0.0063569917678833006, 0.006333439826965332, 0.006346752166748047, 0.006351871967315674, 0.0063569917678833006, 0.006367231845855713, 0.00632422399520874, 0.0063539199829101565, 0.006363135814666748, 0.006352896213531494, 0.006458367824554443, 0.006360064029693604, 0.006363135814666748, 0.006342656135559082, 0.00628227186203003, 0.006288352012634277, 0.0062679038047790524, 0.006275072097778321, 0.006255616188049316, 0.006262784004211426, 0.0062638077735900875, 0.00628223991394043, 0.006231040000915527, 0.0062975997924804685, 0.006253568172454834, 0.006287424087524414, 0.006264768123626709, 0.006277120113372803, 0.0062638077735900875, 0.006675456047058105, 0.006616064071655273, 0.00658022403717041, 0.0066007041931152345, 0.006614016056060791, 0.006746111869812011, 0.006563839912414551, 0.006576128005981445, 0.006606847763061524, 0.0065669121742248536, 0.006586368083953857, 0.006594560146331787, 0.006612991809844971, 0.006656000137329102, 0.006606847763061524, 0.006591487884521485, 0.006604800224304199, 0.006606847763061524, 0.006589439868927002, 0.014072832107543945, 0.006594560146331787, 0.006690815925598144, 0.0066406397819519045, 0.006585343837738037, 0.006612991809844971, 0.006654975891113281, 0.006593535900115967, 0.006582272052764892, 0.006615039825439453, 0.006591487884521485, 0.00663046407699585, 0.006618048191070557, 0.0066007041931152345, 0.006575104236602783, 0.0066119680404663084, 0.006660096168518067, 0.00660588788986206, 0.006574016094207763, 0.006597631931304931, 0.006848512172698974, 0.0068321280479431154, 0.006599679946899414, 0.006649856090545654, 0.006628352165222168, 0.006632448196411133, 0.006633471965789795, 0.006657055854797363, 0.006719456195831299, 0.006617087841033936, 0.006602752208709717, 0.006632448196411133, 0.006656000137329102, 0.00658841609954834, 0.00662937593460083, 0.006636544227600098, 0.006595583915710449, 0.006631423950195312, 0.006615039825439453, 0.006595583915710449, 0.006667263984680176, 0.006670335769653321, 0.006564864158630371, 0.006623231887817383, 0.006683648109436035, 0.0066078720092773435, 0.006621183872222901, 0.0066826238632202144, 0.0066304001808166506, 0.006648831844329834, 0.006744063854217529, 0.00675328016281128, 0.006692863941192627, 0.006601727962493896, 0.006508543968200684, 0.0065771517753601075, 0.006516736030578613, 0.006511616230010986, 0.006556672096252441, 0.006564864158630371, 0.006582272052764892, 0.006591487884521485, 0.006819839954376221, 0.014217215538024902, 0.006583295822143555, 0.006612991809844971, 0.006651904106140137, 0.006640704154968262, 0.00665388822555542, 0.006659071922302246, 0.006606847763061524, 0.006609920024871826, 0.006620160102844238, 0.00662937593460083, 0.006650879859924317, 0.00673689603805542, 0.006623231887817383, 0.006650879859924317, 0.0066795840263366695, 0.006654943943023682, 0.006602752208709717, 0.0065812478065490725, 0.00653107213973999, 0.0065669121742248536, 0.006585343837738037, 0.006500351905822754, 0.0065443840026855465, 0.006519807815551758, 0.006553599834442139, 0.006525951862335205, 0.006533120155334473, 0.006670335769653321, 0.006550528049468994, 0.0065177597999572755, 0.006542399883270263, 0.006552512168884277, 0.006475776195526123, 0.006536191940307618, 0.00653926420211792, 0.006557695865631104, 0.006645760059356689, 0.006582272052764892, 0.006617087841033936, 0.00666323184967041, 0.006596543788909912, 0.006612991809844971, 0.006688767910003662, 0.006617087841033936, 0.006604800224304199, 0.006612991809844971, 0.006576128005981445, 0.006593535900115967, 0.006658112049102783, 0.0064757118225097655, 0.006543360233306885, 0.006564864158630371, 0.006467584133148193, 0.006532095909118653, 0.0066416640281677245, 0.0065372161865234375, 0.006532095909118653, 0.006549503803253174, 0.006604800224304199, 0.0066375679969787596, 0.006618112087249756, 0.006680575847625733, 0.014107647895812989, 0.0066007041931152345, 0.006592512130737305, 0.006648831844329834, 0.0066304001808166506, 0.006669312000274658, 0.006659071922302246, 0.006652927875518798, 0.006677504062652588, 0.006639616012573242, 0.006676511764526367, 0.00667952013015747, 0.006652927875518798, 0.006651904106140137, 0.006688767910003662, 0.006649856090545654, 0.006646783828735352, 0.0066202239990234375, 0.006640575885772705, 0.006663167953491211, 0.006636544227600098, 0.006635519981384277, 0.006680575847625733, 0.0066119680404663084, 0.006713344097137451, 0.0066416640281677245, 0.006515711784362793, 0.006573056221008301, 0.0065781760215759275, 0.00658739185333252, 0.006589439868927002, 0.006575104236602783, 0.006510591983795166, 0.006624256134033203, 0.006673408031463623, 0.006606847763061524, 0.006624256134033203, 0.0066416640281677245, 0.006667263984680176, 0.0066713600158691405, 0.006715392112731934, 0.006684671878814697, 0.006702079772949219, 0.006626304149627686, 0.00657919979095459, 0.006594560146331787, 0.006558720111846924, 0.006585343837738037, 0.006598656177520752, 0.006559743881225586, 0.006586368083953857, 0.006612991809844971, 0.00662937593460083, 0.00674508810043335, 0.006699007987976074, 0.006650879859924317, 0.0071015038490295414, 0.006799295902252197, 0.00669593620300293, 0.007017471790313721, 0.006725632190704346, 0.006669312000274658, 0.006666240215301514, 0.014368767738342286, 0.006797311782836914, 0.006669312000274658, 0.006615071773529053, 0.006666207790374756, 0.006676479816436768, 0.006661119937896728, 0.0067051520347595215, 0.00669593620300293, 0.006633471965789795, 0.006677504062652588, 0.006674496173858643, 0.0066211199760437016, 0.006677504062652588, 0.006703104019165039, 0.006627327919006347, 0.006699007987976074, 0.00667955207824707, 0.006642687797546387, 0.006683648109436035, 0.0066641921997070315, 0.007554048061370849, 0.0068249602317810056, 0.006938623905181885, 0.007472127914428711, 0.0069212160110473635, 0.006665215969085693, 0.006681600093841553, 0.006743040084838867, 0.0066754879951477055, 0.006725599765777588, 0.006972415924072266, 0.006754303932189941, 0.006662144184112549, 0.0067276802062988285, 0.006739967823028564, 0.006623231887817383, 0.006656000137329102, 0.006686719894409179, 0.006612031936645508, 0.006796224117279053, 0.0067010560035705566, 0.006658048152923584, 0.006667263984680176, 0.006719488143920899, 0.006653952121734619, 0.006708223819732666, 0.006681663990020752, 0.006625216007232666, 0.006688767910003662, 0.006691840171813965, 0.0065781760215759275, 0.006618112087249756, 0.006604800224304199, 0.0065413122177124024, 0.0065781760215759275, 0.006559743881225586, 0.006562816143035889, 0.006625279903411865, 0.006624256134033203, 0.006686719894409179, 0.0066938881874084475, 0.006770688056945801, 0.014256128311157227, 0.006601727962493896, 0.0067123198509216305, 0.006761472225189209, 0.006686719894409179, 0.0067041277885437015, 0.006691840171813965, 0.006648831844329834, 0.006724607944488525, 0.006722559928894043, 0.006683648109436035, 0.0066641921997070315, 0.006699007987976074, 0.006687744140625, 0.006688767910003662, 0.00672870397567749, 0.006650879859924317, 0.006647808074951172, 0.006594560146331787, 0.0065484800338745115, 0.006557695865631104, 0.006603775978088379, 0.006592512130737305, 0.006557695865631104, 0.006594560146331787, 0.006574079990386963, 0.00653926420211792, 0.0065771517753601075, 0.006700032234191895, 0.006551551818847656, 0.006543360233306885, 0.00658739185333252, 0.0066304001808166506, 0.006604800224304199, 0.006622208118438721, 0.006689792156219483, 0.0066713600158691405, 0.006699007987976074, 0.006731776237487793, 0.006668288230895996, 0.006703104019165039, 0.006722559928894043, 0.006645760059356689, 0.006675456047058105, 0.00667955207824707, 0.006624256134033203, 0.006686719894409179, 0.006707200050354004, 0.006656000137329102, 0.0066109437942504885, 0.00658841609954834, 0.0065443840026855465, 0.006564864158630371, 0.006584320068359375, 0.006560768127441406, 0.006614016056060791, 0.006668288230895996, 0.006685696125030518, 0.006688767910003662, 0.00673689603805542, 0.006662144184112549, 0.006684671878814697, 0.006643712043762207, 0.014236672401428223, 0.006684671878814697, 0.006696959972381592, 0.006686719894409179, 0.0066406397819519045, 0.006670335769653321, 0.006689792156219483, 0.006631455898284912, 0.00667849588394165, 0.006680575847625733, 0.006628352165222168, 0.00673689603805542, 0.006723584175109864, 0.006696959972381592, 0.006719488143920899, 0.006743040084838867, 0.0066416640281677245, 0.006887423992156983, 0.00684441614151001, 0.0066713600158691405, 0.006700032234191895, 0.0066826238632202144, 0.00666323184967041, 0.006700992107391357, 0.006660096168518067, 0.006643712043762207, 0.006717440128326416, 0.006726655960083008, 0.006665215969085693, 0.006723584175109864, 0.006663167953491211, 0.006603807926177979, 0.006680543899536133, 0.006645760059356689, 0.006585343837738037, 0.006866943836212158, 0.006668288230895996, 0.006576128005981445, 0.0066713600158691405, 0.006624256134033203, 0.006639616012573242, 0.006696959972381592, 0.0066375679969787596, 0.006638591766357422, 0.0066826558113098145, 0.00662937593460083, 0.006691808223724365, 0.006779903888702392, 0.006768703937530518, 0.00694163179397583, 0.0067717118263244626, 0.006684671878814697, 0.006723584175109864, 0.006661119937896728, 0.006642687797546387, 0.0067123198509216305, 0.006628352165222168, 0.006646783828735352, 0.006667263984680176, 0.006606847763061524, 0.006631423950195312, 0.0067041277885437015, 0.006604800224304199, 0.01434931182861328, 0.006623231887817383, 0.006648831844329834, 0.006688767910003662, 0.006618112087249756, 0.00667955207824707, 0.006669312000274658, 0.006657023906707763, 0.006662144184112549, 0.006690815925598144, 0.006612991809844971, 0.006651904106140137, 0.006677504062652588, 0.006643712043762207, 0.006601727962493896, 0.006650879859924317, 0.006628352165222168, 0.006595583915710449, 0.006665215969085693, 0.006627327919006347, 0.006631423950195312, 0.006663167953491211, 0.0066109437942504885, 0.0066119680404663084, 0.006681600093841553, 0.006603775978088379, 0.0066416640281677245, 0.006664224147796631, 0.006680543899536133, 0.007458816051483155, 0.0073820161819458, 0.007127039909362793, 0.0068249602317810056, 0.006905856132507324, 0.006700032234191895, 0.006703104019165039, 0.006656000137329102, 0.006643712043762207, 0.006624256134033203, 0.006626304149627686, 0.0066406397819519045, 0.006659071922302246, 0.0066447358131408694, 0.006676479816436768, 0.006658048152923584, 0.006649856090545654, 0.006681600093841553, 0.006667263984680176, 0.006649856090545654, 0.006666240215301514, 0.0066713600158691405, 0.006657023906707763, 0.006652927875518798, 0.006685696125030518, 0.006657023906707763, 0.006683648109436035, 0.006660096168518067, 0.006617087841033936, 0.006669312000274658, 0.006709248065948486, 0.00662937593460083, 0.006684671878814697, 0.00684441614151001, 0.014042112350463867, 0.006527999877929688, 0.006525951862335205, 0.0065146880149841305, 0.00653107213973999, 0.0065372161865234375, 0.006565887928009034, 0.006639616012573242, 0.006625279903411865, 0.0066304001808166506, 0.006650879859924317, 0.006643712043762207, 0.00667852783203125, 0.006645760059356689, 0.006628352165222168, 0.006640704154968262, 0.006639552116394043, 0.006526976108551025, 0.006576128005981445, 0.006633471965789795, 0.006534143924713135, 0.006568960189819336, 0.00662937593460083, 0.0066416640281677245, 0.006621183872222901, 0.0066375679969787596, 0.006652927875518798, 0.006638591766357422, 0.00662937593460083, 0.006626304149627686, 0.006639616012573242, 0.006625279903411865, 0.006652927875518798, 0.006561791896820069, 0.006331391811370849, 0.006592512130737305, 0.006418432235717773, 0.006364160060882569, 0.006404096126556396, 0.006343679904937744, 0.006371327877044678, 0.006340608119964599, 0.00636518383026123, 0.006366208076477051, 0.006364160060882569, 0.006371327877044678, 0.006339583873748779, 0.006410304069519043, 0.006392767906188965, 0.006396927833557129, 0.006333439826965332, 0.006336512088775635, 0.006355967998504639, 0.006423552036285401, 0.006400000095367431, 0.006660096168518067, 0.0066109437942504885, 0.006874112129211426, 0.006673408031463623, 0.006654975891113281, 0.006623231887817383, 0.006549503803253174, 0.0065064959526062015, 0.014049280166625976, 0.0066304001808166506, 0.006616064071655273, 0.006620160102844238, 0.006614016056060791, 0.006619135856628418, 0.006598656177520752, 0.006657023906707763, 0.006594560146331787, 0.0066744318008422855, 0.006616064071655273, 0.00672870397567749, 0.006714367866516113, 0.006683648109436035, 0.006619135856628418, 0.00662937593460083, 0.006631423950195312, 0.006602752208709717, 0.006620160102844238, 0.006619135856628418, 0.006616064071655273, 0.006625279903411865, 0.00662937593460083, 0.006621183872222901, 0.006684671878814697, 0.006653952121734619, 0.006656032085418701, 0.0066303682327270505, 0.006614016056060791, 0.006626304149627686, 0.006622208118438721, 0.006599679946899414, 0.006632448196411133, 0.006633471965789795, 0.006592512130737305, 0.006820864200592041, 0.0066406397819519045, 0.006590464115142822, 0.006585343837738037, 0.0066109437942504885, 0.006575104236602783, 0.006614016056060791, 0.006603775978088379, 0.006601727962493896, 0.006603775978088379, 0.006623231887817383, 0.006589439868927002, 0.006598656177520752, 0.006599679946899414, 0.006605823993682861, 0.006604800224304199, 0.00658022403717041, 0.006605823993682861, 0.006590464115142822, 0.00658131217956543, 0.006609856128692627, 0.006622208118438721, 0.006593535900115967, 0.006598656177520752, 0.006688767910003662, 0.006590464115142822, 0.006602752208709717, 0.00659660816192627]",tokens/s,148.4182620776904,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,3154.80064,4836.5568,0.0,4206.886912,4087.771648,s,1,9.9669638671875,9.9669638671875,0.0,9.9669638671875,9.9669638671875,9.9669638671875,9.9669638671875,[9.9669638671875],,kWh,3.6878985546523936e-05,2.0177664651374957e-05,6.230616095603825e-05,0.00011936281115393715,,MB,3164.069888,5012.717568,0.0,4366.270464,4273.705984,s,10,6.158258300781251,0.615825830078125,0.0001394564881974286,0.6158218383789062,0.6160140686035156,0.616033706665039,0.6160494171142578,"[0.6160533447265625, 0.6157760620117188, 0.6160097045898437, 0.6156141357421875, 0.61560302734375, 0.6157960815429687, 0.61578076171875, 0.6158706665039062, 0.6159069213867188, 0.6158475952148438]",tokens/s,415.7019525594165,kWh,7.278747485416817e-06,3.988431284971269e-06,4.346568836729524e-05,5.4732867137683324e-05,tokens/kWh,4677262.737872272,MB,2994.802688,5014.81472,0.0,4368.367616,4274.363904,s,10,19.636390991210938,1.9636390991210937,0.010196123203221615,1.9596771240234374,1.9754818847656248,1.98269638671875,1.98846798828125,"[1.957086669921875, 1.959321533203125, 1.95429345703125, 1.9573751220703124, 1.9643192138671874, 1.96003271484375, 1.989910888671875, 1.973878662109375, 1.957112548828125, 1.9630601806640624]",tokens/s,32.083288639036674,kWh,2.3724367531526445e-05,1.2999708943792967e-05,5.64071693087025e-05,9.313124578402189e-05,tokens/kWh,676464.6974239083,,s,630,19.634412553787243,0.031165734212360685,0.0005077507328764034,0.03099545669555664,0.03184168910980224,0.03211084861755371,0.03313218482971191,"[0.03170918464660644, 0.030904319763183592, 0.030921728134155273, 0.030900224685668946, 0.030561279296875, 0.030818304061889647, 0.030892032623291016, 0.031006719589233397, 0.030991359710693358, 0.03096063995361328, 0.03162112045288086, 0.03150233650207519, 0.03160063934326172, 0.03153919982910156, 0.03138457679748535, 0.031060991287231447, 0.03100569534301758, 0.031015935897827147, 0.031040512084960937, 0.031113216400146484, 0.031137792587280274, 0.030875648498535156, 0.030845951080322266, 0.031032320022583007, 0.031406080245971676, 0.03091967964172363, 0.0310118408203125, 0.03100979232788086, 0.030993408203125, 0.030940160751342774, 0.03099852752685547, 0.030665727615356447, 0.030915584564208985, 0.030971904754638672, 0.031047679901123046, 0.030996480941772462, 0.030930944442749023, 0.030906368255615234, 0.030847999572753908, 0.030891008377075195, 0.03121151924133301, 0.030894079208374024, 0.031188991546630858, 0.030947328567504883, 0.03101798439025879, 0.03100569534301758, 0.031094783782958983, 0.03167334365844727, 0.030875648498535156, 0.030892032623291016, 0.031006719589233397, 0.030892032623291016, 0.031441919326782225, 0.03236761474609375, 0.030909439086914063, 0.030922752380371094, 0.03116339111328125, 0.030875648498535156, 0.030910463333129884, 0.030942207336425782, 0.03099238395690918, 0.03095961570739746, 0.03081625556945801, 0.03192831993103027, 0.030922752380371094, 0.03096883201599121, 0.030707712173461913, 0.030860288619995117, 0.030879743576049806, 0.030924800872802735, 0.030966783523559572, 0.03097395133972168, 0.03093606376647949, 0.030946304321289062, 0.03101081657409668, 0.03179929542541504, 0.032922622680664065, 0.031867904663085936, 0.03134873580932617, 0.03097599983215332, 0.030834688186645507, 0.03078860855102539, 0.030711807250976563, 0.03101388740539551, 0.030795808792114257, 0.031161312103271485, 0.030462976455688476, 0.03094937515258789, 0.031014911651611327, 0.030911487579345705, 0.030900224685668946, 0.030842880249023437, 0.03118694305419922, 0.03149926376342774, 0.03144908714294434, 0.03103436851501465, 0.03081523132324219, 0.03192422485351563, 0.03179212760925293, 0.0309749755859375, 0.03099750328063965, 0.03118182373046875, 0.030852096557617188, 0.031575040817260744, 0.03136614418029785, 0.031014911651611327, 0.030885887145996094, 0.031031295776367186, 0.030893056869506837, 0.0310118408203125, 0.03133030319213867, 0.03119513511657715, 0.03098419189453125, 0.03100262451171875, 0.03100467109680176, 0.030854143142700196, 0.030956544876098634, 0.03128422355651855, 0.030935039520263673, 0.031154176712036134, 0.03103539276123047, 0.030871551513671876, 0.03101081657409668, 0.030869504928588868, 0.030908416748046875, 0.030907392501831055, 0.031909887313842776, 0.031066144943237305, 0.030971872329711915, 0.031124479293823244, 0.030955520629882813, 0.03092787170410156, 0.031067136764526368, 0.031031295776367186, 0.030980096817016602, 0.03079167938232422, 0.030502912521362304, 0.030866432189941406, 0.030744575500488282, 0.031049728393554688, 0.031409151077270506, 0.03133030319213867, 0.03273523330688476, 0.03143475151062012, 0.03102003288269043, 0.03168767929077149, 0.030519296646118164, 0.03083673667907715, 0.03100057601928711, 0.030922752380371094, 0.030893056869506837, 0.030619647979736327, 0.031077375411987306, 0.031097856521606446, 0.030875648498535156, 0.030873600006103515, 0.030840831756591795, 0.03094528007507324, 0.03081216049194336, 0.0309616641998291, 0.030765056610107422, 0.031029247283935548, 0.030889984130859374, 0.030929920196533203, 0.03101081657409668, 0.03132723236083984, 0.030988288879394532, 0.03151260757446289, 0.030962656021118164, 0.03081318473815918, 0.0305664005279541, 0.030872575759887694, 0.030907392501831055, 0.03138764762878418, 0.03125964736938477, 0.030996480941772462, 0.030714879989624022, 0.031088640213012695, 0.030903295516967775, 0.03100467109680176, 0.03098214340209961, 0.030889984130859374, 0.030964736938476563, 0.03098931121826172, 0.03082137680053711, 0.030877695083618165, 0.030866432189941406, 0.030962688446044922, 0.03092787170410156, 0.03194367980957031, 0.031152128219604492, 0.03096985626220703, 0.030892032623291016, 0.03095244789123535, 0.03097292709350586, 0.030996480941772462, 0.03100979232788086, 0.031044607162475587, 0.03391692733764649, 0.03218431854248047, 0.031110143661499022, 0.030856191635131838, 0.030859264373779297, 0.03101081657409668, 0.030915584564208985, 0.030867456436157226, 0.03156070327758789, 0.030862335205078126, 0.03158425521850586, 0.031718399047851564, 0.030860288619995117, 0.03114291191101074, 0.031097856521606446, 0.03082342338562012, 0.03099750328063965, 0.030884864807128907, 0.030857215881347655, 0.030697471618652345, 0.031062015533447264, 0.030864383697509764, 0.031575040817260744, 0.031112192153930664, 0.03149619293212891, 0.030849023818969725, 0.030921728134155273, 0.03099545669555664, 0.03100467109680176, 0.030895103454589845, 0.030955520629882813, 0.030717952728271485, 0.030930944442749023, 0.030734336853027344, 0.03098111915588379, 0.030857215881347655, 0.030861312866210938, 0.030814207077026368, 0.03082444763183594, 0.030636032104492186, 0.031369216918945314, 0.03079680061340332, 0.031285247802734374, 0.031070207595825194, 0.03100364875793457, 0.030833663940429686, 0.030955520629882813, 0.03058380889892578, 0.030893056869506837, 0.03080601692199707, 0.03055308723449707, 0.03079475212097168, 0.031471616744995115, 0.030926847457885744, 0.03148902320861816, 0.03213312149047851, 0.03155251121520996, 0.031025152206420898, 0.030922752380371094, 0.03077631950378418, 0.030793727874755858, 0.030862335205078126, 0.031007743835449218, 0.030873600006103515, 0.03098521614074707, 0.030877695083618165, 0.030896127700805662, 0.030908416748046875, 0.03101286315917969, 0.03218431854248047, 0.031045631408691408, 0.031036415100097657, 0.03183923149108887, 0.030894079208374024, 0.030880767822265624, 0.030851072311401367, 0.03081625556945801, 0.030897151947021483, 0.031108095169067384, 0.03129548835754394, 0.031006719589233397, 0.030895103454589845, 0.030827520370483398, 0.030892032623291016, 0.030736383438110353, 0.0315545597076416, 0.03078963279724121, 0.03186380767822266, 0.0319815673828125, 0.031055871963500976, 0.03101388740539551, 0.031023103713989256, 0.030877695083618165, 0.031076351165771485, 0.031078399658203124, 0.030930944442749023, 0.03096985626220703, 0.0320552978515625, 0.033121280670166016, 0.032655361175537106, 0.031648767471313476, 0.03176755142211914, 0.03158323287963867, 0.03122585678100586, 0.031072256088256835, 0.031111167907714843, 0.031058944702148438, 0.031239168167114258, 0.030881792068481444, 0.031077375411987306, 0.03095961570739746, 0.03097395133972168, 0.031029247283935548, 0.030930944442749023, 0.03103436851501465, 0.030660608291625976, 0.030501888275146483, 0.03200511932373047, 0.03095347213745117, 0.032709632873535156, 0.03355750274658203, 0.031663103103637694, 0.03174195289611816, 0.031033344268798828, 0.030887935638427736, 0.030833663940429686, 0.03077836799621582, 0.03171225547790527, 0.031007743835449218, 0.03128934478759766, 0.030915584564208985, 0.03131289672851562, 0.031077375411987306, 0.030861312866210938, 0.03080089569091797, 0.030874624252319335, 0.03076300811767578, 0.030915584564208985, 0.030917631149291993, 0.030871551513671876, 0.030916608810424805, 0.0318023681640625, 0.031062015533447264, 0.03157811164855957, 0.0319682559967041, 0.032024574279785153, 0.03055820846557617, 0.030664703369140626, 0.03124838447570801, 0.031352832794189454, 0.03096575927734375, 0.030840831756591795, 0.03081625556945801, 0.030921728134155273, 0.030947328567504883, 0.031119359970092773, 0.030874624252319335, 0.030693376541137695, 0.031369216918945314, 0.03083673667907715, 0.030718975067138672, 0.031088640213012695, 0.03079475212097168, 0.030883840560913086, 0.030817279815673827, 0.03078656005859375, 0.03059916877746582, 0.030745599746704103, 0.030910463333129884, 0.03080806350708008, 0.030955520629882813, 0.030748672485351562, 0.030734336853027344, 0.03187302398681641, 0.031170560836791993, 0.030637056350708007, 0.03078451156616211, 0.03094118309020996, 0.03125657653808594, 0.030526464462280273, 0.03179929542541504, 0.03075584030151367, 0.030841856002807616, 0.03095244789123535, 0.030954496383666992, 0.030884864807128907, 0.033535999298095705, 0.032707584381103515, 0.031440895080566404, 0.031123455047607423, 0.0316364803314209, 0.03094425582885742, 0.030909439086914063, 0.03168563270568848, 0.031085567474365236, 0.03167231941223145, 0.03287039947509766, 0.031049728393554688, 0.03105177688598633, 0.031094783782958983, 0.031253503799438476, 0.03094937515258789, 0.031733760833740236, 0.03077939224243164, 0.030840831756591795, 0.030955520629882813, 0.03114291191101074, 0.03099852752685547, 0.03202560043334961, 0.031107072830200196, 0.030903295516967775, 0.03182489585876465, 0.03154841613769531, 0.030868480682373047, 0.031052799224853517, 0.03155046463012695, 0.03096575927734375, 0.031937536239624024, 0.03099238395690918, 0.030831615447998048, 0.03095142364501953, 0.03307929611206055, 0.03215359878540039, 0.03160063934326172, 0.03219660949707031, 0.03266048049926758, 0.03183718490600586, 0.03211775970458984, 0.032715774536132815, 0.03199180793762207, 0.031780864715576174, 0.032102401733398435, 0.03198566436767578, 0.031749120712280275, 0.03214438247680664, 0.03177881622314453, 0.031471616744995115, 0.0317573127746582, 0.0320184326171875, 0.032247806549072264, 0.032075775146484374, 0.03198873519897461, 0.03204915237426758, 0.03252326583862305, 0.031270912170410156, 0.03151158332824707, 0.031501279830932614, 0.03165286445617676, 0.03200921630859375, 0.03187507247924805, 0.03179212760925293, 0.032696319580078126, 0.03430604934692383, 0.032140289306640625, 0.031925247192382815, 0.031062015533447264, 0.03099545669555664, 0.030847999572753908, 0.031045631408691408, 0.03096780776977539, 0.030991359710693358, 0.0310118408203125, 0.03115519905090332, 0.030840831756591795, 0.030939136505126953, 0.03181158447265625, 0.031124479293823244, 0.03082956886291504, 0.03192934417724609, 0.03191500854492187, 0.031067136764526368, 0.03080499267578125, 0.031177728652954102, 0.03101286315917969, 0.0315043830871582, 0.031067136764526368, 0.03094937515258789, 0.03121049690246582, 0.03099545669555664, 0.03079680061340332, 0.030505983352661133, 0.030954496383666992, 0.031085567474365236, 0.031462432861328125, 0.031399904251098634, 0.030917631149291993, 0.03088899230957031, 0.03088380813598633, 0.031124479293823244, 0.03080806350708008, 0.030906368255615234, 0.03098214340209961, 0.031072256088256835, 0.03099852752685547, 0.031048704147338867, 0.03114291191101074, 0.0312729606628418, 0.03143987274169922, 0.03099545669555664, 0.0309749755859375, 0.031047679901123046, 0.031075328826904298, 0.03196518325805664, 0.032287742614746096, 0.03178598403930664, 0.03139174461364746, 0.03179520034790039, 0.03099033546447754, 0.030898176193237304, 0.0315361270904541, 0.03094528007507324, 0.03083673667907715, 0.03075481605529785, 0.030697471618652345, 0.030886911392211915, 0.03099033546447754, 0.030923776626586914, 0.031054847717285155, 0.03039129638671875, 0.03096575927734375, 0.0313436164855957, 0.03133337593078613, 0.03098726463317871, 0.03118694305419922, 0.031044607162475587, 0.031235071182250978, 0.0310118408203125, 0.03058176040649414, 0.03105075263977051, 0.030903295516967775, 0.030889984130859374, 0.031497215270996096, 0.032036865234375, 0.031185920715332032, 0.030511104583740234, 0.030879743576049806, 0.03095961570739746, 0.03093708801269531, 0.030793727874755858, 0.03056435203552246, 0.033751041412353515, 0.032347137451171876, 0.030907392501831055, 0.030859264373779297, 0.030840831756591795, 0.030668800354003906, 0.031007743835449218, 0.030971904754638672, 0.031007743835449218, 0.03100364875793457, 0.03077529525756836, 0.030922752380371094, 0.030950399398803712, 0.031091712951660157, 0.031441919326782225, 0.0311910400390625, 0.030910463333129884, 0.03098624038696289, 0.031902719497680664, 0.030938112258911132, 0.031148031234741212, 0.030887935638427736, 0.030675968170166015, 0.030835712432861328, 0.03105177688598633, 0.030727167129516602, 0.03074662399291992, 0.030712831497192384, 0.031056896209716797, 0.03194675254821777, 0.03126272010803223, 0.03120947265625, 0.03159040069580078, 0.03096575927734375, 0.031021055221557618, 0.030882816314697265, 0.03078860855102539, 0.030487552642822265, 0.030640127182006836, 0.03092787170410156, 0.03097395133972168, 0.03096063995361328, 0.030717952728271485, 0.03118489646911621, 0.03135385513305664, 0.03099545669555664, 0.030950399398803712, 0.03099238395690918, 0.031114240646362305, 0.030876672744750977, 0.030940160751342774, 0.030877695083618165, 0.031060991287231447, 0.03138047981262207, 0.030656511306762696, 0.030955520629882813, 0.031048704147338867, 0.03096575927734375, 0.030940160751342774, 0.030932992935180665, 0.031562751770019534, 0.03136000061035156, 0.03144396781921387, 0.031088640213012695, 0.03167027282714844, 0.03180339241027832, 0.030867456436157226, 0.03100467109680176, 0.03078656005859375, 0.03073023986816406, 0.030898176193237304, 0.030842880249023437, 0.030864383697509764, 0.031040512084960937, 0.031903743743896484, 0.031649791717529296, 0.031156223297119142, 0.03119206428527832, 0.031286272048950195, 0.03101286315917969, 0.03104256057739258, 0.03098214340209961, 0.033271808624267575, 0.03313663864135742, 0.03236556625366211, 0.031096832275390625, 0.030921728134155273, 0.030940160751342774, 0.03096883201599121, 0.030891008377075195, 0.030963712692260743, 0.030513151168823242]",tokens/s,32.086521472142586,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1326.03904,1361.575936,0.0,731.906048,703.86944,s,1,7.79654541015625,7.79654541015625,0.0,7.79654541015625,7.79654541015625,7.79654541015625,7.79654541015625,[7.79654541015625],,kWh,1.1280419570122528e-05,6.146386480828162e-06,1.6709180033935844e-05,3.413598608488653e-05,,MB,1544.208384,1644.691456,0.0,998.244352,942.610432,s,10,0.6653628158569337,0.06653628158569337,5.659954334923705e-05,0.0665267677307129,0.06656996002197266,0.06662890167236328,0.06667605499267577,"[0.06653401947021484, 0.0665568618774414, 0.06650665283203125, 0.06650559997558594, 0.06648870086669922, 0.0666878433227539, 0.06647344207763672, 0.06653046417236329, 0.0665230712890625, 0.06655615997314453]",tokens/s,3847.5248976799617,kWh,7.856900327905564e-07,4.305215519469161e-07,4.665230001496601e-06,5.8814415862340725e-06,tokens/kWh,43526743.61319613,MB,1568.575488,1653.080064,0.0,1006.63296,942.612992,s,10,12.171869262695312,1.2171869262695312,0.013856863879426303,1.2206013793945312,1.230363671875,1.2316453369140625,1.2326706689453126,"[1.1865623779296874, 1.216954345703125, 1.22931201171875, 1.2242484130859375, 1.2164495849609376, 1.2068826904296874, 1.230078857421875, 1.2031109619140625, 1.232927001953125, 1.225343017578125]",tokens/s,51.75868935191752,kWh,1.456183573859857e-05,7.979633393721754e-06,2.6286294759705842e-05,4.882776389202618e-05,tokens/kWh,1290249.5420292679,,s,630,12.169670648574828,0.019316937537420364,0.00043083679187514654,0.01943142318725586,0.019630386543273926,0.019816908931732175,0.020307343902587892,"[0.018611200332641603, 0.018563072204589845, 0.018685951232910156, 0.018572288513183592, 0.018494464874267577, 0.018537471771240235, 0.018685951232910156, 0.01907711982727051, 0.019749887466430666, 0.0218603515625, 0.019974143981933593, 0.019408895492553712, 0.01922047996520996, 0.0194201602935791, 0.019693567276000978, 0.019487743377685548, 0.019527679443359376, 0.01965158462524414, 0.019568639755249022, 0.019594240188598632, 0.019180543899536134, 0.01837059211730957, 0.018216928482055663, 0.01843507194519043, 0.018523136138916017, 0.01845043182373047, 0.01840742492675781, 0.018465791702270508, 0.018405376434326173, 0.018354175567626953, 0.018540544509887694, 0.01845248031616211, 0.018546688079833985, 0.01877299118041992, 0.018964479446411133, 0.018791423797607423, 0.018498559951782227, 0.018553855895996094, 0.01863577651977539, 0.018489343643188477, 0.018496511459350586, 0.018537471771240235, 0.018537471771240235, 0.019366912841796875, 0.018593791961669923, 0.018251775741577148, 0.018541568756103514, 0.018534400939941405, 0.018534400939941405, 0.018329599380493163, 0.018669567108154296, 0.018497535705566406, 0.01864806365966797, 0.018569215774536133, 0.018481151580810547, 0.01866547203063965, 0.018565120697021483, 0.01846067237854004, 0.01860915184020996, 0.01844121551513672, 0.018490367889404297, 0.018915327072143554, 0.02007859230041504, 0.01859891128540039, 0.01946112060546875, 0.019351551055908203, 0.019376127243041993, 0.019507200241088866, 0.019366912841796875, 0.019323904037475585, 0.019174400329589843, 0.018961408615112304, 0.0192993278503418, 0.019322879791259767, 0.019294208526611328, 0.019358720779418945, 0.019388416290283202, 0.019349504470825195, 0.019478527069091797, 0.019520511627197267, 0.019380224227905272, 0.01939558410644531, 0.01943756866455078, 0.0194334716796875, 0.01940787124633789, 0.019397632598876953, 0.01943654441833496, 0.01945599937438965, 0.019385343551635743, 0.019349567413330077, 0.019270591735839844, 0.018487295150756835, 0.018319360733032225, 0.019330047607421876, 0.019353599548339845, 0.019369983673095705, 0.019382272720336914, 0.019359743118286133, 0.019317760467529296, 0.0194467830657959, 0.01940787124633789, 0.019382272720336914, 0.019330047607421876, 0.019367935180664063, 0.019619840621948242, 0.01940377616882324, 0.019409919738769533, 0.019380224227905272, 0.01824563217163086, 0.018373632431030275, 0.018973695755004884, 0.019501056671142578, 0.02001408004760742, 0.019681280136108398, 0.019281919479370118, 0.01938739204406738, 0.019500032424926757, 0.019405824661254883, 0.019393535614013673, 0.019508224487304687, 0.01942527961730957, 0.019314687728881837, 0.01947648048400879, 0.01943142318725586, 0.01930342483520508, 0.019404800415039062, 0.0184453125, 0.019313663482666017, 0.0194201602935791, 0.01946316719055176, 0.019503103256225587, 0.019533824920654298, 0.019405824661254883, 0.0196495361328125, 0.019312639236450196, 0.01945907211303711, 0.019532800674438477, 0.019546112060546874, 0.01908531188964844, 0.019345407485961915, 0.019396608352661132, 0.019397632598876953, 0.0190515193939209, 0.019369983673095705, 0.019521535873413084, 0.019422208786010742, 0.01942732810974121, 0.019087360382080077, 0.019184640884399414, 0.020124671936035156, 0.020370431900024414, 0.019320831298828126, 0.019894271850585937, 0.019587072372436523, 0.019478527069091797, 0.0192542724609375, 0.01920921516418457, 0.019538944244384765, 0.019408895492553712, 0.019569664001464843, 0.019288063049316406, 0.01967820739746094, 0.020214784622192384, 0.01987276840209961, 0.019504127502441407, 0.019697664260864257, 0.0194703369140625, 0.019717119216918946, 0.0195020809173584, 0.019591167449951173, 0.01986662483215332, 0.01940377616882324, 0.019552255630493166, 0.019359743118286133, 0.019467264175415038, 0.019932159423828123, 0.019518463134765626, 0.019519487380981446, 0.01960038375854492, 0.019579904556274414, 0.01958502388000488, 0.019597312927246095, 0.019581951141357423, 0.019548160552978516, 0.019558399200439454, 0.01946009635925293, 0.019697664260864257, 0.019551231384277345, 0.019588096618652344, 0.01858252716064453, 0.019506175994873046, 0.019535871505737306, 0.019111936569213867, 0.01903308868408203, 0.019087360382080077, 0.019356672286987304, 0.01941196823120117, 0.019560447692871095, 0.019050495147705078, 0.018762752532958983, 0.0192993278503418, 0.01947238349914551, 0.019479551315307618, 0.019542015075683594, 0.019382272720336914, 0.01944473648071289, 0.01942323112487793, 0.019558399200439454, 0.0193832950592041, 0.019298303604125978, 0.019414016723632813, 0.02004275131225586, 0.020007936477661133, 0.019517440795898438, 0.01942527961730957, 0.01958502388000488, 0.01946009635925293, 0.019418111801147463, 0.01942323112487793, 0.019474431991577147, 0.019390464782714844, 0.01941094398498535, 0.019520511627197267, 0.019483648300170898, 0.019486719131469727, 0.0194652156829834, 0.019531776428222656, 0.019518463134765626, 0.01941196823120117, 0.01948467254638672, 0.01947648048400879, 0.019536895751953123, 0.019571712493896484, 0.019506175994873046, 0.01922662353515625, 0.01923993682861328, 0.01945088005065918, 0.019475456237792968, 0.019539968490600586, 0.01965465545654297, 0.019573759078979493, 0.019521535873413084, 0.01961676788330078, 0.01903513526916504, 0.019167232513427734, 0.019558399200439454, 0.01946316719055176, 0.019504127502441407, 0.01962598419189453, 0.019531776428222656, 0.019479551315307618, 0.019535871505737306, 0.01839411163330078, 0.018327552795410155, 0.018506752014160157, 0.018518047332763674, 0.018249696731567382, 0.018539520263671876, 0.018288639068603514, 0.018695167541503906, 0.01944268798828125, 0.019771392822265626, 0.02008166313171387, 0.01962393569946289, 0.019555328369140625, 0.0194969596862793, 0.01962495994567871, 0.019273727416992188, 0.01923686408996582, 0.020051967620849608, 0.019353599548339845, 0.019190784454345702, 0.01940787124633789, 0.019349504470825195, 0.01943142318725586, 0.019361791610717775, 0.019392511367797852, 0.01943449592590332, 0.019390464782714844, 0.019408895492553712, 0.019094528198242186, 0.01903718376159668, 0.01882316780090332, 0.019317760467529296, 0.019515392303466796, 0.01946419143676758, 0.019414016723632813, 0.0194334716796875, 0.01945497512817383, 0.01942323112487793, 0.019534847259521485, 0.01927475166320801, 0.01939148712158203, 0.01942118453979492, 0.01944063949584961, 0.019353599548339845, 0.019409919738769533, 0.01944166374206543, 0.01948569679260254, 0.019319807052612305, 0.019537919998168944, 0.01943654441833496, 0.01943756866455078, 0.01949388885498047, 0.01944883155822754, 0.019195903778076173, 0.0193832950592041, 0.019590143203735352, 0.019260416030883788, 0.019397632598876953, 0.01945292854309082, 0.01946419143676758, 0.019317760467529296, 0.0194201602935791, 0.019715072631835938, 0.01869004821777344, 0.019489791870117186, 0.019463232040405273, 0.019475391387939453, 0.019517440795898438, 0.019700735092163087, 0.019524608612060547, 0.019579904556274414, 0.019177471160888672, 0.018899967193603515, 0.019384319305419923, 0.019497983932495116, 0.01906892776489258, 0.019354623794555666, 0.01946316719055176, 0.01927577590942383, 0.019471359252929688, 0.019458047866821288, 0.019483648300170898, 0.01945088005065918, 0.0194150390625, 0.019535871505737306, 0.019347455978393553, 0.019516416549682617, 0.01945395278930664, 0.01942425537109375, 0.01939558410644531, 0.019466239929199217, 0.019260416030883788, 0.01946931266784668, 0.01941606330871582, 0.019588096618652344, 0.019404800415039062, 0.01957683181762695, 0.019522560119628905, 0.019512319564819337, 0.019556352615356445, 0.019538944244384765, 0.01946419143676758, 0.019529727935791014, 0.01960960006713867, 0.019353599548339845, 0.019506175994873046, 0.019530752182006835, 0.019268608093261717, 0.01827123260498047, 0.01847091293334961, 0.01845043182373047, 0.018661376953125, 0.018739200592041014, 0.018967552185058592, 0.018553855895996094, 0.018498559951782227, 0.018486272811889647, 0.01821696090698242, 0.018310144424438478, 0.018259967803955078, 0.018282495498657226, 0.018530303955078126, 0.01855897521972656, 0.018456575393676757, 0.01844326400756836, 0.0184586238861084, 0.018759679794311524, 0.01939148712158203, 0.01947750473022461, 0.019500032424926757, 0.01943961524963379, 0.019540992736816407, 0.019398656845092774, 0.01946931266784668, 0.019571712493896484, 0.01960038375854492, 0.019525632858276368, 0.01945907211303711, 0.0194703369140625, 0.020368383407592772, 0.02031820869445801, 0.02026188850402832, 0.019573759078979493, 0.019529727935791014, 0.01943552017211914, 0.01963007926940918, 0.019581951141357423, 0.01960550308227539, 0.019545087814331053, 0.019487743377685548, 0.019581951141357423, 0.01947238349914551, 0.019764223098754884, 0.019400703430175782, 0.019522560119628905, 0.019629056930541993, 0.019580928802490235, 0.01924505615234375, 0.01926144027709961, 0.01960038375854492, 0.019490816116333007, 0.019121152877807617, 0.01906175994873047, 0.0202608642578125, 0.019729408264160156, 0.01944371223449707, 0.019376127243041993, 0.01904640007019043, 0.01944576072692871, 0.019409919738769533, 0.019749887466430666, 0.019603456497192383, 0.019503103256225587, 0.019506175994873046, 0.01944576072692871, 0.0192542724609375, 0.01964031982421875, 0.019766271591186522, 0.019520511627197267, 0.01943756866455078, 0.01947238349914551, 0.019572736740112305, 0.01947340774536133, 0.019451904296875, 0.019351551055908203, 0.019474431991577147, 0.01943961524963379, 0.01948467254638672, 0.019365888595581054, 0.018502656936645507, 0.018264064788818358, 0.018395135879516602, 0.0184770565032959, 0.01843404769897461, 0.018473983764648438, 0.01844121551513672, 0.01840640068054199, 0.01848422431945801, 0.018437120437622072, 0.01844326400756836, 0.018538496017456055, 0.018455551147460936, 0.018365440368652345, 0.018947071075439453, 0.019133440017700197, 0.018937856674194335, 0.01942118453979492, 0.019570688247680663, 0.019577856063842772, 0.019518463134765626, 0.019489791870117186, 0.019406848907470704, 0.019551231384277345, 0.020065280914306642, 0.01980620765686035, 0.018340864181518556, 0.018473983764648438, 0.018479103088378905, 0.01846067237854004, 0.018727935791015626, 0.01840742492675781, 0.019200000762939453, 0.019310592651367187, 0.019731456756591798, 0.019544063568115236, 0.019389440536499023, 0.019503103256225587, 0.01948467254638672, 0.01927577590942383, 0.01942835235595703, 0.0194334716796875, 0.01939967918395996, 0.01942732810974121, 0.019366912841796875, 0.01923481559753418, 0.018939903259277344, 0.018962432861328125, 0.019495935440063478, 0.019422208786010742, 0.019348480224609374, 0.01944780731201172, 0.019327999114990235, 0.01943961524963379, 0.01943552017211914, 0.019501056671142578, 0.019482624053955077, 0.019400703430175782, 0.01943142318725586, 0.01942323112487793, 0.019364864349365234, 0.018933759689331055, 0.019408895492553712, 0.02030080032348633, 0.019825664520263672, 0.0194703369140625, 0.019509248733520508, 0.01966694450378418, 0.01942835235595703, 0.019355648040771483, 0.019458047866821288, 0.019536895751953123, 0.019497983932495116, 0.019194879531860352, 0.01987481689453125, 0.01988198471069336, 0.01948876762390137, 0.01946931266784668, 0.019580928802490235, 0.01941094398498535, 0.01941913604736328, 0.019483648300170898, 0.02000383949279785, 0.02031001663208008, 0.020163583755493163, 0.01984921646118164, 0.019569664001464843, 0.01927884864807129, 0.019519487380981446, 0.01945292854309082, 0.019489791870117186, 0.019409919738769533, 0.019466239929199217, 0.0194150390625, 0.01942118453979492, 0.019494911193847657, 0.019378175735473634, 0.019430400848388672, 0.019505151748657225, 0.01945395278930664, 0.01943961524963379, 0.019489791870117186, 0.01945088005065918, 0.01946931266784668, 0.019594240188598632, 0.019506175994873046, 0.019571712493896484, 0.019562496185302734, 0.019687423706054686, 0.01969049644470215, 0.01946112060546875, 0.01966182327270508, 0.0194969596862793, 0.0194150390625, 0.01963315200805664, 0.019579904556274414, 0.019491840362548828, 0.01944883155822754, 0.019753984451293945, 0.019573759078979493, 0.019513343811035155, 0.01945497512817383, 0.019458047866821288, 0.019708927154541016, 0.01960960006713867, 0.019559423446655275, 0.0194334716796875, 0.02047488021850586, 0.01944268798828125, 0.020374528884887694, 0.019512319564819337, 0.01965977668762207, 0.019366912841796875, 0.019984384536743165, 0.019497983932495116, 0.019354623794555666, 0.019508224487304687, 0.019528703689575197, 0.019727359771728514, 0.019380224227905272, 0.019375104904174805, 0.019533824920654298, 0.019371007919311522, 0.01945088005065918, 0.019384319305419923, 0.01944371223449707, 0.019486719131469727, 0.019412992477416992, 0.01923891258239746, 0.019365888595581054, 0.019422239303588867, 0.01946723175048828, 0.019508224487304687, 0.019347455978393553, 0.019301376342773437, 0.01947238349914551, 0.019389440536499023, 0.019417087554931642, 0.0194201602935791, 0.019534847259521485, 0.019384319305419923, 0.019366912841796875, 0.019533824920654298, 0.019514368057250975, 0.019557376861572266, 0.019409919738769533, 0.019506175994873046, 0.01942527961730957, 0.019564544677734375, 0.01942732810974121, 0.01943756866455078, 0.019458047866821288, 0.01941196823120117, 0.019688447952270507, 0.019542015075683594, 0.019503103256225587, 0.01979903984069824, 0.019989503860473632, 0.01943552017211914, 0.019491840362548828, 0.019184640884399414, 0.018770944595336913, 0.018437120437622072, 0.018535423278808593, 0.01861734390258789, 0.019458047866821288, 0.019382272720336914, 0.019310592651367187, 0.019430400848388672]",tokens/s,51.768040252903496,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1498.578944,2057.8304,0.0,1428.160512,1322.516992,s,1,8.043470703125,8.043470703125,0.0,8.043470703125,8.043470703125,8.043470703125,8.043470703125,[8.043470703125],,kWh,1.4869393246528439e-05,8.128364014095185e-06,2.4487797367966913e-05,4.748555462859054e-05,,MB,1585.139712,2080.899072,0.0,1434.451968,1322.072064,s,10,2.3700211486816407,0.23700211486816408,0.00014628120418445564,0.23697551727294922,0.23720361175537108,0.23721438522338867,0.23722300399780274,"[0.23679385375976564, 0.23691746520996093, 0.23683987426757813, 0.23686026000976562, 0.23722515869140626, 0.23698191833496093, 0.23712345886230468, 0.23720121765136717, 0.23710882568359376, 0.2369691162109375]",tokens/s,1080.1591375773326,kWh,2.8012727609818925e-06,1.534958349451334e-06,1.5669883336929253e-05,2.0006114447362477e-05,tokens/kWh,12796087.9496893,MB,1589.735424,2080.899072,0.0,1434.451968,1374.944768,s,10,10.550449584960937,1.055044958496094,0.007495976780818197,1.051786376953125,1.0656725952148438,1.0665871520996095,1.0673187976074219,"[1.05142919921875, 1.0503306884765624, 1.067501708984375, 1.0652364501953124, 1.0654693603515626, 1.053809326171875, 1.05025244140625, 1.0478289794921876, 1.0521435546875, 1.0464478759765625]",tokens/s,59.7130951554926,kWh,1.2396601371726552e-05,6.793206260861025e-06,2.4594482207877248e-05,4.3784289840464834e-05,tokens/kWh,1438872.2582814686,,s,630,10.546668542861939,0.016740743718828475,0.0002903452044736156,0.01662105655670166,0.01717862319946289,0.017269299221038818,0.017798635387420658,"[0.016670719146728515, 0.01660825538635254, 0.01664102363586426, 0.016664575576782227, 0.0165980167388916, 0.016545791625976563, 0.01663385581970215, 0.016552959442138672, 0.016571392059326173, 0.0165928955078125, 0.01664614486694336, 0.016664575576782227, 0.016859136581420898, 0.01661235237121582, 0.01666662406921387, 0.01660518455505371, 0.0166748161315918, 0.01664204788208008, 0.01663385581970215, 0.01658367919921875, 0.016630783081054687, 0.01665843200683594, 0.016718847274780273, 0.016688127517700196, 0.01660416030883789, 0.01660518455505371, 0.016680959701538087, 0.016611328125, 0.016763904571533202, 0.016667648315429686, 0.01763020706176758, 0.016929792404174804, 0.0167956485748291, 0.017460224151611328, 0.016990207672119142, 0.016703487396240235, 0.016638975143432617, 0.016662527084350585, 0.01662873649597168, 0.016635904312133788, 0.016638975143432617, 0.01661030387878418, 0.01661030387878418, 0.016683008193969725, 0.0165928955078125, 0.0166297607421875, 0.016653312683105468, 0.016623615264892578, 0.0166748161315918, 0.016639999389648438, 0.01661747169494629, 0.01658572769165039, 0.0165980167388916, 0.01656729507446289, 0.016706560134887697, 0.016827392578125, 0.01660006332397461, 0.01658163261413574, 0.016625696182250977, 0.016603103637695314, 0.016665599822998048, 0.016668672561645507, 0.016688127517700196, 0.016514047622680664, 0.01660825538635254, 0.01659391975402832, 0.016767999649047852, 0.016733184814453125, 0.01662873649597168, 0.01657753562927246, 0.01663385581970215, 0.016924671173095703, 0.01666662406921387, 0.01658880043029785, 0.01658163261413574, 0.01663488006591797, 0.016900096893310547, 0.017294336318969726, 0.017145856857299805, 0.017117183685302736, 0.01678950309753418, 0.016648223876953125, 0.01665430450439453, 0.01661440086364746, 0.01662054443359375, 0.016630783081054687, 0.01656729507446289, 0.01658572769165039, 0.016635904312133788, 0.016685056686401366, 0.016536575317382812, 0.01659699249267578, 0.016562175750732423, 0.016695295333862305, 0.016590848922729492, 0.01657344055175781, 0.01661644744873047, 0.016582656860351562, 0.01654374313354492, 0.016631807327270508, 0.01660825538635254, 0.016639999389648438, 0.016555007934570314, 0.01661030387878418, 0.016541696548461913, 0.01657344055175781, 0.016571392059326173, 0.0165928955078125, 0.01659187126159668, 0.016550912857055664, 0.01657548713684082, 0.016566272735595702, 0.016548864364624022, 0.016562175750732423, 0.016562175750732423, 0.01664204788208008, 0.01657241630554199, 0.016858112335205077, 0.017064960479736328, 0.016895999908447267, 0.01662054443359375, 0.01661337661743164, 0.016750591278076172, 0.016746496200561522, 0.01665433692932129, 0.01660006332397461, 0.01676595115661621, 0.01719398307800293, 0.017117183685302736, 0.01719808006286621, 0.017310720443725586, 0.016645120620727538, 0.01657651138305664, 0.017035263061523438, 0.016703487396240235, 0.016718847274780273, 0.016652288436889647, 0.016559104919433593, 0.0168724479675293, 0.017252351760864256, 0.017099775314331055, 0.016863231658935548, 0.017282047271728516, 0.01663692855834961, 0.016937984466552734, 0.017201152801513672, 0.01680793571472168, 0.016749568939208984, 0.01660723114013672, 0.01658163261413574, 0.01660927963256836, 0.01701785659790039, 0.016876544952392578, 0.016695295333862305, 0.017130495071411133, 0.016945152282714843, 0.016693248748779296, 0.017116159439086915, 0.017128448486328125, 0.017082368850708008, 0.016951295852661134, 0.01659699249267578, 0.016947200775146484, 0.01717452812194824, 0.016705535888671876, 0.016755712509155272, 0.01695232009887695, 0.0165928955078125, 0.016870399475097657, 0.017238016128540038, 0.017201152801513672, 0.01720217514038086, 0.01724313545227051, 0.017082368850708008, 0.01696460723876953, 0.017145856857299805, 0.017318912506103516, 0.01703014373779297, 0.016869375228881836, 0.01660518455505371, 0.01664102363586426, 0.017188863754272463, 0.01696051216125488, 0.017100799560546876, 0.01721651268005371, 0.016961536407470702, 0.016738304138183592, 0.017126399993896483, 0.017062911987304686, 0.01782476806640625, 0.01741107177734375, 0.01738137626647949, 0.017105920791625977, 0.017192960739135742, 0.01658060836791992, 0.01661337661743164, 0.017217536926269532, 0.01702400016784668, 0.017056768417358398, 0.017458175659179686, 0.01721855926513672, 0.0172359676361084, 0.01723391914367676, 0.01685196876525879, 0.016880640029907225, 0.01719808006286621, 0.016647167205810547, 0.016885791778564453, 0.017127391815185546, 0.017163263320922852, 0.0172042236328125, 0.017160192489624023, 0.01700864028930664, 0.017298431396484376, 0.017283071517944337, 0.01719398307800293, 0.01744076728820801, 0.017228799819946287, 0.016720895767211915, 0.016739328384399413, 0.01717862319946289, 0.01723289680480957, 0.017135616302490234, 0.01705881690979004, 0.016659456253051756, 0.016541696548461913, 0.016566272735595702, 0.01658572769165039, 0.016700416564941405, 0.01675881576538086, 0.016518112182617187, 0.016570367813110352, 0.016544767379760742, 0.016528383255004882, 0.016496639251708984, 0.016514047622680664, 0.01656012725830078, 0.016525312423706053, 0.016562175750732423, 0.016627712249755858, 0.016546815872192384, 0.016792575836181642, 0.017067007064819336, 0.017104896545410156, 0.016753664016723634, 0.01655193519592285, 0.01656524848937988, 0.016665599822998048, 0.01660211181640625, 0.01662566375732422, 0.016570367813110352, 0.01657344055175781, 0.016547840118408205, 0.017054719924926756, 0.01662873649597168, 0.016578559875488282, 0.01664614486694336, 0.016529407501220703, 0.016627712249755858, 0.016540672302246092, 0.01663692855834961, 0.016623615264892578, 0.016590848922729492, 0.01662156867980957, 0.01700966453552246, 0.017172479629516603, 0.017488895416259767, 0.01725644874572754, 0.017076223373413087, 0.01701171112060547, 0.016733184814453125, 0.016648191452026367, 0.017269760131835937, 0.017539072036743163, 0.017447935104370118, 0.017229824066162108, 0.01700864028930664, 0.01679052734375, 0.016638975143432617, 0.016747520446777343, 0.016623680114746093, 0.016587711334228514, 0.016630783081054687, 0.017171455383300782, 0.017179647445678712, 0.016876544952392578, 0.01738035202026367, 0.017253376007080077, 0.017148927688598634, 0.0166430721282959, 0.016618528366088868, 0.016729055404663087, 0.017262592315673828, 0.017114112854003907, 0.016679935455322266, 0.016578559875488282, 0.016672767639160157, 0.016722944259643553, 0.01659699249267578, 0.01662156867980957, 0.01660211181640625, 0.017879039764404296, 0.01879347229003906, 0.01766912078857422, 0.017369087219238282, 0.017084415435791016, 0.017035263061523438, 0.01661644744873047, 0.016684032440185546, 0.016625696182250977, 0.016561151504516602, 0.016578527450561525, 0.01662668800354004, 0.01661337661743164, 0.0165928955078125, 0.017970176696777345, 0.01735577583312988, 0.01773465538024902, 0.016689151763916017, 0.017133567810058595, 0.017056768417358398, 0.01703321647644043, 0.016894975662231446, 0.016582656860351562, 0.016550912857055664, 0.016582656860351562, 0.01657753562927246, 0.01662259292602539, 0.016579584121704103, 0.016649215698242188, 0.01655705642700195, 0.01659903907775879, 0.01659699249267578, 0.01661337661743164, 0.016673824310302735, 0.016720863342285158, 0.016590848922729492, 0.01639116859436035, 0.01634611129760742, 0.016280576705932616, 0.016363519668579102, 0.01638297653198242, 0.017179647445678712, 0.0172728328704834, 0.017031167984008787, 0.01664204788208008, 0.016571392059326173, 0.016574464797973632, 0.016587776184082033, 0.01663488006591797, 0.016608287811279297, 0.016586719512939455, 0.01657753562927246, 0.01659391975402832, 0.01656729507446289, 0.016657407760620118, 0.01658060836791992, 0.016638975143432617, 0.016549888610839843, 0.017257471084594727, 0.01701375961303711, 0.01657651138305664, 0.016566272735595702, 0.016719871520996094, 0.0168724479675293, 0.016587776184082033, 0.01661235237121582, 0.01662566375732422, 0.016563199996948243, 0.01660108757019043, 0.016552959442138672, 0.016859136581420898, 0.01679667282104492, 0.0166430721282959, 0.01662668800354004, 0.016615423202514648, 0.01656012725830078, 0.016749568939208984, 0.01658163261413574, 0.016631807327270508, 0.016563199996948243, 0.016556032180786134, 0.01662873649597168, 0.016688127517700196, 0.016586751937866212, 0.016638975143432617, 0.0165980167388916, 0.016647167205810547, 0.01660006332397461, 0.016578559875488282, 0.016615423202514648, 0.01663283157348633, 0.016503807067871093, 0.01660006332397461, 0.01660211181640625, 0.01658880043029785, 0.01660211181640625, 0.01662873649597168, 0.016561151504516602, 0.016724992752075195, 0.016544767379760742, 0.016653312683105468, 0.0165980167388916, 0.01661235237121582, 0.01662668800354004, 0.01658470344543457, 0.016553983688354493, 0.016540672302246092, 0.016571392059326173, 0.016561151504516602, 0.016536575317382812, 0.016563199996948243, 0.016564224243164064, 0.016564224243164064, 0.016578559875488282, 0.01657753562927246, 0.01659187126159668, 0.016587776184082033, 0.01659596824645996, 0.016566272735595702, 0.016553983688354493, 0.016582687377929686, 0.016582624435424805, 0.016891904830932617, 0.01662668800354004, 0.01660313606262207, 0.01660313606262207, 0.01657651138305664, 0.016574464797973632, 0.01660723114013672, 0.01659699249267578, 0.016587776184082033, 0.01661849594116211, 0.01661337661743164, 0.0165980167388916, 0.01662054443359375, 0.01659699249267578, 0.016582656860351562, 0.017528831481933595, 0.018757631301879883, 0.01757798385620117, 0.0166297607421875, 0.016645120620727538, 0.01658060836791992, 0.016553983688354493, 0.01655193519592285, 0.01660518455505371, 0.016527360916137695, 0.016555007934570314, 0.01664102363586426, 0.016558080673217773, 0.016552959442138672, 0.01656934356689453, 0.0166297607421875, 0.016509952545166014, 0.016544767379760742, 0.016523263931274415, 0.016541696548461913, 0.016525312423706053, 0.01656934356689453, 0.01660108757019043, 0.016579584121704103, 0.0169932804107666, 0.01717862319946289, 0.016951295852661134, 0.01657651138305664, 0.01660620880126953, 0.01660927963256836, 0.016718847274780273, 0.016714752197265623, 0.016561151504516602, 0.016656383514404297, 0.016526336669921874, 0.01662054443359375, 0.016582656860351562, 0.016579584121704103, 0.016582656860351562, 0.01657548713684082, 0.016587776184082033, 0.016552959442138672, 0.016513023376464844, 0.016538623809814454, 0.01653555107116699, 0.016655359268188476, 0.016710655212402344, 0.01726361656188965, 0.016780288696289062, 0.016566272735595702, 0.016729087829589845, 0.016570367813110352, 0.01661235237121582, 0.01658880043029785, 0.016594944000244142, 0.01660108757019043, 0.016537599563598633, 0.016693248748779296, 0.016578559875488282, 0.0165980167388916, 0.01661235237121582, 0.01658163261413574, 0.0165928955078125, 0.01661952018737793, 0.01662566375732422, 0.01660825538635254, 0.0164136962890625, 0.01660211181640625, 0.01655705642700195, 0.01657753562927246, 0.01664102363586426, 0.016563199996948243, 0.016664575576782227, 0.01661440086364746, 0.01659596824645996, 0.01655705642700195, 0.01721958351135254, 0.016990207672119142, 0.016657407760620118, 0.016672767639160157, 0.01664204788208008, 0.016635904312133788, 0.016588832855224608, 0.018375648498535156, 0.017912832260131836, 0.017268735885620116, 0.017099775314331055, 0.016927743911743166, 0.016635904312133788, 0.01658163261413574, 0.01660620880126953, 0.016564224243164064, 0.016649215698242188, 0.01699635124206543, 0.016865280151367186, 0.01659903907775879, 0.01665843200683594, 0.01662566375732422, 0.01663385581970215, 0.0165928955078125, 0.01658470344543457, 0.016539648056030275, 0.01657753562927246, 0.01662054443359375, 0.016559104919433593, 0.01656934356689453, 0.01659699249267578, 0.01656524848937988, 0.016556032180786134, 0.016548864364624022, 0.01660927963256836, 0.01661644744873047, 0.01659596824645996, 0.01656831932067871, 0.016553983688354493, 0.016550912857055664, 0.016644096374511717, 0.016564224243164064, 0.0166297607421875, 0.01657548713684082, 0.016587776184082033, 0.016518144607543944, 0.016562175750732423, 0.016537599563598633, 0.016590848922729492, 0.01680998420715332, 0.016639999389648438, 0.016587776184082033, 0.016541696548461913, 0.017333248138427734, 0.016764928817749023, 0.01658470344543457, 0.01658367919921875, 0.016571392059326173, 0.016582656860351562, 0.01657548713684082, 0.016562175750732423, 0.016578559875488282, 0.01659391975402832, 0.016574464797973632, 0.01659903907775879, 0.016611328125, 0.01659596824645996, 0.016571392059326173, 0.016605215072631838, 0.016531423568725587, 0.01659903907775879, 0.016715776443481444, 0.016364543914794923, 0.01639833641052246, 0.01641472053527832, 0.016534528732299804, 0.01662156867980957, 0.016566272735595702, 0.016544767379760742, 0.016574464797973632, 0.016564224243164064, 0.0166246395111084, 0.01660620880126953, 0.01657548713684082, 0.016555007934570314, 0.016648191452026367, 0.016673791885375978, 0.01665126419067383, 0.016582656860351562, 0.01664102363586426, 0.01659699249267578, 0.016582656860351562, 0.01657344055175781, 0.01662259292602539, 0.01662668800354004, 0.016736255645751954, 0.016574464797973632, 0.016564224243164064, 0.016550912857055664, 0.01660620880126953, 0.01656729507446289, 0.016671743392944336, 0.01658367919921875, 0.01660620880126953, 0.01658060836791992, 0.016578559875488282, 0.016570367813110352, 0.016708608627319335, 0.01660313606262207, 0.016660480499267577, 0.01659187126159668, 0.01661337661743164, 0.016571392059326173, 0.01661644744873047, 0.016667648315429686, 0.01659187126159668]",tokens/s,59.734502647889556,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 69057 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1576.185856,2057.8304,0.0,1428.160512,1322.516992,s,1,8.2050634765625,8.2050634765625,0.0,8.2050634765625,8.2050634765625,8.2050634765625,8.2050634765625,[8.2050634765625],,kWh,1.6170524206247846e-05,8.846551147715235e-06,2.543168701196752e-05,5.04487623659306e-05,,MB,1640.09984,2080.899072,0.0,1434.451968,1322.072064,s,10,2.3696000061035156,0.23696000061035155,0.00010204711414666431,0.23693441772460938,0.23710650482177736,0.23712297744750976,0.2371361555480957,"[0.2369820098876953, 0.23690130615234375, 0.23684857177734375, 0.23687126159667968, 0.23705215454101564, 0.23696202087402343, 0.2369068145751953, 0.23710284423828126, 0.2371394500732422, 0.2368335723876953]",tokens/s,1080.3511113293637,kWh,2.7996959028102647e-06,1.5340947183461622e-06,1.5943682005581383e-05,2.027747262673781e-05,tokens/kWh,12624847.519825485,MB,1642.029056,2080.899072,0.0,1434.451968,1374.944768,s,10,10.558951904296874,1.0558951904296876,0.012972447054749056,1.0547797241210937,1.0668655639648437,1.075362225341797,1.0821595544433593,"[1.08385888671875, 1.0628895263671876, 1.0649774169921875, 1.04964453125, 1.04729541015625, 1.0371549072265625, 1.040891357421875, 1.062680419921875, 1.058754638671875, 1.0508048095703124]",tokens/s,59.66501275033053,kWh,1.2636025965383839e-05,6.924405764130217e-06,2.595512929121635e-05,4.5515561020730395e-05,tokens/kWh,1384142.0074182143,,s,630,10.555245550155629,0.016754358016120064,0.0003640637733219087,0.01688422393798828,0.01713940410614014,0.01723857946395874,0.01761313835144043,"[0.01744179153442383, 0.017340415954589843, 0.01720524787902832, 0.017067007064819336, 0.017122304916381836, 0.017064960479736328, 0.017238016128540038, 0.017145856857299805, 0.01724006462097168, 0.017089536666870117, 0.01723904037475586, 0.017524736404418945, 0.0172677116394043, 0.017131519317626954, 0.017238016128540038, 0.017022975921630858, 0.017177600860595704, 0.017092607498168946, 0.017526784896850587, 0.017071104049682616, 0.01722060775756836, 0.017160192489624023, 0.017094655990600584, 0.01723494338989258, 0.017138687133789063, 0.017091583251953125, 0.017123327255249024, 0.017098751068115235, 0.01743257522583008, 0.017353727340698243, 0.01721343994140625, 0.01721036720275879, 0.017111040115356444, 0.01720524787902832, 0.01704960060119629, 0.017177600860595704, 0.017184768676757813, 0.017117183685302736, 0.017217567443847656, 0.017112031936645507, 0.017147903442382813, 0.017091583251953125, 0.017338367462158204, 0.017346559524536134, 0.01714995193481445, 0.01716531181335449, 0.01706598472595215, 0.01699737548828125, 0.017336320877075196, 0.01702911949157715, 0.017117183685302736, 0.01699942398071289, 0.017126399993896483, 0.018530303955078126, 0.01742438316345215, 0.017064960479736328, 0.017083391189575196, 0.01707827186584473, 0.017129472732543945, 0.017101823806762697, 0.017172479629516603, 0.01721241569519043, 0.016945152282714843, 0.016710655212402344, 0.01659596824645996, 0.016473087310791015, 0.01703321647644043, 0.016876544952392578, 0.016321535110473632, 0.016272384643554686, 0.016342016220092775, 0.016470016479492186, 0.016941055297851563, 0.017596416473388672, 0.017306623458862306, 0.01721139144897461, 0.01699737548828125, 0.01701888084411621, 0.01696460723876953, 0.016931840896606445, 0.016953344345092772, 0.016991231918334963, 0.016945152282714843, 0.016943103790283204, 0.01704652786254883, 0.016958463668823243, 0.0169881591796875, 0.01724825668334961, 0.01746227264404297, 0.016753664016723634, 0.016259071350097656, 0.017167360305786132, 0.01706188774108887, 0.01699839973449707, 0.016886783599853517, 0.016957439422607423, 0.016935935974121095, 0.017081344604492187, 0.016990207672119142, 0.016954368591308593, 0.016884735107421875, 0.016738304138183592, 0.016918527603149415, 0.016661504745483398, 0.016315391540527344, 0.016372735977172852, 0.016344064712524413, 0.016711679458618164, 0.016920576095581053, 0.016881664276123046, 0.016680959701538087, 0.01698099136352539, 0.01699942398071289, 0.01698508834838867, 0.017476608276367187, 0.016977920532226562, 0.016417791366577148, 0.01639321517944336, 0.016442367553710938, 0.017113088607788086, 0.016969728469848632, 0.017076223373413087, 0.01719603157043457, 0.0170199031829834, 0.01703219223022461, 0.0163768310546875, 0.017054719924926756, 0.016631807327270508, 0.0163768310546875, 0.01680998420715332, 0.01704652786254883, 0.016942079544067384, 0.016955392837524414, 0.01700966453552246, 0.01698099136352539, 0.016950271606445313, 0.01696051216125488, 0.016927743911743166, 0.017789951324462892, 0.01716531181335449, 0.01698918342590332, 0.016931840896606445, 0.01698918342590332, 0.016873472213745116, 0.016645120620727538, 0.016504831314086914, 0.016909311294555664, 0.016915456771850586, 0.016955392837524414, 0.016720895767211915, 0.01639423942565918, 0.01635430335998535, 0.016497663497924805, 0.016334848403930666, 0.016918527603149415, 0.017022975921630858, 0.016962560653686523, 0.016898048400878905, 0.016916479110717773, 0.017081344604492187, 0.017301504135131835, 0.016516096115112306, 0.016703487396240235, 0.016925695419311524, 0.017047552108764647, 0.0169932804107666, 0.017148927688598634, 0.01702400016784668, 0.01739263916015625, 0.01705062484741211, 0.016449535369873047, 0.01642905616760254, 0.01696460723876953, 0.016940031051635742, 0.01696051216125488, 0.016894975662231446, 0.016890880584716796, 0.01716223907470703, 0.017115135192871094, 0.01684787178039551, 0.01640755271911621, 0.01696767997741699, 0.016929792404174804, 0.016977920532226562, 0.01701785659790039, 0.01718169593811035, 0.01699839973449707, 0.0169564151763916, 0.01698099136352539, 0.016355327606201172, 0.016424959182739257, 0.0164136962890625, 0.016390144348144533, 0.01646899223327637, 0.0164003849029541, 0.016821247100830078, 0.01681100845336914, 0.017006591796875, 0.017055744171142577, 0.01701171112060547, 0.01701068878173828, 0.01703014373779297, 0.016892927169799805, 0.016944128036499022, 0.017304576873779298, 0.017135616302490234, 0.01697689628601074, 0.016891904830932617, 0.017105920791625977, 0.01700351905822754, 0.01645260810852051, 0.016348159790039063, 0.01665126419067383, 0.016949247360229493, 0.016947200775146484, 0.016883712768554687, 0.016845823287963867, 0.01676288032531738, 0.016881664276123046, 0.016898048400878905, 0.016846847534179688, 0.016899072647094726, 0.016931840896606445, 0.017114112854003907, 0.016941055297851563, 0.016897024154663084, 0.016941055297851563, 0.016886783599853517, 0.016862207412719727, 0.016536575317382812, 0.016305152893066405, 0.016318464279174806, 0.016315391540527344, 0.016289791107177733, 0.016316415786743164, 0.016315391540527344, 0.016309247970581055, 0.016318464279174806, 0.016358400344848634, 0.016343040466308592, 0.016305152893066405, 0.016301055908203126, 0.016249856948852538, 0.016338943481445312, 0.01640345573425293, 0.0165928955078125, 0.016471040725708007, 0.016360448837280273, 0.016316415786743164, 0.016286720275878908, 0.016273408889770507, 0.016274431228637695, 0.018135040283203126, 0.017925119400024413, 0.017133567810058595, 0.0169932804107666, 0.016863231658935548, 0.016760831832885743, 0.01644339179992676, 0.01622425651550293, 0.016314367294311523, 0.016430080413818358, 0.016275455474853515, 0.016333824157714845, 0.016307199478149414, 0.016273408889770507, 0.016301055908203126, 0.016284671783447266, 0.01632972717285156, 0.016350208282470705, 0.016307199478149414, 0.016308223724365235, 0.016664575576782227, 0.016954368591308593, 0.01697177505493164, 0.016910335540771485, 0.016887807846069337, 0.01697587203979492, 0.016919551849365236, 0.016917503356933594, 0.016933887481689454, 0.01699225616455078, 0.01699430465698242, 0.016911359786987306, 0.016929792404174804, 0.016880640029907225, 0.016875520706176757, 0.01639321517944336, 0.016331775665283203, 0.016321535110473632, 0.016363519668579102, 0.016301055908203126, 0.016302080154418946, 0.016236543655395508, 0.01635327911376953, 0.01643724822998047, 0.01640550422668457, 0.016306175231933593, 0.016291839599609375, 0.016273408889770507, 0.016313343048095702, 0.016283647537231445, 0.016322559356689453, 0.016336896896362304, 0.016292863845825196, 0.01627136039733887, 0.016564224243164064, 0.016355327606201172, 0.016358400344848634, 0.0163768310546875, 0.01697587203979492, 0.017105920791625977, 0.01700454330444336, 0.01700249671936035, 0.01701171112060547, 0.01659596824645996, 0.01654374313354492, 0.016278528213500978, 0.0166430721282959, 0.01700147247314453, 0.016484352111816408, 0.016336896896362304, 0.016322559356689453, 0.016319488525390623, 0.016304128646850585, 0.016336896896362304, 0.01637887954711914, 0.017367040634155274, 0.01662054443359375, 0.01640447998046875, 0.01643622398376465, 0.016327680587768553, 0.016311296463012694, 0.016294912338256837, 0.01627136039733887, 0.016320512771606444, 0.016373760223388673, 0.016358400344848634, 0.016309247970581055, 0.016295936584472655, 0.016275455474853515, 0.016267263412475585, 0.016240640640258788, 0.016293888092041017, 0.016365568161010743, 0.016283647537231445, 0.016306175231933593, 0.016318464279174806, 0.016297983169555663, 0.016322559356689453, 0.016320512771606444, 0.016363519668579102, 0.016335872650146483, 0.01637580871582031, 0.016281600952148437, 0.016302080154418946, 0.016319488525390623, 0.017138687133789063, 0.016781312942504883, 0.01639936065673828, 0.016482303619384766, 0.01700044822692871, 0.016931840896606445, 0.016886783599853517, 0.016679935455322266, 0.016390144348144533, 0.016318464279174806, 0.016451583862304688, 0.016321535110473632, 0.01639321517944336, 0.016324607849121094, 0.01637171173095703, 0.0169564151763916, 0.01659903907775879, 0.01658060836791992, 0.01639321517944336, 0.01646899223327637, 0.01641267204284668, 0.016513023376464844, 0.01640345573425293, 0.016303104400634767, 0.016286720275878908, 0.016283647537231445, 0.016311296463012694, 0.016286720275878908, 0.016323583602905273, 0.016307199478149414, 0.0164003849029541, 0.016293888092041017, 0.016380928039550782, 0.01638707160949707, 0.016358400344848634, 0.016305152893066405, 0.016395263671875, 0.016305152893066405, 0.016357376098632814, 0.01637171173095703, 0.016336896896362304, 0.01638809585571289, 0.016339967727661133, 0.016300031661987305, 0.016324607849121094, 0.016299007415771484, 0.016704511642456055, 0.016538623809814454, 0.01639936065673828, 0.016380928039550782, 0.01640652847290039, 0.01640243148803711, 0.016360448837280273, 0.01698508834838867, 0.016953344345092772, 0.016901119232177735, 0.016729087829589845, 0.01640959930419922, 0.016318464279174806, 0.016328704833984374, 0.016294912338256837, 0.016327680587768553, 0.016296960830688476, 0.016286720275878908, 0.016380928039550782, 0.016505855560302735, 0.01646080017089844, 0.016314367294311523, 0.016303104400634767, 0.01657241630554199, 0.016892927169799805, 0.01702604866027832, 0.01686016082763672, 0.016891904830932617, 0.01680384063720703, 0.01685196876525879, 0.01683456039428711, 0.016940031051635742, 0.016857088088989256, 0.016911359786987306, 0.016858112335205077, 0.016940031051635742, 0.016869375228881836, 0.016876544952392578, 0.01617817687988281, 0.016331775665283203, 0.016874496459960937, 0.016389120101928712, 0.016350208282470705, 0.01638400077819824, 0.016321535110473632, 0.016235519409179687, 0.016735231399536133, 0.016922624588012695, 0.016965631484985352, 0.016946176528930663, 0.017062911987304686, 0.01704652786254883, 0.01701683235168457, 0.01698508834838867, 0.01699430465698242, 0.016949247360229493, 0.01700556755065918, 0.017067007064819336, 0.016928768157958983, 0.017330175399780275, 0.01702400016784668, 0.01703628730773926, 0.017006591796875, 0.0170199031829834, 0.017079296112060546, 0.017094655990600584, 0.01717043113708496, 0.017067007064819336, 0.0170199031829834, 0.016897024154663084, 0.01704140853881836, 0.016982080459594727, 0.017012672424316408, 0.016925695419311524, 0.016950271606445313, 0.01698508834838867, 0.016313343048095702, 0.016288768768310546, 0.016319488525390623, 0.016344064712524413, 0.016380928039550782, 0.01637171173095703, 0.017110015869140623, 0.017460224151611328, 0.018122751235961913, 0.01723494338989258, 0.017084415435791016, 0.016928768157958983, 0.016935935974121095, 0.016570367813110352, 0.016349184036254884, 0.016862207412719727, 0.016886783599853517, 0.016962560653686523, 0.016947200775146484, 0.016941055297851563, 0.016911359786987306, 0.016846847534179688, 0.016954368591308593, 0.016916479110717773, 0.016928768157958983, 0.016733184814453125, 0.016562175750732423, 0.016236543655395508, 0.016289791107177733, 0.01637785530090332, 0.016295936584472655, 0.016274431228637695, 0.016323583602905273, 0.01663385581970215, 0.016550912857055664, 0.01639116859436035, 0.016320512771606444, 0.016660480499267577, 0.01684889602661133, 0.016944128036499022, 0.01686016082763672, 0.01696051216125488, 0.016919551849365236, 0.016900096893310547, 0.016871423721313478, 0.016918527603149415, 0.016926719665527345, 0.016925695419311524, 0.016902143478393555, 0.017108991622924806, 0.016939008712768554, 0.01696870422363281, 0.016891904830932617, 0.016978944778442383, 0.016945152282714843, 0.016948223114013672, 0.016764928817749023, 0.016317440032958985, 0.016306175231933593, 0.016332799911499024, 0.016265216827392577, 0.016949247360229493, 0.016886783599853517, 0.016946176528930663, 0.016869375228881836, 0.016917503356933594, 0.016874496459960937, 0.016928768157958983, 0.016899072647094726, 0.016969728469848632, 0.016940031051635742, 0.016936960220336913, 0.016883712768554687, 0.016880640029907225, 0.016982015609741212, 0.0170383358001709, 0.016918527603149415, 0.016957439422607423, 0.01699737548828125, 0.017079296112060546, 0.016911359786987306, 0.017080320358276366, 0.016932863235473633, 0.01719193649291992, 0.01725132751464844, 0.016921600341796874, 0.016963584899902344, 0.016873472213745116, 0.016861183166503906, 0.01644441604614258, 0.016327680587768553, 0.01787494468688965, 0.01761996841430664, 0.01723391914367676, 0.01723187255859375, 0.016932863235473633, 0.016983039855957033, 0.01702911949157715, 0.01702604866027832, 0.016845823287963867, 0.01697689628601074, 0.016950271606445313, 0.016866304397583007, 0.016945152282714843, 0.016947200775146484, 0.016894975662231446, 0.016918527603149415, 0.016320512771606444, 0.016558080673217773, 0.016631807327270508, 0.016159744262695314, 0.016308223724365235, 0.016319488525390623, 0.016291839599609375, 0.016267263412475585, 0.016285696029663087, 0.016327680587768553, 0.016348159790039063, 0.016447488784790038, 0.016870399475097657, 0.016950271606445313, 0.016966655731201173, 0.017071104049682616, 0.017117183685302736, 0.016977920532226562, 0.016927743911743166, 0.01698918342590332, 0.01664102363586426, 0.01701273536682129, 0.01701580810546875, 0.01665023994445801, 0.01617817687988281, 0.016234495162963866, 0.016307199478149414, 0.016282623291015624, 0.01643212890625, 0.016299007415771484, 0.01640447998046875, 0.01663283157348633, 0.01663283157348633, 0.01698508834838867, 0.016733184814453125, 0.016478208541870116, 0.016408575057983397, 0.016336896896362304, 0.016314367294311523, 0.016259071350097656, 0.016242687225341796, 0.016256000518798826, 0.016330751419067382, 0.016335872650146483]",tokens/s,59.68596343935461,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.42.3,,0.31.0,,,,1.20.0,,,,0.11.1,,,,,,,,,,,,,,,,,,,,,,,,,,,MB,1240.674304,2645.03296,0.0,1998.585856,1692.386816,s,10,0.18838963317871094,0.018838963317871094,0.000668527770232222,0.01876473617553711,0.019819794464111327,0.020010393524169924,0.0201628727722168,"[0.020200992584228517, 0.018689184188842772, 0.018129568099975586, 0.018872800827026366, 0.018840288162231444, 0.018487136840820314, 0.01897756767272949, 0.018571807861328126, 0.01977743911743164, 0.01784284782409668]",tokens/s,13588.858138343114,kWh,2.1014254247072237e-07,1.1514753753080715e-07,6.21637013718738e-07,9.469270937202676e-07,tokens/kWh,270348162.70198005,MB,1241.542656,2645.03296,0.0,1998.585856,1714.454528,s,10,11.323864135742188,1.1323864135742188,0.011448048499887543,1.1325510864257813,1.1412751220703123,1.1484743530273438,1.1542337377929688,"[1.155673583984375, 1.1301966552734375, 1.125267822265625, 1.13967529296875, 1.1369483642578124, 1.1275675048828124, 1.134905517578125, 1.137067626953125, 1.128013916015625, 1.1085478515625]",tokens/s,55.634719071866414,kWh,1.3173522658266065e-05,7.218674734789249e-06,2.631794009349133e-05,4.6710137486546655e-05,tokens/kWh,1348743.6216205768,,s,629,11.47224270248413,0.018238859622391305,0.0022825385783415146,0.017979391098022462,0.018243788528442384,0.01849487419128418,0.036483277893066435,"[0.01969254493713379, 0.019286016464233398, 0.018386943817138672, 0.01817087936401367, 0.018189311981201172, 0.018092031478881835, 0.017952768325805665, 0.01804902458190918, 0.01804287910461426, 0.018106367111206053, 0.018114559173583983, 0.018313215255737304, 0.01825279998779297, 0.018108415603637695, 0.018390016555786134, 0.018233343124389647, 0.01801215934753418, 0.018094079971313477, 0.018000896453857423, 0.018353151321411132, 0.018100223541259765, 0.018144256591796876, 0.01808896064758301, 0.018141183853149414, 0.018148351669311523, 0.0180633602142334, 0.01801625633239746, 0.018678783416748047, 0.021622783660888673, 0.02020147132873535, 0.01885593605041504, 0.018364416122436524, 0.01823744010925293, 0.018144287109375, 0.01806947135925293, 0.018033664703369142, 0.01805619239807129, 0.018111488342285157, 0.01805721664428711, 0.01799782371520996, 0.018125823974609375, 0.018130943298339842, 0.018159616470336915, 0.018207744598388673, 0.01799065589904785, 0.01802956771850586, 0.017930240631103517, 0.018335744857788085, 0.018184192657470705, 0.01800294494628906, 0.018115583419799804, 0.01796403121948242, 0.017979391098022462, 0.01794767951965332, 0.017952735900878907, 0.01824358367919922, 0.01883443260192871, 0.018513919830322266, 0.019216384887695313, 0.018311168670654295, 0.018207744598388673, 0.018132991790771484, 0.03711590576171875, 0.018126848220825196, 0.018250751495361327, 0.018081792831420897, 0.01820057678222656, 0.018028543472290038, 0.018085887908935547, 0.01806540870666504, 0.018287616729736327, 0.01806438446044922, 0.018077695846557617, 0.01805721664428711, 0.01807257652282715, 0.01805619239807129, 0.018027519226074217, 0.01808793640136719, 0.01816268730163574, 0.017923072814941408, 0.017959936141967774, 0.017911808013916015, 0.017880064010620117, 0.017904640197753906, 0.017917951583862304, 0.017812480926513673, 0.017854463577270507, 0.017885183334350584, 0.01789952087402344, 0.017932287216186525, 0.017929216384887696, 0.017934335708618163, 0.017915903091430666, 0.017862655639648437, 0.017945600509643556, 0.01789952087402344, 0.017789951324462892, 0.01761689567565918, 0.017846271514892577, 0.017707008361816406, 0.01761484718322754, 0.01819443130493164, 0.017971200942993162, 0.017912832260131836, 0.018914304733276367, 0.018008064270019532, 0.017911808013916015, 0.01794867134094238, 0.017984512329101563, 0.017914880752563478, 0.01775923156738281, 0.01762611198425293, 0.017617919921875, 0.017911808013916015, 0.017870847702026366, 0.017944576263427735, 0.017878015518188475, 0.01788313674926758, 0.01760358428955078, 0.017565696716308594, 0.017853439331054686, 0.017912832260131836, 0.017679359436035155, 0.01764352035522461, 0.017670143127441407, 0.0357386245727539, 0.01742438316345215, 0.017318912506103516, 0.017556480407714844, 0.018902015686035157, 0.018452512741088868, 0.018180063247680664, 0.018086912155151368, 0.017971200942993162, 0.01794047927856445, 0.017310720443725586, 0.017369087219238282, 0.017303552627563477, 0.017358848571777344, 0.017308671951293944, 0.017299455642700197, 0.017254400253295898, 0.017160192489624023, 0.017280000686645508, 0.017236991882324217, 0.01718681526184082, 0.01723494338989258, 0.017777664184570312, 0.018258943557739257, 0.017957887649536132, 0.018033664703369142, 0.01795686340332031, 0.018017280578613282, 0.017992704391479493, 0.018125823974609375, 0.018062335968017578, 0.01807257652282715, 0.018371583938598633, 0.018070528030395508, 0.01819340705871582, 0.01804902458190918, 0.017950719833374023, 0.01796505546569824, 0.01794047927856445, 0.01800601577758789, 0.01795996856689453, 0.017928159713745118, 0.017942527770996093, 0.01794047927856445, 0.017959936141967774, 0.017950719833374023, 0.017957887649536132, 0.01839414405822754, 0.01804489517211914, 0.01803468894958496, 0.018081792831420897, 0.01798246383666992, 0.01798963165283203, 0.017976320266723633, 0.018009088516235353, 0.017979391098022462, 0.017935359954833984, 0.01799577522277832, 0.0180633602142334, 0.017704959869384765, 0.017729536056518554, 0.017947647094726564, 0.01823539161682129, 0.03723571014404297, 0.01800499153137207, 0.01802137565612793, 0.018008064270019532, 0.01804800033569336, 0.01798041534423828, 0.018050048828125, 0.018017280578613282, 0.018017280578613282, 0.01801625633239746, 0.01802649688720703, 0.017946624755859376, 0.018349056243896485, 0.018127872467041017, 0.018093088150024413, 0.018162656784057617, 0.018086912155151368, 0.017929216384887696, 0.018070528030395508, 0.018010112762451173, 0.018086912155151368, 0.018000896453857423, 0.018121728897094725, 0.01808896064758301, 0.01804800033569336, 0.018101247787475586, 0.01803775978088379, 0.01799884796142578, 0.018012191772460936, 0.018047967910766603, 0.01800704002380371, 0.01801420783996582, 0.018051071166992186, 0.01809516716003418, 0.018114559173583983, 0.018010047912597655, 0.017934335708618163, 0.018033664703369142, 0.018043903350830077, 0.01803980827331543, 0.01802137565612793, 0.01807360076904297, 0.01805619239807129, 0.01800294494628906, 0.01850060844421387, 0.01800294494628906, 0.018044927597045898, 0.01801215934753418, 0.01780735969543457, 0.017903615951538086, 0.018132991790771484, 0.01822105598449707, 0.018119680404663087, 0.018314239501953124, 0.018135040283203126, 0.019092479705810548, 0.018257919311523436, 0.01820057678222656, 0.018122751235961913, 0.018153472900390624, 0.018118656158447266, 0.018137088775634767, 0.018069503784179687, 0.03716403198242187, 0.018043903350830077, 0.018045984268188476, 0.0180930233001709, 0.01803059196472168, 0.018078720092773438, 0.018091007232666014, 0.01803468894958496, 0.01803264045715332, 0.01808896064758301, 0.018115583419799804, 0.01807974433898926, 0.01805721664428711, 0.0182609920501709, 0.01886720085144043, 0.01839411163330078, 0.018025472640991212, 0.018324480056762696, 0.018110464096069336, 0.017944576263427735, 0.019140607833862306, 0.018117631912231445, 0.01798963165283203, 0.0179814395904541, 0.01799782371520996, 0.017936384201049805, 0.017999872207641602, 0.017979391098022462, 0.01804902458190918, 0.018182144165039063, 0.01805721664428711, 0.017922048568725587, 0.017914880752563478, 0.01790771293640137, 0.017983488082885742, 0.017889280319213868, 0.017697792053222656, 0.017358848571777344, 0.017933311462402343, 0.017617919921875, 0.017758207321166994, 0.017855487823486327, 0.018168832778930662, 0.018528255462646484, 0.018096128463745118, 0.01825279998779297, 0.018225151062011717, 0.018009088516235353, 0.017928192138671875, 0.01790771293640137, 0.01789030456542969, 0.017947647094726564, 0.018000896453857423, 0.017963008880615236, 0.017904640197753906, 0.017999872207641602, 0.017941503524780272, 0.018561023712158203, 0.017903615951538086, 0.017975296020507812, 0.017901567459106444, 0.017920000076293945, 0.0176363525390625, 0.036772865295410156, 0.01801215934753418, 0.017957887649536132, 0.017976320266723633, 0.017951744079589844, 0.01795686340332031, 0.017892351150512697, 0.017918975830078124, 0.017987583160400392, 0.01798246383666992, 0.0179835205078125, 0.017826784133911134, 0.017147903442382813, 0.017548288345336914, 0.017847295761108398, 0.017969152450561524, 0.018092031478881835, 0.017920000076293945, 0.01825382423400879, 0.018135040283203126, 0.018070528030395508, 0.0178606071472168, 0.017378303527832033, 0.01724723243713379, 0.017333248138427734, 0.01743974494934082, 0.017967103958129883, 0.01782476806640625, 0.017868799209594728, 0.017937408447265626, 0.017921024322509766, 0.01763737678527832, 0.017697792053222656, 0.017699840545654297, 0.01781657600402832, 0.01763942337036133, 0.0176680965423584, 0.017870847702026366, 0.018321407318115233, 0.017889280319213868, 0.017992704391479493, 0.01799065589904785, 0.017797119140625, 0.01782374382019043, 0.017903615951538086, 0.017903615951538086, 0.017949695587158202, 0.01796403121948242, 0.01800396728515625, 0.017991680145263672, 0.0180316162109375, 0.0180633602142334, 0.018183168411254884, 0.017985536575317384, 0.017979391098022462, 0.017993728637695314, 0.017938432693481447, 0.017954816818237306, 0.017943552017211914, 0.017901567459106444, 0.018130943298339842, 0.017933311462402343, 0.018486272811889647, 0.03722956848144531, 0.01803980827331543, 0.01794047927856445, 0.018132991790771484, 0.018092031478881835, 0.018223104476928712, 0.01792207908630371, 0.01806230354309082, 0.018544639587402344, 0.01904844856262207, 0.018386943817138672, 0.01824460792541504, 0.01803468894958496, 0.01803775978088379, 0.018050048828125, 0.018197504043579102, 0.01807974433898926, 0.017966079711914062, 0.017991680145263672, 0.017913856506347657, 0.017904640197753906, 0.017971200942993162, 0.017966079711914062, 0.018096128463745118, 0.01803775978088379, 0.01800396728515625, 0.018041856765747072, 0.01805414390563965, 0.01805721664428711, 0.018181119918823242, 0.01798246383666992, 0.01801318359375, 0.017979391098022462, 0.01784832000732422, 0.01763430404663086, 0.017954816818237306, 0.017957887649536132, 0.01799782371520996, 0.017955839157104494, 0.017955839157104494, 0.017897472381591797, 0.017986560821533205, 0.0179814395904541, 0.017994752883911135, 0.017902591705322265, 0.017938432693481447, 0.017884191513061524, 0.017903583526611328, 0.018058240890502928, 0.018147327423095702, 0.017924095153808595, 0.017313791275024415, 0.017262592315673828, 0.017283071517944337, 0.01724313545227051, 0.01841868782043457, 0.018120704650878908, 0.018076671600341796, 0.01804902458190918, 0.017914880752563478, 0.018092031478881835, 0.018143232345581056, 0.01803059196472168, 0.037408767700195314, 0.018696191787719727, 0.018634752273559572, 0.018241535186767577, 0.018098175048828127, 0.017975296020507812, 0.01807155227661133, 0.01803468894958496, 0.018076671600341796, 0.01801420783996582, 0.018050048828125, 0.01805516815185547, 0.018017280578613282, 0.01805516815185547, 0.01799884796142578, 0.018074623107910158, 0.018207744598388673, 0.017936384201049805, 0.017967103958129883, 0.01798041534423828, 0.01808896064758301, 0.018104320526123048, 0.017953792572021485, 0.017905664443969727, 0.01802444839477539, 0.01810534477233887, 0.018036735534667968, 0.017945600509643556, 0.018009088516235353, 0.017758207321166994, 0.017710079193115236, 0.01790771293640137, 0.017830911636352538, 0.017674240112304687, 0.017918975830078124, 0.017930240631103517, 0.017897472381591797, 0.017970176696777345, 0.017894399642944335, 0.017925119400024413, 0.01785753631591797, 0.017978368759155275, 0.01784524726867676, 0.01762713623046875, 0.01818623924255371, 0.0182476806640625, 0.017957887649536132, 0.01804595184326172, 0.01788313674926758, 0.018009088516235353, 0.01800294494628906, 0.017983488082885742, 0.0180633602142334, 0.017942527770996093, 0.017999872207641602, 0.01803980827331543, 0.01800601577758789, 0.01803468894958496, 0.01803980827331543, 0.017936384201049805, 0.017760255813598632, 0.01760870361328125, 0.020915199279785156, 0.03822387313842773, 0.017963008880615236, 0.017894399642944335, 0.017931264877319338, 0.018061311721801757, 0.017958911895751953, 0.017924095153808595, 0.017893375396728514, 0.017904640197753906, 0.0178657283782959, 0.017979391098022462, 0.017985536575317384, 0.01798963165283203, 0.018050048828125, 0.017963008880615236, 0.017953792572021485, 0.017947647094726564, 0.018153472900390624, 0.017994752883911135, 0.017887231826782226, 0.017880064010620117, 0.017892351150512697, 0.017903615951538086, 0.017954816818237306, 0.017916927337646483, 0.017955839157104494, 0.017898496627807618, 0.017864704132080078, 0.01788211250305176, 0.017979391098022462, 0.01789132881164551, 0.017946624755859376, 0.017966079711914062, 0.017938432693481447, 0.017761280059814453, 0.01759539222717285, 0.01756876754760742, 0.017901567459106444, 0.017921024322509766, 0.01789030456542969, 0.017916927337646483, 0.017934335708618163, 0.017902591705322265, 0.017913856506347657, 0.018020351409912108, 0.017938432693481447, 0.017943552017211914, 0.01785958480834961, 0.017953792572021485, 0.017912832260131836, 0.01795686340332031, 0.01790771293640137, 0.017918975830078124, 0.01789952087402344, 0.017983488082885742, 0.01819443130493164, 0.017913856506347657, 0.017915903091430666, 0.01796505546569824, 0.017887231826782226, 0.01768448066711426, 0.017138687133789063, 0.017300479888916014, 0.03538022232055664, 0.017307647705078123, 0.017283071517944337, 0.01721241569519043, 0.017349632263183593, 0.017378303527832033, 0.017240095138549804, 0.017227743148803713, 0.01722777557373047, 0.017311744689941407, 0.017301504135131835, 0.017290239334106446, 0.017257471084594727, 0.017282047271728516, 0.01723904037475586, 0.017351680755615235, 0.017242111206054688, 0.017192960739135742, 0.017285120010375975, 0.01720832061767578, 0.017253376007080077, 0.017376256942749024, 0.017876991271972655, 0.017971200942993162, 0.017885183334350584, 0.01787392044067383, 0.01839206314086914, 0.01800704002380371, 0.017971200942993162, 0.018276351928710938, 0.017876991271972655, 0.017870847702026366, 0.017926143646240233, 0.017906688690185548, 0.01783500862121582, 0.017947647094726564, 0.017884159088134767, 0.017943552017211914, 0.01803878402709961, 0.018421760559082033, 0.01859993553161621, 0.018069503784179687, 0.017920000076293945, 0.017926143646240233, 0.01790771293640137, 0.017934335708618163, 0.017869855880737303, 0.017647584915161132, 0.01760358428955078, 0.0176312313079834, 0.01762816047668457, 0.017338367462158204, 0.017317888259887695, 0.017361919403076173, 0.017257471084594727, 0.017260543823242186, 0.017282047271728516, 0.017294336318969726, 0.017277952194213866, 0.01724825668334961, 0.017250303268432618, 0.01740492820739746, 0.017288192749023438]",tokens/s,54.82798928790097,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66949933-1b77766b705eb5a433c7f9ef;8b47ed0d-68a3-43c9-bc4d-82b9241e1b47) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like tiiuae/falcon-180B is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",deci,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,876.511232,793.247744,0.0,163.577856,152.009216,s,1,7.22709912109375,7.22709912109375,0.0,7.22709912109375,7.22709912109375,7.22709912109375,7.22709912109375,[7.22709912109375],,kWh,4.8351331527884315e-06,2.6339678041624493e-06,6.498338532001657e-06,1.3967439488952538e-05,,MB,1380.950016,847.773696,0.0,201.326592,184.525824,s,33,0.2212764801979065,0.006705347884785044,6.0815449966103155e-05,0.0067051520347595215,0.00676639347076416,0.006827577590942383,0.006840730876922607,"[0.006711328029632568, 0.0068204479217529295, 0.006672095775604248, 0.00672108793258667, 0.006766240119934082, 0.0066358399391174315, 0.006754079818725586, 0.0067051520347595215, 0.0066130561828613285, 0.006630911827087403, 0.006713119983673096, 0.006640416145324707, 0.0066126399040222165, 0.006724607944488525, 0.006730463981628418, 0.0068418879508972165, 0.006609983921051025, 0.00676035213470459, 0.006725152015686035, 0.0067593598365783695, 0.006690080165863037, 0.00664243221282959, 0.006838272094726563, 0.006698272228240967, 0.006716512203216553, 0.006724607944488525, 0.006640096187591552, 0.00676643180847168, 0.006699840068817139, 0.006672832012176513, 0.006698400020599365, 0.006686048030853271, 0.006654431819915771]",tokens/s,38178.481474597895,kWh,7.861231471454641e-08,4.3067670746987305e-08,3.4505387255355943e-07,4.6673385801509314e-07,tokens/kWh,548492455.8263384,MB,1405.796352,847.773696,0.0,201.326592,184.528384,s,33,10.084137512207032,0.3055799246123342,0.002581748336679956,0.30583187866210937,0.30897452392578123,0.3100307495117187,0.3113549853515625,"[0.3114967041015625, 0.3110538330078125, 0.30520159912109374, 0.3064500122070313, 0.3025860595703125, 0.3042984313964844, 0.30754547119140624, 0.3029530639648437, 0.301517333984375, 0.305915771484375, 0.3028258361816406, 0.30220281982421876, 0.30383837890625, 0.3080930786132812, 0.30302947998046875, 0.3010217590332031, 0.3036041259765625, 0.30617422485351564, 0.30919488525390626, 0.3061013488769531, 0.3049837646484375, 0.3063575439453125, 0.30583187866210937, 0.30799481201171874, 0.3078143005371094, 0.30369378662109375, 0.3059189758300781, 0.3079864501953125, 0.30413653564453125, 0.30934869384765623, 0.3044425048828125, 0.30388568115234377, 0.30663836669921873]",tokens/s,206.16537581754844,kWh,3.563565198817129e-06,1.9526356886190295e-06,5.8105655300523585e-06,1.1326766417488521e-05,tokens/kWh,5562046.366801388,,s,2079,10.071015520095846,0.004844163309329401,0.0001262557046913419,0.004816895961761475,0.00497587194442749,0.005044223785400391,0.005350138168334955,"[0.004722752094268799, 0.004941760063171386, 0.004835328102111816, 0.004844543933868409, 0.0048148479461669925, 0.004874271869659424, 0.00480457592010498, 0.0047964158058166504, 0.004817920207977295, 0.004806655883789063, 0.004841472148895264, 0.004800511837005615, 0.004801536083221435, 0.004810751914978028, 0.004798463821411133, 0.004904960155487061, 0.004834303855895996, 0.004867072105407715, 0.0048230400085449215, 0.004891647815704346, 0.004851712226867676, 0.004839424133300781, 0.004757503986358643, 0.0047523841857910155, 0.004771840095520019, 0.004754432201385498, 0.004733952045440673, 0.004772863864898682, 0.004810751914978028, 0.004842495918273926, 0.004834303855895996, 0.0048865280151367185, 0.005091328144073487, 0.005067776203155518, 0.005220352172851562, 0.0052008957862854, 0.005339136123657226, 0.005295104026794434, 0.00530841588973999, 0.005370880126953125, 0.005378047943115235, 0.005070847988128662, 0.004968448162078858, 0.005064703941345215, 0.004978687763214112, 0.0050094079971313476, 0.004936704158782959, 0.004934656143188477, 0.004891647815704346, 0.004901887893676758, 0.005037055969238281, 0.00501145601272583, 0.004999167919158935, 0.004964352130889893, 0.004969471931457519, 0.004998144149780274, 0.00502784013748169, 0.005021696090698242, 0.0049428482055664065, 0.004998144149780274, 0.004960288047790527, 0.004876255989074707, 0.0050165758132934574, 0.004741119861602783, 0.004975615978240967, 0.004897791862487793, 0.004800511837005615, 0.004843520164489746, 0.004867072105407715, 0.004984831809997559, 0.005201920032501221, 0.005449728012084961, 0.005306367874145508, 0.00517632007598877, 0.006053887844085694, 0.006146048069000244, 0.006065184116363525, 0.00510972785949707, 0.005021696090698242, 0.0048148479461669925, 0.0048158721923828125, 0.004830207824707031, 0.004843520164489746, 0.004827136039733886, 0.004783103942871094, 0.0047185921669006346, 0.004830207824707031, 0.004817920207977295, 0.0047964158058166504, 0.00481382417678833, 0.004807680130004883, 0.004837376117706299, 0.004838399887084961, 0.004817920207977295, 0.004907008171081543, 0.005079040050506592, 0.005017600059509277, 0.00496127986907959, 0.004956160068511963, 0.004831232070922851, 0.004784128189086914, 0.0047523841857910155, 0.004753407955169678, 0.0047513599395751956, 0.0047513599395751956, 0.004828159809112549, 0.0048158721923828125, 0.0048158721923828125, 0.004944896221160889, 0.004887551784515381, 0.004853759765625, 0.004865024089813232, 0.004824063777923584, 0.004851712226867676, 0.0047329277992248535, 0.00481279993057251, 0.004743167877197266, 0.004729856014251709, 0.0047513599395751956, 0.004857855796813965, 0.004848639965057373, 0.004817920207977295, 0.00481382417678833, 0.00481279993057251, 0.00481382417678833, 0.00486195182800293, 0.004630527973175049, 0.004835328102111816, 0.004829184055328369, 0.004833280086517334, 0.004817920207977295, 0.004903935909271241, 0.004767744064331054, 0.004772928237915039, 0.004798399925231934, 0.004711423873901367, 0.004748288154602051, 0.0049489917755126955, 0.004887551784515381, 0.004910079956054687, 0.004754432201385498, 0.004801536083221435, 0.004772863864898682, 0.0048261117935180665, 0.0048220157623291016, 0.004906015872955322, 0.004980703830718994, 0.0052899842262268066, 0.005064703941345215, 0.004980735778808594, 0.004892672061920166, 0.004749311923980713, 0.004747263908386231, 0.004757503986358643, 0.004787199974060059, 0.0048558077812194825, 0.0047636480331420894, 0.0047523841857910155, 0.004757503986358643, 0.004761600017547608, 0.0047820801734924315, 0.004737023830413818, 0.004760575771331787, 0.0048455681800842285, 0.0047964158058166504, 0.0049192957878112795, 0.0048496642112731934, 0.004807680130004883, 0.004837376117706299, 0.004835328102111816, 0.00481279993057251, 0.0048230400085449215, 0.004800511837005615, 0.004806655883789063, 0.004842495918273926, 0.0048148479461669925, 0.004810751914978028, 0.004794367790222168, 0.004811776161193848, 0.004883456230163574, 0.004816895961761475, 0.004820991992950439, 0.004761600017547608, 0.0050022401809692385, 0.005054463863372802, 0.005032959938049316, 0.004960256099700928, 0.004744192123413086, 0.0048230400085449215, 0.004595776081085205, 0.00482809591293335, 0.004799488067626953, 0.004736000061035156, 0.004750336170196533, 0.0047820801734924315, 0.004828159809112549, 0.0048261117935180665, 0.004832255840301514, 0.004828159809112549, 0.004825088024139404, 0.005587967872619629, 0.00496230411529541, 0.004819968223571777, 0.004850687980651855, 0.004840447902679444, 0.004803584098815918, 0.004811776161193848, 0.004788224220275879, 0.004851712226867676, 0.004807680130004883, 0.004803584098815918, 0.0048056321144104, 0.004809728145599365, 0.004981760025024414, 0.00516812801361084, 0.005232639789581299, 0.00537395191192627, 0.005610496044158936, 0.005119999885559082, 0.004910079956054687, 0.004787231922149658, 0.004786143779754639, 0.004825088024139404, 0.004809728145599365, 0.004816895961761475, 0.004766719818115234, 0.004739071846008301, 0.004741119861602783, 0.004754432201385498, 0.004775936126708984, 0.004801536083221435, 0.004808767795562744, 0.004823999881744385, 0.004806655883789063, 0.004848639965057373, 0.004832255840301514, 0.0048189439773559575, 0.004677631855010986, 0.004716544151306152, 0.004769792079925537, 0.004761600017547608, 0.004754432201385498, 0.0047626237869262695, 0.004824063777923584, 0.004835328102111816, 0.004839424133300781, 0.004820991992950439, 0.0048158721923828125, 0.004807680130004883, 0.004847616195678711, 0.004853759765625, 0.00485478401184082, 0.004560895919799805, 0.0047513599395751956, 0.004745215892791748, 0.004773888111114502, 0.004776959896087647, 0.004764671802520752, 0.004747263908386231, 0.0047626237869262695, 0.004746240139007568, 0.0048455681800842285, 0.0048148479461669925, 0.0048158721923828125, 0.004803584098815918, 0.0048148479461669925, 0.0048455681800842285, 0.0048056321144104, 0.004802559852600098, 0.0048189439773559575, 0.004806687831878662, 0.004839392185211182, 0.004791296005249023, 0.004807680130004883, 0.004799488067626953, 0.004794367790222168, 0.004843520164489746, 0.004803584098815918, 0.004799488067626953, 0.0048158721923828125, 0.0048855037689208985, 0.004833280086517334, 0.004809760093688965, 0.004798431873321533, 0.004801536083221435, 0.004819968223571777, 0.004832255840301514, 0.0048056321144104, 0.0047288317680358885, 0.004795392036437988, 0.0048230400085449215, 0.004817920207977295, 0.004807680130004883, 0.004799488067626953, 0.004807680130004883, 0.004819007873535156, 0.00481276798248291, 0.004827104091644287, 0.0048056321144104, 0.004761600017547608, 0.004767744064331054, 0.0047513599395751956, 0.004753407955169678, 0.004748288154602051, 0.004766719818115234, 0.004741119861602783, 0.004894720077514648, 0.004803584098815918, 0.00479744005203247, 0.004801536083221435, 0.004791296005249023, 0.004825151920318603, 0.004825024127960205, 0.004832255840301514, 0.004810751914978028, 0.004560895919799805, 0.004807680130004883, 0.004834303855895996, 0.004825088024139404, 0.004795392036437988, 0.0048015999794006345, 0.004821951866149902, 0.004848639965057373, 0.004798463821411133, 0.004802559852600098, 0.0048056321144104, 0.004827136039733886, 0.0048895998001098635, 0.0048148479461669925, 0.00480460786819458, 0.004761600017547608, 0.004744192123413086, 0.0047964158058166504, 0.004748320102691651, 0.004810719966888428, 0.004810751914978028, 0.004829184055328369, 0.004846591949462891, 0.004840447902679444, 0.004776959896087647, 0.004753407955169678, 0.00480460786819458, 0.004850687980651855, 0.004830207824707031, 0.004806687831878662, 0.004789216041564941, 0.0047626237869262695, 0.0047861762046813965, 0.0047523841857910155, 0.004744192123413086, 0.0047513599395751956, 0.0048056321144104, 0.004781055927276612, 0.004764704227447509, 0.004746208190917969, 0.004748288154602051, 0.004740096092224121, 0.004813888072967529, 0.0047799677848815916, 0.0047851519584655765, 0.0047216639518737795, 0.004736000061035156, 0.0047554559707641605, 0.004759552001953125, 0.004801536083221435, 0.004899839878082276, 0.00487116813659668, 0.005086207866668701, 0.004858880043029785, 0.004880383968353271, 0.005250048160552978, 0.005700607776641845, 0.0049459199905395506, 0.004824063777923584, 0.004790272235870361, 0.004830207824707031, 0.004772863864898682, 0.0048189439773559575, 0.004787199974060059, 0.004973567962646484, 0.004967423915863037, 0.0049725441932678225, 0.00491315221786499, 0.005148672103881836, 0.00501145601272583, 0.005008384227752686, 0.0049909758567810054, 0.004873216152191162, 0.004832255840301514, 0.004809792041778564, 0.004723648071289063, 0.005035007953643799, 0.005132287979125977, 0.005045248031616211, 0.004827136039733886, 0.004825088024139404, 0.0048189439773559575, 0.0048230400085449215, 0.004846591949462891, 0.005028863906860351, 0.004967423915863037, 0.004944896221160889, 0.004998144149780274, 0.00506060791015625, 0.0049387521743774416, 0.0047923197746276855, 0.004773888111114502, 0.004768767833709717, 0.004776959896087647, 0.005344287872314453, 0.004975584030151367, 0.004956160068511963, 0.00480460786819458, 0.004791296005249023, 0.004800511837005615, 0.004848639965057373, 0.004803584098815918, 0.004809728145599365, 0.004827136039733886, 0.004947968006134033, 0.004850687980651855, 0.004757503986358643, 0.004757503986358643, 0.0047523841857910155, 0.004832255840301514, 0.004777984142303467, 0.004783103942871094, 0.00481279993057251, 0.00481382417678833, 0.0048158721923828125, 0.004840447902679444, 0.004799488067626953, 0.004808703899383545, 0.00481382417678833, 0.004829216003417969, 0.004840415954589844, 0.004771840095520019, 0.004749311923980713, 0.0047636480331420894, 0.0047626237869262695, 0.004895743846893311, 0.004534272193908692, 0.004890624046325683, 0.004915200233459473, 0.004809728145599365, 0.004742144107818603, 0.0047923197746276855, 0.004753407955169678, 0.0047626237869262695, 0.004750336170196533, 0.0047523841857910155, 0.0047523841857910155, 0.004941855907440186, 0.004910048007965088, 0.0048220157623291016, 0.0048230400085449215, 0.0048496642112731934, 0.0048230400085449215, 0.004810751914978028, 0.004837376117706299, 0.004830207824707031, 0.004903935909271241, 0.00479744005203247, 0.0047523841857910155, 0.005552127838134766, 0.004938784122467041, 0.004816864013671875, 0.004806655883789063, 0.004830207824707031, 0.0048261117935180665, 0.004850687980651855, 0.00486297607421875, 0.0048158721923828125, 0.004820000171661377, 0.004850656032562256, 0.004789247989654541, 0.004728896141052246, 0.004757440090179444, 0.004748288154602051, 0.004773888111114502, 0.0047626237869262695, 0.00475648021697998, 0.004742144107818603, 0.004753407955169678, 0.004757503986358643, 0.00479744005203247, 0.004757503986358643, 0.004747263908386231, 0.0047554559707641605, 0.004766719818115234, 0.004798463821411133, 0.004732992172241211, 0.004685760021209717, 0.004754432201385498, 0.004749311923980713, 0.004776959896087647, 0.0047851519584655765, 0.004820991992950439, 0.0047636480331420894, 0.0047626237869262695, 0.004742144107818603, 0.004783103942871094, 0.004749311923980713, 0.004749311923980713, 0.004511744022369385, 0.004750336170196533, 0.004739071846008301, 0.004746240139007568, 0.004774911880493164, 0.004808703899383545, 0.004819968223571777, 0.004810751914978028, 0.004824063777923584, 0.004769792079925537, 0.004748288154602051, 0.004737023830413818, 0.004753407955169678, 0.004746240139007568, 0.004772863864898682, 0.004754432201385498, 0.004733952045440673, 0.004746240139007568, 0.004734975814819336, 0.0048558077812194825, 0.004816895961761475, 0.004825088024139404, 0.004809728145599365, 0.004819968223571777, 0.004840479850769043, 0.0048230400085449215, 0.004828127861022949, 0.004819968223571777, 0.004817920207977295, 0.004857855796813965, 0.004834303855895996, 0.004820991992950439, 0.004820991992950439, 0.004833280086517334, 0.004858880043029785, 0.004817920207977295, 0.004835328102111816, 0.004835328102111816, 0.004767744064331054, 0.004715519905090332, 0.004753407955169678, 0.00475648021697998, 0.004834303855895996, 0.004827136039733886, 0.004806655883789063, 0.004799488067626953, 0.004749311923980713, 0.004738048076629638, 0.004740096092224121, 0.004779007911682129, 0.004727807998657227, 0.0047523841857910155, 0.004757503986358643, 0.004764671802520752, 0.004790272235870361, 0.004759552001953125, 0.004738048076629638, 0.004770815849304199, 0.0047626237869262695, 0.0047523841857910155, 0.004753407955169678, 0.004744192123413086, 0.004737023830413818, 0.004520959854125976, 0.004817920207977295, 0.0048158721923828125, 0.005112832069396973, 0.004967423915863037, 0.0049725441932678225, 0.004994048118591309, 0.0050104641914367675, 0.0053974719047546384, 0.004985856056213379, 0.004956160068511963, 0.005037055969238281, 0.004928512096405029, 0.004825088024139404, 0.004765696048736572, 0.004820991992950439, 0.0048220157623291016, 0.004828159809112549, 0.004788224220275879, 0.004790272235870361, 0.004932608127593994, 0.004747263908386231, 0.004741119861602783, 0.004750336170196533, 0.004746240139007568, 0.004731904029846192, 0.0046919679641723635, 0.0047626237869262695, 0.004737023830413818, 0.004780032157897949, 0.004740096092224121, 0.004824063777923584, 0.0047719039916992185, 0.004841407775878906, 0.004915200233459473, 0.005093376159667969, 0.005070847988128662, 0.004944896221160889, 0.0048148479461669925, 0.004809728145599365, 0.004876287937164306, 0.005030911922454834, 0.004807680130004883, 0.004824063777923584, 0.004843520164489746, 0.004817920207977295, 0.004803584098815918, 0.00479744005203247, 0.00485478401184082, 0.0047861762046813965, 0.004759552001953125, 0.004745215892791748, 0.004745215892791748, 0.004743167877197266, 0.00481279993057251, 0.0048220157623291016, 0.0047933440208435055, 0.004839424133300781, 0.0048056960105895994, 0.004871103763580322, 0.00480460786819458, 0.004836351871490479, 0.004899839878082276, 0.00455679988861084, 0.00480460786819458, 0.004809728145599365, 0.004820991992950439, 0.004827136039733886, 0.004829184055328369, 0.00481279993057251, 0.004842495918273926, 0.0048496642112731934, 0.00491315221786499, 0.0048158721923828125, 0.0048148479461669925, 0.004799488067626953, 0.004894720077514648, 0.004779007911682129, 0.0047513599395751956, 0.004753407955169678, 0.004774911880493164, 0.004790272235870361, 0.004842495918273926, 0.0047964158058166504, 0.004808703899383545, 0.004839424133300781, 0.004795392036437988, 0.004809728145599365, 0.0048455681800842285, 0.005171199798583984, 0.005017600059509277, 0.004955135822296143, 0.004973567962646484, 0.0048056321144104, 0.004841472148895264, 0.004817920207977295, 0.00481382417678833, 0.004799488067626953, 0.004746240139007568, 0.004837376117706299, 0.004746240139007568, 0.004742144107818603, 0.004737023830413818, 0.004777984142303467, 0.004776959896087647, 0.004745215892791748, 0.004759552001953125, 0.004742144107818603, 0.004740096092224121, 0.004777984142303467, 0.004741119861602783, 0.004758528232574463, 0.004753407955169678, 0.004757503986358643, 0.004783103942871094, 0.004768767833709717, 0.004750336170196533, 0.004738048076629638, 0.0047636480331420894, 0.004747263908386231, 0.004784128189086914, 0.004765696048736572, 0.0047513599395751956, 0.004754432201385498, 0.004745215892791748, 0.004775936126708984, 0.004588543891906738, 0.004741119861602783, 0.0047626237869262695, 0.0047851519584655765, 0.004730879783630371, 0.004747263908386231, 0.004798463821411133, 0.004743167877197266, 0.004761600017547608, 0.004750336170196533, 0.004749311923980713, 0.004783103942871094, 0.004745215892791748, 0.004760575771331787, 0.004769792079925537, 0.0047636480331420894, 0.00487116813659668, 0.0047626237869262695, 0.0047513599395751956, 0.004749311923980713, 0.004720640182495117, 0.004779007911682129, 0.004754432201385498, 0.004747263908386231, 0.0047636480331420894, 0.004733952045440673, 0.004798463821411133, 0.004787199974060059, 0.00475648021697998, 0.004824063777923584, 0.004839424133300781, 0.004808703899383545, 0.004841472148895264, 0.0048189439773559575, 0.004800511837005615, 0.004817920207977295, 0.004828159809112549, 0.0048261117935180665, 0.004816895961761475, 0.0048158721923828125, 0.0048455681800842285, 0.004833280086517334, 0.004829184055328369, 0.004833280086517334, 0.004816895961761475, 0.004824063777923584, 0.004843520164489746, 0.004819968223571777, 0.004868095874786377, 0.0048230400085449215, 0.004832255840301514, 0.004863999843597412, 0.004816895961761475, 0.004788224220275879, 0.004726784229278564, 0.00480460786819458, 0.00486297607421875, 0.00480460786819458, 0.0048220157623291016, 0.0048230400085449215, 0.00481382417678833, 0.004775936126708984, 0.004753407955169678, 0.004570112228393554, 0.004819968223571777, 0.004894720077514648, 0.00481279993057251, 0.004838399887084961, 0.00481279993057251, 0.004803584098815918, 0.0047933440208435055, 0.0047513599395751956, 0.0047923197746276855, 0.004761600017547608, 0.004757503986358643, 0.004754464149475098, 0.00475542402267456, 0.004921343803405762, 0.005062655925750732, 0.004869120121002197, 0.004843520164489746, 0.004820991992950439, 0.004876287937164306, 0.00480460786819458, 0.004799551963806152, 0.004825024127960205, 0.004832255840301514, 0.004784128189086914, 0.004749375820159912, 0.004745151996612549, 0.004761600017547608, 0.004754432201385498, 0.004794367790222168, 0.004806655883789063, 0.004809728145599365, 0.004798463821411133, 0.004781055927276612, 0.004784128189086914, 0.004743167877197266, 0.0047923197746276855, 0.004766719818115234, 0.004760575771331787, 0.0047933759689331055, 0.004862944126129151, 0.004770815849304199, 0.004783103942871094, 0.004743167877197266, 0.004766719818115234, 0.004748288154602051, 0.004742144107818603, 0.004743167877197266, 0.004739071846008301, 0.0048568320274353025, 0.004759552001953125, 0.0048558077812194825, 0.004843520164489746, 0.004803584098815918, 0.004921343803405762, 0.004893695831298828, 0.0048158721923828125, 0.0051066880226135255, 0.005183487892150879, 0.004984831809997559, 0.004834303855895996, 0.0047964158058166504, 0.004860928058624267, 0.004622367858886719, 0.004974559783935547, 0.004817920207977295, 0.004790272235870361, 0.004807680130004883, 0.0048056321144104, 0.004810751914978028, 0.0047636480331420894, 0.0047513599395751956, 0.004753407955169678, 0.004739071846008301, 0.004772863864898682, 0.00475648021697998, 0.004741119861602783, 0.004746240139007568, 0.004746240139007568, 0.004935679912567138, 0.004921343803405762, 0.004840447902679444, 0.004839424133300781, 0.00491212797164917, 0.004917247772216797, 0.004831232070922851, 0.005248000144958496, 0.00553984022140503, 0.005087232112884522, 0.00497049617767334, 0.0049725441932678225, 0.004882431983947754, 0.004723711967468262, 0.0048496642112731934, 0.004834303855895996, 0.0048455681800842285, 0.004881408214569092, 0.004843520164489746, 0.004801536083221435, 0.004817920207977295, 0.0048230400085449215, 0.004851712226867676, 0.004765696048736572, 0.0050022401809692385, 0.004910079956054687, 0.004860928058624267, 0.0048261117935180665, 0.004811808109283448, 0.004810719966888428, 0.004800511837005615, 0.004833280086517334, 0.004819968223571777, 0.0048148479461669925, 0.004878335952758789, 0.004808703899383545, 0.004974592208862305, 0.005188608169555664, 0.00545692777633667, 0.005017568111419677, 0.004998144149780274, 0.004980735778808594, 0.004975679874420166, 0.005012415885925293, 0.004966400146484375, 0.00481279993057251, 0.00480460786819458, 0.004571135997772217, 0.004759552001953125, 0.004745215892791748, 0.004795392036437988, 0.0047513599395751956, 0.004757503986358643, 0.004733952045440673, 0.004764671802520752, 0.004830207824707031, 0.004800511837005615, 0.004819968223571777, 0.0048158721923828125, 0.004859903812408447, 0.004847616195678711, 0.0048189439773559575, 0.004786240100860596, 0.004735936164855957, 0.004802559852600098, 0.004848639965057373, 0.004835328102111816, 0.00481279993057251, 0.004830207824707031, 0.004810751914978028, 0.004858880043029785, 0.004827136039733886, 0.0048230400085449215, 0.004816895961761475, 0.004832255840301514, 0.0048261117935180665, 0.004808703899383545, 0.004825088024139404, 0.004810751914978028, 0.004841472148895264, 0.004817920207977295, 0.004801536083221435, 0.0048148479461669925, 0.004833280086517334, 0.004839424133300781, 0.004819968223571777, 0.004828159809112549, 0.0048158721923828125, 0.0047626237869262695, 0.004799488067626953, 0.0047523841857910155, 0.004776959896087647, 0.004761600017547608, 0.004773888111114502, 0.004830207824707031, 0.004771840095520019, 0.004764671802520752, 0.004800511837005615, 0.0047626237869262695, 0.0047964158058166504, 0.004760575771331787, 0.004753407955169678, 0.004767744064331054, 0.004744192123413086, 0.004966400146484375, 0.004969471931457519, 0.0049500160217285155, 0.004859903812408447, 0.004746240139007568, 0.004777984142303467, 0.004583424091339112, 0.00481279993057251, 0.004872288227081299, 0.004758431911468506, 0.0047554559707641605, 0.004867072105407715, 0.00475648021697998, 0.004746240139007568, 0.004791296005249023, 0.0048793601989746095, 0.004787199974060059, 0.00475648021697998, 0.004753407955169678, 0.004750336170196533, 0.004772863864898682, 0.004877312183380127, 0.0048158721923828125, 0.004801536083221435, 0.004790272235870361, 0.004753407955169678, 0.004776959896087647, 0.004748288154602051, 0.004745215892791748, 0.00475648021697998, 0.0047523841857910155, 0.004744224071502686, 0.004786143779754639, 0.004769792079925537, 0.0047523841857910155, 0.004759552001953125, 0.004758528232574463, 0.004810751914978028, 0.004759552001953125, 0.004767744064331054, 0.00479744005203247, 0.004827136039733886, 0.004810751914978028, 0.004745215892791748, 0.004816895961761475, 0.004759552001953125, 0.004884479999542236, 0.004918272018432617, 0.004843520164489746, 0.004747263908386231, 0.004747263908386231, 0.004745215892791748, 0.004779007911682129, 0.004726784229278564, 0.0047523841857910155, 0.004729856014251709, 0.0047523841857910155, 0.004758528232574463, 0.004765696048736572, 0.004733952045440673, 0.004746240139007568, 0.004711423873901367, 0.004654079914093018, 0.004766719818115234, 0.0047329277992248535, 0.0047513599395751956, 0.004753407955169678, 0.004731904029846192, 0.0047820801734924315, 0.004541440010070801, 0.004835328102111816, 0.004780032157897949, 0.004774943828582763, 0.004726751804351807, 0.004734975814819336, 0.004753407955169678, 0.004744192123413086, 0.004771840095520019, 0.0048158721923828125, 0.0047933440208435055, 0.004784128189086914, 0.004745215892791748, 0.005083136081695557, 0.005111807823181152, 0.0050421757698059086, 0.0048230400085449215, 0.004817920207977295, 0.0048056321144104, 0.004829184055328369, 0.004828159809112549, 0.004808703899383545, 0.0048056321144104, 0.004799488067626953, 0.004811776161193848, 0.004839424133300781, 0.004791327953338623, 0.0048557758331298825, 0.004799488067626953, 0.00480460786819458, 0.0048855037689208985, 0.00481279993057251, 0.004803584098815918, 0.00481279993057251, 0.0047861762046813965, 0.004842495918273926, 0.0048148479461669925, 0.004816895961761475, 0.004761600017547608, 0.004738048076629638, 0.004789247989654541, 0.004758528232574463, 0.004760575771331787, 0.0047329277992248535, 0.0046561279296875, 0.004767744064331054, 0.004737023830413818, 0.004739071846008301, 0.004774911880493164, 0.004806655883789063, 0.004825088024139404, 0.004810751914978028, 0.004810751914978028, 0.0047820801734924315, 0.004748288154602051, 0.005090303897857666, 0.005029888153076172, 0.004937727928161621, 0.0048220157623291016, 0.0048455681800842285, 0.004830207824707031, 0.004816895961761475, 0.0048056321144104, 0.004554751873016357, 0.004836351871490479, 0.004816895961761475, 0.0048220157623291016, 0.004725823879241944, 0.004747200012207031, 0.0049827837944030765, 0.005197824001312256, 0.004881408214569092, 0.004872191905975342, 0.00481382417678833, 0.004808703899383545, 0.004839424133300781, 0.004810751914978028, 0.004800511837005615, 0.004809728145599365, 0.004806655883789063, 0.005258304119110107, 0.004974527835845947, 0.004977663993835449, 0.004984831809997559, 0.005071872234344482, 0.004904960155487061, 0.004820064067840576, 0.004818848133087158, 0.004960256099700928, 0.005028863906860351, 0.004905983924865722, 0.00481382417678833, 0.004853759765625, 0.004802559852600098, 0.0047216639518737795, 0.004744192123413086, 0.004764671802520752, 0.004779007911682129, 0.004757503986358643, 0.004745215892791748, 0.0050063362121582035, 0.004868095874786377, 0.004795392036437988, 0.004761600017547608, 0.004743167877197266, 0.004847616195678711, 0.004770815849304199, 0.004767744064331054, 0.00480460786819458, 0.004811776161193848, 0.004969471931457519, 0.004867072105407715, 0.004865024089813232, 0.004835328102111816, 0.0048230400085449215, 0.004835328102111816, 0.004874239921569825, 0.0048568320274353025, 0.0048056321144104, 0.0047964158058166504, 0.0047933440208435055, 0.0048056321144104, 0.004994048118591309, 0.004851712226867676, 0.00487014389038086, 0.004941823959350586, 0.004717567920684815, 0.0049909758567810054, 0.004890624046325683, 0.004947968006134033, 0.004959231853485108, 0.00487014389038086, 0.0048220157623291016, 0.004798495769500732, 0.004802527904510498, 0.0048015999794006345, 0.0048516798019409176, 0.004963295936584473, 0.004945951938629151, 0.004795360088348389, 0.004798463821411133, 0.0048261117935180665, 0.004839424133300781, 0.0047923197746276855, 0.00481382417678833, 0.004963327884674072, 0.0049725441932678225, 0.004900864124298096, 0.004806655883789063, 0.004867072105407715, 0.004893695831298828, 0.004834303855895996, 0.004867072105407715, 0.004890624046325683, 0.0049489917755126955, 0.00502784013748169, 0.0048895998001098635, 0.004794367790222168, 0.004745215892791748, 0.004890624046325683, 0.0048793601989746095, 0.004926464080810547, 0.0049725441932678225, 0.004987904071807861, 0.004997119903564453, 0.004993023872375488, 0.004980735778808594, 0.005054463863372802, 0.005054463863372802, 0.005078015804290771, 0.005434368133544922, 0.005053440093994141, 0.005004288196563721, 0.0049530878067016604, 0.00501145601272583, 0.004914175987243652, 0.004829184055328369, 0.004808703899383545, 0.0048220157623291016, 0.0048056321144104, 0.004937727928161621, 0.0047626237869262695, 0.004825088024139404, 0.004772863864898682, 0.004881408214569092, 0.004873216152191162, 0.004869120121002197, 0.004874239921569825, 0.004904960155487061, 0.004679679870605469, 0.004836351871490479, 0.0048056321144104, 0.004816895961761475, 0.004797599792480469, 0.004715392112731934, 0.00480457592010498, 0.00481382417678833, 0.004799520015716553, 0.004815839767456055, 0.0047851519584655765, 0.004834303855895996, 0.004810751914978028, 0.004810751914978028, 0.004795392036437988, 0.004802559852600098, 0.004859903812408447, 0.00496127986907959, 0.004956160068511963, 0.0049530878067016604, 0.004996096134185791, 0.004963327884674072, 0.004985856056213379, 0.004880383968353271, 0.004810751914978028, 0.004825088024139404, 0.0048056321144104, 0.00484764814376831, 0.004813792228698731, 0.004824063777923584, 0.004884479999542236, 0.0048230400085449215, 0.005083136081695557, 0.005155839920043945, 0.005033984184265137, 0.004827136039733886, 0.004831232070922851, 0.004869120121002197, 0.004998144149780274, 0.004899839878082276, 0.004880383968353271, 0.004947968006134033, 0.004963327884674072, 0.004956160068511963, 0.004816895961761475, 0.004833280086517334, 0.004807680130004883, 0.0048527359962463375, 0.00481279993057251, 0.004819968223571777, 0.004809728145599365, 0.004810751914978028, 0.004847616195678711, 0.004800511837005615, 0.004811776161193848, 0.004771840095520019, 0.004748288154602051, 0.0047185921669006346, 0.004799488067626953, 0.004788224220275879, 0.004798495769500732, 0.004827104091644287, 0.004875328063964844, 0.0046254081726074215, 0.004841472148895264, 0.00485478401184082, 0.004846591949462891, 0.00487116813659668, 0.004835328102111816, 0.004817920207977295, 0.0048158721923828125, 0.0048056321144104, 0.00479744005203247, 0.004831232070922851, 0.00481279993057251, 0.004829184055328369, 0.004848639965057373, 0.004829216003417969, 0.004864992141723633, 0.004848639965057373, 0.004832255840301514, 0.004901887893676758, 0.005020736217498779, 0.004946879863739014, 0.0048752641677856446, 0.004798463821411133, 0.0048158721923828125, 0.004850687980651855, 0.004817920207977295, 0.004809728145599365, 0.004816895961761475, 0.004817920207977295, 0.004840447902679444, 0.0048261117935180665, 0.004930560111999512, 0.0047933759689331055, 0.004863967895507812, 0.004759552001953125, 0.0048220157623291016, 0.004888576030731201, 0.004828159809112549, 0.004888576030731201, 0.004975615978240967, 0.00487014389038086, 0.004859903812408447, 0.004758528232574463, 0.004759552001953125, 0.004661248207092285, 0.004741119861602783, 0.004753407955169678, 0.004754432201385498, 0.004770815849304199, 0.004767744064331054, 0.0048230400085449215, 0.004873216152191162, 0.00485478401184082, 0.0047933440208435055, 0.0047861762046813965, 0.004881408214569092, 0.004937727928161621, 0.004976640224456787, 0.004905983924865722, 0.004795392036437988, 0.004842495918273926, 0.00481279993057251, 0.004904960155487061, 0.004578303813934326, 0.00487116813659668, 0.004832255840301514, 0.0048230400085449215, 0.004758528232574463, 0.004768767833709717, 0.004800511837005615, 0.00475648021697998, 0.004759552001953125, 0.004761600017547608, 0.0047554559707641605, 0.004798463821411133, 0.004777984142303467, 0.004744192123413086, 0.00481382417678833, 0.004774911880493164, 0.004808767795562744, 0.0047738242149353025, 0.004765696048736572, 0.0047820801734924315, 0.0047513599395751956, 0.004770815849304199, 0.004744192123413086, 0.004760575771331787, 0.0047626237869262695, 0.00480460786819458, 0.004848639965057373, 0.004830207824707031, 0.0048455681800842285, 0.004859903812408447, 0.004838399887084961, 0.004996096134185791, 0.004894720077514648, 0.004981760025024414, 0.004971519947052002, 0.004882431983947754, 0.004902912139892578, 0.004878335952758789, 0.004767744064331054, 0.004874239921569825, 0.004911104202270508, 0.0048895998001098635, 0.00487116813659668, 0.004868095874786377, 0.004784128189086914, 0.004811776161193848, 0.00475648021697998, 0.004769792079925537, 0.004819007873535156, 0.004904895782470703, 0.004865024089813232, 0.004742176055908203, 0.00482096004486084, 0.004876287937164306, 0.005015552043914795, 0.0049797120094299315, 0.005005311965942383, 0.005193727970123291, 0.00530841588973999, 0.005014527797698975, 0.00496230411529541, 0.005001215934753418, 0.005280767917633057, 0.00475648021697998, 0.005001215934753418, 0.004904960155487061, 0.004921343803405762, 0.004883456230163574, 0.0047636480331420894, 0.004767744064331054, 0.004780032157897949, 0.0047851519584655765, 0.004801536083221435, 0.004773888111114502, 0.0047523841857910155, 0.004767744064331054, 0.004749311923980713, 0.004838399887084961, 0.004744192123413086, 0.0048455681800842285, 0.004794367790222168, 0.004702208042144776, 0.004696063995361328, 0.004748288154602051, 0.004830207824707031, 0.004941823959350586, 0.004946944236755371, 0.0049162240028381345, 0.004874239921569825, 0.004801536083221435, 0.0047636480331420894, 0.0047933440208435055, 0.004758528232574463, 0.004789247989654541, 0.004958208084106445, 0.004975647926330567, 0.005066720008850098, 0.004944896221160889, 0.004927487850189209, 0.004951039791107178, 0.0048865280151367185, 0.0048189439773559575, 0.004799488067626953, 0.004819968223571777, 0.004806655883789063, 0.004801536083221435, 0.00475648021697998, 0.004753407955169678, 0.004736000061035156, 0.004753407955169678, 0.004803584098815918, 0.004769792079925537, 0.004776959896087647, 0.004753407955169678, 0.004931583881378174, 0.005129216194152832, 0.005061632156372071, 0.0049500479698181155, 0.004860896110534668, 0.00487014389038086, 0.004846591949462891, 0.004978687763214112, 0.004882463932037354, 0.0048783040046691895, 0.004859903812408447, 0.005116960048675537, 0.004555776119232178, 0.0047923197746276855, 0.005064703941345215, 0.004987904071807861, 0.004947968006134033, 0.0049090561866760255, 0.004908031940460205, 0.0050165758132934574, 0.004994048118591309, 0.00499507188796997, 0.004963327884674072, 0.005017600059509277, 0.00496230411529541, 0.0049725441932678225, 0.004974592208862305, 0.004932608127593994, 0.004958208084106445, 0.0050032639503479, 0.004937727928161621, 0.005005311965942383, 0.004974592208862305, 0.004921343803405762, 0.004834303855895996, 0.0048148479461669925, 0.004848639965057373, 0.004807680130004883, 0.004888576030731201, 0.004873216152191162, 0.004950079917907715, 0.0049714560508728025, 0.0049797120094299315, 0.0050165758132934574, 0.004964352130889893, 0.005101568222045898, 0.004911104202270508, 0.004838399887084961, 0.004807680130004883, 0.004791296005249023, 0.004772863864898682, 0.004775936126708984, 0.004754432201385498, 0.004747263908386231, 0.004747263908386231, 0.004828159809112549, 0.0047964158058166504, 0.004801536083221435, 0.004808703899383545, 0.0048189439773559575, 0.0048855037689208985, 0.004865024089813232, 0.004893695831298828, 0.0048455681800842285, 0.004899839878082276, 0.004880383968353271, 0.0048865280151367185, 0.004811776161193848, 0.0047820801734924315, 0.004742144107818603, 0.0047861762046813965, 0.0047923197746276855, 0.0047964158058166504, 0.004795392036437988, 0.0048865280151367185, 0.004767744064331054, 0.004933631896972656, 0.004794367790222168, 0.004884479999542236, 0.004958208084106445, 0.004863999843597412, 0.004851712226867676, 0.004858943939208984, 0.004862912178039551, 0.004931583881378174, 0.004851712226867676, 0.004821023941040039, 0.00475542402267456, 0.004739071846008301, 0.0047944002151489255, 0.00481993579864502, 0.004806655883789063, 0.0048220157623291016, 0.0048158721923828125, 0.004848639965057373, 0.0048230400085449215, 0.004957183837890625, 0.00506982421875, 0.0049530878067016604, 0.004881408214569092, 0.004897791862487793, 0.00491212797164917, 0.004992000102996826, 0.0052367358207702636, 0.0049725441932678225, 0.004977663993835449, 0.005064703941345215, 0.004899839878082276, 0.004907008171081543, 0.0047923197746276855, 0.0048148479461669925, 0.004858880043029785, 0.004798463821411133, 0.004765696048736572, 0.004742144107818603, 0.00480460786819458, 0.004905983924865722, 0.004896768093109131, 0.004744192123413086, 0.004747263908386231, 0.004808735847473144, 0.004877280235290527, 0.004810751914978028, 0.004857855796813965, 0.004899839878082276, 0.004933631896972656, 0.004980735778808594, 0.005029888153076172, 0.005022719860076904, 0.0050094079971313476, 0.004917247772216797, 0.004829184055328369, 0.00481382417678833, 0.0048189439773559575, 0.004907008171081543, 0.004951039791107178, 0.004930560111999512, 0.0048230400085449215, 0.004578303813934326, 0.004838399887084961, 0.004926464080810547, 0.0049797120094299315, 0.004981760025024414, 0.004935679912567138, 0.005014527797698975, 0.004966400146484375, 0.0048558077812194825, 0.0047626237869262695, 0.0047513599395751956, 0.004775936126708984, 0.004760575771331787, 0.004757503986358643, 0.004754432201385498, 0.004769792079925537, 0.004767744064331054, 0.004742144107818603, 0.004733952045440673, 0.004739071846008301, 0.004761600017547608, 0.004783103942871094, 0.004749311923980713, 0.004857855796813965, 0.004842559814453125, 0.004868031978607178, 0.004901887893676758, 0.004884479999542236, 0.004859903812408447, 0.004834303855895996, 0.004760575771331787, 0.004701183795928955, 0.004806655883789063, 0.004873216152191162, 0.004846591949462891, 0.004836351871490479, 0.004788224220275879, 0.004806655883789063, 0.004746240139007568, 0.004733952045440673, 0.004747263908386231, 0.004775936126708984, 0.004749311923980713, 0.004741119861602783, 0.004769792079925537, 0.004750336170196533, 0.0048527359962463375, 0.004857855796813965, 0.004860928058624267, 0.004857855796813965, 0.004872191905975342, 0.004837376117706299, 0.0048455681800842285, 0.0047923197746276855, 0.004811776161193848, 0.0048230400085449215, 0.004846591949462891, 0.004795392036437988, 0.0048148479461669925, 0.004807680130004883, 0.004827136039733886, 0.004807680130004883, 0.004829184055328369, 0.004593664169311523, 0.004857855796813965, 0.0049827837944030765, 0.004836351871490479, 0.004881408214569092, 0.004893695831298828, 0.005306367874145508, 0.005251071929931641, 0.004992000102996826, 0.0048230400085449215, 0.004830207824707031, 0.004844543933868409, 0.004867072105407715, 0.004828159809112549, 0.004775936126708984, 0.004802559852600098, 0.004776959896087647, 0.004911104202270508, 0.004774911880493164, 0.00481279993057251, 0.0047861762046813965, 0.0048241281509399415, 0.004939712047576904, 0.004869120121002197, 0.004834303855895996, 0.004899839878082276, 0.004896768093109131, 0.004882431983947754, 0.00481279993057251, 0.005088255882263183, 0.004891647815704346, 0.004848639965057373, 0.004820032119750977, 0.004751296043395996, 0.004748288154602051, 0.004770815849304199, 0.004775936126708984, 0.00475648021697998, 0.0047626237869262695, 0.004803647994995117, 0.004952000141143799, 0.0050206718444824215, 0.00495411205291748, 0.004884479999542236, 0.00497049617767334, 0.0050360321998596195, 0.004881408214569092, 0.004921343803405762, 0.004757503986358643, 0.004773888111114502, 0.004768767833709717, 0.004760575771331787, 0.004744192123413086, 0.004760575771331787, 0.004835328102111816, 0.004800511837005615, 0.004745215892791748, 0.0047523841857910155, 0.0047636799812316895, 0.004723680019378662, 0.004766719818115234, 0.004742144107818603, 0.004766719818115234, 0.00464896011352539, 0.004877312183380127, 0.005795839786529541, 0.0051036162376403805, 0.0048752641677856446, 0.0047923197746276855, 0.004773888111114502, 0.004798463821411133, 0.004910079956054687, 0.004903935909271241, 0.004955135822296143, 0.00506982421875, 0.004971519947052002, 0.004884479999542236, 0.004807680130004883, 0.004841504096984864, 0.00480457592010498, 0.004809728145599365, 0.004827136039733886, 0.0049909758567810054, 0.00501964807510376, 0.004985856056213379, 0.005044223785400391, 0.004943871974945068, 0.004998144149780274, 0.004935679912567138, 0.00487116813659668, 0.0048056321144104, 0.004833280086517334, 0.0048189439773559575, 0.004795392036437988, 0.00481279993057251, 0.004816895961761475, 0.004839424133300781, 0.0047964158058166504, 0.004794367790222168, 0.0048158721923828125, 0.004811776161193848, 0.004825088024139404, 0.004947968006134033, 0.0050462718009948735, 0.005007359981536865, 0.004974592208862305, 0.004958208084106445, 0.0048220157623291016, 0.004850687980651855, 0.0048158721923828125, 0.004848639965057373, 0.0048189439773559575, 0.004832255840301514, 0.004882431983947754, 0.004863999843597412, 0.004839424133300781, 0.004874239921569825, 0.004834303855895996, 0.004841472148895264, 0.004832255840301514, 0.004743167877197266, 0.004795392036437988, 0.004820991992950439, 0.0047851519584655765, 0.0047820801734924315, 0.004772863864898682, 0.0047513599395751956, 0.004860928058624267, 0.004729856014251709, 0.004768767833709717, 0.004760575771331787, 0.004776959896087647, 0.004750336170196533, 0.004820991992950439, 0.005000192165374756, 0.004773888111114502, 0.004748288154602051, 0.00486195182800293, 0.0048189439773559575, 0.004773888111114502, 0.00505241584777832, 0.004851712226867676, 0.004890624046325683, 0.005149695873260498, 0.004894720077514648, 0.004832255840301514, 0.004859903812408447, 0.004779007911682129, 0.004765696048736572, 0.004770815849304199, 0.004775936126708984, 0.0047636480331420894, 0.004743167877197266, 0.004758528232574463, 0.004829184055328369, 0.004842495918273926, 0.004819968223571777, 0.0048158721923828125, 0.004800511837005615, 0.004843520164489746, 0.004824063777923584, 0.004810751914978028, 0.004830207824707031, 0.004973567962646484, 0.0048568320274353025, 0.0048230400085449215, 0.004829184055328369, 0.00486195182800293, 0.004770815849304199, 0.004788224220275879, 0.004707327842712402, 0.0047226881980895995, 0.004803584098815918, 0.00480460786819458, 0.004837376117706299, 0.00481279993057251, 0.00481279993057251, 0.00480460786819458, 0.004824063777923584, 0.0048261117935180665, 0.00481382417678833, 0.004824063777923584, 0.004808703899383545, 0.00481279993057251, 0.004847616195678711, 0.004824063777923584, 0.004803584098815918, 0.00480460786819458, 0.00481382417678833, 0.004568064212799072, 0.004828159809112549, 0.0048056321144104, 0.004807680130004883, 0.00479744005203247, 0.004816895961761475, 0.004840447902679444, 0.004795392036437988, 0.00475648021697998, 0.0047513599395751956, 0.004747263908386231, 0.0048056321144104, 0.0047606081962585445, 0.004767712116241455, 0.004750336170196533, 0.004746240139007568, 0.0047861762046813965, 0.004761631965637207, 0.004765664100646973, 0.004746240139007568, 0.004743167877197266, 0.004831232070922851, 0.004847616195678711, 0.004816895961761475, 0.004824063777923584, 0.005468160152435303, 0.0053309440612792965, 0.0051701760292053225, 0.00538316822052002, 0.005431295871734619, 0.005585919857025146, 0.004996096134185791, 0.004941823959350586, 0.00495411205291748, 0.005093376159667969, 0.00496127986907959, 0.005018623828887939, 0.004975615978240967, 0.005108736038208008, 0.005078015804290771, 0.005044223785400391, 0.005029888153076172, 0.0049090561866760255, 0.0048220157623291016, 0.004840447902679444, 0.00481279993057251, 0.004806655883789063, 0.004810751914978028, 0.004795392036437988, 0.004843520164489746, 0.004824063777923584, 0.004806655883789063, 0.0048148479461669925, 0.005268479824066162, 0.0050206718444824215, 0.004803584098815918, 0.004819968223571777, 0.004810751914978028, 0.004850687980651855, 0.004811776161193848, 0.004799488067626953, 0.004799488067626953, 0.004816895961761475, 0.004555776119232178, 0.004841472148895264, 0.0048056321144104, 0.00480460786819458, 0.004811840057373047, 0.004805568218231201, 0.004836351871490479, 0.004794367790222168, 0.0047636480331420894, 0.004742144107818603, 0.004740096092224121, 0.004775936126708984, 0.004736000061035156, 0.004747263908386231, 0.004740096092224121, 0.004766719818115234, 0.004817920207977295, 0.004829184055328369, 0.0047636480331420894, 0.004739071846008301, 0.004817920207977295, 0.004834303855895996, 0.004833280086517334, 0.004835328102111816, 0.004847616195678711, 0.004829184055328369, 0.004840447902679444, 0.004831232070922851, 0.0048220157623291016, 0.004832255840301514, 0.0048261117935180665, 0.004891647815704346, 0.004808703899383545, 0.004840447902679444, 0.004802559852600098, 0.004811776161193848, 0.004895743846893311, 0.004899839878082276, 0.004833280086517334, 0.00481382417678833, 0.004863999843597412, 0.0048189439773559575, 0.004811776161193848, 0.004806687831878662, 0.00480457592010498, 0.004824063777923584, 0.004840447902679444, 0.004808703899383545, 0.004816895961761475, 0.004817920207977295, 0.00486297607421875, 0.004829184055328369, 0.0048261117935180665, 0.00481382417678833, 0.0048220157623291016, 0.004848639965057373, 0.004834303855895996, 0.0048148479461669925, 0.005654528141021729, 0.004932608127593994, 0.00481279993057251, 0.004806655883789063, 0.00481382417678833, 0.004558847904205322, 0.0048220157623291016, 0.004851712226867676, 0.004802559852600098, 0.0048158721923828125, 0.004794367790222168, 0.0047329277992248535, 0.004833280086517334, 0.004787199974060059, 0.004819968223571777, 0.00481279993057251, 0.004799488067626953, 0.0048496642112731934, 0.004803584098815918, 0.004808735847473144, 0.00481993579864502, 0.004818975925445557, 0.004794335842132568, 0.004730879783630371, 0.004768767833709717, 0.004741119861602783, 0.0047554559707641605, 0.004777984142303467, 0.0047851519584655765, 0.004806655883789063, 0.0048261117935180665, 0.00481279993057251, 0.0048496642112731934, 0.005018623828887939, 0.004931583881378174, 0.004832255840301514, 0.004895743846893311, 0.004838399887084961, 0.004817920207977295, 0.004806655883789063, 0.004809728145599365, 0.004835328102111816, 0.004806655883789063, 0.004811776161193848, 0.0048752641677856446, 0.004941823959350586, 0.005190656185150146, 0.004832255840301514, 0.004744192123413086, 0.0047565121650695805, 0.004738016128540039, 0.004780032157897949, 0.004761600017547608, 0.0047523841857910155, 0.004736000061035156, 0.004750336170196533, 0.00479744005203247, 0.004810751914978028, 0.0047964158058166504, 0.004860928058624267, 0.004952064037322998, 0.005031936168670655, 0.004859903812408447, 0.004731904029846192, 0.004766719818115234, 0.0048220157623291016, 0.004816895961761475, 0.004806655883789063, 0.004559872150421143, 0.004816895961761475, 0.00481279993057251, 0.004820991992950439, 0.004783103942871094, 0.004749311923980713, 0.004768767833709717, 0.004745215892791748, 0.004748288154602051, 0.0048855037689208985, 0.004841472148895264, 0.004994048118591309, 0.004863999843597412, 0.0048537921905517575, 0.004745183944702148, 0.004761600017547608, 0.004753407955169678, 0.004784128189086914, 0.004802591800689697, 0.004856800079345703, 0.004825088024139404, 0.004807680130004883, 0.004802559852600098, 0.004835328102111816, 0.004835328102111816, 0.004820991992950439, 0.00481382417678833, 0.004811776161193848, 0.004834303855895996, 0.0048220157623291016, 0.004824063777923584, 0.004803584098815918, 0.0048189439773559575, 0.004840447902679444, 0.004807680130004883, 0.004809728145599365, 0.00481279993057251, 0.004809728145599365, 0.004832255840301514, 0.004819968223571777, 0.004828159809112549, 0.004958240032196045, 0.005047264099121093, 0.005083136081695557, 0.004880383968353271, 0.004883456230163574, 0.005066751956939697, 0.005107711791992187, 0.0049459199905395506, 0.0048558077812194825, 0.004836351871490479, 0.0048558077812194825, 0.004829184055328369, 0.0049725441932678225, 0.004878335952758789, 0.005185535907745361, 0.00497049617767334, 0.0050841598510742185, 0.004965375900268554, 0.005005311965942383, 0.004960256099700928, 0.004901887893676758, 0.0048056321144104]",tokens/s,206.4339982250587,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,x,x,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/x/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669492ce-3d98ae233f66e68119bd3167;d20df17a-c04b-40ff-853f-c6accd1abacb) - -Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: x is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,/,/,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 373, in cached_file - raise EnvironmentError( -OSError: / does not appear to have a file named config.json. Checkout 'https://huggingface.co///tree/None' for available files. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gptj,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-6694811b-0dd7ab7e6f4beb8f4a7871bb;0eee426c-63cc-4c8b-b34a-3af47f69d6f9) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,7421.558784,9691.46368,0.0,9061.793792,8463.626752,s,1,11.721361328125,11.721361328125,0.0,11.721361328125,11.721361328125,11.721361328125,11.721361328125,[11.721361328125],,kWh,5.7575126868760496e-05,3.1519129515471137e-05,0.00011668398223596554,0.00020577823862019716,,MB,1775.14496,9708.240896,0.0,9061.793792,7980.722176,s,10,23.6985517578125,2.36985517578125,0.00018037474855229414,2.3699097900390624,2.36998359375,2.3699943603515625,2.3700029736328125,"[2.369358642578125, 2.369888427734375, 2.36980615234375, 2.370005126953125, 2.369892333984375, 2.36977001953125, 2.369981201171875, 2.369978271484375, 2.36992724609375, 2.3699443359375]",tokens/s,108.0234786564992,kWh,2.7986642854653054e-05,1.5337484421028822e-05,0.00016057387845899718,0.00020389800573467905,tokens/kWh,1255529.6903350705,MB,1775.14496,9708.240896,0.0,9061.793792,8267.784704,s,10,17.562904418945312,1.756290441894531,0.013086613981274997,1.7539346923828125,1.7736221923828126,1.7741517333984376,1.7745753662109376,"[1.7533260498046874, 1.7543319091796874, 1.7415921630859375, 1.7746812744140625, 1.748948486328125, 1.73225927734375, 1.7735045166015626, 1.7535374755859374, 1.770148681640625, 1.7605745849609375]",tokens/s,35.87106010327151,kWh,2.0554245525069195e-05,1.12655224958193e-05,8.401470610060467e-05,0.0001158344741214932,tokens/kWh,543879.5356719308,,s,630,17.560303627014157,0.027873497820657397,0.0004841442811886305,0.02795980739593506,0.028259431076049803,0.028478156661987303,0.029246464252471932,"[0.02871500778198242, 0.02839347267150879, 0.028106752395629882, 0.028275711059570312, 0.02810163116455078, 0.028185600280761718, 0.028034048080444338, 0.02811392021179199, 0.02790399932861328, 0.028043264389038085, 0.027878400802612304, 0.027926528930664062, 0.027849727630615235, 0.028005376815795898, 0.028302335739135744, 0.028246015548706056, 0.027842592239379883, 0.02797871971130371, 0.028051456451416015, 0.02816204833984375, 0.027800575256347656, 0.027881471633911133, 0.027826175689697266, 0.02795315170288086, 0.027798528671264647, 0.027890687942504884, 0.02710527992248535, 0.026852352142333984, 0.026970111846923828, 0.02710220718383789, 0.027015167236328123, 0.026991615295410155, 0.02695475196838379, 0.02706329536437988, 0.027501567840576172, 0.027890687942504884, 0.027813888549804686, 0.0279736328125, 0.02796544075012207, 0.027848703384399414, 0.027897855758666993, 0.028073984146118162, 0.02792959976196289, 0.028047359466552735, 0.02789580726623535, 0.027888639450073242, 0.027069440841674806, 0.027042816162109375, 0.02708684730529785, 0.02713599967956543, 0.02720358467102051, 0.028221439361572266, 0.028189695358276368, 0.028108800888061523, 0.027992063522338868, 0.02812928009033203, 0.028030975341796875, 0.028111871719360353, 0.02815488052368164, 0.028107776641845703, 0.028023807525634766, 0.028067840576171874, 0.028293119430541993, 0.02799001693725586, 0.027230207443237304, 0.027042816162109375, 0.02699673652648926, 0.026986495971679687, 0.027133951187133788, 0.02715238380432129, 0.02726911926269531, 0.0271329288482666, 0.027052032470703126, 0.02773708724975586, 0.02999603271484375, 0.028992511749267577, 0.028116992950439453, 0.02809343910217285, 0.02778009605407715, 0.02791935920715332, 0.02791116714477539, 0.028203008651733398, 0.02809753608703613, 0.028108800888061523, 0.028045312881469726, 0.02853887939453125, 0.028218368530273437, 0.027963392257690428, 0.0281343994140625, 0.028028928756713867, 0.027987968444824218, 0.028270591735839845, 0.028047359466552735, 0.0281343994140625, 0.02794598388671875, 0.028128255844116212, 0.02790915107727051, 0.02818556785583496, 0.028044288635253906, 0.028007423400878906, 0.027998207092285156, 0.0279736328125, 0.028003328323364256, 0.02798899269104004, 0.02811903953552246, 0.02789580726623535, 0.02813030433654785, 0.02797670364379883, 0.028009471893310548, 0.02730188751220703, 0.02716364860534668, 0.02714112091064453, 0.027077632904052733, 0.027076608657836915, 0.026998783111572267, 0.0270960636138916, 0.02712985610961914, 0.028085248947143555, 0.028031999588012696, 0.028110847473144532, 0.02791529655456543, 0.028076000213623047, 0.028080127716064454, 0.027858943939208985, 0.028161088943481447, 0.028157888412475587, 0.027820127487182617, 0.027297695159912108, 0.027009023666381835, 0.02698854446411133, 0.026926080703735353, 0.027189247131347655, 0.028238847732543947, 0.02819891166687012, 0.028053504943847656, 0.02795008087158203, 0.028047359466552735, 0.027838464736938476, 0.028041215896606447, 0.02793574333190918, 0.027971584320068358, 0.027907072067260744, 0.027905023574829102, 0.028366847991943358, 0.02809753608703613, 0.02796441650390625, 0.027732032775878906, 0.02803705596923828, 0.028007423400878906, 0.02795724868774414, 0.02792857551574707, 0.028015615463256836, 0.02794905662536621, 0.027914239883422853, 0.02794086456298828, 0.027876352310180662, 0.02715340805053711, 0.027201568603515625, 0.02723023986816406, 0.027001792907714844, 0.02714419174194336, 0.02708787155151367, 0.02698956871032715, 0.027020288467407227, 0.027037696838378908, 0.027073535919189453, 0.027032575607299804, 0.02716979217529297, 0.02712678337097168, 0.027212799072265623, 0.02712678337097168, 0.027042816162109375, 0.02712063980102539, 0.02711347198486328, 0.026945535659790038, 0.027015167236328123, 0.02708684730529785, 0.02702137565612793, 0.02706528091430664, 0.02710527992248535, 0.028431360244750976, 0.028259328842163086, 0.02793471908569336, 0.027893760681152343, 0.02795827293395996, 0.02819993591308594, 0.029305856704711915, 0.029322240829467775, 0.028793855667114256, 0.027798528671264647, 0.02719539260864258, 0.028437503814697264, 0.028256256103515624, 0.028064767837524415, 0.028050432205200194, 0.028084224700927734, 0.02816204833984375, 0.027828224182128908, 0.02799001693725586, 0.028226560592651367, 0.028207103729248048, 0.02812620735168457, 0.028275711059570312, 0.02814771270751953, 0.028056575775146485, 0.028878847122192384, 0.028299264907836914, 0.028255231857299806, 0.02810982322692871, 0.028116992950439453, 0.028269567489624024, 0.028255231857299806, 0.02816409683227539, 0.028083200454711913, 0.028080127716064454, 0.027894784927368164, 0.028080127716064454, 0.02814771270751953, 0.028065792083740236, 0.027996160507202147, 0.02816819190979004, 0.027900928497314452, 0.027894847869873046, 0.028046272277832032, 0.02793267250061035, 0.0279736328125, 0.02832383918762207, 0.02832486343383789, 0.02815078353881836, 0.028233728408813476, 0.028050432205200194, 0.027974655151367187, 0.02789580726623535, 0.027836416244506838, 0.02796236801147461, 0.0279736328125, 0.028840959548950194, 0.02996735954284668, 0.028824575424194337, 0.02836275291442871, 0.028261375427246094, 0.028221439361572266, 0.027986944198608397, 0.028181503295898438, 0.028268543243408203, 0.02838425636291504, 0.028230655670166017, 0.028144704818725587, 0.02815999984741211, 0.028097471237182616, 0.028200960159301756, 0.028057600021362306, 0.02795008087158203, 0.027181055068969725, 0.027122688293457032, 0.028048383712768556, 0.028019712448120116, 0.02814668846130371, 0.02813132858276367, 0.028063743591308594, 0.028033023834228517, 0.028619775772094725, 0.029457408905029295, 0.02877132797241211, 0.028408832550048828, 0.028099584579467773, 0.028070911407470703, 0.02810163116455078, 0.027438079833984375, 0.028048383712768556, 0.02797875213623047, 0.02796134376525879, 0.02793574333190918, 0.02775040054321289, 0.027963392257690428, 0.02792959976196289, 0.02794905662536621, 0.027815935134887695, 0.027810815811157227, 0.027854848861694335, 0.02772684860229492, 0.02795110321044922, 0.026998783111572267, 0.027069440841674806, 0.02709199905395508, 0.027093984603881835, 0.027290624618530275, 0.027090944290161133, 0.027204608917236327, 0.027026432037353516, 0.027059200286865235, 0.026832895278930666, 0.027185152053833008, 0.027220991134643553, 0.027068416595458986, 0.027036672592163087, 0.027249664306640626, 0.027923456192016603, 0.02793984031677246, 0.027825151443481445, 0.027991039276123047, 0.02796953582763672, 0.028025856018066408, 0.027898880004882814, 0.027585535049438475, 0.02794803237915039, 0.02753331184387207, 0.02791219139099121, 0.027832319259643554, 0.027808767318725586, 0.028019712448120116, 0.027918336868286132, 0.027891712188720705, 0.027923456192016603, 0.027900928497314452, 0.027773952484130858, 0.027158527374267577, 0.0269117431640625, 0.026878976821899415, 0.02691379165649414, 0.027080703735351562, 0.027051008224487305, 0.026990591049194337, 0.02772275161743164, 0.027867136001586915, 0.02790297508239746, 0.027947008132934572, 0.027952159881591797, 0.027967456817626954, 0.027633663177490234, 0.027035648345947266, 0.026960895538330077, 0.027057151794433593, 0.02734182357788086, 0.027415552139282227, 0.027234304428100587, 0.02698854446411133, 0.027047935485839843, 0.027125759124755858, 0.027040767669677734, 0.026978303909301758, 0.0269752311706543, 0.02698956871032715, 0.027032575607299804, 0.027085823059082033, 0.027031551361083983, 0.027077632904052733, 0.027003904342651368, 0.026983423233032225, 0.027045919418334962, 0.02716771125793457, 0.026961984634399413, 0.026943424224853515, 0.027014144897460936, 0.026961919784545898, 0.027158527374267577, 0.02795315170288086, 0.02876313591003418, 0.028193792343139647, 0.028120063781738282, 0.02792550468444824, 0.02793574333190918, 0.02799308776855469, 0.02798591995239258, 0.028110847473144532, 0.02792959976196289, 0.02792959976196289, 0.027992063522338868, 0.02797056007385254, 0.027956384658813477, 0.02786083221435547, 0.028019712448120116, 0.02795724868774414, 0.027987007141113282, 0.02809235191345215, 0.02799411201477051, 0.02792959976196289, 0.028008447647094727, 0.027971584320068358, 0.027478015899658204, 0.027971584320068358, 0.028034048080444338, 0.028017663955688478, 0.028021760940551758, 0.027995136260986327, 0.028140544891357422, 0.028625919342041017, 0.028411903381347657, 0.02818252754211426, 0.028073984146118162, 0.028169216156005858, 0.028219392776489258, 0.02838937568664551, 0.028006399154663086, 0.02815180778503418, 0.028057600021362306, 0.02819174385070801, 0.028044288635253906, 0.028035072326660155, 0.027955263137817384, 0.02807904052734375, 0.028024831771850587, 0.028325887680053712, 0.028443647384643556, 0.029101055145263673, 0.02809343910217285, 0.028132352828979492, 0.02818764877319336, 0.02874470329284668, 0.028184576034545897, 0.02797875213623047, 0.027884544372558592, 0.028249088287353515, 0.028229631423950196, 0.028092416763305664, 0.028260351181030274, 0.027990047454833984, 0.027900896072387694, 0.028026880264282225, 0.028011520385742186, 0.028012544631958007, 0.028406784057617186, 0.028217344284057616, 0.027992063522338868, 0.028090368270874022, 0.027992063522338868, 0.028027904510498046, 0.028016639709472657, 0.02798899269104004, 0.02809753608703613, 0.02797260856628418, 0.028545024871826172, 0.02813747215270996, 0.028053504943847656, 0.028095487594604493, 0.028057600021362306, 0.02816819190979004, 0.028446720123291015, 0.02858598327636719, 0.028210176467895507, 0.028064767837524415, 0.02799001693725586, 0.02713091278076172, 0.027107295989990236, 0.027011072158813477, 0.02696294403076172, 0.026945535659790038, 0.026992639541625976, 0.027926528930664062, 0.027835391998291017, 0.027876352310180662, 0.02795827293395996, 0.02753433609008789, 0.027044864654541017, 0.02693120002746582, 0.02697318458557129, 0.02775654411315918, 0.027889663696289063, 0.027923456192016603, 0.027890687942504884, 0.028689407348632814, 0.028041215896606447, 0.027914239883422853, 0.02796134376525879, 0.027817983627319336, 0.027874303817749024, 0.027891712188720705, 0.027922431945800782, 0.027805696487426756, 0.027802623748779298, 0.027777023315429687, 0.027999231338500977, 0.027892736434936522, 0.027910144805908203, 0.027810815811157227, 0.027881471633911133, 0.0279552001953125, 0.027407360076904298, 0.027656192779541015, 0.027686912536621092, 0.02819174385070801, 0.02790809631347656, 0.028470272064208983, 0.029073408126831055, 0.02852454376220703, 0.02815488052368164, 0.028071935653686524, 0.027926528930664062, 0.027853824615478515, 0.027898880004882814, 0.027910144805908203, 0.027992063522338868, 0.028060672760009765, 0.028004352569580077, 0.028012544631958007, 0.028023807525634766, 0.027974655151367187, 0.028053504943847656, 0.028103679656982423, 0.027979776382446288, 0.02795212745666504, 0.027926528930664062, 0.027874303817749024, 0.02797875213623047, 0.027878400802612304, 0.02715545654296875, 0.02711039924621582, 0.02792550468444824, 0.02799308776855469, 0.02796441650390625, 0.0279418888092041, 0.027789312362670897, 0.027947008132934572, 0.02792448043823242, 0.028037120819091797, 0.027889663696289063, 0.028038143157958984, 0.02796031951904297, 0.028027904510498046, 0.028662784576416016, 0.02817638397216797, 0.028437503814697264, 0.02797260856628418, 0.027922431945800782, 0.027790336608886718, 0.027886592864990234, 0.027986944198608397, 0.028088319778442384, 0.028222463607788087, 0.027834367752075196, 0.028075008392333983, 0.027792383193969726, 0.02779136085510254, 0.02779545593261719, 0.028218368530273437, 0.027979776382446288, 0.028028928756713867, 0.028013568878173828, 0.02818662452697754, 0.02796031951904297, 0.02795929527282715, 0.02798080062866211, 0.028610559463500978, 0.02815590476989746, 0.028060672760009765, 0.02806483268737793, 0.0280196475982666, 0.02795827293395996, 0.027890687942504884, 0.02838118362426758, 0.028039167404174805, 0.02814668846130371, 0.028010496139526365, 0.02815897560119629, 0.029070335388183592, 0.030737407684326173, 0.028673023223876954, 0.028268543243408203, 0.028107776641845703, 0.02809753608703613, 0.028022783279418945, 0.027996160507202147, 0.028860416412353516, 0.028031999588012696, 0.02791628837585449, 0.027789375305175782, 0.028382144927978515, 0.02975129508972168, 0.028484607696533205, 0.028188703536987304, 0.027721696853637696, 0.027877376556396483, 0.02789276885986328, 0.027897823333740236, 0.027885568618774413, 0.027930624008178712, 0.02794495964050293, 0.028256256103515624, 0.02794393539428711, 0.027817983627319336, 0.027901023864746095, 0.027957151412963867, 0.027847679138183593, 0.028035072326660155, 0.028092416763305664, 0.028017663955688478, 0.02794393539428711, 0.027888639450073242, 0.02790297508239746, 0.028092416763305664, 0.028266496658325195, 0.02794495964050293, 0.027810815811157227, 0.02791935920715332, 0.027819007873535157, 0.027814912796020507, 0.02798899269104004, 0.027881471633911133, 0.027777023315429687, 0.027808767318725586, 0.026933248519897462, 0.02712883186340332, 0.027413503646850586, 0.027966463088989257, 0.02792959976196289, 0.027883520126342775, 0.027855871200561523, 0.027860992431640624, 0.02795417594909668, 0.02793779182434082, 0.027926528930664062, 0.02779955291748047, 0.027847679138183593, 0.028494848251342773, 0.028239871978759764, 0.02775040054321289, 0.028241920471191406, 0.02792959976196289, 0.027828224182128908, 0.02779648017883301, 0.02792857551574707, 0.02797875213623047, 0.02796134376525879, 0.028067840576171874, 0.027784191131591796, 0.027864063262939453, 0.028039167404174805, 0.027886592864990234, 0.027843584060668947, 0.02795417594909668]",tokens/s,35.87637283394291,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,i,i,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/i/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694901f-2e15a68418ddccc2779299bf;2a2fa148-e713-4bf1-8808-c10e9fbc56ce) - -Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: i is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,4188.987392,6019.350528,0.0,5389.68064,5000.446464,s,1,9.9382333984375,9.9382333984375,0.0,9.9382333984375,9.9382333984375,9.9382333984375,9.9382333984375,[9.9382333984375],,kWh,3.721064479168111e-05,2.0378892797260235e-05,7.039200075803498e-05,0.0001279815383469763,,MB,1521.963008,6040.322048,0.0,5393.874944,4700.829696,s,10,11.91125915527344,1.1911259155273437,0.0001144677247249635,1.1910863647460936,1.19128056640625,1.191297265625,1.191310625,"[1.191189453125, 1.1910892333984375, 1.19108349609375, 1.191006591796875, 1.19101416015625, 1.1909674072265626, 1.1910826416015625, 1.19131396484375, 1.19127685546875, 1.1912353515625]",tokens/s,214.92270184270305,kWh,1.4076526486458458e-05,7.713586520801259e-06,8.191931553540033e-05,0.00010370942854266005,tokens/kWh,2468435.1615600353,MB,1539.95264,6040.322048,0.0,5393.874944,4877.454848,s,10,14.017318847656249,1.401731884765625,0.0037190033490569095,1.4012568969726562,1.4075944458007812,1.408120526123047,1.4085413903808595,"[1.3983914794921875, 1.4086466064453125, 1.403867431640625, 1.4074775390625, 1.3969322509765625, 1.399386962890625, 1.40148583984375, 1.4010279541015624, 1.3981771240234375, 1.4019256591796876]",tokens/s,44.94440105465236,kWh,1.6570089101389338e-05,9.081739363339968e-06,5.4312904561398825e-05,7.99647330261281e-05,tokens/kWh,787847.3123822603,,s,630,14.0150149116516,0.02224605541532002,0.0003502961047814552,0.022178815841674804,0.02264575939178467,0.02292316160202026,0.023522641849517828,"[0.02310758399963379, 0.02224127960205078, 0.021999616622924805, 0.022014976501464844, 0.02290176010131836, 0.02231603240966797, 0.021893119812011717, 0.02208768081665039, 0.021918720245361328, 0.02249113655090332, 0.023045120239257814, 0.023302143096923827, 0.022940671920776368, 0.022838272094726563, 0.02227097511291504, 0.02227609634399414, 0.021985279083251954, 0.022145023345947267, 0.021982208251953125, 0.02185523223876953, 0.02202726364135742, 0.02185830307006836, 0.0218920955657959, 0.02185215950012207, 0.022412288665771486, 0.022245376586914063, 0.02221670341491699, 0.022054912567138672, 0.022782976150512696, 0.022738943099975584, 0.02227712059020996, 0.021902336120605468, 0.021839872360229492, 0.021770240783691407, 0.022124544143676757, 0.022208511352539064, 0.022008832931518556, 0.021792768478393554, 0.021910528182983398, 0.022181888580322266, 0.022160383224487306, 0.022156288146972656, 0.02191564750671387, 0.022173696517944336, 0.022999040603637694, 0.022278144836425783, 0.02204979133605957, 0.02185215950012207, 0.022125568389892578, 0.021839872360229492, 0.02208051109313965, 0.022038528442382813, 0.022114303588867186, 0.022042623519897463, 0.022098943710327147, 0.021827583312988282, 0.02190745544433594, 0.02229862403869629, 0.022226943969726562, 0.02186444854736328, 0.022162431716918944, 0.021926912307739257, 0.02231091117858887, 0.023069696426391603, 0.021938175201416017, 0.023433216094970705, 0.023059455871582032, 0.023003135681152344, 0.02249728012084961, 0.02206924819946289, 0.02223308753967285, 0.022172672271728516, 0.022192127227783204, 0.02246246337890625, 0.0221214714050293, 0.021981184005737304, 0.02228326416015625, 0.021968896865844727, 0.022330368041992187, 0.022150144577026368, 0.02230169677734375, 0.02213478469848633, 0.02194534492492676, 0.02186240005493164, 0.021825536727905274, 0.021979135513305666, 0.022192127227783204, 0.021990400314331054, 0.022425600051879883, 0.022153215408325197, 0.022099967956542968, 0.022129663467407225, 0.022395904541015626, 0.022157312393188477, 0.022335487365722655, 0.022501375198364256, 0.022223871231079103, 0.02186444854736328, 0.022155263900756835, 0.022170623779296874, 0.022178815841674804, 0.022271999359130858, 0.022112255096435548, 0.02183782386779785, 0.021808128356933593, 0.022658048629760744, 0.02289356803894043, 0.022207487106323243, 0.022191104888916017, 0.0220897274017334, 0.022776832580566408, 0.022426624298095704, 0.022063104629516602, 0.02208153533935547, 0.022501375198364256, 0.022594560623168947, 0.025683967590332032, 0.023559167861938478, 0.023937023162841797, 0.022304767608642577, 0.02188800048828125, 0.02187468719482422, 0.02201190376281738, 0.022183935165405275, 0.022208511352539064, 0.02225971221923828, 0.02307788848876953, 0.024005632400512695, 0.023005184173583985, 0.022773759841918945, 0.0225167350769043, 0.022157312393188477, 0.022722560882568358, 0.022738943099975584, 0.022161407470703123, 0.022218751907348632, 0.02248806381225586, 0.022874111175537108, 0.02239897537231445, 0.022156288146972656, 0.022149120330810547, 0.0224532470703125, 0.022305791854858398, 0.022138879776000975, 0.022207487106323243, 0.02203647994995117, 0.0222423038482666, 0.022125568389892578, 0.022157312393188477, 0.022573055267333983, 0.02213580894470215, 0.022174720764160157, 0.022214656829833986, 0.02225868797302246, 0.022226943969726562, 0.022115327835083007, 0.022172672271728516, 0.021944320678710938, 0.021938175201416017, 0.02202009582519531, 0.02205286407470703, 0.021953535079956055, 0.021940223693847655, 0.02192793655395508, 0.02221670341491699, 0.02232524871826172, 0.022205440521240235, 0.022047744750976563, 0.021781503677368166, 0.022042623519897463, 0.022165504455566407, 0.022211584091186523, 0.022278144836425783, 0.022200319290161134, 0.022218751907348632, 0.02210099220275879, 0.022312959671020507, 0.022047744750976563, 0.02230784034729004, 0.022022144317626953, 0.02211942481994629, 0.02214297676086426, 0.02207846450805664, 0.022108160018920898, 0.022221824645996095, 0.022212608337402344, 0.022525951385498046, 0.022815744400024415, 0.022171648025512695, 0.02304204750061035, 0.02210201644897461, 0.022169599533081053, 0.022226943969726562, 0.02203647994995117, 0.02249932861328125, 0.02285055923461914, 0.02282598304748535, 0.022774784088134766, 0.022165504455566407, 0.02209280014038086, 0.02230886459350586, 0.022268928527832032, 0.02230169677734375, 0.02210508728027344, 0.021757951736450197, 0.021970943450927736, 0.022323200225830078, 0.022133760452270508, 0.022591487884521484, 0.02249932861328125, 0.022552576065063477, 0.022230016708374024, 0.0237076473236084, 0.022252544403076172, 0.02229452705383301, 0.022133760452270508, 0.02188697624206543, 0.02208051109313965, 0.02221670341491699, 0.022180864334106445, 0.022342655181884767, 0.02223308753967285, 0.02235699272155762, 0.02225868797302246, 0.022195199966430663, 0.022166528701782227, 0.022381568908691408, 0.02221772766113281, 0.023145471572875977, 0.022879232406616212, 0.022879232406616212, 0.022206464767456056, 0.022280191421508787, 0.02224332809448242, 0.02224844741821289, 0.022181888580322266, 0.02209587287902832, 0.022194175720214843, 0.02246553611755371, 0.02231091117858887, 0.02227712059020996, 0.02224947166442871, 0.022183935165405275, 0.02224332809448242, 0.022421503067016603, 0.022365184783935548, 0.022403072357177735, 0.02229555130004883, 0.02242252731323242, 0.02208460807800293, 0.02226585578918457, 0.022167552947998048, 0.02306764793395996, 0.022226943969726562, 0.022108160018920898, 0.02222489547729492, 0.02211020851135254, 0.022180864334106445, 0.021986303329467775, 0.02248192024230957, 0.02230784034729004, 0.022369279861450195, 0.022160383224487306, 0.02184601593017578, 0.02222591972351074, 0.022149120330810547, 0.02220134353637695, 0.02223308753967285, 0.022000640869140626, 0.022198272705078126, 0.02226688003540039, 0.022205440521240235, 0.0223242244720459, 0.02207744026184082, 0.022179840087890625, 0.022055936813354493, 0.02224332809448242, 0.02206924819946289, 0.022131711959838866, 0.02208358383178711, 0.022177791595458983, 0.02214297676086426, 0.0221214714050293, 0.022123519897460937, 0.022106111526489256, 0.022161407470703123, 0.022004735946655272, 0.022172672271728516, 0.022099967956542968, 0.022168575286865236, 0.022147071838378905, 0.022131711959838866, 0.022226943969726562, 0.021967872619628907, 0.022161407470703123, 0.022143999099731446, 0.022370304107666016, 0.022133760452270508, 0.022236160278320313, 0.02206003189086914, 0.022178815841674804, 0.022090751647949217, 0.02226585578918457, 0.02208051109313965, 0.022163455963134765, 0.022141952514648438, 0.022162431716918944, 0.022160383224487306, 0.022140928268432617, 0.02187161636352539, 0.022179840087890625, 0.022150144577026368, 0.022210559844970702, 0.02215116882324219, 0.022180864334106445, 0.022964223861694336, 0.022161407470703123, 0.02267955207824707, 0.023414783477783203, 0.022218751907348632, 0.021754880905151368, 0.02183475112915039, 0.022156288146972656, 0.022133760452270508, 0.021917695999145507, 0.02189926338195801, 0.022171648025512695, 0.022271999359130858, 0.022174720764160157, 0.022167552947998048, 0.02211942481994629, 0.022383615493774413, 0.02215116882324219, 0.021975040435791016, 0.021745664596557617, 0.02231603240966797, 0.022222848892211915, 0.02227712059020996, 0.022153215408325197, 0.0222423038482666, 0.022016000747680665, 0.021947391510009767, 0.02193715286254883, 0.021857280731201172, 0.02202009582519531, 0.02206924819946289, 0.02213478469848633, 0.022215679168701173, 0.022238208770751954, 0.022174720764160157, 0.021841920852661133, 0.0218603515625, 0.021960704803466798, 0.02187775993347168, 0.021925888061523437, 0.022146047592163084, 0.022157312393188477, 0.022024192810058595, 0.022137855529785155, 0.022174720764160157, 0.022193151473999022, 0.022230016708374024, 0.021902336120605468, 0.022632448196411133, 0.022822912216186524, 0.022992895126342772, 0.02285158348083496, 0.022832128524780275, 0.02226585578918457, 0.02227507209777832, 0.02230681610107422, 0.02229350471496582, 0.022185983657836913, 0.02221670341491699, 0.022245376586914063, 0.0222873592376709, 0.022146047592163084, 0.02225049591064453, 0.023378944396972655, 0.02225049591064453, 0.02229964828491211, 0.0221265926361084, 0.02208153533935547, 0.022145023345947267, 0.022219776153564453, 0.02189004707336426, 0.022042623519897463, 0.0221214714050293, 0.022199296951293947, 0.022129663467407225, 0.022203392028808593, 0.022202367782592772, 0.022172672271728516, 0.021999616622924805, 0.022206464767456056, 0.022397951126098634, 0.022212608337402344, 0.022181888580322266, 0.022162431716918944, 0.022313983917236328, 0.02249113655090332, 0.022239231109619142, 0.022197248458862305, 0.021981184005737304, 0.022206464767456056, 0.02187775993347168, 0.021935104370117187, 0.02223308753967285, 0.023051263809204102, 0.02264473533630371, 0.02225663948059082, 0.022352895736694335, 0.022206464767456056, 0.022189056396484375, 0.022189056396484375, 0.022372352600097657, 0.021901311874389647, 0.021980159759521483, 0.021969919204711915, 0.022211584091186523, 0.022026239395141603, 0.022163455963134765, 0.02228121566772461, 0.02248192024230957, 0.022418432235717774, 0.022367231369018553, 0.022076416015625, 0.02225049591064453, 0.02226585578918457, 0.02209791946411133, 0.02210201644897461, 0.022202367782592772, 0.022180864334106445, 0.021906431198120118, 0.02201190376281738, 0.02225766372680664, 0.022140928268432617, 0.02229555130004883, 0.022808576583862306, 0.023146495819091797, 0.02234982490539551, 0.02268671989440918, 0.022199296951293947, 0.022175743103027345, 0.022143999099731446, 0.022185983657836913, 0.022166528701782227, 0.022116352081298828, 0.02287513542175293, 0.022218751907348632, 0.02215116882324219, 0.022156288146972656, 0.0218787841796875, 0.022177791595458983, 0.022195199966430663, 0.022197248458862305, 0.02211840057373047, 0.022174720764160157, 0.022204416275024414, 0.022244352340698242, 0.02187775993347168, 0.021955583572387697, 0.022195199966430663, 0.022165504455566407, 0.02229350471496582, 0.024179712295532226, 0.023348224639892577, 0.022595584869384764, 0.02225049591064453, 0.022173696517944336, 0.022183935165405275, 0.022183935165405275, 0.022168575286865236, 0.022191104888916017, 0.022164480209350586, 0.022064128875732423, 0.0220446720123291, 0.02211840057373047, 0.02227916717529297, 0.022158336639404298, 0.022501375198364256, 0.021916671752929686, 0.02190028762817383, 0.02183782386779785, 0.022189056396484375, 0.022240255355834963, 0.02208460807800293, 0.022197248458862305, 0.022206464767456056, 0.022245376586914063, 0.022206464767456056, 0.021940223693847655, 0.022146047592163084, 0.02265497589111328, 0.022195199966430663, 0.02226790428161621, 0.02227097511291504, 0.022108160018920898, 0.02189107131958008, 0.021960704803466798, 0.022194175720214843, 0.02227609634399414, 0.022136831283569337, 0.022361087799072265, 0.022987775802612305, 0.022197248458862305, 0.022147071838378905, 0.022178815841674804, 0.022202367782592772, 0.022164480209350586, 0.022054912567138672, 0.021813247680664064, 0.021711872100830077, 0.022171648025512695, 0.02204876708984375, 0.021843967437744142, 0.021772287368774415, 0.02190745544433594, 0.021827583312988282, 0.02188287925720215, 0.022148096084594726, 0.022129663467407225, 0.022139904022216796, 0.0220897274017334, 0.022120447158813478, 0.02210918426513672, 0.02229043197631836, 0.022239231109619142, 0.022033407211303712, 0.022175743103027345, 0.022553600311279298, 0.02210099220275879, 0.02213580894470215, 0.022014976501464844, 0.02167193603515625, 0.021850112915039063, 0.02208051109313965, 0.022188032150268554, 0.022114303588867186, 0.022197248458862305, 0.022031360626220704, 0.021992448806762696, 0.022072320938110353, 0.022125568389892578, 0.022139904022216796, 0.022183935165405275, 0.021984256744384766, 0.02208665657043457, 0.022146047592163084, 0.022112255096435548, 0.022154239654541014, 0.022114303588867186, 0.022181888580322266, 0.02243071937561035, 0.023345151901245118, 0.023888896942138672, 0.023027711868286133, 0.022526975631713866, 0.022203392028808593, 0.02268057632446289, 0.022232063293457033, 0.022244352340698242, 0.02226585578918457, 0.022128639221191407, 0.02208768081665039, 0.02210304069519043, 0.022166528701782227, 0.02249318313598633, 0.02220134353637695, 0.022154239654541014, 0.022137855529785155, 0.02190336036682129, 0.022199296951293947, 0.022046720504760742, 0.022174720764160157, 0.022152191162109376, 0.022163455963134765, 0.022190080642700196, 0.02185113525390625, 0.02205183982849121, 0.022207487106323243, 0.022164480209350586, 0.02210406494140625, 0.021921791076660157, 0.022166528701782227, 0.022213632583618165, 0.02186956787109375, 0.022145023345947267, 0.022158336639404298, 0.022189056396484375, 0.02222489547729492, 0.0218603515625, 0.021997568130493163, 0.02205695915222168, 0.022278144836425783, 0.022230016708374024, 0.021898239135742188, 0.02190540885925293, 0.022171648025512695, 0.022076416015625, 0.022289407730102538, 0.02251571273803711, 0.021949440002441405, 0.022091775894165038, 0.022344703674316405, 0.02205081558227539, 0.02242252731323242, 0.022846464157104493, 0.022445056915283205, 0.022334463119506837, 0.02267033576965332, 0.02229248046875, 0.0220579833984375, 0.02266111946105957, 0.022502399444580077, 0.02227712059020996, 0.02226483154296875, 0.022034431457519533, 0.02231091117858887, 0.02224844741821289, 0.02247372817993164, 0.022631423950195313, 0.023051263809204102, 0.022895616531372072, 0.02266111946105957, 0.02234880065917969, 0.022345727920532226, 0.022334463119506837, 0.022411264419555665, 0.022369279861450195]",tokens/s,44.9517894894453,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,17655.844864,22207.266816,0.0,21577.596928,20724.970496,s,1,18.662205078125,18.662205078125,0.0,18.662205078125,18.662205078125,18.662205078125,18.662205078125,[18.662205078125],,kWh,0.00013564120059721935,7.432745758087195e-05,0.00029975190646801264,0.0005097205646461039,,MB,1739.517952,22286.958592,0.0,21640.511488,19428.81536,s,10,63.18457470703126,6.318457470703126,0.0005070138349320449,6.318324951171875,6.3191629394531255,6.319186450195312,6.319205258789062,"[6.31877880859375, 6.31862646484375, 6.31891064453125, 6.31915771484375, 6.3192099609375, 6.3180224609375, 6.317828125, 6.31800146484375, 6.3180234375, 6.318015625]",tokens/s,40.5162179514539,kWh,7.462592088659696e-05,4.089996506675735e-05,0.0004144884149237998,0.0005300143008771542,tokens/kWh,483005.83508092025,MB,1741.74208,22314.221568,0.0,21665.677312,19428.81792,s,10,33.116486328125,3.3116486328125,0.0007012592637373438,3.3116743164062497,3.3123858154296877,3.3125937866210937,3.312760163574219,"[3.3128017578125, 3.311986572265625, 3.311605712890625, 3.311389892578125, 3.312240966796875, 3.310526123046875, 3.311742919921875, 3.3112900390625, 3.312339599609375, 3.310562744140625]",tokens/s,19.023757344237236,kWh,3.910318657916715e-05,2.1429171257292508e-05,0.0001864661213949987,0.00024699847923145835,tokens/kWh,255062.29915271545,,s,630,33.11393585586544,0.05256180294581822,0.00018374192333080349,0.05256038475036621,0.052775936126708986,0.05284201946258545,0.05314688911437988,"[0.053318656921386716, 0.052359169006347656, 0.05238681411743164, 0.05237247848510742, 0.05244825744628906, 0.05231923294067383, 0.05250867080688477, 0.05233868789672851, 0.05236019134521484, 0.05232844924926758, 0.052364288330078126, 0.05240217590332031, 0.05243084716796875, 0.052501502990722655, 0.052424705505371094, 0.05242675018310547, 0.05236940765380859, 0.052370433807373044, 0.05240422439575195, 0.05255782318115235, 0.052424705505371094, 0.05242777633666992, 0.05243084716796875, 0.05247385787963867, 0.05246361541748047, 0.052468734741210936, 0.05247488021850586, 0.052462593078613284, 0.05246156692504883, 0.05242572784423828, 0.05252403259277344, 0.0525404167175293, 0.05261209487915039, 0.05258342361450195, 0.052574207305908206, 0.05260287857055664, 0.05263872146606445, 0.05260287857055664, 0.05266739273071289, 0.05272371292114258, 0.052604927062988284, 0.052651008605957034, 0.05265817642211914, 0.05269913482666016, 0.052724735260009765, 0.05268479919433594, 0.05262847900390625, 0.05272883224487305, 0.05263564682006836, 0.0527421760559082, 0.05268067169189453, 0.052790271759033204, 0.05265919876098633, 0.05267967987060547, 0.05267660903930664, 0.05319475173950195, 0.052789249420166016, 0.05279334259033203, 0.052893695831298826, 0.052806655883789064, 0.052746238708496096, 0.05279948806762695, 0.05270528030395508, 0.053101566314697264, 0.052452350616455076, 0.05227724838256836, 0.052416545867919925, 0.05230486297607422, 0.05240217590332031, 0.05233561706542969, 0.052321281433105465, 0.05229260635375976, 0.052365310668945314, 0.05231513595581055, 0.05240524673461914, 0.0526376953125, 0.05254553604125976, 0.052357120513916014, 0.052490238189697266, 0.052324352264404295, 0.05244825744628906, 0.05247078323364258, 0.05246976089477539, 0.05234688186645508, 0.05243596649169922, 0.0523581428527832, 0.052478977203369144, 0.052408321380615235, 0.05246566390991211, 0.05235609436035156, 0.05258649444580078, 0.052375553131103515, 0.052479999542236325, 0.05238784027099609, 0.05260083389282227, 0.05268172836303711, 0.052670463562011716, 0.0525219841003418, 0.052675582885742187, 0.05255680084228516, 0.05267148971557617, 0.05254143905639649, 0.05272678375244141, 0.05251379013061523, 0.05267660903930664, 0.05259468841552734, 0.05270528030395508, 0.05260083389282227, 0.052721664428710936, 0.05260595321655273, 0.05275033569335937, 0.052572158813476565, 0.05272883224487305, 0.05264486312866211, 0.05291622543334961, 0.05271244812011719, 0.052738048553466796, 0.052736000061035154, 0.05289779281616211, 0.052789249420166016, 0.05283942413330078, 0.05266841506958008, 0.05284864044189453, 0.0526295051574707, 0.05281689453125, 0.052954113006591794, 0.053231616973876954, 0.05236940765380859, 0.052367359161376956, 0.052348926544189454, 0.052337665557861325, 0.052468734741210936, 0.05255372619628906, 0.05243494415283203, 0.0523612174987793, 0.05245951843261719, 0.052457473754882813, 0.05230899047851562, 0.05233152008056641, 0.05236940765380859, 0.05238579177856445, 0.052397056579589846, 0.052400127410888675, 0.052397056579589846, 0.05240323257446289, 0.052423648834228516, 0.05245542526245117, 0.052383743286132815, 0.052410369873046876, 0.05243801498413086, 0.052420639038085935, 0.0524984016418457, 0.052552703857421876, 0.052490238189697266, 0.05249126434326172, 0.05246361541748047, 0.05245542526245117, 0.052523006439208986, 0.05260287857055664, 0.05263564682006836, 0.05262540817260742, 0.05255782318115235, 0.05258137512207031, 0.052550655364990234, 0.05257011032104492, 0.05265203094482422, 0.052647937774658204, 0.052654079437255856, 0.052596736907958984, 0.052757503509521485, 0.05271142578125, 0.05298483276367188, 0.05268070220947266, 0.052706302642822264, 0.052636672973632816, 0.0526561279296875, 0.05264486312866211, 0.05273395156860351, 0.05269401550292969, 0.05266534423828125, 0.05266739273071289, 0.052752384185791014, 0.05266022491455078, 0.05271551895141602, 0.052754432678222656, 0.05278003311157226, 0.05269606399536133, 0.05269708633422852, 0.05269708633422852, 0.0531486701965332, 0.05237356948852539, 0.052281280517578126, 0.05241548919677735, 0.05224857711791992, 0.052311038970947264, 0.052294654846191405, 0.05231513595581055, 0.05226291275024414, 0.052408321380615235, 0.05231923294067383, 0.05241856002807617, 0.05229363250732422, 0.05247488021850586, 0.052329471588134766, 0.05246156692504883, 0.05231923294067383, 0.05242265701293945, 0.05232844924926758, 0.052808704376220705, 0.05237145614624023, 0.05252505493164063, 0.05236326217651367, 0.052482048034667966, 0.05235200119018555, 0.052457473754882813, 0.05235200119018555, 0.052534271240234375, 0.052421630859375, 0.052479999542236325, 0.05242367935180664, 0.05258137512207031, 0.05256499099731445, 0.05264281463623047, 0.05256499099731445, 0.05264896011352539, 0.052528129577636716, 0.05264384078979492, 0.052572158813476565, 0.05279129409790039, 0.052550655364990234, 0.052670463562011716, 0.05258649444580078, 0.052808704376220705, 0.0526192626953125, 0.05267865753173828, 0.052657150268554685, 0.052836353302001954, 0.05259468841552734, 0.052760574340820314, 0.05261520004272461, 0.052841438293457034, 0.05263052749633789, 0.052768768310546874, 0.052651008605957034, 0.05285068893432617, 0.05263359832763672, 0.052760574340820314, 0.05304217529296875, 0.05294387054443359, 0.052674560546875, 0.052789249420166016, 0.05263974380493164, 0.05321113586425781, 0.052350975036621096, 0.05237452697753906, 0.052408321380615235, 0.05235302352905274, 0.0523059196472168, 0.05231820678710938, 0.05230080032348633, 0.052343807220458984, 0.052294654846191405, 0.05239603042602539, 0.05233049774169922, 0.052375553131103515, 0.052498432159423826, 0.05244313430786133, 0.05244825744628906, 0.05239910507202149, 0.05244416046142578, 0.05246361541748047, 0.05244723129272461, 0.05257113647460938, 0.05242777633666992, 0.052544513702392576, 0.052468734741210936, 0.05244825744628906, 0.05241446304321289, 0.05249228668212891, 0.05244825744628906, 0.0524769287109375, 0.05242483139038086, 0.05241228866577148, 0.052485118865966796, 0.05300428771972656, 0.05266022491455078, 0.05259161758422851, 0.052563968658447265, 0.05267967987060547, 0.052580352783203124, 0.05267967987060547, 0.052653057098388675, 0.05262438583374023, 0.052603904724121096, 0.052634624481201174, 0.05267967987060547, 0.05271039962768555, 0.05266124725341797, 0.05262540817260742, 0.052716545104980465, 0.052721664428710936, 0.052673534393310545, 0.05264179229736328, 0.052729854583740236, 0.05284249496459961, 0.05266534423828125, 0.05281075286865235, 0.05281382369995117, 0.05276160049438477, 0.052744190216064454, 0.052775936126708986, 0.052744190216064454, 0.052732929229736325, 0.05283430480957031, 0.052703231811523435, 0.053125118255615236, 0.05235609436035156, 0.05224755096435547, 0.05237964630126953, 0.05229568099975586, 0.05231718444824219, 0.05279743957519531, 0.052393985748291017, 0.0522977294921875, 0.05237145614624023, 0.052307968139648435, 0.05241548919677735, 0.05229260635375976, 0.05242777633666992, 0.052284446716308594, 0.052454368591308594, 0.052332542419433595, 0.05243904113769531, 0.05237145614624023, 0.05250252914428711, 0.052357120513916014, 0.05242572784423828, 0.05232537460327148, 0.052501502990722655, 0.052348926544189454, 0.052421630859375, 0.05245644760131836, 0.052615169525146485, 0.05240115356445312, 0.05247078323364258, 0.05237964630126953, 0.05269708633422852, 0.05260902404785156, 0.052596736907958984, 0.05252608108520508, 0.0526827507019043, 0.05257523345947265, 0.05259161758422851, 0.05254348754882812, 0.052689918518066405, 0.05254655838012695, 0.05267251205444336, 0.05257523345947265, 0.05276671981811523, 0.052585472106933595, 0.05270732879638672, 0.052674560546875, 0.052760574340820314, 0.05257727813720703, 0.05271244812011719, 0.05261414337158203, 0.052789249420166016, 0.052613121032714844, 0.05270016098022461, 0.052636672973632816, 0.05279948806762695, 0.0526376953125, 0.0527534065246582, 0.05264179229736328, 0.05284044647216797, 0.05264384078979492, 0.0527564811706543, 0.052654079437255856, 0.05320908737182617, 0.05231820678710938, 0.05234790420532227, 0.05252505493164063, 0.052411392211914064, 0.052342784881591796, 0.05237247848510742, 0.05235507202148437, 0.052327423095703124, 0.052383743286132815, 0.0524400634765625, 0.052357120513916014, 0.05236633682250977, 0.052383743286132815, 0.052367359161376956, 0.05246771240234375, 0.052446208953857425, 0.052457473754882813, 0.05239295959472656, 0.052450302124023435, 0.05248409652709961, 0.05241856002807617, 0.05240422439575195, 0.05244211196899414, 0.05241958236694336, 0.05244927978515625, 0.05245644760131836, 0.052496383666992184, 0.05246054458618164, 0.052534271240234375, 0.05243699264526367, 0.05253222274780273, 0.05258342361450195, 0.0526110725402832, 0.05276364898681641, 0.05257523345947265, 0.05258342361450195, 0.052563968658447265, 0.052574207305908206, 0.05300223922729492, 0.052613121032714844, 0.05260287857055664, 0.052621311187744144, 0.05269504165649414, 0.05265919876098633, 0.05269504165649414, 0.052664321899414064, 0.052691967010498046, 0.0526295051574707, 0.05266739273071289, 0.05263257598876953, 0.052749313354492185, 0.05269606399536133, 0.052708351135253906, 0.052689918518066405, 0.052762622833251956, 0.052691967010498046, 0.052729854583740236, 0.052739070892333983, 0.05288243103027344, 0.052689918518066405, 0.052741119384765625, 0.05272780990600586, 0.05307699203491211, 0.05239910507202149, 0.05230284881591797, 0.05248102569580078, 0.052284481048583985, 0.05235193634033203, 0.05234790420532227, 0.05234175872802734, 0.0522608642578125, 0.05234483337402344, 0.052498432159423826, 0.05247180938720703, 0.05226803207397461, 0.05278105545043945, 0.052340736389160154, 0.052528129577636716, 0.05234688186645508, 0.05242675018310547, 0.05234688186645508, 0.05247078323364258, 0.052378623962402344, 0.05252096176147461, 0.05242777633666992, 0.05248614501953125, 0.05235302352905274, 0.052452350616455076, 0.05236326217651367, 0.05250559997558594, 0.052373504638671874, 0.05247180938720703, 0.05238579177856445, 0.05260595321655273, 0.052542465209960934, 0.052744190216064454, 0.05256192016601562, 0.052651008605957034, 0.052534271240234375, 0.052651008605957034, 0.05263257598876953, 0.05267763137817383, 0.052531200408935545, 0.05268787384033203, 0.05255987167358398, 0.052729854583740236, 0.05260902404785156, 0.05268479919433594, 0.05257932662963867, 0.0527718391418457, 0.05258444976806641, 0.05271039962768555, 0.05263872146606445, 0.05278515243530273, 0.052613121032714844, 0.05285580825805664, 0.05262540817260742, 0.052789249420166016, 0.05266329574584961, 0.05278310394287109, 0.05266841506958008, 0.052847614288330076, 0.05269811248779297, 0.052853759765625, 0.052759552001953126, 0.05322137451171875, 0.05229056167602539, 0.052343807220458984, 0.05233152008056641, 0.05230489730834961, 0.05229056167602539, 0.052311038970947264, 0.05245542526245117, 0.052380672454833986, 0.05231923294067383, 0.05248614501953125, 0.052348926544189454, 0.05232844924926758, 0.05234483337402344, 0.052367359161376956, 0.05241548919677735, 0.052370433807373044, 0.05252403259277344, 0.05243596649169922, 0.052446208953857425, 0.05244927978515625, 0.05242879867553711, 0.05240524673461914, 0.05243904113769531, 0.05243392181396484, 0.0524031982421875, 0.052945919036865234, 0.05262335968017578, 0.05248716735839844, 0.05245849609375, 0.052485118865966796, 0.05249126434326172, 0.05259571075439453, 0.05261619186401367, 0.05260902404785156, 0.052806655883789064, 0.052636672973632816, 0.052634624481201174, 0.05258444976806641, 0.05266022491455078, 0.05257113647460938, 0.05262438583374023, 0.05264588928222656, 0.05269094467163086, 0.05259980773925781, 0.05288857650756836, 0.05278515243530273, 0.05274726486206055, 0.052738048553466796, 0.05269401550292969, 0.05263257598876953, 0.052751361846923826, 0.05268479919433594, 0.052959232330322265, 0.052751361846923826, 0.05274009704589844, 0.05266841506958008, 0.052736000061035154, 0.052721664428710936, 0.052775936126708986, 0.052719615936279295, 0.05274214553833008, 0.05269401550292969, 0.05314252853393555, 0.05240627288818359, 0.05226393508911133, 0.05241958236694336, 0.05227110290527344, 0.05232230377197265, 0.05226291275024414, 0.05230080032348633, 0.05225267028808594, 0.052350975036621096, 0.05234790420532227, 0.052397056579589846, 0.05229056167602539, 0.052416511535644535, 0.05235609436035156, 0.05247590255737305, 0.05232025527954102, 0.05248921585083008, 0.052350975036621096, 0.052483070373535154, 0.05245337677001953, 0.05244927978515625, 0.052354049682617185, 0.052501502990722655, 0.05261721420288086, 0.05250457763671875, 0.052378623962402344, 0.05253017425537109, 0.05240729522705078, 0.05244313430786133, 0.052354049682617185, 0.05253836822509766, 0.05254143905639649, 0.05263872146606445, 0.052560897827148435, 0.05264486312866211, 0.052550655364990234, 0.05259571075439453, 0.0525035514831543, 0.05272678375244141, 0.05265510559082031, 0.05266739273071289, 0.052569087982177735, 0.052754432678222656, 0.05259366226196289, 0.052739070892333983, 0.052569087982177735, 0.0527534065246582, 0.05255372619628906, 0.052708351135253906, 0.05256806564331055, 0.05280767822265625, 0.05261827087402344, 0.05273904037475586, 0.052596736907958984, 0.05295206451416016, 0.05267865753173828, 0.052768768310546874, 0.05266944122314453, 0.053013504028320314, 0.05265510559082031, 0.05278412628173828, 0.052673534393310545]",tokens/s,19.025222575238153,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/1/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669495c2-2020169116cf5c8a72628056;0d20cce6-94d9-4d7a-b675-b3e90e52a5bc) - -Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 1 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3710, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ - self.model = InternLMModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ - self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in - self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 545, in __init__ - self.self_attn = INTERNLM_ATTENTION_CLASSES[config.attn_implementation](config=config) -KeyError: 'sdpa' - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4037, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 146, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 79, in post_init - self.q4 = exl_ext.make_q4( -RuntimeError: scales and qweight have incompatible shapes - -",qwen2_moe,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,1050.963968,1200.095232,0.0,570.425344,525.840896,s,1,7.8153359375,7.8153359375,0.0,7.8153359375,7.8153359375,7.8153359375,7.8153359375,[7.8153359375],,kWh,9.59897124513797e-06,5.216216186088791e-06,1.3173343871963894e-05,2.7988531303190656e-05,,MB,1465.233408,1244.135424,0.0,597.68832,584.940544,s,10,0.6404292144775391,0.0640429214477539,4.699651253976224e-05,0.06404201507568359,0.06408109283447266,0.06411827468872071,0.06414802017211914,"[0.06397356796264649, 0.06401286315917969, 0.06403488159179688, 0.06407283020019532, 0.0639936637878418, 0.06404914855957031, 0.06403273773193359, 0.06415545654296875, 0.06404927825927734, 0.06405478668212891]",tokens/s,3997.3192073825726,kWh,7.56385234810665e-07,4.1446098326870817e-07,4.255594345732645e-06,5.4264405638120176e-06,tokens/kWh,47176412.7865362,MB,1489.895424,1258.815488,0.0,612.368384,597.290496,s,10,10.441157958984375,1.0441157958984375,0.007854056301673776,1.0425192260742189,1.0562167114257812,1.057537432861328,1.0585940100097655,"[1.0495250244140626, 1.0559232177734375, 1.058858154296875, 1.0444755859375, 1.0345133056640625, 1.0413861083984375, 1.04365234375, 1.0377904052734375, 1.0388853759765626, 1.0361484375]",tokens/s,60.338135145048696,kWh,1.2189291030398622e-05,6.679291741476001e-06,2.159389855606452e-05,4.046248132793914e-05,tokens/kWh,1556997.9381491567,,s,630,10.436632549285887,0.016566083411564902,0.00030891862572666155,0.01644441604614258,0.017052876281738284,0.01711897611618042,0.01738478675842285,"[0.01621401596069336, 0.01637478446960449, 0.01637478446960449, 0.01641267204284668, 0.01619660758972168, 0.016286720275878908, 0.01640140724182129, 0.016368640899658202, 0.016285696029663087, 0.016296960830688476, 0.01636966323852539, 0.01641062355041504, 0.01640345573425293, 0.016309247970581055, 0.016290815353393554, 0.016343040466308592, 0.016541696548461913, 0.016940031051635742, 0.01700454330444336, 0.01698406410217285, 0.01701375961303711, 0.0170250244140625, 0.01700147247314453, 0.016897024154663084, 0.016837631225585938, 0.017044479370117188, 0.017068031311035157, 0.017052671432495118, 0.017076223373413087, 0.01697996711730957, 0.0170700798034668, 0.017115135192871094, 0.017055744171142577, 0.016931840896606445, 0.016390144348144533, 0.01639219284057617, 0.016515071868896485, 0.01639116859436035, 0.0163768310546875, 0.016511999130249023, 0.016448511123657226, 0.0164454402923584, 0.01640550422668457, 0.016433151245117187, 0.016385055541992186, 0.01646998405456543, 0.016356351852416993, 0.016390144348144533, 0.016454656600952147, 0.01640755271911621, 0.016389120101928712, 0.016467967987060548, 0.016387104034423828, 0.016807903289794923, 0.017164287567138673, 0.0170383358001709, 0.016965631484985352, 0.01700454330444336, 0.017099775314331055, 0.017054719924926756, 0.01716633605957031, 0.017105920791625977, 0.0166748161315918, 0.01784524726867676, 0.017128448486328125, 0.01699430465698242, 0.01704652786254883, 0.01702707290649414, 0.0170383358001709, 0.01699225616455078, 0.01696460723876953, 0.016982015609741212, 0.01703628730773926, 0.01700864028930664, 0.017100799560546876, 0.017051647186279297, 0.017076223373413087, 0.017125375747680666, 0.017391616821289063, 0.017116159439086915, 0.016973823547363282, 0.017040384292602538, 0.01707318305969238, 0.017191904067993164, 0.01706188774108887, 0.017129472732543945, 0.017096704483032226, 0.017099775314331055, 0.01703731155395508, 0.0170383358001709, 0.017135616302490234, 0.017142784118652343, 0.01681510353088379, 0.01640755271911621, 0.016314367294311523, 0.016416767120361327, 0.01658060836791992, 0.016524288177490236, 0.01641164779663086, 0.0164270076751709, 0.01641472053527832, 0.016430080413818358, 0.016516096115112306, 0.016467967987060548, 0.01644339179992676, 0.016451583862304688, 0.016467967987060548, 0.016454656600952147, 0.01661747169494629, 0.016475135803222657, 0.01641164779663086, 0.016458751678466797, 0.01642905616760254, 0.016456703186035156, 0.016462848663330077, 0.01643724822998047, 0.01642905616760254, 0.01640652847290039, 0.01640243148803711, 0.01638502311706543, 0.016458751678466797, 0.016449535369873047, 0.016472063064575194, 0.016484352111816408, 0.016476160049438478, 0.016276479721069336, 0.016134143829345703, 0.016380928039550782, 0.016358432769775392, 0.016388063430786134, 0.01643212890625, 0.01643622398376465, 0.01638400077819824, 0.016449535369873047, 0.016348159790039063, 0.01638604736328125, 0.016288768768310546, 0.01639116859436035, 0.016351232528686522, 0.01638297653198242, 0.016373760223388673, 0.01639423942565918, 0.016449535369873047, 0.01639219284057617, 0.016352256774902343, 0.016339967727661133, 0.01640959930419922, 0.01639116859436035, 0.016450559616088867, 0.01638297653198242, 0.01643110466003418, 0.016288768768310546, 0.016563199996948243, 0.0164454402923584, 0.018765823364257812, 0.01723904037475586, 0.017092607498168946, 0.017051647186279297, 0.01720319938659668, 0.017124351501464845, 0.017081344604492187, 0.01701785659790039, 0.017142784118652343, 0.01706188774108887, 0.01703628730773926, 0.0172042236328125, 0.01704550361633301, 0.017121280670166016, 0.01701171112060547, 0.017051647186279297, 0.01703628730773926, 0.017111040115356444, 0.017075199127197266, 0.016966655731201173, 0.017144832611083984, 0.01704960060119629, 0.017148927688598634, 0.017097728729248047, 0.017067007064819336, 0.01703731155395508, 0.01709984016418457, 0.017090496063232423, 0.017092607498168946, 0.017127424240112304, 0.017039360046386717, 0.017071104049682616, 0.01699737548828125, 0.01701478385925293, 0.017087488174438475, 0.016130048751831053, 0.016389120101928712, 0.016360448837280273, 0.016321535110473632, 0.016359424591064452, 0.016348159790039063, 0.016455680847167968, 0.01640140724182129, 0.01635327911376953, 0.016479232788085937, 0.016433151245117187, 0.016453632354736326, 0.016471040725708007, 0.016854015350341797, 0.016950271606445313, 0.016555007934570314, 0.016486400604248046, 0.017052671432495118, 0.016698368072509767, 0.01643110466003418, 0.01660108757019043, 0.01881907272338867, 0.017543167114257813, 0.017260543823242186, 0.016763904571533202, 0.016365568161010743, 0.016434175491333008, 0.016434175491333008, 0.016480255126953124, 0.01639628791809082, 0.016475135803222657, 0.016470016479492186, 0.016449535369873047, 0.016236543655395508, 0.016424959182739257, 0.01638707160949707, 0.016458751678466797, 0.016380928039550782, 0.01642393684387207, 0.016389120101928712, 0.01641472053527832, 0.01641881561279297, 0.01643622398376465, 0.01646080017089844, 0.016360448837280273, 0.0164454402923584, 0.016334848403930666, 0.01642291259765625, 0.016266239166259765, 0.016412704467773438, 0.016747488021850585, 0.016882688522338866, 0.017133567810058595, 0.017466367721557616, 0.016861183166503906, 0.016533504486083983, 0.016564224243164064, 0.016453632354736326, 0.0164270076751709, 0.01640550422668457, 0.01643929672241211, 0.016442367553710938, 0.016514047622680664, 0.016172063827514647, 0.016376800537109375, 0.01638604736328125, 0.01638400077819824, 0.016425983428955078, 0.01643212890625, 0.01639833641052246, 0.01644441604614258, 0.01641062355041504, 0.0164454402923584, 0.016747520446777343, 0.01708140754699707, 0.01634911918640137, 0.016389120101928712, 0.016397312164306642, 0.0164454402923584, 0.01640140724182129, 0.01640447998046875, 0.01641574478149414, 0.016282623291015624, 0.016474111557006836, 0.016390144348144533, 0.01638707160949707, 0.016338943481445312, 0.01635327911376953, 0.016372735977172852, 0.0164454402923584, 0.016309247970581055, 0.01642803192138672, 0.016434175491333008, 0.016453632354736326, 0.01642905616760254, 0.01639833641052246, 0.01643929672241211, 0.016470016479492186, 0.016476160049438478, 0.01638604736328125, 0.016466943740844727, 0.016381952285766603, 0.01640755271911621, 0.01641472053527832, 0.01636761665344238, 0.016291839599609375, 0.016360448837280273, 0.016496639251708984, 0.016446464538574217, 0.01641983985900879, 0.01633791923522949, 0.01639833641052246, 0.016359424591064452, 0.01639219284057617, 0.01655193519592285, 0.016520191192626953, 0.016455680847167968, 0.016424959182739257, 0.0164136962890625, 0.016316415786743164, 0.01639628791809082, 0.016249856948852538, 0.01634611129760742, 0.016330751419067382, 0.01638707160949707, 0.01636147117614746, 0.01607372856140137, 0.01641472053527832, 0.016351232528686522, 0.01643519973754883, 0.016365631103515625, 0.01672697639465332, 0.016333824157714845, 0.01677414321899414, 0.0163768310546875, 0.016455680847167968, 0.01639833641052246, 0.01639628791809082, 0.016494592666625976, 0.01641574478149414, 0.016521215438842773, 0.016425983428955078, 0.01638096046447754, 0.016368608474731445, 0.01641062355041504, 0.01720425605773926, 0.017111007690429687, 0.016487424850463867, 0.016475135803222657, 0.016451583862304688, 0.016538623809814454, 0.016395296096801758, 0.016514015197753907, 0.016492544174194337, 0.01638604736328125, 0.0163768310546875, 0.01643212890625, 0.016527360916137695, 0.016504831314086914, 0.016499711990356446, 0.016528383255004882, 0.01641574478149414, 0.01639936065673828, 0.016372735977172852, 0.016480255126953124, 0.01656524848937988, 0.016473087310791015, 0.01640140724182129, 0.016492544174194337, 0.01642086410522461, 0.016417791366577148, 0.0170700798034668, 0.017368064880371094, 0.01701273536682129, 0.01683148765563965, 0.016730112075805666, 0.01658572769165039, 0.016540672302246092, 0.016487424850463867, 0.016476160049438478, 0.016531455993652345, 0.016441343307495117, 0.01643110466003418, 0.01652128028869629, 0.016437183380126952, 0.01641881561279297, 0.016553983688354493, 0.0164771842956543, 0.016537599563598633, 0.016722944259643553, 0.01644339179992676, 0.016542720794677734, 0.01644339179992676, 0.01637171173095703, 0.016744447708129884, 0.016373760223388673, 0.016445472717285158, 0.016960479736328124, 0.016934911727905275, 0.016561151504516602, 0.016465919494628906, 0.01641472053527832, 0.016755712509155272, 0.017357824325561523, 0.01820569610595703, 0.016924671173095703, 0.016611328125, 0.016442367553710938, 0.016365568161010743, 0.01641164779663086, 0.016453632354736326, 0.016434175491333008, 0.01641574478149414, 0.016759807586669923, 0.016484352111816408, 0.01637171173095703, 0.0164771842956543, 0.0164003849029541, 0.01639628791809082, 0.016561151504516602, 0.016373760223388673, 0.01661030387878418, 0.01722675132751465, 0.01657548713684082, 0.01636966323852539, 0.01646899223327637, 0.01639628791809082, 0.016451583862304688, 0.01641881561279297, 0.01645260810852051, 0.016434175491333008, 0.01642905616760254, 0.01656012725830078, 0.01637785530090332, 0.01660006332397461, 0.01640243148803711, 0.0163492488861084, 0.016755647659301758, 0.016518144607543944, 0.01637887954711914, 0.01637990379333496, 0.01637990379333496, 0.01636966323852539, 0.016494623184204103, 0.016522207260131837, 0.01641062355041504, 0.01646080017089844, 0.01640447998046875, 0.01658367919921875, 0.01683967971801758, 0.01641574478149414, 0.016522239685058594, 0.016966655731201173, 0.016545791625976563, 0.01625497627258301, 0.01640652847290039, 0.016344064712524413, 0.017336320877075196, 0.016785408020019533, 0.0166430721282959, 0.016502784729003905, 0.016323583602905273, 0.016430080413818358, 0.016448511123657226, 0.016461824417114256, 0.016390144348144533, 0.016395263671875, 0.016385055541992186, 0.01644745635986328, 0.016351232528686522, 0.016578559875488282, 0.016451583862304688, 0.016500736236572267, 0.016373760223388673, 0.01638502311706543, 0.01639423942565918, 0.01640345573425293, 0.016252927780151367, 0.016510976791381835, 0.016395263671875, 0.01646899223327637, 0.01640140724182129, 0.016417791366577148, 0.01639833641052246, 0.016381952285766603, 0.016397312164306642, 0.01640755271911621, 0.016479232788085937, 0.016425983428955078, 0.016652288436889647, 0.01646080017089844, 0.016265216827392577, 0.016101375579833984, 0.016471040725708007, 0.01641574478149414, 0.01639321517944336, 0.01640755271911621, 0.01640652847290039, 0.0164454402923584, 0.016702463150024414, 0.01642803192138672, 0.016455680847167968, 0.01641164779663086, 0.01637887954711914, 0.01641574478149414, 0.017096704483032226, 0.016562175750732423, 0.016333824157714845, 0.016472063064575194, 0.01643929672241211, 0.016433151245117187, 0.016380928039550782, 0.016441343307495117, 0.0164771842956543, 0.016450559616088867, 0.01639936065673828, 0.016441343307495117, 0.01644339179992676, 0.016283647537231445, 0.016348159790039063, 0.01640140724182129, 0.016458751678466797, 0.01638707160949707, 0.016364543914794923, 0.01637171173095703, 0.01639116859436035, 0.016381952285766603, 0.01644339179992676, 0.016373760223388673, 0.016420896530151368, 0.016515039443969728, 0.01641472053527832, 0.01637785530090332, 0.01638809585571289, 0.01620070457458496, 0.016528383255004882, 0.016473087310791015, 0.016926719665527345, 0.017327104568481445, 0.016678911209106445, 0.016492544174194337, 0.016496639251708984, 0.016508928298950197, 0.016474111557006836, 0.016465919494628906, 0.01640959930419922, 0.016494592666625976, 0.016536575317382812, 0.01642393684387207, 0.016258047103881835, 0.016357376098632814, 0.01637171173095703, 0.016455680847167968, 0.016351232528686522, 0.016381952285766603, 0.016408575057983397, 0.0164003849029541, 0.016475135803222657, 0.016538623809814454, 0.016515071868896485, 0.01675775909423828, 0.0166246395111084, 0.01679155158996582, 0.01700556755065918, 0.016380928039550782, 0.016570367813110352, 0.01657651138305664, 0.01658060836791992, 0.01641983985900879, 0.016481279373168945, 0.016395263671875, 0.016350208282470705, 0.01643622398376465, 0.01636761665344238, 0.01641267204284668, 0.01638096046447754, 0.0168703670501709, 0.016475135803222657, 0.01597644805908203, 0.016389120101928712, 0.016347135543823242, 0.01640345573425293, 0.01640755271911621, 0.016338943481445312, 0.0164454402923584, 0.01639219284057617, 0.016555007934570314, 0.01641164779663086, 0.016552959442138672, 0.01644441604614258, 0.016161792755126952, 0.016165887832641602, 0.016540672302246092, 0.016380928039550782, 0.01640652847290039, 0.01642393684387207, 0.01658470344543457, 0.01641574478149414, 0.016448511123657226, 0.016430080413818358, 0.0164003849029541, 0.01638915252685547, 0.016400352478027343, 0.016492544174194337, 0.016522239685058594, 0.016508928298950197, 0.016277503967285157, 0.01637478446960449, 0.016467967987060548, 0.016359424591064452, 0.01635327911376953, 0.016351232528686522, 0.016450559616088867, 0.016456703186035156, 0.016438272476196288, 0.016478208541870116, 0.01643519973754883, 0.016322559356689453, 0.016202751159667968, 0.01655705642700195, 0.01704652786254883, 0.016321535110473632, 0.016523263931274415, 0.016803871154785155, 0.01640649604797363, 0.016484352111816408, 0.016338943481445312, 0.01641574478149414, 0.01639628791809082, 0.0163768310546875, 0.01644441604614258, 0.01642188835144043, 0.016479232788085937, 0.01701171112060547, 0.016693248748779296, 0.016292863845825196, 0.0164003849029541, 0.01641164779663086, 0.016644096374511717, 0.016492544174194337, 0.016425983428955078]",tokens/s,60.364298256635166,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66947b45-3ab3dc9f5668660522884602;84cef30c-c870-4aa7-80ff-9cad99d8710b) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like databricks/dbrx-base is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,4926.967808,8141.668352,0.0,7511.998464,6895.682048,s,1,10.867046875,10.867046875,0.0,10.867046875,10.867046875,10.867046875,10.867046875,[10.867046875],,kWh,4.8247963629847216e-05,2.642785997846372e-05,8.761145897795419e-05,0.00016228728258626512,,MB,2819.289088,8160.54272,0.0,7514.095616,6822.141952,s,10,12.318525634765624,1.2318525634765625,7.76981671989919e-05,1.2318380737304688,1.2319517944335936,1.2319716247558594,1.231987489013672,"[1.2317601318359375, 1.23178515625, 1.2318070068359375, 1.2319227294921875, 1.2317501220703124, 1.2318392333984376, 1.2318369140625, 1.2319473876953124, 1.231991455078125, 1.231885498046875]",tokens/s,207.81707778202852,kWh,1.4559177271318934e-05,7.977024661211319e-06,8.493170683420148e-05,0.00010746790876673173,tokens/kWh,2382106.4626433724,MB,2823.553024,8162.639872,0.0,7516.192768,6822.144512,s,10,17.474487915039063,1.7474487915039063,0.030906740065202627,1.757196044921875,1.7854700195312498,1.7869577880859375,1.7881480029296875,"[1.746919921875, 1.719139892578125, 1.71448486328125, 1.7055657958984376, 1.76747216796875, 1.785139404296875, 1.7678134765625, 1.788445556640625, 1.770366943359375, 1.709139892578125]",tokens/s,36.05255862506868,kWh,2.0367218195205824e-05,1.1164040399424446e-05,6.278635578459424e-05,9.43176143792245e-05,tokens/kWh,667955.8257982946,,s,630,17.472511980056776,0.027734146000090103,0.0009498411272494875,0.027973631858825683,0.028710707664489746,0.02889441270828247,0.02974271503448487,"[0.02773196792602539, 0.027825151443481445, 0.028519424438476562, 0.028457984924316407, 0.028459007263183594, 0.028460031509399415, 0.026728448867797853, 0.02855731201171875, 0.028440576553344726, 0.028820480346679687, 0.028445695877075194, 0.028235776901245117, 0.026583040237426758, 0.02669977569580078, 0.026746879577636717, 0.027033599853515625, 0.026863616943359377, 0.028613632202148437, 0.028626943588256838, 0.027918336868286132, 0.028598272323608398, 0.02753945541381836, 0.028516351699829103, 0.026826751708984374, 0.026784767150878908, 0.02654310417175293, 0.02977894401550293, 0.029030399322509767, 0.028622848510742187, 0.028425216674804688, 0.026454015731811522, 0.026334239959716798, 0.026725343704223633, 0.028493824005126952, 0.02691276741027832, 0.026735616683959962, 0.026592256546020508, 0.02754764747619629, 0.02850201606750488, 0.026596351623535155, 0.026719232559204102, 0.026727424621582032, 0.026686464309692383, 0.027662336349487306, 0.028299264907836914, 0.026615808486938477, 0.028089344024658205, 0.028018688201904295, 0.028285951614379884, 0.028067840576171874, 0.028331008911132813, 0.028081151962280275, 0.027926528930664062, 0.029570047378540038, 0.030993408203125, 0.02877337646484375, 0.028520448684692383, 0.026795007705688476, 0.02676633644104004, 0.026670080184936523, 0.026412031173706055, 0.02668339157104492, 0.026702848434448243, 0.027620351791381836, 0.026953727722167968, 0.02735103988647461, 0.028556287765502928, 0.028396543502807618, 0.028531711578369142, 0.02673459243774414, 0.026670080184936523, 0.02840985679626465, 0.028492799758911135, 0.028709888458251953, 0.028406784057617186, 0.028124160766601562, 0.026650623321533205, 0.026570751190185548, 0.026624000549316407, 0.026392576217651367, 0.02656870460510254, 0.02678169631958008, 0.02693734359741211, 0.02652876853942871, 0.026693632125854492, 0.02630860710144043, 0.02692915153503418, 0.02671615982055664, 0.026417152404785156, 0.026432512283325195, 0.02634854316711426, 0.026672128677368165, 0.026798080444335938, 0.02649087905883789, 0.02889727973937988, 0.029467647552490234, 0.028593151092529297, 0.02874470329284668, 0.028222463607788087, 0.028387327194213868, 0.028520448684692383, 0.02837299156188965, 0.027386880874633788, 0.0285849609375, 0.027012096405029298, 0.028433408737182617, 0.028454912185668944, 0.026850303649902343, 0.026877952575683595, 0.026693632125854492, 0.02687385559082031, 0.02652876853942871, 0.026694656372070313, 0.026655744552612305, 0.026629119873046874, 0.026655744552612305, 0.026764287948608398, 0.027629568099975587, 0.026992639541625976, 0.026730495452880858, 0.02651955223083496, 0.026452991485595705, 0.026686464309692383, 0.02680012893676758, 0.026695680618286134, 0.026302463531494142, 0.027475967407226562, 0.02720358467102051, 0.02834022331237793, 0.028624895095825196, 0.028128255844116212, 0.02835968017578125, 0.028419071197509766, 0.028620800018310546, 0.026855424880981447, 0.028572671890258788, 0.028494848251342773, 0.026858495712280273, 0.02779545593261719, 0.026738687515258788, 0.02673766326904297, 0.026755071640014647, 0.026672128677368165, 0.026682367324829103, 0.026746879577636717, 0.028653568267822265, 0.02696601676940918, 0.027665407180786132, 0.028387327194213868, 0.02691276741027832, 0.02673356819152832, 0.026864639282226564, 0.026677248001098632, 0.026739744186401366, 0.026867679595947266, 0.026866687774658202, 0.02666803169250488, 0.026680320739746095, 0.026594303131103517, 0.026738687515258788, 0.026394624710083008, 0.026472448348999023, 0.026541055679321288, 0.02631270408630371, 0.026454015731811522, 0.026776575088500978, 0.026796031951904296, 0.026629119873046874, 0.026712064743041993, 0.026705919265747072, 0.02675916862487793, 0.026621952056884765, 0.026456064224243164, 0.026703872680664063, 0.026634239196777345, 0.027037696838378908, 0.028673023223876954, 0.02802694320678711, 0.026734527587890626, 0.026664960861206056, 0.026864639282226564, 0.026735616683959962, 0.026556415557861326, 0.026660863876342773, 0.027593727111816405, 0.02852556800842285, 0.02817638397216797, 0.02838937568664551, 0.02857676887512207, 0.027744255065917968, 0.026796031951904296, 0.02674892807006836, 0.02674995231628418, 0.02646428871154785, 0.026702816009521485, 0.026714111328125, 0.02658406448364258, 0.026703872680664063, 0.026833919525146483, 0.02692403221130371, 0.026702848434448243, 0.026657791137695314, 0.026728448867797853, 0.026599424362182617, 0.026604543685913085, 0.02653388786315918, 0.026673152923583986, 0.026702848434448243, 0.026764320373535155, 0.026679264068603516, 0.026632192611694337, 0.02668441581726074, 0.02678169631958008, 0.02675712013244629, 0.026688512802124024, 0.026619903564453123, 0.026604543685913085, 0.02674995231628418, 0.026678272247314453, 0.026649599075317384, 0.026588159561157225, 0.026857471466064452, 0.02671820831298828, 0.026640384674072266, 0.027827199935913087, 0.02835968017578125, 0.028035072326660155, 0.028396543502807618, 0.028504064559936523, 0.028718080520629883, 0.02950655937194824, 0.02891366386413574, 0.02889625549316406, 0.02831155204772949, 0.02859519958496094, 0.02859110450744629, 0.026864639282226564, 0.026822656631469727, 0.02676633644104004, 0.026441728591918946, 0.026830848693847657, 0.026875904083251953, 0.026663936614990235, 0.02633318328857422, 0.026358783721923826, 0.026705919265747072, 0.02693120002746582, 0.02672537612915039, 0.02693222427368164, 0.026711040496826172, 0.026821632385253907, 0.026673152923583986, 0.02876006317138672, 0.0285614070892334, 0.027792383193969726, 0.028422143936157225, 0.027876352310180662, 0.02652672004699707, 0.027183103561401366, 0.028267520904541016, 0.028609535217285157, 0.026583040237426758, 0.026785791397094725, 0.02855014419555664, 0.0275599365234375, 0.026364927291870118, 0.02696499252319336, 0.02874060821533203, 0.02878873634338379, 0.028620800018310546, 0.028473344802856446, 0.02796031951904297, 0.026838016510009766, 0.027676671981811524, 0.028658687591552736, 0.029048831939697265, 0.028652544021606444, 0.028527616500854492, 0.02855731201171875, 0.028390399932861327, 0.028408832550048828, 0.028481536865234375, 0.028450815200805665, 0.029199359893798828, 0.028696575164794923, 0.028803071975708007, 0.02857164764404297, 0.02693222427368164, 0.028597248077392577, 0.028411903381347657, 0.027584512710571288, 0.028421119689941408, 0.02820403289794922, 0.026693632125854492, 0.028313600540161132, 0.028610559463500978, 0.02852454376220703, 0.02652876853942871, 0.027250688552856447, 0.02839449691772461, 0.0283504638671875, 0.028684288024902343, 0.028618751525878908, 0.02840985679626465, 0.027624448776245116, 0.027218944549560548, 0.028444671630859376, 0.028718080520629883, 0.027652095794677735, 0.02837196731567383, 0.027744255065917968, 0.026763263702392577, 0.026634239196777345, 0.028447744369506835, 0.02876825523376465, 0.02754150390625, 0.028624895095825196, 0.02859519958496094, 0.028506111145019532, 0.028448768615722656, 0.02856857681274414, 0.028434431076049805, 0.028475391387939454, 0.02857472038269043, 0.028395519256591797, 0.028478464126586913, 0.02857164764404297, 0.02853785514831543, 0.02855014419555664, 0.028438528060913085, 0.02860032081604004, 0.028297216415405273, 0.028696575164794923, 0.028648448944091798, 0.02740838432312012, 0.02855526351928711, 0.028527616500854492, 0.028613632202148437, 0.028298240661621094, 0.028506111145019532, 0.02870681571960449, 0.02879283142089844, 0.028417024612426758, 0.026866687774658202, 0.028641279220581056, 0.028480512619018555, 0.02770227241516113, 0.03075379180908203, 0.02915839958190918, 0.02897715187072754, 0.02857676887512207, 0.028645376205444335, 0.028503040313720703, 0.028082176208496092, 0.028705791473388673, 0.028309503555297853, 0.028506111145019532, 0.026859519958496093, 0.02830438423156738, 0.028052480697631835, 0.02837196731567383, 0.028438528060913085, 0.02758143997192383, 0.028422143936157225, 0.0267775993347168, 0.02857164764404297, 0.02850918388366699, 0.02892799949645996, 0.027571199417114257, 0.02654719924926758, 0.02856038475036621, 0.026400768280029296, 0.027652095794677735, 0.028633087158203126, 0.028539903640747072, 0.028466175079345703, 0.028656639099121094, 0.02838528060913086, 0.028079103469848633, 0.026847232818603517, 0.02676019287109375, 0.02627276802062988, 0.02768998336791992, 0.02856857681274414, 0.02675302314758301, 0.0267325439453125, 0.028539903640747072, 0.028535808563232422, 0.028451839447021485, 0.028642303466796876, 0.027820032119750978, 0.02674278450012207, 0.026582015991210937, 0.028421119689941408, 0.02852454376220703, 0.028688383102416993, 0.02870681571960449, 0.028457984924316407, 0.028497919082641602, 0.026656768798828126, 0.028099584579467773, 0.028860416412353516, 0.02679091262817383, 0.026802175521850585, 0.028094463348388672, 0.02853068733215332, 0.028436479568481447, 0.028528640747070313, 0.028602367401123048, 0.026712064743041993, 0.02667519950866699, 0.027877376556396483, 0.029421567916870117, 0.029654016494750978, 0.028967935562133788, 0.0287457275390625, 0.02853068733215332, 0.027881471633911133, 0.02814259147644043, 0.028625919342041017, 0.028539903640747072, 0.028483583450317384, 0.02852249526977539, 0.0265850887298584, 0.026704896926879884, 0.0281343994140625, 0.028666879653930662, 0.028639232635498047, 0.029083648681640626, 0.02881331253051758, 0.028656639099121094, 0.028791807174682618, 0.028642303466796876, 0.029216768264770508, 0.026719232559204102, 0.027599872589111327, 0.02895564842224121, 0.028694528579711914, 0.026859519958496093, 0.02775449562072754, 0.02860339164733887, 0.027975679397583008, 0.026864639282226564, 0.027380735397338866, 0.0283371524810791, 0.028645376205444335, 0.028688383102416993, 0.028618751525878908, 0.028794879913330077, 0.028421119689941408, 0.028443679809570313, 0.02782204818725586, 0.028423168182373046, 0.02870783996582031, 0.028564479827880858, 0.028461055755615236, 0.028417024612426758, 0.028180479049682617, 0.028701696395874023, 0.02815692710876465, 0.02836275291442871, 0.02856550407409668, 0.028618751525878908, 0.028310527801513673, 0.02996633529663086, 0.02911129570007324, 0.03039948844909668, 0.03153919982910156, 0.02876927947998047, 0.026900480270385742, 0.028052480697631835, 0.028258304595947265, 0.026754047393798826, 0.027971584320068358, 0.02855731201171875, 0.02897203254699707, 0.028048383712768556, 0.028572671890258788, 0.028392448425292968, 0.028625919342041017, 0.028545024871826172, 0.028485631942749022, 0.02855526351928711, 0.026685440063476562, 0.027420671463012695, 0.028625919342041017, 0.02854092788696289, 0.028564479827880858, 0.028482559204101563, 0.02842624092102051, 0.026746879577636717, 0.02794393539428711, 0.02878156852722168, 0.02857062339782715, 0.02834739112854004, 0.02838118362426758, 0.02857164764404297, 0.02840575981140137, 0.02861568069458008, 0.028669952392578125, 0.02853273582458496, 0.027003904342651368, 0.02859110450744629, 0.028395519256591797, 0.03017728042602539, 0.028648448944091798, 0.029041664123535156, 0.026797056198120117, 0.027894784927368164, 0.028630016326904296, 0.028439552307128906, 0.02795212745666504, 0.028421119689941408, 0.028478464126586913, 0.026674175262451173, 0.02797772789001465, 0.028459039688110352, 0.02873852729797363, 0.026662912368774414, 0.02831974411010742, 0.028601343154907227, 0.028892160415649414, 0.027660287857055665, 0.028708864212036132, 0.028627967834472655, 0.029315071105957033, 0.02874880027770996, 0.02853068733215332, 0.026856447219848634, 0.027716608047485353, 0.028677120208740234, 0.02878361511230469, 0.02675814437866211, 0.028474367141723633, 0.0277708797454834, 0.026793983459472655, 0.026442752838134766, 0.026874879837036132, 0.027422719955444336, 0.028651519775390624, 0.027396127700805663, 0.028701663970947266, 0.028366847991943358, 0.026776575088500978, 0.0289300479888916, 0.02850201606750488, 0.026844160079956055, 0.028450815200805665, 0.02855731201171875, 0.027979776382446288, 0.0285665283203125, 0.028643327713012694, 0.028619775772094725, 0.028700672149658202, 0.028725248336791992, 0.028468223571777345, 0.028826623916625976, 0.02877235221862793, 0.028456960678100586, 0.02877132797241211, 0.028483583450317384, 0.028831743240356447, 0.02672230339050293, 0.026802175521850585, 0.027064319610595702, 0.026858495712280273, 0.02673459243774414, 0.027296768188476563, 0.02656768035888672, 0.026626047134399415, 0.026605567932128905, 0.026630144119262695, 0.026792959213256837, 0.026659839630126952, 0.026712064743041993, 0.026451967239379884, 0.02666700744628906, 0.026607616424560547, 0.026689535140991212, 0.026827775955200195, 0.026588159561157225, 0.026600448608398438, 0.026681343078613282, 0.02872217559814453, 0.028786687850952147, 0.028617727279663087, 0.02692403221130371, 0.026557439804077147, 0.026674175262451173, 0.026669055938720702, 0.0267509765625, 0.026697727203369142, 0.026404863357543946, 0.026663936614990235, 0.026735616683959962, 0.026617855072021485, 0.02652364730834961, 0.026677248001098632, 0.02676531219482422, 0.026556447982788087, 0.026713056564331053, 0.026693632125854492, 0.026608640670776368, 0.027594751358032226, 0.026864639282226564, 0.026555423736572267, 0.026674144744873046, 0.027077632904052733, 0.026861568450927735, 0.026925056457519532, 0.027205631256103514, 0.026688512802124024, 0.02672332763671875, 0.02668441581726074, 0.02751692771911621, 0.026754047393798826, 0.026847232818603517, 0.02894745635986328, 0.028925952911376954, 0.02850918388366699, 0.026471424102783202, 0.028115968704223632, 0.028735488891601563, 0.028407808303833007, 0.026645503997802734, 0.026798080444335938, 0.028087295532226563, 0.028839935302734376, 0.02860339164733887, 0.028508159637451173]",tokens/s,36.05663574413833,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8212.754432,12374.769664,0.0,11737.759744,11171.24352,s,1,12.854716796875,12.854716796875,0.0,12.854716796875,12.854716796875,12.854716796875,12.854716796875,[12.854716796875],,kWh,7.136749228124398e-05,3.9096094558727574e-05,0.00014317122564799778,0.00025363481248796935,,MB,4061.7984,12393.644032,0.0,11739.856896,10924.361728,s,10,24.12828442382812,2.4128284423828124,9.441193230607837e-05,2.412851318359375,2.4129219970703124,2.4129588012695313,2.412988244628906,"[2.4126953125, 2.412732666015625, 2.412913818359375, 2.412851806640625, 2.4128935546875, 2.41299560546875, 2.412864501953125, 2.41279736328125, 2.412850830078125, 2.41268896484375]",tokens/s,106.0995450414969,kWh,2.850474168416649e-05,1.5618635792949086e-05,0.0001724854990994007,0.00021660887657651626,tokens/kWh,1181853.689682791,MB,4066.062336,12395.741184,0.0,11741.954048,10924.364288,s,10,21.51089111328125,2.151089111328125,0.02032630585045066,2.1580616455078125,2.1751906982421874,2.1756214721679688,2.175966091308594,"[2.142735595703125, 2.1276767578125, 2.106494873046875, 2.16002392578125, 2.159419677734375, 2.161550048828125, 2.17605224609375, 2.175094970703125, 2.15670361328125, 2.145139404296875]",tokens/s,29.287489610833717,kWh,2.499222211250015e-05,1.3700453690858022e-05,9.853918994239956e-05,0.00013723186574575772,tokens/kWh,459077.049325532,,s,630,21.508833229064923,0.034141005125499904,0.0008285471012598355,0.033855998992919925,0.03509698638916016,0.03530465202331543,0.03601582187652588,"[0.03457843017578125, 0.03347353744506836, 0.033465343475341795, 0.034391040802001956, 0.03484979248046875, 0.03502489471435547, 0.035386367797851564, 0.03340697479248047, 0.03339059066772461, 0.0337367057800293, 0.03468492889404297, 0.03473715209960938, 0.03483852767944336, 0.034680831909179685, 0.03466239929199219, 0.03488051223754883, 0.034678783416748044, 0.03464908981323242, 0.035250175476074216, 0.034797569274902344, 0.03344793701171875, 0.03448012924194336, 0.034326526641845705, 0.034825214385986326, 0.034751487731933595, 0.034677761077880856, 0.03468492889404297, 0.0348671989440918, 0.034900993347167966, 0.03477401733398437, 0.03499827194213867, 0.034977790832519534, 0.03447808074951172, 0.035037185668945314, 0.033732608795166014, 0.0334284782409668, 0.03346944046020508, 0.033380352020263675, 0.033377281188964845, 0.03337830352783203, 0.03339263916015625, 0.03349094390869141, 0.033554431915283206, 0.033337345123291014, 0.03331071853637695, 0.033344512939453126, 0.03333222579956055, 0.03376537704467773, 0.03340697479248047, 0.03330047988891602, 0.03332710266113281, 0.03337011337280273, 0.03332505416870117, 0.033380352020263675, 0.03327078247070313, 0.033331199645996096, 0.033188865661621096, 0.03327283096313476, 0.03322163009643555, 0.03335987091064453, 0.03326464080810547, 0.03321446228027344, 0.03322060775756836, 0.03421388626098633, 0.033306625366210936, 0.03326259231567383, 0.03360870361328125, 0.03322265625, 0.03323801422119141, 0.033259521484375, 0.033230846405029296, 0.033188865661621096, 0.033142784118652346, 0.033355777740478515, 0.03321241760253906, 0.03322777557373047, 0.033276927947998046, 0.03322470474243164, 0.033983486175537106, 0.03334143829345703, 0.03336601638793945, 0.03323392105102539, 0.03331071853637695, 0.03327385711669922, 0.033242111206054685, 0.033258495330810545, 0.03328204727172852, 0.03338649749755859, 0.03337318420410156, 0.03340902328491211, 0.033372161865234375, 0.03344998550415039, 0.03315609741210938, 0.03322880172729492, 0.03330047988891602, 0.03335372924804687, 0.033276927947998046, 0.0337520637512207, 0.03334860610961914, 0.033301502227783206, 0.03370700836181641, 0.03552358245849609, 0.03655680084228516, 0.03522867202758789, 0.034667518615722655, 0.0334837760925293, 0.03344076919555664, 0.03340800094604492, 0.03351551818847656, 0.03388927841186523, 0.034825214385986326, 0.036506622314453126, 0.03477913665771484, 0.034551807403564457, 0.03456409454345703, 0.0334284782409668, 0.03342131042480469, 0.03326668930053711, 0.033334270477294925, 0.033933311462402346, 0.03467059326171875, 0.03460300827026367, 0.034618366241455076, 0.0345241584777832, 0.03481702423095703, 0.0342210578918457, 0.034476032257080076, 0.035855358123779296, 0.03446681594848633, 0.03343155288696289, 0.03336806488037109, 0.03338240051269531, 0.033342464447021485, 0.03339059066772461, 0.033699840545654294, 0.03336806488037109, 0.03332505416870117, 0.0332410888671875, 0.03340083312988281, 0.03328204727172852, 0.03334143829345703, 0.03347455978393555, 0.03339468765258789, 0.033549312591552735, 0.033465343475341795, 0.03321139144897461, 0.03338547134399414, 0.03325337600708008, 0.03333222579956055, 0.033524734497070316, 0.03330867385864258, 0.0332410888671875, 0.033452030181884765, 0.033339393615722655, 0.03328716659545899, 0.03324415969848633, 0.03334860610961914, 0.03321855926513672, 0.03344076919555664, 0.03334041595458984, 0.033258495330810545, 0.03322470474243164, 0.033337345123291014, 0.033225727081298825, 0.03325235366821289, 0.0333199348449707, 0.03320729446411133, 0.03322675323486328, 0.033307647705078124, 0.03325337600708008, 0.03350527954101563, 0.03355136108398438, 0.03337011337280273, 0.03333324813842774, 0.0332492790222168, 0.033413120269775394, 0.03335168075561523, 0.03329228973388672, 0.0337786865234375, 0.0332492790222168, 0.03326259231567383, 0.033205249786376956, 0.03341721725463867, 0.03352371215820313, 0.03331071853637695, 0.03327590560913086, 0.03333017730712891, 0.03349401473999023, 0.033587200164794925, 0.03596492767333984, 0.03355852890014648, 0.03487641525268555, 0.03495731353759766, 0.0349194221496582, 0.034885631561279294, 0.03491020965576172, 0.03483135986328125, 0.034941951751708986, 0.033426433563232424, 0.03342540740966797, 0.03391078567504883, 0.034969600677490234, 0.03486515045166016, 0.034928638458251955, 0.035133438110351564, 0.035454975128173825, 0.034993152618408206, 0.03488870239257812, 0.03342438507080078, 0.03344076919555664, 0.03426816177368164, 0.0355491828918457, 0.03511705780029297, 0.03477811050415039, 0.03339980697631836, 0.03348787307739258, 0.033465343475341795, 0.03342438507080078, 0.03471462249755859, 0.03478937530517578, 0.03489894485473633, 0.034855934143066404, 0.03478732681274414, 0.034887680053710936, 0.034890750885009765, 0.03485388946533203, 0.03487846374511719, 0.03339980697631836, 0.033277950286865234, 0.033879039764404296, 0.03490816116333008, 0.034738174438476564, 0.03480166244506836, 0.03498495864868164, 0.03348582458496094, 0.03331174468994141, 0.033334270477294925, 0.033364990234375, 0.03329740905761719, 0.033276927947998046, 0.03333017730712891, 0.03382988739013672, 0.033604606628417966, 0.03321446228027344, 0.03347763061523437, 0.03336601638793945, 0.033288192749023435, 0.03404800033569336, 0.034933761596679686, 0.034895870208740236, 0.03477811050415039, 0.033242111206054685, 0.034431968688964844, 0.03326668930053711, 0.033285118103027346, 0.03325747299194336, 0.03336703872680664, 0.03326464080810547, 0.03332403182983398, 0.033242111206054685, 0.033279998779296875, 0.033732608795166014, 0.03383295822143555, 0.033296382904052735, 0.03329433441162109, 0.03335372924804687, 0.03327590560913086, 0.0332677116394043, 0.03326259231567383, 0.03321446228027344, 0.03330355072021484, 0.033219585418701174, 0.03329433441162109, 0.03322367858886719, 0.03323392105102539, 0.033274879455566404, 0.03334041595458984, 0.03336294555664063, 0.03329433441162109, 0.033261566162109374, 0.033452030181884765, 0.03335270309448242, 0.03612876892089844, 0.0353966064453125, 0.03496243286132812, 0.03479654312133789, 0.035266559600830076, 0.03497983932495117, 0.03492659378051758, 0.0350750732421875, 0.03502489471435547, 0.035092479705810545, 0.03574476623535156, 0.03522969436645508, 0.035337215423583986, 0.0350750732421875, 0.035302398681640625, 0.03488256072998047, 0.03500646209716797, 0.03514265441894531, 0.034917377471923826, 0.03490816116333008, 0.034933761596679686, 0.0348590087890625, 0.03482316970825195, 0.034955265045166016, 0.0349224967956543, 0.034925567626953126, 0.034953216552734374, 0.034939903259277344, 0.03499622344970703, 0.03483955383300781, 0.03573452758789063, 0.03513958358764648, 0.03513139343261719, 0.03445248031616211, 0.033347583770751955, 0.033350654602050785, 0.03337113571166992, 0.03328204727172852, 0.03330252838134766, 0.03327283096313476, 0.033329151153564454, 0.034065406799316404, 0.03455078506469727, 0.03339980697631836, 0.03471155166625976, 0.03540991973876953, 0.034852863311767575, 0.03336703872680664, 0.03420467376708984, 0.03476172637939453, 0.033326080322265625, 0.034721790313720705, 0.034802688598632815, 0.034900993347167966, 0.033293312072753906, 0.033339393615722655, 0.03396198272705078, 0.034920448303222655, 0.034902015686035154, 0.03473100662231445, 0.03566592025756836, 0.03355033493041992, 0.03335475158691406, 0.033380352020263675, 0.033347583770751955, 0.03343155288696289, 0.03405926513671875, 0.03480678558349609, 0.03476889419555664, 0.03490611267089844, 0.0350300178527832, 0.03487027359008789, 0.034651134490966795, 0.033775615692138675, 0.0348037109375, 0.034857982635498046, 0.034797569274902344, 0.03328716659545899, 0.033293312072753906, 0.033298431396484376, 0.03478015899658203, 0.03469311904907227, 0.034756607055664065, 0.034732032775878906, 0.03498700714111328, 0.034857982635498046, 0.0347883529663086, 0.0349378547668457, 0.03491020965576172, 0.034331649780273435, 0.03558195114135742, 0.03487539291381836, 0.034770942687988284, 0.034928638458251955, 0.03478220748901367, 0.03475046539306641, 0.03443404769897461, 0.03347251129150391, 0.033314815521240236, 0.033345535278320314, 0.033337345123291014, 0.0334202880859375, 0.0333496322631836, 0.033350654602050785, 0.034097152709960936, 0.03510067367553711, 0.03516416168212891, 0.03493580627441406, 0.03488460922241211, 0.03489996719360351, 0.035585025787353515, 0.03488153457641602, 0.03493273544311523, 0.034907135009765625, 0.034991104125976565, 0.034895870208740236, 0.03505152130126953, 0.035124225616455076, 0.034945022583007815, 0.0347883529663086, 0.035419136047363284, 0.03536896133422852, 0.036311038970947264, 0.03517030334472656, 0.03482828903198242, 0.03474943923950195, 0.03498700714111328, 0.03490611267089844, 0.03486822509765625, 0.03488870239257812, 0.034948097229003904, 0.034950145721435545, 0.035074047088623043, 0.03498188781738281, 0.03484262466430664, 0.03394867324829102, 0.034904064178466795, 0.03485388946533203, 0.034907135009765625, 0.034098175048828124, 0.03377971267700195, 0.033519615173339845, 0.033503231048583985, 0.03353702545166016, 0.033588222503662106, 0.033567745208740236, 0.03343667221069336, 0.03343769454956055, 0.03353497695922852, 0.03347251129150391, 0.03413708877563477, 0.03640627288818359, 0.03487948989868164, 0.03466035079956055, 0.03522457504272461, 0.034797569274902344, 0.03470848083496094, 0.03468697738647461, 0.03474227142333984, 0.036956161499023435, 0.03550207901000976, 0.03501670455932617, 0.03482419204711914, 0.03477503967285156, 0.034912254333496096, 0.0349409294128418, 0.034931713104248044, 0.034802688598632815, 0.03474739074707031, 0.034710529327392575, 0.03491430282592774, 0.03340697479248047, 0.0333383674621582, 0.03331174468994141, 0.033261566162109374, 0.033342464447021485, 0.03467366409301758, 0.03476684951782227, 0.034784255981445314, 0.034939903259277344, 0.03479654312133789, 0.03481497573852539, 0.03481702423095703, 0.034167808532714845, 0.033274879455566404, 0.033290241241455076, 0.0349194221496582, 0.0348487663269043, 0.03491020965576172, 0.03551641464233399, 0.035181568145751956, 0.03489894485473633, 0.034754558563232424, 0.0346429443359375, 0.03520000076293945, 0.03515084838867188, 0.03486515045166016, 0.03333631896972656, 0.03332505416870117, 0.03337625503540039, 0.03330355072021484, 0.03333631896972656, 0.03335782241821289, 0.03338649749755859, 0.033372161865234375, 0.03460505676269531, 0.035019775390625, 0.03502796936035156, 0.03508428955078125, 0.03498700714111328, 0.03496448135375976, 0.0349409294128418, 0.03482009506225586, 0.03484569549560547, 0.03488665771484375, 0.034947071075439456, 0.034802688598632815, 0.03480883026123047, 0.03484467315673828, 0.03566899108886719, 0.03357798385620117, 0.0333568000793457, 0.03447500610351562, 0.03349708938598633, 0.033350654602050785, 0.033459201812744144, 0.03341107177734375, 0.033334270477294925, 0.033070079803466795, 0.03341209411621094, 0.03332710266113281, 0.033375232696533204, 0.03346944046020508, 0.03336191940307617, 0.03338854217529297, 0.033363967895507815, 0.03336908721923828, 0.033342464447021485, 0.03357900619506836, 0.03423027038574219, 0.033898494720458985, 0.03503104019165039, 0.03517030334472656, 0.03570278549194336, 0.03578572845458984, 0.03503308868408203, 0.03508838272094727, 0.03509145736694336, 0.035020801544189455, 0.03503923034667969, 0.03511808013916016, 0.034939903259277344, 0.0350382080078125, 0.034971649169921876, 0.0350115852355957, 0.03499008178710938, 0.03498086547851562, 0.035023872375488284, 0.03506380844116211, 0.035037185668945314, 0.03505152130126953, 0.03501670455932617, 0.03506380844116211, 0.03509657669067383, 0.035122177124023435, 0.03499520111083984, 0.03340697479248047, 0.033304576873779294, 0.03331584167480469, 0.033980415344238284, 0.034767871856689454, 0.03504127883911133, 0.03504742431640625, 0.034991104125976565, 0.035009536743164066, 0.03337318420410156, 0.033337345123291014, 0.03333017730712891, 0.03338751983642578, 0.033323009490966796, 0.033339393615722655, 0.033274879455566404, 0.033377281188964845, 0.03335475158691406, 0.033342464447021485, 0.034506752014160154, 0.03327897644042969, 0.033430526733398434, 0.033546241760253906, 0.03393843078613281, 0.03376128005981445, 0.03391078567504883, 0.033530879974365234, 0.03335987091064453, 0.03330559921264648, 0.033326080322265625, 0.03338444900512695, 0.033454078674316406, 0.033375232696533204, 0.03340288162231445, 0.03337011337280273, 0.03347353744506836, 0.03343564987182617, 0.033454078674316406, 0.03341107177734375, 0.03347148895263672, 0.0329246711730957, 0.03322880172729492, 0.03341926574707031, 0.03342438507080078, 0.03339059066772461, 0.03351347351074219, 0.033442817687988284, 0.033430526733398434, 0.0333496322631836, 0.03336703872680664, 0.033375232696533204, 0.034282497406005856, 0.03533926391601563, 0.035122177124023435, 0.036036609649658206, 0.035388416290283206, 0.035133438110351564, 0.034854911804199216, 0.03481087875366211, 0.035171329498291014, 0.035248126983642575, 0.035211265563964846, 0.03530649566650391, 0.03518975830078125, 0.03368960189819336, 0.0345354232788086, 0.035932159423828124, 0.03525939178466797, 0.03478220748901367, 0.03510067367553711, 0.03503923034667969, 0.035151870727539065, 0.03340288162231445, 0.03333017730712891, 0.033339393615722655, 0.0334202880859375, 0.03342335891723633, 0.033377281188964845, 0.03337625503540039, 0.03337113571166992, 0.03527782440185547, 0.03503206253051758]",tokens/s,29.29029172761819,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,a,a,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/a/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949171-3125b6236ec631226f52bbaa;0f758913-8bcf-45b1-9fb1-baf7fa54916e) - -Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: a is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,-,-,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 106, in _inner_fn - validate_repo_id(arg_value) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 160, in validate_repo_id - raise HFValidationError( -huggingface_hub.errors.HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '-'. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 466, in cached_file - raise EnvironmentError( -OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,m,m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/m/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948c52-7c4106786b4cbcc30e45aab2;cf14755c-9cfa-4a50-9669-5f6c6d283e1a) - -Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: m is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/M/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948fca-43cc037e15d3884f71859cb8;2a60cb70-b6a3-4ec8-8e6f-187c49c6c685) - -Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: M is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,8,8,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/8/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949279-179e332e37ddb64d1199134d;65b6f76d-0b6a-441d-8027-7987328985dc) - -Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 8 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,11150.512128,14821.097472,0.0,14191.427584,13325.783552,s,1,14.0223095703125,14.0223095703125,0.0,14.0223095703125,14.0223095703125,14.0223095703125,14.0223095703125,[14.0223095703125],,kWh,8.319086966874984e-05,4.5579630975701875e-05,0.00018276181287602,0.00031153231352047174,,MB,2122.77248,14837.874688,0.0,14191.427584,12582.029312,s,10,36.29163623046875,3.6291636230468747,0.00022777061108684947,3.6290941162109376,3.6293370849609374,3.629556481933594,3.6297319995117188,"[3.62895703125, 3.62977587890625, 3.629288330078125, 3.629176025390625, 3.62921142578125, 3.62898388671875, 3.628999267578125, 3.62905615234375, 3.62905712890625, 3.629131103515625]",tokens/s,70.53966880255304,kWh,4.2856285743819486e-05,2.348739708534439e-05,0.0002665474354599989,0.0003328911182891628,tokens/kWh,769020.2169275901,MB,2127.245312,14837.874688,0.0,14191.427584,12935.92832,s,10,22.315797119140626,2.231579711914063,0.0022995401224601268,2.23152392578125,2.2334161376953126,2.235371154785156,2.2369351684570313,"[2.231531005859375, 2.22956494140625, 2.231516845703125, 2.231714111328125, 2.230606689453125, 2.237326171875, 2.22879443359375, 2.232981689453125, 2.23231689453125, 2.2294443359375]",tokens/s,28.231122403404473,kWh,2.6669649647083522e-05,1.4617202969694104e-05,0.00012779624112580217,0.00016908309374257994,tokens/kWh,372597.8665608885,,s,630,22.301841415405267,0.03539974827842107,0.00033063927536764603,0.035373054504394534,0.035635507583618165,0.03567411041259766,0.036580853424072274,"[0.0355676155090332, 0.03501567840576172, 0.03498905563354492, 0.03505766296386719, 0.035095550537109374, 0.03501363372802734, 0.03506585693359375, 0.035119102478027346, 0.03503206253051758, 0.03510374450683594, 0.035156993865966796, 0.0350904312133789, 0.03520716857910156, 0.03518463897705078, 0.03530547332763672, 0.03521023941040039, 0.03521843338012695, 0.035154945373535154, 0.03522969436645508, 0.03525632095336914, 0.03532799911499023, 0.035282943725585936, 0.035340286254882815, 0.03523788833618164, 0.03527884674072266, 0.03533004760742187, 0.035798015594482424, 0.035350528717041016, 0.03590655899047852, 0.03529011154174805, 0.03535871887207031, 0.03538431930541992, 0.03527884674072266, 0.035364864349365234, 0.035399681091308595, 0.0353177604675293, 0.03548262405395508, 0.03546931076049804, 0.03536281585693359, 0.03542835235595703, 0.03552870559692383, 0.03538431930541992, 0.03561881637573242, 0.035555328369140625, 0.03563520050048828, 0.035522560119628906, 0.03551232147216797, 0.03544166564941406, 0.035492862701416016, 0.03556966400146484, 0.03573452758789063, 0.03562496185302735, 0.03731148910522461, 0.03564646530151367, 0.03561574554443359, 0.03563827133178711, 0.035536895751953124, 0.03563417434692383, 0.03568025588989258, 0.035579902648925785, 0.03563008117675781, 0.03570278549194336, 0.03559526443481445, 0.035519489288330076, 0.03512115097045899, 0.03504742431640625, 0.03505868911743164, 0.03512319946289062, 0.03505868911743164, 0.03506892776489258, 0.035102718353271486, 0.03504537582397461, 0.03517747116088867, 0.03521023941040039, 0.035547134399414065, 0.035198974609375, 0.03523583984375, 0.035138561248779294, 0.035266559600830076, 0.03535257720947266, 0.03518771362304687, 0.035388416290283206, 0.03526553726196289, 0.035168254852294925, 0.035244033813476565, 0.035280895233154294, 0.03520000076293945, 0.03526348876953125, 0.03533824157714844, 0.035280895233154294, 0.03533107376098633, 0.0353546257019043, 0.03527065658569336, 0.035323902130126955, 0.03539353561401367, 0.03531673431396484, 0.03538227081298828, 0.0354150390625, 0.03530137634277344, 0.035366912841796876, 0.0354252815246582, 0.03536076736450195, 0.03540582275390625, 0.03561779022216797, 0.03539148712158203, 0.03544268798828125, 0.03551334381103516, 0.03539763259887695, 0.035517440795898435, 0.03661619186401367, 0.035471359252929685, 0.03552972793579102, 0.03555737686157227, 0.03544575881958008, 0.03556249618530274, 0.035576831817626955, 0.035509246826171875, 0.03561471939086914, 0.035639297485351565, 0.03558092880249023, 0.035659774780273434, 0.035643390655517575, 0.035604480743408204, 0.035620864868164064, 0.03567718505859375, 0.03556454467773437, 0.0356577262878418, 0.035064830780029296, 0.03499008178710938, 0.03503411102294922, 0.035127296447753906, 0.0350115852355957, 0.035043327331542966, 0.0350904312133789, 0.03503923034667969, 0.03512115097045899, 0.035141632080078124, 0.03508736038208008, 0.035138561248779294, 0.035266559600830076, 0.03514470291137695, 0.03517440032958984, 0.037577728271484374, 0.03528704071044922, 0.035230720520019534, 0.035366912841796876, 0.035178497314453126, 0.03530547332763672, 0.03557785415649414, 0.035372032165527346, 0.03528192138671875, 0.035323902130126955, 0.03522969436645508, 0.035350528717041016, 0.035335166931152344, 0.03529011154174805, 0.035348480224609374, 0.035373054504394534, 0.035299327850341795, 0.035381248474121094, 0.035399681091308595, 0.035345409393310545, 0.03536896133422852, 0.0354334716796875, 0.03533004760742187, 0.0354334716796875, 0.03547443389892578, 0.03537919998168945, 0.035451904296875, 0.035517440795898435, 0.03540889739990234, 0.03549388885498047, 0.03649433517456055, 0.03546726226806641, 0.03553484725952148, 0.035622913360595705, 0.035563518524169925, 0.03556454467773437, 0.03560140609741211, 0.035506175994873046, 0.03558092880249023, 0.03561164855957031, 0.0355491828918457, 0.03561779022216797, 0.03568742370605469, 0.035591167449951173, 0.035659774780273434, 0.03569356918334961, 0.03561369705200195, 0.0355491828918457, 0.035046398162841795, 0.034985984802246094, 0.03504435348510742, 0.03508838272094727, 0.03505152130126953, 0.035059711456298825, 0.035181568145751956, 0.03503923034667969, 0.03513651275634765, 0.035160064697265625, 0.035124225616455076, 0.03518975830078125, 0.03522150421142578, 0.03545395278930664, 0.03523379135131836, 0.03523481750488281, 0.03515084838867188, 0.03522048187255859, 0.035492862701416016, 0.03519385528564453, 0.03525734329223633, 0.03531161499023437, 0.03527679824829102, 0.03531980895996094, 0.035372032165527346, 0.03549491119384766, 0.035350528717041016, 0.03544063949584961, 0.03528908920288086, 0.03534745788574219, 0.03540582275390625, 0.03540582275390625, 0.035454975128173825, 0.03544166564941406, 0.03533107376098633, 0.03540070343017578, 0.03543142318725586, 0.03542015838623047, 0.03545292663574219, 0.03549593734741211, 0.035416065216064455, 0.03549388885498047, 0.03553484725952148, 0.03544678497314453, 0.035530750274658206, 0.035560447692871096, 0.03547238540649414, 0.03590758514404297, 0.035568641662597655, 0.035492862701416016, 0.03555635070800781, 0.03561471939086914, 0.03557785415649414, 0.03565363311767578, 0.03564851379394531, 0.03559526443481445, 0.035645439147949216, 0.03565465545654297, 0.03558092880249023, 0.03563417434692383, 0.03601408004760742, 0.035639297485351565, 0.03553484725952148, 0.035373054504394534, 0.03506175994873047, 0.03502592086791992, 0.035105792999267575, 0.03507097625732422, 0.035166206359863283, 0.03520204925537109, 0.03509657669067383, 0.035171329498291014, 0.035179519653320314, 0.035141632080078124, 0.03520000076293945, 0.03526860809326172, 0.03510988616943359, 0.03517747116088867, 0.03524915313720703, 0.03517747116088867, 0.035195903778076174, 0.03524505615234375, 0.03517337417602539, 0.035266559600830076, 0.03530035018920898, 0.035179519653320314, 0.03525222396850586, 0.035310592651367184, 0.035310592651367184, 0.03531366348266601, 0.03535257720947266, 0.03527475357055664, 0.03537100982666016, 0.0354068489074707, 0.035364864349365234, 0.035361793518066405, 0.035389438629150394, 0.03531468963623047, 0.035348480224609374, 0.03566080093383789, 0.03537408065795898, 0.03544985580444336, 0.03590553665161133, 0.03552153778076172, 0.035454975128173825, 0.03551027297973633, 0.03543142318725586, 0.03547340774536133, 0.03551846313476562, 0.03547340774536133, 0.035544063568115236, 0.03553177642822265, 0.03549593734741211, 0.03554611206054688, 0.03561062240600586, 0.03551334381103516, 0.035591167449951173, 0.035639297485351565, 0.035606529235839846, 0.03564031982421875, 0.03570687866210937, 0.03559936141967773, 0.03565670394897461, 0.035676158905029294, 0.03559423828125, 0.03550515365600586, 0.0350750732421875, 0.03496755218505859, 0.03501875305175781, 0.03510374450683594, 0.03525632095336914, 0.03511705780029297, 0.035127296447753906, 0.035026943206787106, 0.035130367279052735, 0.03523788833618164, 0.03507302474975586, 0.03513446426391602, 0.03528396987915039, 0.040460289001464846, 0.036765697479248044, 0.035378177642822264, 0.035141632080078124, 0.03521843338012695, 0.03524505615234375, 0.03518463897705078, 0.03523174285888672, 0.03526758575439453, 0.035209217071533204, 0.03536281585693359, 0.0353259506225586, 0.03524198532104492, 0.03530547332763672, 0.03535974502563476, 0.035280895233154294, 0.03536383819580078, 0.03561881637573242, 0.03527475357055664, 0.03537612915039062, 0.03545804977416992, 0.03530956649780274, 0.035404800415039066, 0.03544268798828125, 0.035326976776123044, 0.035410945892333984, 0.03545395278930664, 0.03567103958129883, 0.035484672546386715, 0.035536895751953124, 0.035432449340820314, 0.035555328369140625, 0.035544063568115236, 0.03543552017211914, 0.03562393569946289, 0.03558297729492187, 0.035451904296875, 0.03557068634033203, 0.03563520050048828, 0.03554304122924805, 0.03557785415649414, 0.03560140609741211, 0.03587583923339844, 0.03564031982421875, 0.03566796875, 0.0355676155090332, 0.03562905502319336, 0.03567411041259766, 0.03567411041259766, 0.035538944244384765, 0.03503104019165039, 0.034953216552734374, 0.03529420852661133, 0.035094528198242186, 0.0350013427734375, 0.03507814407348633, 0.03513958358764648, 0.035062782287597655, 0.03515801620483398, 0.03517747116088867, 0.03508224105834961, 0.03513651275634765, 0.035214336395263675, 0.035113983154296875, 0.035166206359863283, 0.03522662353515625, 0.035154945373535154, 0.035227649688720705, 0.03523993682861328, 0.03518054580688477, 0.03521535873413086, 0.03530342483520508, 0.03522252655029297, 0.03525836944580078, 0.0353331184387207, 0.03524710464477539, 0.035312641143798826, 0.035389438629150394, 0.03528396987915039, 0.0353259506225586, 0.03535974502563476, 0.03529011154174805, 0.03535564804077149, 0.035410945892333984, 0.03532492828369141, 0.035372032165527346, 0.03542118453979492, 0.03536281585693359, 0.03543756866455078, 0.03546316909790039, 0.035416065216064455, 0.03544985580444336, 0.03549798583984375, 0.03540889739990234, 0.035507198333740234, 0.03551334381103516, 0.03544985580444336, 0.035542015075683595, 0.035560447692871096, 0.03544473648071289, 0.03553177642822265, 0.03558911895751953, 0.035525630950927735, 0.035560447692871096, 0.03562496185302735, 0.03555430221557617, 0.03570073699951172, 0.035683326721191407, 0.03556556701660156, 0.03564031982421875, 0.03564748764038086, 0.03560345458984375, 0.0355153923034668, 0.035056640625, 0.034947071075439456, 0.03503923034667969, 0.03514572906494141, 0.03504127883911133, 0.03508838272094727, 0.03511808013916016, 0.03504435348510742, 0.03511603164672852, 0.035163135528564454, 0.03508531188964844, 0.03513753509521484, 0.03536793518066406, 0.03514265441894531, 0.03523276901245117, 0.03522662353515625, 0.03512115097045899, 0.03520614242553711, 0.03524505615234375, 0.03518463897705078, 0.03522662353515625, 0.035297279357910154, 0.03520819091796875, 0.03526041412353516, 0.03536793518066406, 0.03524915313720703, 0.035345409393310545, 0.035351551055908204, 0.03526553726196289, 0.035318782806396484, 0.03541196823120117, 0.03530649566650391, 0.03537715148925781, 0.03540377426147461, 0.03531468963623047, 0.03606220626831055, 0.03551232147216797, 0.035378177642822264, 0.03551129531860352, 0.037013504028320314, 0.03548364639282227, 0.03687116622924805, 0.03559731292724609, 0.03547955322265625, 0.03549491119384766, 0.03551641464233399, 0.03542630386352539, 0.035504127502441404, 0.03556454467773437, 0.035465217590332034, 0.03555635070800781, 0.03563520050048828, 0.035550209045410154, 0.03555123138427734, 0.03562905502319336, 0.0355676155090332, 0.03561471939086914, 0.0356495361328125, 0.03555327987670898, 0.03564748764038086, 0.03568947219848633, 0.03564748764038086, 0.03566694259643555, 0.035095550537109374, 0.03511808013916016, 0.03505868911743164, 0.035138561248779294, 0.03499622344970703, 0.035108863830566404, 0.035194881439208986, 0.03510476684570313, 0.035151870727539065, 0.03517030334472656, 0.03556249618530274, 0.035156993865966796, 0.03520204925537109, 0.035253246307373046, 0.0351907844543457, 0.03528704071044922, 0.036034561157226565, 0.03538431930541992, 0.03528908920288086, 0.03552870559692383, 0.03526553726196289, 0.035264511108398434, 0.035192832946777344, 0.03525529479980469, 0.03534643173217773, 0.03532492828369141, 0.03535974502563476, 0.035348480224609374, 0.03548057556152344, 0.03536076736450195, 0.03538332748413086, 0.03539555358886719, 0.03538739013671875, 0.03541299057006836, 0.035318782806396484, 0.0354150390625, 0.03546931076049804, 0.03536076736450195, 0.03570278549194336, 0.03547955322265625, 0.035388416290283206, 0.03547750473022461, 0.0354969596862793, 0.03542835235595703, 0.03550003051757813, 0.0355860481262207, 0.03548876953125, 0.035573760986328126, 0.035639297485351565, 0.035507198333740234, 0.03552358245849609, 0.03559936141967773, 0.035517440795898435, 0.03558707046508789, 0.03561881637573242, 0.035576831817626955, 0.03561471939086914, 0.03565875244140625, 0.035568641662597655, 0.035625984191894534, 0.035659774780273434, 0.03560140609741211, 0.035519489288330076, 0.035064830780029296, 0.03520409774780273, 0.03509862518310547, 0.03510067367553711, 0.035004417419433595, 0.03509964752197266, 0.035113983154296875, 0.035023872375488284, 0.03513958358764648, 0.03517337417602539, 0.035076095581054685, 0.03512934494018555, 0.035197952270507815, 0.035125247955322264, 0.0351723518371582, 0.03523481750488281, 0.035130367279052735, 0.03523481750488281, 0.03526758575439453, 0.035171329498291014, 0.03521023941040039, 0.035340286254882815, 0.03517747116088867, 0.03526041412353516, 0.035345409393310545, 0.03523174285888672, 0.03532492828369141, 0.03535257720947266, 0.035282943725585936, 0.03535564804077149, 0.03537612915039062, 0.03529011154174805, 0.035342334747314456, 0.03538022232055664, 0.035323902130126955, 0.03541196823120117, 0.03544268798828125, 0.035340286254882815, 0.035418113708496096, 0.035476478576660156, 0.03537612915039062, 0.03544473648071289, 0.03551846313476562, 0.03542323303222656, 0.03545907211303711, 0.03551334381103516, 0.03542732620239258, 0.03548057556152344, 0.03553996658325195, 0.03546316909790039, 0.035571712493896485, 0.03560038375854492, 0.035504127502441404, 0.035573760986328126, 0.03562803268432617, 0.035538944244384765, 0.03562496185302735, 0.035664894104003905, 0.036037631988525394, 0.03574784088134766, 0.03572531127929687, 0.03565875244140625]",tokens/s,28.24878844151495,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,s,s,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/s/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948cf5-14674d57480a8ec364baf34f;f2fe8072-c785-4e38-b61e-b7213914da04) - -Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: s is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,6755.04128,9406.251008,0.0,8776.58112,8188.314112,s,1,11.3781962890625,11.3781962890625,0.0,11.3781962890625,11.3781962890625,11.3781962890625,11.3781962890625,[11.3781962890625],,kWh,5.2769926914567754e-05,2.8905592406496574e-05,0.00010339313826995822,0.00018506865759102255,,MB,1708.032,9423.028224,0.0,8776.58112,7654.832128,s,10,21.341498046874996,2.1341498046874996,0.00019409447472600784,2.13417236328125,2.1343157470703127,2.1344193481445313,2.134502229003906,"[2.133729736328125, 2.134009033203125, 2.134167724609375, 2.134038330078125, 2.1341435546875, 2.134177001953125, 2.13422216796875, 2.134292724609375, 2.13452294921875, 2.13419482421875]",tokens/s,119.95409105664244,kWh,2.520633971680556e-05,1.3813678799101578e-05,0.00014393950404040234,0.00018295952255630948,tokens/kWh,1399216.5940486144,MB,1716.559872,9423.028224,0.0,8776.58112,7908.807168,s,10,15.9230078125,1.59230078125,0.013345955373199282,1.5917216796875,1.6044472290039062,1.6136519592285155,1.621015743408203,"[1.58943505859375, 1.5823006591796875, 1.622856689453125, 1.59400830078125, 1.587776123046875, 1.5979637451171875, 1.6024017333984375, 1.57622314453125, 1.59544482421875, 1.5745975341796874]",tokens/s,39.565389116083495,kWh,1.9106029095554497e-05,1.047178175753225e-05,7.814206251359755e-05,0.0001077198733666843,tokens/kWh,584850.2976376938,,s,630,15.920432117462157,0.025270527170574854,0.0005259845335797371,0.024986112594604495,0.025872179985046387,0.026040627574920653,0.026575135135650643,"[0.025967615127563477, 0.0251013126373291, 0.025792512893676758, 0.0257392635345459, 0.025785343170166015, 0.025649152755737304, 0.02571161651611328, 0.02573516845703125, 0.025673728942871094, 0.025788415908813478, 0.02553343963623047, 0.024972288131713868, 0.024854528427124024, 0.02485759925842285, 0.02486783981323242, 0.024902656555175783, 0.02485043144226074, 0.02485862350463867, 0.024808448791503908, 0.024852479934692383, 0.02531839942932129, 0.025815040588378906, 0.024887296676635744, 0.02488422393798828, 0.024861696243286133, 0.02485862350463867, 0.024813568115234375, 0.024847360610961915, 0.025742336273193358, 0.025887744903564453, 0.02575155258178711, 0.025828351974487306, 0.02574745559692383, 0.02575564765930176, 0.024814592361450196, 0.024818687438964843, 0.02489139175415039, 0.024929279327392577, 0.02488115119934082, 0.024829952239990235, 0.02574028778076172, 0.026424320220947265, 0.025981952667236328, 0.02573721694946289, 0.025665536880493164, 0.02569830322265625, 0.024755199432373046, 0.024801279067993166, 0.024856576919555663, 0.024878080368041993, 0.024919040679931642, 0.02553753662109375, 0.02513203239440918, 0.02494156837463379, 0.024848384857177733, 0.024827903747558593, 0.02482585525512695, 0.02488115119934082, 0.024772607803344726, 0.024870912551879884, 0.02496512031555176, 0.0248985595703125, 0.024770559310913084, 0.025636863708496094, 0.024877056121826172, 0.024801279067993166, 0.024964096069335938, 0.024802303314208983, 0.024819711685180663, 0.024757247924804687, 0.024770559310913084, 0.024748031616210937, 0.024819711685180663, 0.024805376052856445, 0.02498252868652344, 0.025043968200683595, 0.02476851272583008, 0.024840192794799806, 0.02476032066345215, 0.02475929641723633, 0.024790016174316407, 0.024802303314208983, 0.024765439987182617, 0.025790464401245116, 0.025672704696655273, 0.025616384506225585, 0.025618431091308593, 0.025588735580444336, 0.024859647750854492, 0.02517094421386719, 0.025214975357055663, 0.02494259262084961, 0.02494976043701172, 0.024807424545288087, 0.024896511077880858, 0.024770559310913084, 0.024845312118530274, 0.024845312118530274, 0.024855552673339845, 0.0247459831237793, 0.024828927993774414, 0.024845312118530274, 0.024812543869018554, 0.024954879760742187, 0.025644031524658203, 0.025815040588378906, 0.025818111419677735, 0.02570751953125, 0.025709568023681642, 0.025669631958007814, 0.025640960693359374, 0.025616384506225585, 0.024782848358154298, 0.024764415740966796, 0.025178112030029298, 0.02595123291015625, 0.025757696151733397, 0.025746431350708008, 0.024847360610961915, 0.024795135498046874, 0.024937471389770507, 0.02487500762939453, 0.024817663192749022, 0.024360960006713867, 0.02449203109741211, 0.02712678337097168, 0.026254335403442384, 0.02612735939025879, 0.026315776824951172, 0.02574847984313965, 0.025656320571899413, 0.025820159912109376, 0.02637107276916504, 0.027232255935668945, 0.026200063705444337, 0.025825279235839844, 0.025758720397949218, 0.025797632217407225, 0.024766464233398438, 0.025144319534301757, 0.0259420166015625, 0.025705472946166992, 0.025675775527954102, 0.025671680450439452, 0.025634815216064453, 0.025425920486450194, 0.02526310348510742, 0.025644031524658203, 0.026137599945068358, 0.025833471298217774, 0.025790464401245116, 0.02489036750793457, 0.025205759048461913, 0.02574336051940918, 0.02570342445373535, 0.025651199340820312, 0.025619455337524414, 0.025622528076171876, 0.02567065620422363, 0.025686016082763673, 0.026835968017578125, 0.026198015213012696, 0.026257408142089843, 0.02589388847351074, 0.026149887084960938, 0.02567065620422363, 0.02570342445373535, 0.02609766387939453, 0.02615193557739258, 0.02575564765930176, 0.025643007278442383, 0.02573209571838379, 0.02571673583984375, 0.025819135665893556, 0.02573516845703125, 0.025779199600219727, 0.025769983291625977, 0.025807872772216797, 0.025797632217407225, 0.025871360778808594, 0.026015743255615235, 0.025783296585083007, 0.025778175354003906, 0.02577305603027344, 0.02488319969177246, 0.02489753532409668, 0.024848384857177733, 0.02570649528503418, 0.025006080627441408, 0.025639936447143553, 0.02488217544555664, 0.0247511043548584, 0.024837120056152344, 0.024761344909667967, 0.024824832916259764, 0.02471833610534668, 0.024793088912963866, 0.0247459831237793, 0.02478387260437012, 0.02470604705810547, 0.02483404731750488, 0.02536960029602051, 0.02454630470275879, 0.024592384338378907, 0.02492006492614746, 0.025124864578247072, 0.02531532859802246, 0.02484223937988281, 0.024854528427124024, 0.024790016174316407, 0.024989696502685548, 0.02590105628967285, 0.025964544296264647, 0.025664512634277343, 0.025886720657348632, 0.025793535232543945, 0.02533683204650879, 0.025615360260009764, 0.025684991836547853, 0.025669631958007814, 0.025793535232543945, 0.025694208145141603, 0.02573516845703125, 0.02597478485107422, 0.02632089614868164, 0.026037248611450195, 0.025861120223999022, 0.025790464401245116, 0.02572185516357422, 0.025651199340820312, 0.025746431350708008, 0.02567884826660156, 0.02593280029296875, 0.025805824279785155, 0.025881599426269532, 0.02565836715698242, 0.024781824111938477, 0.02490777587890625, 0.024855552673339845, 0.025035776138305665, 0.024856576919555663, 0.02484121513366699, 0.024801279067993166, 0.02487603187561035, 0.024816640853881834, 0.024896511077880858, 0.0252938232421875, 0.0251013126373291, 0.024830976486206056, 0.02588569641113281, 0.025842687606811524, 0.02571468734741211, 0.02555801582336426, 0.02492416000366211, 0.024748031616210937, 0.024773632049560547, 0.025448448181152345, 0.024816640853881834, 0.02548940849304199, 0.02572800064086914, 0.02574950408935547, 0.025798656463623046, 0.025879552841186523, 0.02572287940979004, 0.025652223587036133, 0.02572390365600586, 0.02471014404296875, 0.02472857666015625, 0.024851455688476562, 0.024809471130371095, 0.024748031616210937, 0.02485759925842285, 0.024750080108642578, 0.02477568054199219, 0.02482585525512695, 0.024845312118530274, 0.02476032066345215, 0.024779775619506835, 0.024785919189453123, 0.024782848358154298, 0.024782848358154298, 0.024781824111938477, 0.024748031616210937, 0.025159679412841796, 0.02614374351501465, 0.02609459114074707, 0.02570751953125, 0.02572697639465332, 0.025857023239135742, 0.025677824020385744, 0.025622528076171876, 0.025660415649414063, 0.025819135665893556, 0.025672704696655273, 0.025677824020385744, 0.02572390365600586, 0.02574745559692383, 0.02573619270324707, 0.02573721694946289, 0.02570649528503418, 0.02567065620422363, 0.024770559310913084, 0.024797183990478516, 0.024786943435668944, 0.024804351806640625, 0.024815616607666017, 0.02472038459777832, 0.024798208236694336, 0.02478489685058594, 0.024785919189453123, 0.024790016174316407, 0.024802303314208983, 0.024772607803344726, 0.0247459831237793, 0.02488012886047363, 0.025630720138549806, 0.024809471130371095, 0.024757247924804687, 0.02486783981323242, 0.02486783981323242, 0.024804351806640625, 0.025069568634033205, 0.025145343780517578, 0.025276416778564452, 0.0249169921875, 0.02477670478820801, 0.024762367248535155, 0.024836095809936523, 0.02477670478820801, 0.025596927642822266, 0.025868288040161135, 0.02588979148864746, 0.025822208404541015, 0.02571878433227539, 0.024797183990478516, 0.024846336364746095, 0.02484121513366699, 0.024808448791503908, 0.024844287872314453, 0.024758272171020508, 0.02484121513366699, 0.024814592361450196, 0.024797183990478516, 0.024902656555175783, 0.024815616607666017, 0.024828927993774414, 0.024902656555175783, 0.027273216247558595, 0.025865215301513672, 0.024796159744262695, 0.025006080627441408, 0.02571571159362793, 0.025734144210815428, 0.02571161651611328, 0.02569113540649414, 0.02570342445373535, 0.025826303482055665, 0.025536512374877928, 0.025856000900268555, 0.025651199340820312, 0.02576383972167969, 0.025990144729614258, 0.025753599166870117, 0.026283008575439453, 0.02614169692993164, 0.025801727294921875, 0.02575564765930176, 0.02586419105529785, 0.02572697639465332, 0.026448896408081055, 0.026045440673828125, 0.025811967849731447, 0.025734144210815428, 0.02570649528503418, 0.024844287872314453, 0.02495692825317383, 0.024886272430419923, 0.02483404731750488, 0.025582592010498048, 0.024877056121826172, 0.024791040420532227, 0.02469068717956543, 0.024640512466430665, 0.024689664840698244, 0.02549452781677246, 0.025686016082763673, 0.025609216690063476, 0.024724479675292968, 0.024853504180908204, 0.02478387260437012, 0.024797183990478516, 0.025392127990722657, 0.02588876724243164, 0.02569625663757324, 0.02565836715698242, 0.025636863708496094, 0.025631744384765624, 0.025629695892333985, 0.025633792877197265, 0.02572492790222168, 0.025684991836547853, 0.025222143173217772, 0.025108480453491212, 0.025818111419677735, 0.026015743255615235, 0.02575257682800293, 0.02576383972167969, 0.025821184158325194, 0.025803775787353517, 0.025806848526000976, 0.025744384765625, 0.02712985610961914, 0.024859647750854492, 0.024818687438964843, 0.024786943435668944, 0.024848384857177733, 0.02475315284729004, 0.024945663452148437, 0.026452991485595705, 0.025975807189941406, 0.025709568023681642, 0.025680896759033203, 0.02568191909790039, 0.02566655921936035, 0.024770559310913084, 0.024807424545288087, 0.0247511043548584, 0.024840192794799806, 0.024778751373291014, 0.024802303314208983, 0.025614336013793947, 0.025992191314697266, 0.02573721694946289, 0.02571673583984375, 0.02575155258178711, 0.025857023239135742, 0.025785343170166015, 0.02578124809265137, 0.0257392635345459, 0.0257126407623291, 0.025753599166870117, 0.025589759826660157, 0.024781824111938477, 0.02470809555053711, 0.024845312118530274, 0.024846336364746095, 0.02478387260437012, 0.02467635154724121, 0.024747007369995116, 0.024729600906372072, 0.024819711685180663, 0.0247142391204834, 0.024779775619506835, 0.0247511043548584, 0.024788991928100586, 0.024715263366699217, 0.024810495376586913, 0.024381439208984376, 0.02593894386291504, 0.025633792877197265, 0.02571059226989746, 0.025641984939575195, 0.02571673583984375, 0.025610240936279297, 0.02572287940979004, 0.025903104782104492, 0.02574950408935547, 0.025622528076171876, 0.025684991836547853, 0.025629695892333985, 0.025775104522705077, 0.025584640502929686, 0.024770559310913084, 0.02476748847961426, 0.024772607803344726, 0.02475315284729004, 0.024804351806640625, 0.024945663452148437, 0.02493337631225586, 0.024813568115234375, 0.02528358459472656, 0.024996864318847657, 0.024848384857177733, 0.024748031616210937, 0.024864767074584963, 0.024794111251831053, 0.02484121513366699, 0.024769535064697267, 0.02486579132080078, 0.024819711685180663, 0.024838144302368165, 0.024772607803344726, 0.024854528427124024, 0.024762367248535155, 0.024851455688476562, 0.024800256729125978, 0.02474393653869629, 0.024803327560424804, 0.02478387260437012, 0.024764415740966796, 0.024804351806640625, 0.024778751373291014, 0.024790016174316407, 0.024765439987182617, 0.026625024795532228, 0.025923583984375, 0.025686016082763673, 0.02470604705810547, 0.02467020797729492, 0.02476032066345215, 0.02468659210205078, 0.025255935668945313, 0.026009599685668947, 0.02574847984313965, 0.025652223587036133, 0.025659391403198242, 0.025606143951416017, 0.02568806457519531, 0.02591231918334961, 0.025349119186401366, 0.024829952239990235, 0.024799232482910157, 0.024827903747558593, 0.024805376052856445, 0.02477568054199219, 0.024762367248535155, 0.024782848358154298, 0.024814592361450196, 0.02473779106140137, 0.024774656295776368, 0.02475315284729004, 0.024770559310913084, 0.024823808670043947, 0.024773632049560547, 0.02477670478820801, 0.024814592361450196, 0.024802303314208983, 0.024879104614257814, 0.02487603187561035, 0.025035776138305665, 0.025848831176757812, 0.025794559478759766, 0.02572083282470703, 0.025631744384765624, 0.025798656463623046, 0.025757696151733397, 0.02474496078491211, 0.024738815307617186, 0.024748031616210937, 0.024754175186157225, 0.024798208236694336, 0.025100288391113282, 0.026043392181396483, 0.02618880081176758, 0.025831424713134765, 0.025805824279785155, 0.025840639114379883, 0.025818111419677735, 0.025823232650756835, 0.026063871383666993, 0.02631372833251953, 0.025805824279785155, 0.02575564765930176, 0.025994239807128908, 0.025842687606811524, 0.02568806457519531, 0.024793088912963866, 0.027089920043945313, 0.025886720657348632, 0.02488217544555664, 0.02592153549194336, 0.025839616775512695, 0.02570444869995117, 0.02594508743286133, 0.025804800033569338, 0.025744384765625, 0.024738815307617186, 0.025084928512573244, 0.02609868812561035, 0.02470604705810547, 0.02476748847961426, 0.02470809555053711, 0.024798208236694336, 0.02469171142578125, 0.024738815307617186, 0.025007104873657225, 0.024808448791503908, 0.02472652816772461, 0.02473779106140137, 0.024698879241943358, 0.02473779106140137, 0.02470809555053711, 0.024765439987182617, 0.02469375991821289, 0.024771583557128905, 0.02486579132080078, 0.024844287872314453, 0.024649728775024415, 0.025371648788452147, 0.024781824111938477, 0.024771583557128905, 0.024814592361450196, 0.024803327560424804, 0.02476748847961426, 0.024754175186157225, 0.02489753532409668, 0.0248668155670166, 0.024790016174316407, 0.024801279067993166, 0.024770559310913084, 0.02478387260437012, 0.024763391494750975, 0.024820735931396484, 0.024993791580200195, 0.02484121513366699, 0.024762367248535155, 0.024833023071289064, 0.02476851272583008, 0.024770559310913084, 0.024838144302368165, 0.024815616607666017, 0.024814592361450196, 0.024790016174316407, 0.024809471130371095, 0.024797183990478516, 0.025032703399658202, 0.024995840072631836, 0.024781824111938477, 0.02507980728149414, 0.02490675163269043]",tokens/s,39.57179022226357,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/2/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949384-64791c6830106fee54d20bde;b6eb150d-e320-45d0-b121-1e789f263508) - -Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 2 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,914.538496,925.36832,0.0,295.698432,277.263872,s,1,7.24520654296875,7.24520654296875,0.0,7.24520654296875,7.24520654296875,7.24520654296875,7.24520654296875,[7.24520654296875],,kWh,5.917552040969237e-06,3.200360236746317e-06,8.15861763792114e-06,1.7276529915636693e-05,,MB,1428.856832,981.991424,0.0,335.54432,312.39168,s,18,0.36528450775146487,0.020293583763970266,3.761165881612667e-05,0.020300031661987305,0.02033110065460205,0.020345390605926515,0.02035807653427124,"[0.0202653751373291, 0.020317695617675782, 0.020262304306030272, 0.0202992000579834, 0.02030086326599121, 0.020342592239379884, 0.020263391494750975, 0.020213760375976563, 0.02028940773010254, 0.020221343994140627, 0.02036124801635742, 0.020304927825927733, 0.02032147216796875, 0.02032169532775879, 0.02029155158996582, 0.020271455764770508, 0.020310047149658204, 0.020326175689697266]",tokens/s,12614.824615379604,kWh,2.395869197317932e-07,1.312805881991722e-07,1.264630265703869e-06,1.6354977736348344e-06,tokens/kWh,156527269.0228427,MB,1453.162496,1007.157248,0.0,360.710144,312.39424,s,18,10.007863647460939,0.5559924248589408,0.006999274559303566,0.5534296569824219,0.5651827941894532,0.5659269866943359,0.5691935516357423,"[0.5639237670898437, 0.5528592529296875, 0.5648345947265625, 0.5536002197265625, 0.5500508422851562, 0.5515244750976562, 0.5523441162109375, 0.5505833129882812, 0.5478919067382813, 0.558171875, 0.5652064208984375, 0.5651726684570313, 0.5600555419921875, 0.5700101928710938, 0.5532590942382812, 0.5454507446289063, 0.5542156982421875, 0.5487089233398438]",tokens/s,113.31089630579686,kWh,6.520235405539136e-06,3.5727891078171593e-06,1.1069160465517545e-05,2.1162184978873847e-05,tokens/kWh,2977008.2844891837,,s,1134,10.000340095520032,0.008818642059541464,0.0002037141522075951,0.008710127830505371,0.009034742450714112,0.009077811574935913,0.009337190208435058,"[0.008833024024963379, 0.00902348804473877, 0.008971263885498047, 0.008972288131713867, 0.009041919708251953, 0.008991744041442871, 0.008919039726257324, 0.009110527992248535, 0.008972288131713867, 0.008961024284362793, 0.008943615913391113, 0.009037823677062988, 0.00894976043701172, 0.009048064231872559, 0.008950783729553222, 0.00897433567047119, 0.008904704093933105, 0.008831007957458496, 0.008994784355163574, 0.009005056381225587, 0.008955904006958008, 0.008973312377929688, 0.008991744041442871, 0.008995871543884277, 0.008922080039978027, 0.00894156837463379, 0.008969216346740723, 0.008773632049560547, 0.008675328254699707, 0.008664064407348633, 0.008707072257995606, 0.008804351806640624, 0.008668160438537598, 0.008697855949401855, 0.008698880195617676, 0.008951807975769043, 0.009037823677062988, 0.009004032135009766, 0.008926207542419434, 0.008995840072631836, 0.008975359916687011, 0.009041983604431153, 0.009084863662719727, 0.009034751892089844, 0.009047039985656738, 0.00898252773284912, 0.009088000297546387, 0.009057279586791991, 0.009074687957763672, 0.008967167854309082, 0.00900812816619873, 0.009004032135009766, 0.009141247749328613, 0.008985600471496581, 0.00872447967529297, 0.008567808151245117, 0.008854559898376465, 0.009034720420837403, 0.00898252773284912, 0.008985600471496581, 0.009055232048034668, 0.009011199951171875, 0.009005120277404785, 0.008469504356384277, 0.008722432136535644, 0.008691712379455567, 0.00868556785583496, 0.008670207977294921, 0.008655872344970703, 0.008660991668701172, 0.00868556785583496, 0.008684543609619141, 0.008679424285888672, 0.008692735671997071, 0.008705023765563966, 0.008655872344970703, 0.008740863800048827, 0.008666111946105956, 0.0087193603515625, 0.008675328254699707, 0.008632320404052735, 0.008540160179138183, 0.008663040161132812, 0.008688639640808106, 0.008691712379455567, 0.008706048011779785, 0.008675359725952148, 0.008673248291015625, 0.00872447967529297, 0.00872652816772461, 0.008706048011779785, 0.008689663887023925, 0.008686592102050781, 0.008699904441833496, 0.00869478416442871, 0.008712191581726075, 0.008744959831237792, 0.008747008323669434, 0.00872447967529297, 0.008748031616210938, 0.008713215827941894, 0.00869478416442871, 0.008700927734375, 0.008679424285888672, 0.008698880195617676, 0.008768511772155761, 0.008666111946105956, 0.00870297622680664, 0.008698880195617676, 0.008993791580200196, 0.008920063972473144, 0.008967167854309082, 0.009025535583496093, 0.008967167854309082, 0.0090316801071167, 0.008972288131713867, 0.009014271736145019, 0.008979455947875976, 0.009042943954467773, 0.008979455947875976, 0.008980480194091797, 0.00897433567047119, 0.008993791580200196, 0.00899891185760498, 0.008966143608093263, 0.008979455947875976, 0.008433664321899414, 0.008859647750854491, 0.009019392013549805, 0.008968192100524902, 0.008975359916687011, 0.009027584075927735, 0.008942591667175292, 0.008879103660583497, 0.008658944129943847, 0.00872652816772461, 0.00852070426940918, 0.009143296241760255, 0.008978431701660156, 0.009076736450195312, 0.00894976043701172, 0.008999936103820801, 0.009044992446899413, 0.008978431701660156, 0.008961024284362793, 0.0091146240234375, 0.008953856468200684, 0.008970239639282226, 0.008971263885498047, 0.008987648010253906, 0.00911359977722168, 0.009270272254943847, 0.009318400382995605, 0.010053631782531738, 0.009273344039916993, 0.009040896415710448, 0.00900607967376709, 0.008969216346740723, 0.008948736190795899, 0.008942591667175292, 0.00892416000366211, 0.009082880020141602, 0.008945664405822755, 0.00894976043701172, 0.008824831962585449, 0.008960000038146973, 0.00898252773284912, 0.009025535583496093, 0.008963071823120117, 0.009019392013549805, 0.009009152412414552, 0.008919039726257324, 0.008670207977294921, 0.00889958381652832, 0.008667136192321777, 0.009002016067504883, 0.008991711616516114, 0.009020447731018066, 0.00898863983154297, 0.00902451229095459, 0.008968192100524902, 0.008989695549011231, 0.008943615913391113, 0.00900710391998291, 0.008863743782043456, 0.008677375793457032, 0.008654848098754882, 0.008679424285888672, 0.008696831703186036, 0.008995840072631836, 0.00897433567047119, 0.009069567680358886, 0.009012224197387696, 0.008983551979064941, 0.009009152412414552, 0.00903270435333252, 0.009019424438476562, 0.009021408081054687, 0.008993791580200196, 0.0090316801071167, 0.008993791580200196, 0.00900812816619873, 0.008989695549011231, 0.009030655860900879, 0.009018367767333984, 0.009011199951171875, 0.008994815826416015, 0.008993791580200196, 0.008985600471496581, 0.008754176139831543, 0.008674304008483886, 0.008671232223510742, 0.008689663887023925, 0.008695808410644532, 0.008668224334716796, 0.008685503959655762, 0.008733695983886718, 0.008641535758972169, 0.008608768463134766, 0.008682496070861816, 0.008689663887023925, 0.008681535720825195, 0.008673215866088867, 0.008679424285888672, 0.008683520317077637, 0.008669183731079102, 0.008632320404052735, 0.008653823852539062, 0.008677375793457032, 0.008651776313781738, 0.008696831703186036, 0.008668160438537598, 0.00871014404296875, 0.008655872344970703, 0.008669183731079102, 0.008660991668701172, 0.008690688133239746, 0.008700927734375, 0.00870195198059082, 0.008695808410644532, 0.008740863800048827, 0.008641535758972169, 0.008733695983886718, 0.008656895637512207, 0.008650752067565918, 0.008673279762268067, 0.0086364164352417, 0.008573951721191407, 0.00871014404296875, 0.008671232223510742, 0.008637439727783204, 0.008648703575134278, 0.008421376228332519, 0.008654848098754882, 0.008712191581726075, 0.008671232223510742, 0.008678432464599609, 0.008664031982421876, 0.008662015914916991, 0.008698880195617676, 0.008662015914916991, 0.008652799606323243, 0.008687616348266602, 0.008666111946105956, 0.008753151893615722, 0.008751104354858399, 0.008673279762268067, 0.008690688133239746, 0.008678400039672851, 0.0086364164352417, 0.008644607543945313, 0.008739839553833008, 0.00871014404296875, 0.008590335845947266, 0.008633407592773437, 0.008693696022033692, 0.008667136192321777, 0.008680447578430176, 0.008683520317077637, 0.008692735671997071, 0.008676351547241211, 0.008643584251403809, 0.008665087699890137, 0.008691712379455567, 0.008670207977294921, 0.008655872344970703, 0.008669183731079102, 0.008670207977294921, 0.008684576034545899, 0.008721376419067383, 0.008655872344970703, 0.008664064407348633, 0.008679424285888672, 0.00869375991821289, 0.008665087699890137, 0.008688639640808106, 0.0086364164352417, 0.008687616348266602, 0.008680447578430176, 0.008682496070861816, 0.008679424285888672, 0.00871014404296875, 0.008541184425354004, 0.008648703575134278, 0.008676351547241211, 0.008649727821350098, 0.008654848098754882, 0.00869375991821289, 0.008886272430419923, 0.008700927734375, 0.010440704345703124, 0.00905628776550293, 0.009015263557434082, 0.00902451229095459, 0.00901632022857666, 0.008665087699890137, 0.009009152412414552, 0.008991744041442871, 0.008985600471496581, 0.008951807975769043, 0.008953856468200684, 0.008957951545715333, 0.008950783729553222, 0.008988672256469727, 0.008977408409118653, 0.008948736190795899, 0.009049087524414063, 0.008999936103820801, 0.008884223937988281, 0.00900710391998291, 0.009065471649169921, 0.008994879722595215, 0.008658880233764649, 0.008671232223510742, 0.008679424285888672, 0.008657919883728027, 0.008640512466430664, 0.008640512466430664, 0.008649727821350098, 0.008650752067565918, 0.008667136192321777, 0.008669183731079102, 0.008634367942810058, 0.008643584251403809, 0.008663040161132812, 0.008644607543945313, 0.008638463973999023, 0.008651776313781738, 0.008649727821350098, 0.00862003231048584, 0.008653823852539062, 0.008670207977294921, 0.008671232223510742, 0.008666111946105956, 0.008660991668701172, 0.008673279762268067, 0.00864668846130371, 0.008557536125183106, 0.008665087699890137, 0.008704000473022461, 0.00868556785583496, 0.008645695686340332, 0.008695743560791015, 0.008679424285888672, 0.008639488220214844, 0.008686592102050781, 0.008656895637512207, 0.008659968376159668, 0.008644607543945313, 0.00871833610534668, 0.008653823852539062, 0.008666111946105956, 0.008668160438537598, 0.008655872344970703, 0.008670207977294921, 0.008625151634216309, 0.009052160263061524, 0.008733695983886718, 0.008421376228332519, 0.008670207977294921, 0.008686623573303222, 0.008710111618041992, 0.008678400039672851, 0.008578047752380372, 0.008761343955993652, 0.008978431701660156, 0.008989695549011231, 0.009178112030029297, 0.009020416259765626, 0.008991744041442871, 0.009022463798522949, 0.009019392013549805, 0.009003007888793945, 0.009019424438476562, 0.009030624389648438, 0.009001983642578124, 0.009005056381225587, 0.009014271736145019, 0.008999936103820801, 0.00902348804473877, 0.009053183555603026, 0.008962047576904298, 0.008876031875610351, 0.008669183731079102, 0.008671232223510742, 0.008660991668701172, 0.008660991668701172, 0.008731648445129395, 0.008658944129943847, 0.008670207977294921, 0.008657919883728027, 0.008634464263916015, 0.008572832107543945, 0.008640512466430664, 0.008665087699890137, 0.008684543609619141, 0.008641535758972169, 0.00872652816772461, 0.008642560005187988, 0.008676351547241211, 0.008631296157836914, 0.008674304008483886, 0.008678400039672851, 0.008680447578430176, 0.008664064407348633, 0.008662015914916991, 0.008648703575134278, 0.008688639640808106, 0.008672256469726563, 0.00869375991821289, 0.008652799606323243, 0.008715264320373535, 0.008662015914916991, 0.008696831703186036, 0.008651776313781738, 0.008650752067565918, 0.00870195198059082, 0.008656895637512207, 0.008669183731079102, 0.008644607543945313, 0.008594431877136231, 0.008398847579956055, 0.008635392189025879, 0.008675328254699707, 0.00871116828918457, 0.008652799606323243, 0.008637439727783204, 0.008645631790161134, 0.008671232223510742, 0.008652799606323243, 0.00869478416442871, 0.008659968376159668, 0.008676351547241211, 0.008680447578430176, 0.008695808410644532, 0.008675328254699707, 0.0087193603515625, 0.008764415740966798, 0.008695808410644532, 0.00872755241394043, 0.00869478416442871, 0.008658944129943847, 0.008683520317077637, 0.00873574447631836, 0.008550399780273438, 0.008531968116760253, 0.008535200119018555, 0.008428383827209472, 0.008590335845947266, 0.008699904441833496, 0.008663040161132812, 0.008658944129943847, 0.008648703575134278, 0.008663040161132812, 0.008679424285888672, 0.008664064407348633, 0.008648703575134278, 0.008675328254699707, 0.008650752067565918, 0.008643584251403809, 0.008655872344970703, 0.008699904441833496, 0.008697855949401855, 0.008652799606323243, 0.008660991668701172, 0.0086364164352417, 0.00870809555053711, 0.008691712379455567, 0.008667136192321777, 0.008652799606323243, 0.008651776313781738, 0.008647680282592773, 0.008676351547241211, 0.009198592185974122, 0.009076736450195312, 0.009005056381225587, 0.009257984161376954, 0.009596927642822266, 0.009051199913024902, 0.009109439849853516, 0.00900710391998291, 0.008983551979064941, 0.008970239639282226, 0.008828927993774414, 0.00839782428741455, 0.008644607543945313, 0.008644607543945313, 0.008648768424987793, 0.008638400077819824, 0.008684543609619141, 0.008627200126647949, 0.008715264320373535, 0.008660991668701172, 0.008659968376159668, 0.008648703575134278, 0.008683520317077637, 0.008638463973999023, 0.008660991668701172, 0.008662015914916991, 0.008671232223510742, 0.008679424285888672, 0.008660991668701172, 0.008581119537353516, 0.008644607543945313, 0.008607744216918945, 0.008648703575134278, 0.0086364164352417, 0.008679424285888672, 0.008654848098754882, 0.008728575706481934, 0.00899071979522705, 0.008912896156311035, 0.008975359916687011, 0.008939519882202148, 0.008934399604797364, 0.008947711944580078, 0.008956928253173829, 0.008939519882202148, 0.008758272171020508, 0.008650752067565918, 0.008637439727783204, 0.008651776313781738, 0.008658944129943847, 0.008664064407348633, 0.008680447578430176, 0.008671232223510742, 0.008658944129943847, 0.008654848098754882, 0.008673279762268067, 0.008574975967407226, 0.008596480369567871, 0.008545280456542969, 0.008660991668701172, 0.008653823852539062, 0.008683520317077637, 0.008677375793457032, 0.008657919883728027, 0.008668160438537598, 0.00870911979675293, 0.008643584251403809, 0.008648703575134278, 0.008647680282592773, 0.008699904441833496, 0.008660991668701172, 0.008626175880432128, 0.008671232223510742, 0.008699904441833496, 0.008455167770385743, 0.008687616348266602, 0.008684543609619141, 0.008646656036376953, 0.008676351547241211, 0.008670207977294921, 0.008679424285888672, 0.008672256469726563, 0.008682496070861816, 0.008660991668701172, 0.00861695957183838, 0.008630271911621093, 0.008681471824645997, 0.008662015914916991, 0.008645631790161134, 0.008669183731079102, 0.008737792015075683, 0.008664064407348633, 0.008653823852539062, 0.008654848098754882, 0.008646656036376953, 0.008680447578430176, 0.008708160400390625, 0.008661952018737793, 0.008669183731079102, 0.008670207977294921, 0.008668160438537598, 0.008679424285888672, 0.008705023765563966, 0.009099264144897461, 0.00902143955230713, 0.009059359550476074, 0.009039839744567872, 0.008947711944580078, 0.008952863693237305, 0.008958944320678711, 0.008963071823120117, 0.009154560089111329, 0.008966143608093263, 0.008909824371337891, 0.009137151718139648, 0.008985600471496581, 0.009102335929870605, 0.008977408409118653, 0.008996864318847657, 0.008911871910095214, 0.008651776313781738, 0.008626175880432128, 0.008896512031555176, 0.009070591926574707, 0.009012224197387696, 0.009363455772399902, 0.009111552238464356, 0.008977439880371094, 0.008973279953002929, 0.008942591667175292, 0.00899891185760498, 0.009224191665649414, 0.009074687957763672, 0.009424896240234374, 0.009046015739440917, 0.008966143608093263, 0.008980480194091797, 0.008401920318603515, 0.008652799606323243, 0.008588288307189941, 0.008646656036376953, 0.008649727821350098, 0.008774656295776367, 0.009005056381225587, 0.008990752220153808, 0.008994784355163574, 0.009055232048034668, 0.009028608322143555, 0.008981504440307618, 0.008988672256469727, 0.00900812816619873, 0.0090316801071167, 0.008972288131713867, 0.008954879760742187, 0.009033727645874023, 0.009005056381225587, 0.008963071823120117, 0.009075712203979493, 0.008954879760742187, 0.009028608322143555, 0.008993791580200196, 0.009048064231872559, 0.009088000297546387, 0.00901632022857666, 0.008970239639282226, 0.009054207801818847, 0.008970239639282226, 0.00888319969177246, 0.008995840072631836, 0.009003007888793945, 0.008943615913391113, 0.008966143608093263, 0.00901734447479248, 0.008943615913391113, 0.008939519882202148, 0.008958975791931152, 0.0090316801071167, 0.008966143608093263, 0.00899071979522705, 0.00901632022857666, 0.008996864318847657, 0.008963071823120117, 0.00901734447479248, 0.009026592254638671, 0.00926204776763916, 0.0090316801071167, 0.009030655860900879, 0.009018367767333984, 0.009038847923278808, 0.008965120315551758, 0.008983551979064941, 0.008996864318847657, 0.008988672256469727, 0.008950783729553222, 0.008902655601501466, 0.009012224197387696, 0.008991744041442871, 0.008985600471496581, 0.00901529598236084, 0.009020416259765626, 0.00861695957183838, 0.009000960350036622, 0.008956928253173829, 0.009001983642578124, 0.008960000038146973, 0.008976384162902832, 0.00897433567047119, 0.009019392013549805, 0.00900710391998291, 0.00899788761138916, 0.009307135581970214, 0.009073663711547851, 0.008773632049560547, 0.00869375991821289, 0.008682496070861816, 0.008674304008483886, 0.008699904441833496, 0.008700927734375, 0.008643584251403809, 0.008928256034851074, 0.008864768028259277, 0.00899891185760498, 0.008958975791931152, 0.009091072082519532, 0.008985600471496581, 0.00901734447479248, 0.008977408409118653, 0.008952832221984864, 0.008961024284362793, 0.00908083152770996, 0.00900710391998291, 0.008973312377929688, 0.009001983642578124, 0.00901529598236084, 0.008991744041442871, 0.008999936103820801, 0.008947711944580078, 0.009029631614685058, 0.008945664405822755, 0.008993791580200196, 0.008981504440307618, 0.009104384422302245, 0.008984576225280762, 0.008985600471496581, 0.00902143955230713, 0.009052160263061524, 0.008971263885498047, 0.009012224197387696, 0.008866815567016602, 0.009005056381225587, 0.008978431701660156, 0.008999936103820801, 0.009333760261535644, 0.009014271736145019, 0.008972288131713867, 0.009052160263061524, 0.008940544128417969, 0.009032735824584961, 0.00894153594970703, 0.008999936103820801, 0.008953856468200684, 0.009043968200683594, 0.009011232376098632, 0.008409088134765624, 0.008678400039672851, 0.00869375991821289, 0.00880947208404541, 0.008626175880432128, 0.008644607543945313, 0.008663040161132812, 0.008640512466430664, 0.008700927734375, 0.008691712379455567, 0.008658944129943847, 0.008615936279296875, 0.008637439727783204, 0.008681471824645997, 0.008744959831237792, 0.008679424285888672, 0.008667136192321777, 0.008691712379455567, 0.008698880195617676, 0.008662015914916991, 0.008720383644104004, 0.008681471824645997, 0.008713215827941894, 0.008665087699890137, 0.00870195198059082, 0.00878489589691162, 0.008706048011779785, 0.008723456382751465, 0.00869478416442871, 0.009242624282836913, 0.009333760261535644, 0.009074687957763672, 0.009235456466674804, 0.009395199775695801, 0.00901632022857666, 0.009019392013549805, 0.009065471649169921, 0.009046015739440917, 0.009010175704956054, 0.008956928253173829, 0.008911871910095214, 0.009062399864196777, 0.00897433567047119, 0.008969216346740723, 0.008989695549011231, 0.009050111770629882, 0.008948736190795899, 0.008995840072631836, 0.008972288131713867, 0.00901734447479248, 0.00911359977722168, 0.00903270435333252, 0.008987648010253906, 0.009050111770629882, 0.008979455947875976, 0.009019392013549805, 0.008957951545715333, 0.00939417552947998, 0.009137151718139648, 0.008999936103820801, 0.009018367767333984, 0.008944640159606934, 0.00902451229095459, 0.008729599952697753, 0.009163776397705077, 0.008756223678588868, 0.010134528160095215, 0.009612288475036621, 0.00987548828125, 0.009208831787109375, 0.009130975723266602, 0.009565183639526367, 0.009085951805114746, 0.008960000038146973, 0.008964096069335938, 0.008962047576904298, 0.008989695549011231, 0.009009152412414552, 0.008994815826416015, 0.00899788761138916, 0.008938495635986327, 0.008971263885498047, 0.008954879760742187, 0.008975359916687011, 0.00893337631225586, 0.00899891185760498, 0.008972288131713867, 0.008958975791931152, 0.008947711944580078, 0.009083904266357423, 0.008993791580200196, 0.008950783729553222, 0.008934399604797364, 0.008885248184204102, 0.008946687698364257, 0.00897433567047119, 0.00898252773284912, 0.008995840072631836, 0.008936448097229004, 0.00898252773284912, 0.008961024284362793, 0.008968192100524902, 0.008966143608093263, 0.008996864318847657, 0.009020416259765626, 0.009000960350036622, 0.008938495635986327, 0.008954879760742187, 0.009040896415710448, 0.00912384033203125, 0.009047039985656738, 0.008992768287658692, 0.008967167854309082, 0.008996864318847657, 0.00897433567047119, 0.00899071979522705, 0.009091072082519532, 0.009117695808410644, 0.008994815826416015, 0.00900607967376709, 0.008946751594543457, 0.008945599555969238, 0.009019392013549805, 0.008976384162902832, 0.009011199951171875, 0.009079808235168458, 0.008392704010009766, 0.008690688133239746, 0.008630271911621093, 0.008655872344970703, 0.008658944129943847, 0.008644607543945313, 0.008647680282592773, 0.008662015914916991, 0.008677375793457032, 0.00881049633026123, 0.0087326717376709, 0.00870809555053711, 0.008670207977294921, 0.00868556785583496, 0.008677375793457032, 0.008658944129943847, 0.008707072257995606, 0.008642560005187988, 0.008699904441833496, 0.008651776313781738, 0.008646656036376953, 0.008540224075317383, 0.008670144081115723, 0.008643584251403809, 0.008653823852539062, 0.008679455757141113, 0.008649696350097656, 0.008633343696594239, 0.008675328254699707, 0.008643584251403809, 0.0086364164352417, 0.008641535758972169, 0.008632320404052735, 0.008634367942810058, 0.008688639640808106, 0.008649727821350098, 0.008637439727783204, 0.008688639640808106, 0.008879103660583497, 0.009273344039916993, 0.009034751892089844, 0.008996864318847657, 0.009044992446899413, 0.009044992446899413, 0.00899071979522705, 0.008995840072631836, 0.008985600471496581, 0.009034751892089844, 0.009026559829711914, 0.008928256034851074, 0.008928256034851074, 0.00909823989868164, 0.008968192100524902, 0.008972288131713867, 0.008942591667175292, 0.009038847923278808, 0.008955904006958008, 0.009020416259765626, 0.009014271736145019, 0.00873574447631836, 0.008683520317077637, 0.008650752067565918, 0.008662015914916991, 0.008380415916442872, 0.008665087699890137, 0.008672287940979004, 0.008637408256530761, 0.008612863540649414, 0.008623104095458984, 0.008648703575134278, 0.008643584251403809, 0.008681471824645997, 0.008637439727783204, 0.008614912033081054, 0.008673279762268067, 0.008642560005187988, 0.008566783905029298, 0.008700927734375, 0.008675359725952148, 0.00867734432220459, 0.008627200126647949, 0.008662015914916991, 0.008683520317077637, 0.008682496070861816, 0.008686592102050781, 0.008652799606323243, 0.008648703575134278, 0.008679424285888672, 0.008663040161132812, 0.008662015914916991, 0.008652799606323243, 0.008650752067565918, 0.008676351547241211, 0.00867740821838379, 0.008673248291015625, 0.008688639640808106, 0.00857091236114502, 0.008579039573669433, 0.008621055603027344, 0.008536064147949218, 0.008651776313781738, 0.008641535758972169, 0.00869375991821289, 0.008632320404052735, 0.008648703575134278, 0.008568832397460938, 0.008651776313781738, 0.008677375793457032, 0.008712191581726075, 0.008649727821350098, 0.008645631790161134, 0.008680447578430176, 0.008664064407348633, 0.008638463973999023, 0.008683520317077637, 0.008652799606323243, 0.008653823852539062, 0.008648703575134278, 0.008631296157836914, 0.008660991668701172, 0.008690688133239746, 0.008647680282592773, 0.008779775619506837, 0.008738816261291504, 0.008662015914916991, 0.008667167663574218, 0.008377344131469726, 0.008627200126647949, 0.008642560005187988, 0.008667167663574218, 0.008654815673828125, 0.008573951721191407, 0.008606719970703124, 0.008677375793457032, 0.008659968376159668, 0.008648703575134278, 0.008656895637512207, 0.00872652816772461, 0.008532992362976074, 0.008560640335083008, 0.008543231964111327, 0.008530943870544434, 0.008541184425354004, 0.008509440422058106, 0.008641535758972169, 0.008677375793457032, 0.008639488220214844, 0.008654848098754882, 0.008660991668701172, 0.00891596794128418, 0.008747008323669434, 0.008690688133239746, 0.008558591842651368, 0.00862822437286377, 0.00852070426940918, 0.008524800300598144, 0.00861184024810791, 0.008677375793457032, 0.008646656036376953, 0.008665087699890137, 0.008635392189025879, 0.008627231597900391, 0.008728544235229492, 0.009338879585266113, 0.009043968200683594, 0.009026559829711914, 0.00910848045349121, 0.008989695549011231, 0.009060352325439454, 0.008983551979064941, 0.009073663711547851, 0.008970239639282226, 0.00899071979522705, 0.008979455947875976, 0.009038847923278808, 0.008980480194091797, 0.009050111770629882, 0.009060352325439454, 0.009074687957763672, 0.008992768287658692, 0.008994815826416015, 0.008976384162902832, 0.00899071979522705, 0.008963071823120117, 0.009019392013549805, 0.008979455947875976, 0.009014271736145019, 0.00900710391998291, 0.008904704093933105, 0.008424448013305665, 0.00870809555053711, 0.008650752067565918, 0.008649727821350098, 0.00862003231048584, 0.008630271911621093, 0.008656895637512207, 0.008687616348266602, 0.008680447578430176, 0.008672256469726563, 0.008664064407348633, 0.008674304008483886, 0.008662015914916991, 0.00872755241394043, 0.008668160438537598, 0.008682496070861816, 0.008680447578430176, 0.008625151634216309, 0.008649727821350098, 0.00872652816772461, 0.00871116828918457, 0.008692735671997071, 0.008655872344970703, 0.008665087699890137, 0.008656895637512207, 0.008707072257995606, 0.008532992362976074, 0.008649727821350098, 0.00872447967529297, 0.008679424285888672, 0.008640512466430664, 0.00868556785583496, 0.008622079849243165, 0.008648703575134278, 0.008605695724487305, 0.008644607543945313, 0.008650752067565918, 0.008755200386047364, 0.008614912033081054, 0.008652799606323243, 0.008621055603027344, 0.008696831703186036, 0.00890880012512207, 0.008705023765563966, 0.008656895637512207, 0.008660991668701172, 0.008649727821350098, 0.008667136192321777, 0.008922112464904786, 0.009056256294250489, 0.008971263885498047, 0.008918016433715821, 0.008698880195617676, 0.008672256469726563, 0.008659968376159668, 0.008521727561950684, 0.008663040161132812, 0.008683520317077637, 0.008687616348266602, 0.008964096069335938, 0.00898252773284912, 0.008965120315551758, 0.00899071979522705]",tokens/s,113.39614344796259,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,8197.521408,12367.429632,0.0,11737.759744,11171.24352,s,1,12.95174609375,12.95174609375,0.0,12.95174609375,12.95174609375,12.95174609375,12.95174609375,[12.95174609375],,kWh,7.131160066109917e-05,3.906776427373088e-05,0.00013582233088005324,0.0002462016958148833,,MB,3954.97472,12386.304,0.0,11739.856896,10924.361728,s,10,24.147666748046873,2.414766674804688,7.864542617856288e-05,2.41477099609375,2.414829345703125,2.4148787841796873,2.4149183349609378,"[2.414775634765625, 2.414818359375, 2.41492822265625, 2.414805908203125, 2.4146259765625, 2.414766357421875, 2.41479443359375, 2.414763916015625, 2.414681396484375, 2.41470654296875]",tokens/s,106.01438336509506,kWh,2.8521788685623303e-05,1.5630829017329426e-05,0.00016475457624800515,0.00020890719395095787,tokens/kWh,1225424.5301868226,MB,3959.238656,12388.401152,0.0,11741.954048,10924.364288,s,10,20.7992626953125,2.07992626953125,0.019680958915852337,2.072152099609375,2.0923243408203125,2.113587829589844,2.130598620605469,"[2.134851318359375, 2.081269775390625, 2.070705078125, 2.06419921875, 2.081203369140625, 2.07214306640625, 2.08759912109375, 2.0721611328125, 2.0717783203125, 2.063352294921875]",tokens/s,30.289535221937562,kWh,2.4469192742290033e-05,1.341127573937986e-05,9.881010682578627e-05,0.00013669057530745616,tokens/kWh,460894.980201049,,s,630,20.797196310043322,0.0330114227143545,0.000758491528424199,0.03273779296875,0.03398799285888672,0.03415367774963379,0.0351625233078003,"[0.03471260833740234, 0.033951713562011716, 0.033726463317871096, 0.03381452941894531, 0.03426201629638672, 0.03386777496337891, 0.034769920349121096, 0.03361996841430664, 0.034249729156494144, 0.03395993423461914, 0.03389235305786133, 0.033454078674316406, 0.0340582389831543, 0.03399168014526367, 0.03455487823486328, 0.03366604614257813, 0.033983486175537106, 0.034181121826171876, 0.033974273681640625, 0.034339839935302735, 0.03383910369873047, 0.03405721664428711, 0.03414425659179687, 0.033942527770996093, 0.03392409515380859, 0.033500160217285156, 0.033334270477294925, 0.03316223907470703, 0.03300556945800781, 0.03542732620239258, 0.03465727996826172, 0.03399884796142578, 0.033562625885009766, 0.03392409515380859, 0.03415552139282227, 0.03386777496337891, 0.034116607666015625, 0.034111488342285154, 0.03397324752807617, 0.03423436737060547, 0.03402444839477539, 0.034151424407958986, 0.03402342224121094, 0.03401830291748047, 0.03396710586547851, 0.03406131362915039, 0.03408486557006836, 0.034088958740234376, 0.03399679946899414, 0.03371724700927734, 0.033729534149169925, 0.03350630569458008, 0.03389952087402344, 0.03302604675292969, 0.033552383422851564, 0.033288192749023435, 0.033233951568603516, 0.032837600708007814, 0.03343155288696289, 0.03314995193481445, 0.03345612716674805, 0.03397324752807617, 0.033463294982910154, 0.03441356658935547, 0.03373158264160156, 0.033911808013916016, 0.03369472122192383, 0.03363532638549805, 0.033658878326416015, 0.03322470474243164, 0.033926143646240234, 0.0339159049987793, 0.03390771102905273, 0.03363123321533203, 0.033808383941650394, 0.033939456939697264, 0.034195457458496094, 0.032655361175537106, 0.032732158660888674, 0.03285606384277344, 0.03220172882080078, 0.032020481109619144, 0.032764926910400394, 0.03265740966796875, 0.03262771224975586, 0.03265331268310547, 0.032720897674560545, 0.03265024185180664, 0.032723968505859374, 0.03252633666992188, 0.03275980758666992, 0.03274137496948242, 0.03263180923461914, 0.032263168334960936, 0.032282623291015625, 0.032643104553222654, 0.03286729431152344, 0.03271680068969727, 0.03294515228271484, 0.03273523330688476, 0.032849918365478514, 0.03274956893920898, 0.03271680068969727, 0.03420467376708984, 0.034148353576660156, 0.03386777496337891, 0.033942527770996093, 0.032791553497314455, 0.032717823028564456, 0.03267583847045898, 0.03370598220825195, 0.032761856079101564, 0.032639999389648434, 0.032718849182128903, 0.03297587203979492, 0.03275161743164062, 0.032761856079101564, 0.032543743133544925, 0.032756736755371094, 0.03263590240478516, 0.03271475219726563, 0.03304550552368164, 0.03280284881591797, 0.03262255859375, 0.03249151992797852, 0.03248537445068359, 0.033821697235107424, 0.03276595306396484, 0.033298431396484376, 0.03318476867675781, 0.032140289306640625, 0.03258777618408203, 0.03234304046630859, 0.032323585510253904, 0.03265126419067383, 0.03278335952758789, 0.03285811233520508, 0.03285708618164063, 0.03256115341186523, 0.03265024185180664, 0.033035263061523434, 0.033535999298095705, 0.03229388809204101, 0.0334919662475586, 0.03263692855834961, 0.033426433563232424, 0.033031169891357424, 0.03260825729370117, 0.03260927963256836, 0.03237580871582031, 0.03213721466064453, 0.03264921569824219, 0.03265740966796875, 0.0321976318359375, 0.03366400146484375, 0.032538623809814454, 0.0324136962890625, 0.03275263977050781, 0.03329945755004883, 0.03266048049926758, 0.03308544158935547, 0.03506073760986328, 0.032865280151367186, 0.033111038208007815, 0.032712703704833986, 0.032742401123046876, 0.032778240203857424, 0.03297280120849609, 0.03265126419067383, 0.032732158660888674, 0.03245363235473633, 0.03223859024047852, 0.03224883270263672, 0.03300352096557617, 0.03271680068969727, 0.03234406280517578, 0.03192934417724609, 0.032840705871582034, 0.03265740966796875, 0.032656383514404294, 0.03258265686035156, 0.032648193359375, 0.032571392059326174, 0.03260416030883789, 0.032454654693603514, 0.034955265045166016, 0.03444121551513672, 0.03380940628051758, 0.03379302215576172, 0.033710079193115236, 0.03288678359985352, 0.03291648101806641, 0.03230310440063477, 0.032540672302246096, 0.032748542785644534, 0.033857536315917966, 0.033982463836669925, 0.03276697540283203, 0.03282227325439453, 0.032814079284667966, 0.032753662109375, 0.03237580871582031, 0.03352166366577149, 0.03321753692626953, 0.03268096160888672, 0.032557056427001956, 0.03250175857543945, 0.03321753692626953, 0.03407155227661133, 0.03221401596069336, 0.032702465057373044, 0.03231027221679687, 0.032314369201660156, 0.032525310516357424, 0.03375001525878906, 0.03256012725830078, 0.03224371337890625, 0.032254974365234376, 0.03232460784912109, 0.03235737609863281, 0.032282623291015625, 0.03230003356933594, 0.0323870735168457, 0.03266969680786133, 0.032514049530029294, 0.03273318481445313, 0.03261439895629883, 0.032661502838134765, 0.03223654556274414, 0.03228364944458008, 0.03215052795410156, 0.032231422424316404, 0.03204403305053711, 0.03342438507080078, 0.03239424133300781, 0.033827838897705076, 0.03407155227661133, 0.03390259170532227, 0.0329246711730957, 0.032655361175537106, 0.03253145599365234, 0.03253350448608398, 0.03251609420776367, 0.03262054443359375, 0.03267891311645508, 0.03384524917602539, 0.032688129425048826, 0.0322529296875, 0.03261542510986328, 0.03264409637451172, 0.03234099197387695, 0.032611328125, 0.03460710525512695, 0.03406643295288086, 0.03393843078613281, 0.03343360137939453, 0.0338974723815918, 0.03359743881225586, 0.033188865661621096, 0.03307417678833008, 0.034375679016113284, 0.03520409774780273, 0.03428659057617187, 0.034002975463867186, 0.03303932952880859, 0.03241164779663086, 0.032707584381103515, 0.032718849182128903, 0.032671745300292966, 0.03338956832885742, 0.033825790405273434, 0.03344998550415039, 0.032833534240722655, 0.03266559982299805, 0.032709632873535156, 0.03258777618408203, 0.03268710327148437, 0.0339046401977539, 0.032740352630615234, 0.03256524658203125, 0.03262259292602539, 0.033797119140625, 0.032712703704833986, 0.03258572769165039, 0.03262054443359375, 0.03372851181030274, 0.0321638412475586, 0.032115711212158206, 0.03206041717529297, 0.03215871810913086, 0.032285694122314454, 0.032761856079101564, 0.0324136962890625, 0.0321976318359375, 0.032745471954345705, 0.03287039947509766, 0.032586753845214846, 0.03269529724121094, 0.03277414321899414, 0.03261337661743164, 0.03247513580322266, 0.03270041656494141, 0.03222732925415039, 0.03239116668701172, 0.03263385772705078, 0.03256217575073242, 0.03219148635864258, 0.03349913787841797, 0.03398758316040039, 0.03273830413818359, 0.032696319580078126, 0.03407257461547852, 0.034103294372558594, 0.03283865737915039, 0.03278643035888672, 0.033637374877929685, 0.03275571060180664, 0.03265228652954102, 0.032982017517089846, 0.03249868774414062, 0.03221299362182617, 0.03234201431274414, 0.03370393753051758, 0.03343462371826172, 0.03347558212280274, 0.03386777496337891, 0.033983486175537106, 0.032732158660888674, 0.03265024185180664, 0.03371212768554688, 0.032747520446777346, 0.03279872131347656, 0.03236249542236328, 0.032830463409423825, 0.032309249877929686, 0.032230400085449216, 0.03257548904418945, 0.032535552978515625, 0.033426433563232424, 0.03264921569824219, 0.03598643112182617, 0.034237438201904294, 0.032747520446777346, 0.03283660888671875, 0.032415744781494144, 0.03265433502197266, 0.03295846557617187, 0.03264921569824219, 0.03370086288452148, 0.033963008880615236, 0.0326932487487793, 0.03272294235229492, 0.03345305633544922, 0.03388927841186523, 0.03275980758666992, 0.03262771224975586, 0.03263180923461914, 0.033035263061523434, 0.03227033615112305, 0.03277312088012695, 0.03259084701538086, 0.032696319580078126, 0.033587200164794925, 0.03391795349121094, 0.03220684814453125, 0.03221196746826172, 0.03222732925415039, 0.03257548904418945, 0.032200702667236326, 0.03218739318847656, 0.03215359878540039, 0.03222528076171875, 0.03308031845092774, 0.032791553497314455, 0.032004096984863284, 0.03197542381286621, 0.03237068939208984, 0.032830463409423825, 0.03366912078857422, 0.033375232696533204, 0.032707584381103515, 0.03292160034179688, 0.0323133430480957, 0.032396289825439455, 0.032302078247070314, 0.03268096160888672, 0.03206553649902344, 0.03220479965209961, 0.032317440032958986, 0.032661502838134765, 0.03288063812255859, 0.033426433563232424, 0.03222220611572266, 0.03285094451904297, 0.03353395080566406, 0.0325662727355957, 0.033993728637695314, 0.03380428695678711, 0.0340766716003418, 0.03259084701538086, 0.032851966857910156, 0.033983486175537106, 0.03271372985839844, 0.03402444839477539, 0.03349401473999023, 0.03255807876586914, 0.032249855041503905, 0.032606208801269534, 0.03330355072021484, 0.03380940628051758, 0.03278335952758789, 0.03243622589111328, 0.03361587142944336, 0.03224063873291016, 0.03270553588867187, 0.03268403244018555, 0.03257855987548828, 0.035958782196044925, 0.0384983024597168, 0.03370598220825195, 0.034320384979248046, 0.03279564666748047, 0.033942527770996093, 0.03383500671386719, 0.03238195037841797, 0.03271475219726563, 0.03292160034179688, 0.033982463836669925, 0.03281817626953125, 0.032679935455322266, 0.03275775909423828, 0.03385139083862305, 0.032639999389648434, 0.03261542510986328, 0.032712703704833986, 0.03264409637451172, 0.03370598220825195, 0.034100223541259765, 0.03286220932006836, 0.0325662727355957, 0.03218739318847656, 0.03370086288452148, 0.032661502838134765, 0.03210342407226562, 0.03230822372436523, 0.032489471435546875, 0.032279552459716795, 0.03247923278808594, 0.032198654174804685, 0.03202969741821289, 0.03220172882080078, 0.03206246566772461, 0.032105472564697264, 0.03205734252929687, 0.03210649490356445, 0.03214950561523437, 0.033821697235107424, 0.032679935455322266, 0.03265433502197266, 0.03226419067382812, 0.03233792114257812, 0.032244735717773435, 0.03233587265014649, 0.032471038818359374, 0.032487422943115234, 0.03206246566772461, 0.033873920440673826, 0.03235123062133789, 0.032118785858154295, 0.03306598281860352, 0.03207372665405273, 0.03201536178588867, 0.03241164779663086, 0.03245568084716797, 0.03215359878540039, 0.03227033615112305, 0.032230400085449216, 0.03222118377685547, 0.032210945129394535, 0.03209318542480469, 0.033658878326416015, 0.03266764831542969, 0.03348889541625977, 0.03228672027587891, 0.03260723114013672, 0.03225804901123047, 0.03324313735961914, 0.033977344512939454, 0.03397017669677734, 0.03407257461547852, 0.034116607666015625, 0.03411251068115234, 0.03408486557006836, 0.0339681282043457, 0.03370905685424805, 0.03388108825683594, 0.0341739501953125, 0.03549491119384766, 0.03448627090454102, 0.033982463836669925, 0.03349708938598633, 0.0333383674621582, 0.033247230529785156, 0.03378585433959961, 0.03817062377929688, 0.0344637451171875, 0.03305779266357422, 0.03396505737304688, 0.0328908805847168, 0.03369574356079102, 0.03385139083862305, 0.03393638229370117, 0.03278540802001953, 0.03268096160888672, 0.03284377670288086, 0.032740352630615234, 0.032737281799316405, 0.032699390411376955, 0.03275980758666992, 0.03285094451904297, 0.03273011016845703, 0.032729087829589845, 0.03290419387817383, 0.03390156936645508, 0.03282636642456055, 0.03245772933959961, 0.032661502838134765, 0.0331253776550293, 0.033993728637695314, 0.032758785247802735, 0.032231422424316404, 0.03266252899169922, 0.03268505477905274, 0.03262771224975586, 0.033753089904785157, 0.033350654602050785, 0.03226009750366211, 0.03263283157348633, 0.03380223846435547, 0.0325928955078125, 0.03263590240478516, 0.03218636703491211, 0.03227545547485351, 0.032535552978515625, 0.03266457748413086, 0.03259801483154297, 0.032418815612792966, 0.03220991897583008, 0.03226726531982422, 0.03263385772705078, 0.03273113632202149, 0.03263590240478516, 0.03226828765869141, 0.03307212829589844, 0.03213619232177734, 0.03223756790161133, 0.03217407989501953, 0.032121856689453124, 0.03273113632202149, 0.032435199737548825, 0.032331775665283204, 0.03240959930419922, 0.03265331268310547, 0.032655361175537106, 0.03273932647705078, 0.03279359817504883, 0.03220479965209961, 0.03380121612548828, 0.032824321746826174, 0.032418815612792966, 0.03287449645996094, 0.03265126419067383, 0.032626686096191404, 0.03241164779663086, 0.032702465057373044, 0.032679935455322266, 0.03306393432617188, 0.03244646453857422, 0.03256115341186523, 0.032745471954345705, 0.03240140914916992, 0.0321638412475586, 0.03299123382568359, 0.03445862579345703, 0.033157119750976564, 0.032606208801269534, 0.032696319580078126, 0.03267071914672852, 0.03276595306396484, 0.032551937103271485, 0.03290521621704102, 0.032824321746826174, 0.032595966339111326, 0.032628734588623046, 0.03267583847045898, 0.03310079956054687, 0.03256524658203125, 0.03218431854248047, 0.03223244857788086, 0.032249855041503905, 0.032903167724609376, 0.032778240203857424, 0.03267686462402344, 0.032290817260742184, 0.032492542266845705, 0.033185791015625, 0.033339393615722655, 0.0336814079284668, 0.03257958221435547, 0.032933887481689454, 0.032546817779541014, 0.03226931381225586, 0.03221913528442383, 0.03385651016235352, 0.032922622680664065, 0.03259904098510742, 0.032688129425048826, 0.03250790405273438, 0.03232153701782227, 0.03264409637451172, 0.032331775665283204, 0.032216064453125, 0.03270041656494141, 0.033844223022460936, 0.033018878936767575, 0.03241984176635742, 0.03278950500488281, 0.032807937622070314, 0.0326379508972168, 0.03269836807250977]",tokens/s,30.292544754975548,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/gemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-66948173-0fc09be31629c0fa1e00a691;78481430-3770-441c-b657-eca5ac09d6ac) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/gemma-7b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,qwen2,MB,1862.094848,3095.92064,0.0,2466.250752,2401.696256,s,1,8.8702890625,8.8702890625,0.0,8.8702890625,8.8702890625,8.8702890625,8.8702890625,[8.8702890625],,kWh,2.2931746308993047e-05,1.2552504523680596e-05,3.562363960996073e-05,7.110789044263438e-05,,MB,1895.165952,3326.60736,0.0,2680.160256,2582.175744,s,10,2.3321211395263672,0.23321211395263672,4.852936585903628e-05,0.23321716308593748,0.2332451889038086,0.23327531204223634,0.23329941055297854,"[0.23323849487304688, 0.23323394775390624, 0.23312208557128905, 0.23313938903808593, 0.23321090698242186, 0.23330543518066407, 0.23321391296386718, 0.23322041320800782, 0.23322621154785156, 0.23321034240722657]",tokens/s,1097.7131318829831,kWh,2.7584710172961957e-06,1.5115099296232824e-06,1.6808702077441342e-05,2.107868302436082e-05,tokens/kWh,12144971.282320559,MB,1898.999808,3326.60736,0.0,2680.160256,2582.178304,s,10,12.408852416992186,1.2408852416992187,0.010283887800798553,1.2389793090820311,1.2553709716796875,1.2562504272460937,1.2569539916992187,"[1.237742919921875, 1.2300057373046875, 1.2377471923828125, 1.2256512451171875, 1.24021142578125, 1.2433402099609374, 1.230608154296875, 1.2512401123046875, 1.255175537109375, 1.2571298828125]",tokens/s,50.77020652911491,kWh,1.4567728892700435e-05,7.983157242845399e-06,3.1605225542564395e-05,5.415611167811023e-05,tokens/kWh,1163303.6059615123,,s,630,12.407087099075305,0.019693789046151294,0.00040836963443242137,0.01948876762390137,0.020199526977539062,0.020353381824493408,0.020855224742889405,"[0.02026188850402832, 0.020544511795043945, 0.020141056060791016, 0.020788223266601562, 0.02010419273376465, 0.019709951400756837, 0.019751935958862304, 0.01942937660217285, 0.019292160034179686, 0.019494911193847657, 0.01942425537109375, 0.019804159164428712, 0.02001919937133789, 0.019151872634887695, 0.019991552352905274, 0.020033536911010744, 0.019375104904174805, 0.019353599548339845, 0.01941094398498535, 0.019336191177368164, 0.01928294372558594, 0.019170303344726563, 0.019385343551635743, 0.019396608352661132, 0.019401727676391603, 0.019307519912719725, 0.019281919479370118, 0.019349504470825195, 0.019354623794555666, 0.019319807052612305, 0.01944063949584961, 0.019380224227905272, 0.019307519912719725, 0.019325952529907226, 0.019208192825317383, 0.019340288162231444, 0.019375104904174805, 0.019358720779418945, 0.01930342483520508, 0.019311616897583008, 0.019321855545043946, 0.019371007919311522, 0.019389440536499023, 0.019355648040771483, 0.019341312408447265, 0.020134912490844727, 0.020410367965698242, 0.020115455627441405, 0.02018611145019531, 0.019589120864868165, 0.02006630325317383, 0.02011238479614258, 0.02005504035949707, 0.02001817512512207, 0.020048896789550782, 0.02007347106933594, 0.019510271072387696, 0.019481599807739256, 0.01979801559448242, 0.01961676788330078, 0.01947340774536133, 0.01965977668762207, 0.020402175903320312, 0.01922969627380371, 0.019333120346069335, 0.019392511367797852, 0.01943142318725586, 0.019467264175415038, 0.01946828842163086, 0.01945395278930664, 0.019224576950073242, 0.019397632598876953, 0.019351551055908203, 0.019369983673095705, 0.019775487899780272, 0.01946316719055176, 0.019381248474121093, 0.019314687728881837, 0.019354623794555666, 0.01945497512817383, 0.019362815856933592, 0.01942527961730957, 0.019380224227905272, 0.01939558410644531, 0.019300352096557616, 0.01944473648071289, 0.019281919479370118, 0.01940787124633789, 0.019365888595581054, 0.01942937660217285, 0.019332096099853514, 0.019785728454589844, 0.019720191955566405, 0.019388416290283202, 0.019362815856933592, 0.019360767364501954, 0.019318784713745117, 0.019317760467529296, 0.019357696533203125, 0.01942937660217285, 0.019931135177612306, 0.01941606330871582, 0.019354623794555666, 0.019371007919311522, 0.019345407485961915, 0.02009600067138672, 0.020075519561767577, 0.019979263305664064, 0.019962879180908204, 0.02002841567993164, 0.019989503860473632, 0.020102144241333008, 0.01986764717102051, 0.020116479873657226, 0.02000588798522949, 0.020025344848632814, 0.01981439971923828, 0.019409919738769533, 0.01923788833618164, 0.019385343551635743, 0.01948569679260254, 0.01946009635925293, 0.019376127243041993, 0.019355648040771483, 0.019381248474121093, 0.01922662353515625, 0.019770368576049805, 0.019562496185302734, 0.019312639236450196, 0.01939455986022949, 0.019406848907470704, 0.019458047866821288, 0.019500032424926757, 0.01944371223449707, 0.019372032165527343, 0.019405824661254883, 0.01944063949584961, 0.019414016723632813, 0.019252223968505858, 0.019358720779418945, 0.019734527587890623, 0.019760128021240234, 0.019479551315307618, 0.01943654441833496, 0.020273151397705077, 0.02022809600830078, 0.02003763198852539, 0.0200130558013916, 0.020159488677978517, 0.020297727584838866, 0.020170751571655272, 0.01927168083190918, 0.019511295318603517, 0.019549184799194336, 0.019475456237792968, 0.019451904296875, 0.019742719650268553, 0.019586048126220702, 0.019389440536499023, 0.019537919998168944, 0.019553279876708983, 0.0194467830657959, 0.0194201602935791, 0.019422208786010742, 0.019753984451293945, 0.021102592468261717, 0.02022604751586914, 0.02012876892089844, 0.02008780860900879, 0.02006937599182129, 0.02002739143371582, 0.020074495315551756, 0.02007961654663086, 0.01968332862854004, 0.019371007919311522, 0.019397632598876953, 0.019294208526611328, 0.019284992218017577, 0.019309568405151366, 0.019361791610717775, 0.019367935180664063, 0.019520511627197267, 0.019341312408447265, 0.019352575302124024, 0.019334144592285156, 0.02018611145019531, 0.01944780731201172, 0.019355648040771483, 0.01937919998168945, 0.02089779281616211, 0.01989836883544922, 0.020001792907714845, 0.02004172706604004, 0.01942835235595703, 0.019357696533203125, 0.019348480224609374, 0.01943142318725586, 0.019522560119628905, 0.019366912841796875, 0.01937919998168945, 0.019349504470825195, 0.019349504470825195, 0.019380224227905272, 0.019360767364501954, 0.019334144592285156, 0.019422208786010742, 0.019312639236450196, 0.019376127243041993, 0.01946419143676758, 0.01945702362060547, 0.019346431732177736, 0.0194334716796875, 0.01927884864807129, 0.019337215423583985, 0.019475456237792968, 0.01941196823120117, 0.019219455718994142, 0.019335168838500977, 0.019380224227905272, 0.01939967918395996, 0.01942425537109375, 0.019397632598876953, 0.019384319305419923, 0.019346431732177736, 0.019325952529907226, 0.019324928283691405, 0.019133440017700197, 0.019149824142456053, 0.019174400329589843, 0.019152896881103516, 0.019392511367797852, 0.01939455986022949, 0.019404800415039062, 0.019569664001464843, 0.019954687118530275, 0.019513343811035155, 0.019316736221313476, 0.019306495666503908, 0.019344383239746094, 0.0195020809173584, 0.0200263671875, 0.01968639945983887, 0.019511295318603517, 0.01943756866455078, 0.019360767364501954, 0.019331071853637697, 0.019330047607421876, 0.019418111801147463, 0.019369983673095705, 0.01944063949584961, 0.019543039321899415, 0.0194150390625, 0.019588096618652344, 0.019449855804443358, 0.019375104904174805, 0.019144704818725586, 0.01904025650024414, 0.019347455978393553, 0.019378175735473634, 0.01942118453979492, 0.019418111801147463, 0.019290111541748048, 0.019475456237792968, 0.019393535614013673, 0.01945497512817383, 0.019347455978393553, 0.019347455978393553, 0.019322879791259767, 0.019367935180664063, 0.019349504470825195, 0.019506175994873046, 0.019366912841796875, 0.019405824661254883, 0.019361791610717775, 0.01923481559753418, 0.01985843276977539, 0.01971609687805176, 0.019397632598876953, 0.01947238349914551, 0.020124671936035156, 0.020185087203979494, 0.02004275131225586, 0.020121599197387697, 0.020016128540039063, 0.01999564743041992, 0.02003455924987793, 0.020082687377929686, 0.020139007568359374, 0.02007142448425293, 0.020322303771972656, 0.02005606460571289, 0.020105215072631837, 0.02004787254333496, 0.019359743118286133, 0.019308544158935546, 0.019489791870117186, 0.019317760467529296, 0.019327999114990235, 0.019326976776123047, 0.019332096099853514, 0.019385343551635743, 0.019384319305419923, 0.020067327499389647, 0.020102144241333008, 0.0200949764251709, 0.020000768661499024, 0.020067327499389647, 0.020135936737060548, 0.020031488418579102, 0.019985408782958985, 0.020116479873657226, 0.02003046417236328, 0.019983360290527344, 0.019991552352905274, 0.02002227210998535, 0.019740671157836915, 0.01985638427734375, 0.019405824661254883, 0.020164608001708984, 0.019575807571411134, 0.019169279098510742, 0.019292160034179686, 0.019321855545043946, 0.019375104904174805, 0.019331071853637697, 0.019335168838500977, 0.019347455978393553, 0.019339263916015623, 0.019377151489257814, 0.019374080657958984, 0.019347455978393553, 0.019308544158935546, 0.019371007919311522, 0.01948467254638672, 0.019438592910766602, 0.019386367797851564, 0.019337215423583985, 0.019376127243041993, 0.019361791610717775, 0.01943449592590332, 0.019317760467529296, 0.019368959426879884, 0.019283967971801756, 0.019360767364501954, 0.019414016723632813, 0.019412992477416992, 0.01920204734802246, 0.019422208786010742, 0.01944780731201172, 0.019770368576049805, 0.02004787254333496, 0.02001919937133789, 0.020148223876953125, 0.01942425537109375, 0.020831232070922853, 0.02045132827758789, 0.02008166313171387, 0.020023296356201172, 0.020342784881591795, 0.020920320510864256, 0.020389888763427736, 0.020248575210571287, 0.020136959075927736, 0.020151296615600587, 0.020320255279541014, 0.020205568313598633, 0.020215808868408205, 0.020256767272949217, 0.020124671936035156, 0.02049843215942383, 0.020131839752197265, 0.019903488159179687, 0.020174848556518556, 0.020016128540039063, 0.019615743637084963, 0.019384319305419923, 0.01941196823120117, 0.019833856582641602, 0.01966080093383789, 0.019388416290283202, 0.01943142318725586, 0.019478527069091797, 0.019542015075683594, 0.019322879791259767, 0.019329023361206055, 0.019203071594238282, 0.019655679702758787, 0.01948057556152344, 0.019487743377685548, 0.019366912841796875, 0.019376127243041993, 0.01944063949584961, 0.019355648040771483, 0.01945907211303711, 0.01944473648071289, 0.019451904296875, 0.019371007919311522, 0.019510271072387696, 0.019296255111694336, 0.019412992477416992, 0.01939967918395996, 0.01942527961730957, 0.019523584365844726, 0.019533824920654298, 0.019430400848388672, 0.01947340774536133, 0.019519487380981446, 0.019548160552978516, 0.019414016723632813, 0.019922943115234376, 0.0196997127532959, 0.019619840621948242, 0.020141056060791016, 0.020100095748901366, 0.020048896789550782, 0.019981311798095702, 0.020199424743652345, 0.020130815505981444, 0.020050943374633787, 0.020023296356201172, 0.01970278358459473, 0.01945497512817383, 0.01945088005065918, 0.01925836753845215, 0.019338239669799806, 0.019313663482666017, 0.019373056411743163, 0.019400703430175782, 0.019400703430175782, 0.019327999114990235, 0.019358720779418945, 0.019400703430175782, 0.019527679443359376, 0.019519487380981446, 0.01948876762390137, 0.01965977668762207, 0.01924812889099121, 0.01948057556152344, 0.019359743118286133, 0.019360767364501954, 0.019353599548339845, 0.020361215591430663, 0.019372032165527343, 0.019365888595581054, 0.019569664001464843, 0.019409919738769533, 0.019491840362548828, 0.019520511627197267, 0.019490816116333007, 0.019548160552978516, 0.01970278358459473, 0.019575807571411134, 0.01942118453979492, 0.019586048126220702, 0.019386367797851564, 0.01948467254638672, 0.019474431991577147, 0.01946931266784668, 0.0194652156829834, 0.020341760635375978, 0.020102144241333008, 0.02008678436279297, 0.020125696182250977, 0.020616191864013672, 0.020312063217163084, 0.020207616806030275, 0.020147199630737304, 0.019487743377685548, 0.019365888595581054, 0.019168256759643554, 0.020057088851928712, 0.020677631378173827, 0.02042163276672363, 0.02044313621520996, 0.02024345588684082, 0.01987481689453125, 0.020146175384521483, 0.020170751571655272, 0.02002227210998535, 0.020727807998657227, 0.02036735916137695, 0.019739648818969727, 0.01963827133178711, 0.019284992218017577, 0.019106815338134766, 0.01920921516418457, 0.019362815856933592, 0.02003660774230957, 0.01968230438232422, 0.0200581111907959, 0.02001203155517578, 0.02007142448425293, 0.020142080307006836, 0.02006425666809082, 0.020168703079223634, 0.020145151138305666, 0.019489791870117186, 0.020123647689819335, 0.020114431381225584, 0.020149248123168945, 0.019953664779663087, 0.020343807220458983, 0.01983897590637207, 0.019515392303466796, 0.02046156883239746, 0.02030182456970215, 0.020011007308959963, 0.020001792907714845, 0.019596288681030274, 0.02023321533203125, 0.02142310333251953, 0.020171775817871093, 0.01998847961425781, 0.019098623275756836, 0.01923891258239746, 0.019422208786010742, 0.019478527069091797, 0.020200447082519533, 0.020092927932739257, 0.020057088851928712, 0.020136959075927736, 0.019558399200439454, 0.019156991958618166, 0.019360767364501954, 0.019418111801147463, 0.01963724708557129, 0.019952640533447266, 0.02004377555847168, 0.02040729522705078, 0.01906175994873047, 0.019180543899536134, 0.019514368057250975, 0.01921433639526367, 0.019182592391967773, 0.01947750473022461, 0.019786752700805665, 0.02021887969970703, 0.020113407135009767, 0.02008780860900879, 0.01999667167663574, 0.02006937599182129, 0.020143104553222657, 0.020526079177856444, 0.020296703338623046, 0.02007244873046875, 0.02010419273376465, 0.02002227210998535, 0.020152320861816408, 0.0202106876373291, 0.020172800064086914, 0.020768768310546876, 0.019611648559570313, 0.01924812889099121, 0.02031718444824219, 0.020131839752197265, 0.019547136306762695, 0.01965363121032715, 0.01943654441833496, 0.01963315200805664, 0.020192256927490236, 0.02044825553894043, 0.02026700782775879, 0.020180992126464844, 0.02040115165710449, 0.020436992645263673, 0.020116479873657226, 0.019546112060546874, 0.02086502456665039, 0.02029363250732422, 0.02018611145019531, 0.020174848556518556, 0.020107263565063475, 0.020090879440307616, 0.02005606460571289, 0.020040704727172853, 0.020134912490844727, 0.02008064079284668, 0.019998720169067383, 0.020230144500732423, 0.019971071243286134, 0.019400703430175782, 0.019337215423583985, 0.019833856582641602, 0.019771392822265626, 0.01947238349914551, 0.019366912841796875, 0.01946419143676758, 0.019380224227905272, 0.019366912841796875, 0.019438592910766602, 0.01923788833618164, 0.02088755226135254, 0.021790719985961913, 0.020341760635375978, 0.02018611145019531, 0.0200949764251709, 0.02002841567993164, 0.020802560806274413, 0.020479999542236327, 0.02021990394592285, 0.0200898551940918, 0.020303871154785155, 0.019532800674438477, 0.019409919738769533, 0.019505151748657225, 0.0193832950592041, 0.019533824920654298, 0.02001817512512207, 0.019360767364501954, 0.020048896789550782, 0.020379648208618165, 0.020126720428466797, 0.01938739204406738, 0.01944268798828125, 0.019408895492553712, 0.020025344848632814, 0.02018611145019531, 0.019931135177612306, 0.01948876762390137, 0.019957759857177734, 0.02007859230041504, 0.020083711624145507, 0.02005504035949707, 0.020102144241333008, 0.02003558349609375, 0.020153343200683595, 0.02006425666809082, 0.02005401611328125, 0.020339712142944336, 0.019335168838500977]",tokens/s,50.77743026781469,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 68.50 MiB is free. Process 126226 has 22.11 GiB memory in use. Of the allocated memory 21.86 GiB is allocated by PyTorch, and 10.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3904, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 489, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2796, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1173, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 779, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 853, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1159, in convert - return t.to( -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,r,r,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/r/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-6694911f-054c52293cb3ecb65d01522b;bd28ec7e-e9ad-4bd5-9d84-b99e74b5434c) - -Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: r is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-7b,google/recurrentgemma-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66948251-4fd60f9855ebf2762d8a5603;0013c02f-4384-491c-98d7-d611f6809796) - -Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: google/recurrentgemma-7b is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained - model = cls(config, *model_args, **model_kwargs) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ - self.model = DeciCoderModel(config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in - self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ - self.self_attn = DeciCoderAttention(config=config) - File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ - self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ - raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") -AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' - -",llama,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 403 Client Error: Forbidden for url: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status - raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-669481de-41f6776a37cf8d5c484750e8;67d51f55-5550-4111-9616-e7ea9fe9dc15) - -403 Forbidden: Please enable access to public gated repositories in your fine-grained token settings to view this repository.. -Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. -If you are trying to create or update content,make sure you have a token with the `write` role. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1826, in _raise_on_head_call_error - raise LocalEntryNotFoundError( -huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 445, in cached_file - raise EnvironmentError( -OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like google/recurrentgemma-2b is not the path to a directory containing a file named config.json. -Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'. - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/v/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-66949493-47ff0fe300b4d32926f9c0ec;18b0c9e6-9652-4091-acd4-53bc4eb7f644) - -Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: v is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,gpt_neox,MB,2198.818816,3142.057984,0.0,2512.388096,2240.694784,s,1,8.712916015625,8.712916015625,0.0,8.712916015625,8.712916015625,8.712916015625,8.712916015625,[8.712916015625],,kWh,2.2695793959026866e-05,1.2423149791378321e-05,3.6162528930017146e-05,7.128147268042233e-05,,MB,2275.250176,3160.932352,0.0,2514.485248,2226.413568,s,10,4.889119171142578,0.48891191711425785,0.0001372612517188773,0.48894471740722656,0.48904681091308594,0.4890638259887695,0.48907743804931636,"[0.488607177734375, 0.4887143249511719, 0.4889255676269531, 0.4889137878417969, 0.4889414367675781, 0.48904302978515624, 0.48894940185546876, 0.4890808410644531, 0.488947998046875, 0.48899560546875]",tokens/s,523.611699855893,kWh,5.777872770436706e-06,3.1659832176461784e-06,3.294354487333384e-05,4.188740086141673e-05,tokens/kWh,6111622.939961558,MB,2282.340352,3160.932352,0.0,2514.485248,2337.110528,s,10,13.757603027343752,1.3757603027343748,0.00781680646406521,1.3742268066406251,1.38461318359375,1.3886947143554687,1.3919599389648436,"[1.36609716796875, 1.3747666015625, 1.365140869140625, 1.3721820068359376, 1.372041015625, 1.380036865234375, 1.37368701171875, 1.3771690673828125, 1.3837061767578125, 1.3927762451171875]",tokens/s,45.79286077290147,kWh,1.643698866456317e-05,9.008088375609625e-06,3.8350456606259906e-05,6.379553364643269e-05,tokens/kWh,987529.9476160558,,s,630,13.755112438201909,0.02183351180666969,0.0004582090154506488,0.021634560585021972,0.022432665252685547,0.022605568313598633,0.023284684925079346,"[0.022237184524536133, 0.021766143798828123, 0.021510143280029297, 0.02150912094116211, 0.02146406364440918, 0.021574655532836915, 0.021533695220947266, 0.021646335601806642, 0.021598207473754884, 0.02166169548034668, 0.021638143539428712, 0.021651456832885742, 0.021612543106079102, 0.021567487716674806, 0.02166579246520996, 0.021599231719970705, 0.021531648635864258, 0.021567487716674806, 0.021585920333862304, 0.021570560455322265, 0.021594112396240234, 0.021578752517700195, 0.021595136642456055, 0.02165862464904785, 0.022136831283569337, 0.02205081558227539, 0.021696512222290038, 0.021518335342407227, 0.021615615844726564, 0.021564416885375977, 0.021562368392944335, 0.021600255966186522, 0.021544960021972655, 0.021578752517700195, 0.021565439224243164, 0.021639167785644533, 0.021719039916992186, 0.021763071060180664, 0.02166067123413086, 0.02145075225830078, 0.021599231719970705, 0.021549055099487305, 0.021989376068115234, 0.02206515121459961, 0.022337535858154296, 0.022352895736694335, 0.022185983657836913, 0.021613567352294923, 0.021586944580078125, 0.021603328704833984, 0.021551103591918946, 0.021603328704833984, 0.022074367523193358, 0.02166579246520996, 0.021702655792236326, 0.02162483215332031, 0.021651456832885742, 0.021581823348999024, 0.02162892723083496, 0.021634048461914062, 0.021575679779052736, 0.02161664009094238, 0.021501951217651367, 0.02230988883972168, 0.02187161636352539, 0.021586944580078125, 0.021541887283325196, 0.0214968318939209, 0.021534719467163087, 0.02149990463256836, 0.02165247917175293, 0.021544960021972655, 0.02150809669494629, 0.021598207473754884, 0.021545984268188476, 0.021530624389648437, 0.021517311096191406, 0.021521408081054686, 0.021534719467163087, 0.021518335342407227, 0.021605375289916993, 0.02150092887878418, 0.02169036865234375, 0.02165350341796875, 0.021841920852661133, 0.02329804801940918, 0.02282803153991699, 0.02190745544433594, 0.02239897537231445, 0.02243071937561035, 0.02226483154296875, 0.021542911529541017, 0.021565439224243164, 0.021708799362182618, 0.021797887802124022, 0.02166988754272461, 0.022792192459106447, 0.022569984436035157, 0.02227916717529297, 0.02472857666015625, 0.02287718391418457, 0.021729280471801758, 0.021565439224243164, 0.021573631286621094, 0.021571584701538086, 0.02166886329650879, 0.021749759674072267, 0.021564416885375977, 0.021545984268188476, 0.02144256019592285, 0.02160335922241211, 0.0215633602142334, 0.021563392639160156, 0.021612543106079102, 0.021601280212402343, 0.021659648895263672, 0.021607423782348634, 0.021646335601806642, 0.021659648895263672, 0.021700607299804688, 0.021552127838134767, 0.021573631286621094, 0.02168627166748047, 0.021592063903808592, 0.021679103851318358, 0.021561344146728514, 0.02243174362182617, 0.02189004707336426, 0.02170163154602051, 0.021580799102783203, 0.021552127838134767, 0.021614591598510743, 0.021619712829589844, 0.021569536209106444, 0.021634048461914062, 0.021598207473754884, 0.021565439224243164, 0.02162483215332031, 0.021596160888671875, 0.021574655532836915, 0.02162483215332031, 0.021600255966186522, 0.022692863464355468, 0.022424575805664062, 0.02204876708984375, 0.021634048461914062, 0.021601280212402343, 0.021571584701538086, 0.021590015411376954, 0.02166783905029297, 0.021559295654296876, 0.02163199996948242, 0.021611520767211914, 0.021600255966186522, 0.02163609504699707, 0.021568511962890623, 0.021501951217651367, 0.02163609504699707, 0.021581823348999024, 0.02166169548034668, 0.021571584701538086, 0.021557247161865235, 0.021543935775756837, 0.021550079345703126, 0.021568511962890623, 0.021609472274780273, 0.021574655532836915, 0.021489664077758788, 0.02172313690185547, 0.02245529556274414, 0.0219289608001709, 0.02166988754272461, 0.02150297546386719, 0.021627904891967774, 0.021558271408081055, 0.021572608947753907, 0.021585920333862304, 0.021565439224243164, 0.021597183227539063, 0.02143129539489746, 0.021562368392944335, 0.021629951477050782, 0.021601280212402343, 0.021613567352294923, 0.021581823348999024, 0.021548032760620117, 0.021573631286621094, 0.021563392639160156, 0.021543935775756837, 0.021651456832885742, 0.021614591598510743, 0.021436416625976562, 0.021574655532836915, 0.021571584701538086, 0.02220953559875488, 0.02201907157897949, 0.02165452766418457, 0.022367231369018553, 0.02168320083618164, 0.021597183227539063, 0.021558271408081055, 0.021897216796875, 0.022444032669067384, 0.02230886459350586, 0.022354944229125977, 0.021768192291259765, 0.02230169677734375, 0.022076416015625, 0.021586944580078125, 0.021629951477050782, 0.021622783660888673, 0.021639167785644533, 0.021702655792236326, 0.021611520767211914, 0.02151628875732422, 0.02162073516845703, 0.021635072708129883, 0.021601280212402343, 0.021593088150024413, 0.021591039657592775, 0.021626880645751953, 0.02163711929321289, 0.021635072708129883, 0.021562368392944335, 0.021597183227539063, 0.021552127838134767, 0.021542911529541017, 0.021721088409423828, 0.02183475112915039, 0.021773311614990236, 0.021604352951049805, 0.02225766372680664, 0.022452224731445314, 0.021910528182983398, 0.021603328704833984, 0.02166579246520996, 0.021576704025268553, 0.021325824737548828, 0.021630975723266603, 0.021602304458618164, 0.021710847854614256, 0.02191974449157715, 0.02245631980895996, 0.021778432846069336, 0.021719039916992186, 0.021615615844726564, 0.021613567352294923, 0.021959680557250977, 0.022377471923828125, 0.02208358383178711, 0.021576704025268553, 0.021598207473754884, 0.022181888580322266, 0.021768192291259765, 0.022806528091430665, 0.021617664337158202, 0.021585920333862304, 0.021604352951049805, 0.021556224822998047, 0.0216627197265625, 0.021577728271484374, 0.02143129539489746, 0.021533695220947266, 0.021525503158569336, 0.021525503158569336, 0.021561344146728514, 0.02186956787109375, 0.022331392288208008, 0.02162073516845703, 0.021570560455322265, 0.021564416885375977, 0.02167091178894043, 0.02147327995300293, 0.021576704025268553, 0.02161664009094238, 0.021570560455322265, 0.02169343948364258, 0.021585920333862304, 0.021551103591918946, 0.021544960021972655, 0.02148863983154297, 0.021572608947753907, 0.02180607986450195, 0.02165350341796875, 0.021411840438842773, 0.022352895736694335, 0.02208358383178711, 0.021537792205810546, 0.021557247161865235, 0.021574655532836915, 0.021568511962890623, 0.02189004707336426, 0.022782976150512696, 0.02325196838378906, 0.023524351119995117, 0.022746112823486327, 0.02167398452758789, 0.021577728271484374, 0.021586944580078125, 0.021574655532836915, 0.02150399971008301, 0.021578752517700195, 0.021609472274780273, 0.02168627166748047, 0.02166783905029297, 0.02165350341796875, 0.021602304458618164, 0.021601280212402343, 0.02165760040283203, 0.021604352951049805, 0.021603328704833984, 0.021597183227539063, 0.021625856399536132, 0.02171392059326172, 0.02251263999938965, 0.022509567260742186, 0.021739519119262696, 0.021581823348999024, 0.021573631286621094, 0.02144256019592285, 0.021593088150024413, 0.021592063903808592, 0.021615615844726564, 0.021615615844726564, 0.021587968826293946, 0.021569536209106444, 0.021581823348999024, 0.022537216186523438, 0.02349567985534668, 0.022590463638305663, 0.021832704544067383, 0.021762048721313477, 0.02167193603515625, 0.021604352951049805, 0.02166579246520996, 0.02165452766418457, 0.021621759414672852, 0.02170675277709961, 0.02234982490539551, 0.022458368301391602, 0.022609920501708985, 0.02267852783203125, 0.02231500816345215, 0.02193715286254883, 0.021734399795532225, 0.022007808685302735, 0.022198272705078126, 0.021565439224243164, 0.021629951477050782, 0.02150092887878418, 0.021567487716674806, 0.022766592025756836, 0.022427648544311524, 0.021575679779052736, 0.021617664337158202, 0.021587968826293946, 0.021609472274780273, 0.021572608947753907, 0.022468608856201173, 0.02211942481994629, 0.02167091178894043, 0.021535743713378908, 0.021558271408081055, 0.021511167526245118, 0.021506048202514647, 0.021407743453979493, 0.021596160888671875, 0.022386688232421875, 0.02231705665588379, 0.021772287368774415, 0.021994495391845705, 0.02227097511291504, 0.022361087799072265, 0.021961727142333985, 0.02269388771057129, 0.02167500877380371, 0.021568511962890623, 0.021575679779052736, 0.022441984176635742, 0.021812223434448243, 0.02170982360839844, 0.021599231719970705, 0.021552127838134767, 0.021639167785644533, 0.021607423782348634, 0.021587968826293946, 0.021593088150024413, 0.022123519897460937, 0.022245376586914063, 0.02223308753967285, 0.022590463638305663, 0.023003135681152344, 0.02289356803894043, 0.021850112915039063, 0.022392831802368163, 0.022193151473999022, 0.021787647247314454, 0.02207027244567871, 0.02164019203186035, 0.021617664337158202, 0.02148249626159668, 0.021587968826293946, 0.021593088150024413, 0.021629951477050782, 0.021612543106079102, 0.02162073516845703, 0.021555200576782226, 0.021639167785644533, 0.021614591598510743, 0.021766143798828123, 0.021967872619628907, 0.022388736724853517, 0.021530624389648437, 0.021606399536132814, 0.02224332809448242, 0.02208460807800293, 0.02162380790710449, 0.02163609504699707, 0.02163199996948242, 0.02164735984802246, 0.02191360092163086, 0.0216760311126709, 0.02166476821899414, 0.02151219177246094, 0.021570560455322265, 0.02169343948364258, 0.021585920333862304, 0.02166067123413086, 0.021801984786987305, 0.021756927490234376, 0.0216760311126709, 0.021639167785644533, 0.021613567352294923, 0.021630975723266603, 0.021634048461914062, 0.021564416885375977, 0.021604352951049805, 0.021687295913696288, 0.021604352951049805, 0.02166681671142578, 0.02165452766418457, 0.022540288925170897, 0.02173030471801758, 0.021963775634765623, 0.022401023864746093, 0.022323200225830078, 0.022427648544311524, 0.022140928268432617, 0.02246451187133789, 0.022141952514648438, 0.022416383743286132, 0.02221772766113281, 0.02207539176940918, 0.021542911529541017, 0.021644287109375, 0.022779903411865234, 0.02172211265563965, 0.021739519119262696, 0.02187264060974121, 0.02152038383483887, 0.02205286407470703, 0.021570560455322265, 0.02231705665588379, 0.02221670341491699, 0.021755903244018555, 0.021558271408081055, 0.021614591598510743, 0.02169856071472168, 0.02167500877380371, 0.021577728271484374, 0.021533727645874023, 0.021631967544555663, 0.021625856399536132, 0.021596160888671875, 0.02166476821899414, 0.021588991165161133, 0.02173030471801758, 0.021588991165161133, 0.021618688583374023, 0.021634048461914062, 0.021931007385253908, 0.02223308753967285, 0.02182246398925781, 0.022386688232421875, 0.021932031631469725, 0.021798912048339843, 0.021738496780395508, 0.02162892723083496, 0.022362112045288086, 0.02244095993041992, 0.021995519638061522, 0.021584896087646483, 0.02152448081970215, 0.02150809669494629, 0.021580799102783203, 0.021577728271484374, 0.02167398452758789, 0.021582847595214845, 0.021610496520996093, 0.02162380790710449, 0.021618688583374023, 0.02165657615661621, 0.02161664009094238, 0.021582847595214845, 0.022012928009033202, 0.021727231979370116, 0.021630975723266603, 0.02184499168395996, 0.022336511611938475, 0.02201398468017578, 0.0215817928314209, 0.021618688583374023, 0.021606399536132814, 0.02169139289855957, 0.021590015411376954, 0.021627904891967774, 0.021384191513061524, 0.021603328704833984, 0.021544960021972655, 0.021700607299804688, 0.021513216018676756, 0.021592063903808592, 0.021584896087646483, 0.02165452766418457, 0.021611520767211914, 0.021629951477050782, 0.021563392639160156, 0.021588991165161133, 0.021572608947753907, 0.021695487976074217, 0.021594112396240234, 0.021608448028564452, 0.02161664009094238, 0.021580799102783203, 0.021587968826293946, 0.021607423782348634, 0.022025215148925782, 0.022733823776245117, 0.022602752685546876, 0.02183065605163574, 0.02187571144104004, 0.02254643249511719, 0.022386688232421875, 0.022556671142578123, 0.022401023864746093, 0.022610944747924806, 0.02250547218322754, 0.022487039566040038, 0.02251571273803711, 0.022560768127441407, 0.022208511352539064, 0.021617664337158202, 0.022098943710327147, 0.022565887451171874, 0.02264678382873535, 0.023402496337890624, 0.022726655960083008, 0.022312959671020507, 0.022773759841918945, 0.021948415756225585, 0.021621759414672852, 0.021781503677368166, 0.021608448028564452, 0.021758975982666014, 0.02206617546081543, 0.021611520767211914, 0.021971967697143553, 0.02252390480041504, 0.02187980842590332, 0.02185523223876953, 0.021789695739746092, 0.02163711929321289, 0.021577728271484374, 0.021608448028564452, 0.021748735427856446, 0.021942272186279296, 0.02165760040283203, 0.022024192810058595, 0.02251571273803711, 0.022641664505004884, 0.022607872009277344, 0.021793792724609375, 0.022189056396484375, 0.021585920333862304, 0.022487039566040038, 0.022441984176635742, 0.021909503936767577, 0.021994495391845705, 0.021622783660888673, 0.021642240524291992, 0.021591039657592775, 0.02186956787109375, 0.022393856048583984, 0.022364160537719727, 0.02220953559875488, 0.02162073516845703, 0.022344703674316405, 0.021574655532836915, 0.022383615493774413, 0.021819391250610352, 0.021748735427856446, 0.027460607528686523, 0.022426624298095704, 0.021710847854614256, 0.022839296340942384, 0.023544832229614256, 0.022565887451171874, 0.021581823348999024, 0.02164019203186035, 0.021560319900512694, 0.02166476821899414, 0.021576704025268553, 0.021972991943359374, 0.021812223434448243, 0.021625856399536132, 0.021615615844726564, 0.02164121627807617, 0.02164019203186035, 0.021429311752319335, 0.021230527877807617, 0.022354944229125977, 0.021994495391845705, 0.02172211265563965, 0.02241535949707031, 0.02232729530334473, 0.021612543106079102, 0.02254643249511719, 0.023222272872924804, 0.022617088317871094, 0.022593536376953126]",tokens/s,45.80115232284898,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained - model_class = get_class_from_dynamic_module( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module - final_module = get_cached_module_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file - modules_needed = check_imports(resolved_module_file) - File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports - raise ImportError( -ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` - -",qwen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,llama,MB,3572.322304,5404.884992,0.0,4775.215104,4427.072512,s,1,10.25833984375,10.25833984375,0.0,10.25833984375,10.25833984375,10.25833984375,10.25833984375,[10.25833984375],,kWh,4.129029513472331e-05,2.2614521725725424e-05,7.169616846799953e-05,0.00013560098532844827,,MB,1644.285952,5440.536576,0.0,4794.089472,4101.022208,s,10,10.48104736328125,1.048104736328125,8.549855579026253e-05,1.0481150512695314,1.0481667846679688,1.0481999084472657,1.048226407470703,"[1.0481571044921876, 1.047904052734375, 1.0481146240234376, 1.0481124267578126, 1.048156494140625, 1.048159423828125, 1.048115478515625, 1.0482330322265625, 1.04807080078125, 1.04802392578125]",tokens/s,244.25039895999032,kWh,1.2388184687430585e-05,6.788148267763499e-06,7.060916759840004e-05,8.978550055359412e-05,tokens/kWh,2851239.881958338,MB,1663.000576,5451.022336,0.0,4802.47808,4101.024768,s,10,15.5081689453125,1.5508168945312502,0.01664278097603234,1.5484098510742186,1.5727916381835936,1.5793049377441406,1.584515577392578,"[1.5394857177734376, 1.5429859619140625, 1.553833740234375, 1.5398343505859375, 1.52632275390625, 1.5380025634765624, 1.5550751953125, 1.5554661865234376, 1.5858182373046874, 1.57134423828125]",tokens/s,40.62375140621767,kWh,1.824026280562489e-05,9.99730465195714e-06,5.2655542124399954e-05,8.089310958198201e-05,tokens/kWh,778805.516632439,,s,630,15.506178987503038,0.024612982519846113,0.000585136653566077,0.024371711730957032,0.025230437660217285,0.025454540157318115,0.026265804805755615,"[0.02548428726196289, 0.025058303833007813, 0.024221696853637696, 0.026621952056884765, 0.025389055252075195, 0.024704000473022462, 0.024817663192749022, 0.024390655517578123, 0.02429849624633789, 0.02430259132385254, 0.02431283187866211, 0.0243240966796875, 0.024227840423583984, 0.024164352416992187, 0.02472755241394043, 0.02472038459777832, 0.02428620719909668, 0.024293376922607423, 0.0243189754486084, 0.02424115180969238, 0.023957504272460937, 0.024163328170776367, 0.024252416610717774, 0.02428108787536621, 0.02428108787536621, 0.024182783126831055, 0.024220672607421875, 0.024240127563476564, 0.024457216262817383, 0.02448793601989746, 0.024284160614013672, 0.024327167510986326, 0.024240127563476564, 0.024225791931152343, 0.024407039642333983, 0.024399871826171874, 0.024238079071044923, 0.024267776489257813, 0.024218624114990234, 0.024216575622558592, 0.024178688049316405, 0.024190975189208985, 0.024654848098754883, 0.02526924705505371, 0.024617984771728517, 0.024395776748657227, 0.024328191757202147, 0.024555519104003908, 0.02472243118286133, 0.024246271133422852, 0.024250368118286132, 0.024308736801147462, 0.024381439208984376, 0.024349695205688478, 0.02427903938293457, 0.024366079330444337, 0.024358911514282225, 0.02433024024963379, 0.024251392364501953, 0.024251392364501953, 0.024230911254882814, 0.024242176055908202, 0.02428108787536621, 0.025057279586791992, 0.02429644775390625, 0.024211456298828125, 0.024319999694824217, 0.02433843231201172, 0.02433024024963379, 0.024203264236450195, 0.024230911254882814, 0.02426470375061035, 0.02428825569152832, 0.024290336608886718, 0.024220640182495118, 0.024195072174072265, 0.024230911254882814, 0.024181760787963868, 0.024246271133422852, 0.02427494430541992, 0.024246271133422852, 0.024201215744018553, 0.024239103317260743, 0.024645631790161132, 0.024425472259521484, 0.024182783126831055, 0.02413670349121094, 0.023805952072143553, 0.024159231185913087, 0.024378368377685547, 0.02455449676513672, 0.024244224548339844, 0.024228864669799805, 0.024204288482666016, 0.024154111862182616, 0.024936447143554686, 0.031164415359497072, 0.027024383544921874, 0.025373695373535156, 0.025144319534301757, 0.024191999435424806, 0.024193023681640623, 0.024154111862182616, 0.023837696075439452, 0.024030208587646484, 0.024094720840454102, 0.024179712295532226, 0.02409062385559082, 0.024998912811279295, 0.025037824630737306, 0.02457088088989258, 0.025051136016845704, 0.024927232742309572, 0.025074687957763672, 0.023994367599487306, 0.0238786563873291, 0.024195072174072265, 0.024178688049316405, 0.02395955276489258, 0.02385305595397949, 0.02407935905456543, 0.023913503646850586, 0.024612831115722655, 0.02469375991821289, 0.024208383560180666, 0.024164352416992187, 0.02613145637512207, 0.025060352325439454, 0.024382463455200197, 0.02472652816772461, 0.024592384338378907, 0.024646656036376953, 0.024370176315307617, 0.025560064315795897, 0.025256959915161133, 0.025027584075927735, 0.025024511337280272, 0.025211904525756838, 0.027042816162109375, 0.02551296043395996, 0.025255935668945313, 0.024404991149902345, 0.02515456008911133, 0.024963071823120117, 0.02512384033203125, 0.02516275215148926, 0.025231359481811523, 0.02428006362915039, 0.024764415740966796, 0.02438041687011719, 0.024392704010009765, 0.024263679504394533, 0.024255487442016603, 0.02431385612487793, 0.024195072174072265, 0.02428006362915039, 0.02428620719909668, 0.024259584426879883, 0.02431692886352539, 0.024178688049316405, 0.024328191757202147, 0.02430771255493164, 0.02556620788574219, 0.02627174377441406, 0.025358335494995117, 0.02494259262084961, 0.02504806327819824, 0.02502348709106445, 0.024517631530761717, 0.024246271133422852, 0.024261632919311524, 0.024352767944335937, 0.024653823852539062, 0.02473779106140137, 0.024229888916015626, 0.024171520233154296, 0.024236032485961914, 0.024187904357910156, 0.024160255432128908, 0.024218624114990234, 0.02390937614440918, 0.023875583648681642, 0.023989248275756835, 0.024196096420288086, 0.024147968292236328, 0.024205312728881836, 0.02416640090942383, 0.02416640090942383, 0.024152063369750978, 0.0250644474029541, 0.02453094482421875, 0.025009151458740234, 0.02508188819885254, 0.025035743713378907, 0.025036800384521486, 0.02413670349121094, 0.024071168899536134, 0.02409164810180664, 0.02411724853515625, 0.02411520004272461, 0.02409881591796875, 0.0241080322265625, 0.024225791931152343, 0.02588467216491699, 0.02673151969909668, 0.025289728164672853, 0.024433664321899414, 0.024208383560180666, 0.024190975189208985, 0.024114175796508788, 0.024666112899780275, 0.024122367858886717, 0.024394752502441407, 0.02434867286682129, 0.02467532730102539, 0.026015743255615235, 0.02597887992858887, 0.02451251220703125, 0.024183807373046876, 0.024207359313964845, 0.024131584167480468, 0.024199167251586915, 0.024236032485961914, 0.024206335067749024, 0.024223743438720705, 0.024062976837158204, 0.024139776229858398, 0.024202239990234374, 0.024165376663208008, 0.024246271133422852, 0.024244224548339844, 0.025034751892089844, 0.025127935409545898, 0.024266752243041992, 0.024261632919311524, 0.024011775970458983, 0.023933952331542968, 0.023990272521972656, 0.02390732765197754, 0.023967744827270508, 0.023806976318359374, 0.024373247146606446, 0.02432512092590332, 0.024034303665161134, 0.024145919799804686, 0.02430668830871582, 0.024250368118286132, 0.02427289581298828, 0.024187904357910156, 0.024263679504394533, 0.024213504791259766, 0.024223743438720705, 0.02510032081604004, 0.02418992042541504, 0.024070144653320313, 0.024459264755249024, 0.02456166458129883, 0.024008703231811524, 0.02410905647277832, 0.023973888397216796, 0.023969791412353517, 0.024160255432128908, 0.024253440856933595, 0.024245248794555665, 0.024170495986938476, 0.02391961669921875, 0.024263679504394533, 0.024163328170776367, 0.024175615310668946, 0.024215551376342775, 0.024224767684936522, 0.02432307243347168, 0.024240127563476564, 0.0241213436126709, 0.024159231185913087, 0.024199167251586915, 0.023981056213378905, 0.023964672088623046, 0.024167423248291017, 0.024300544738769532, 0.024630271911621093, 0.024555519104003908, 0.02428108787536621, 0.024232959747314452, 0.0241582088470459, 0.024163328170776367, 0.024215551376342775, 0.02427187156677246, 0.024184831619262694, 0.024159231185913087, 0.024217599868774413, 0.024201215744018553, 0.024213504791259766, 0.024218624114990234, 0.024240127563476564, 0.024173568725585938, 0.024216575622558592, 0.02411520004272461, 0.024524831771850587, 0.024643552780151366, 0.02432307243347168, 0.024175615310668946, 0.02409062385559082, 0.024061952590942383, 0.0240762882232666, 0.024174591064453126, 0.024169471740722655, 0.024187904357910156, 0.024233983993530273, 0.024224767684936522, 0.024147968292236328, 0.02455449676513672, 0.024217599868774413, 0.02412748718261719, 0.024049663543701173, 0.025047040939331053, 0.024160255432128908, 0.024086528778076172, 0.024180736541748047, 0.024212480545043946, 0.024382463455200197, 0.02428211212158203, 0.02408857536315918, 0.02412441635131836, 0.02413363265991211, 0.023994367599487306, 0.024211456298828125, 0.024139776229858398, 0.024156160354614258, 0.02412748718261719, 0.024179712295532226, 0.023916543960571288, 0.024411136627197266, 0.024563711166381837, 0.025620479583740235, 0.024498176574707032, 0.02428825569152832, 0.02412748718261719, 0.024817663192749022, 0.024802303314208983, 0.02474188804626465, 0.02510643196105957, 0.024607744216918945, 0.02431795120239258, 0.025012224197387696, 0.024246271133422852, 0.024385536193847656, 0.0242739200592041, 0.0243189754486084, 0.024466432571411133, 0.024896511077880858, 0.024163328170776367, 0.024225791931152343, 0.024205312728881836, 0.025169919967651368, 0.024689664840698244, 0.02431590461730957, 0.024366079330444337, 0.02449407958984375, 0.024447999954223632, 0.024398847579956053, 0.024627199172973634, 0.024412160873413087, 0.024178688049316405, 0.024217599868774413, 0.024250368118286132, 0.024155136108398437, 0.0242872314453125, 0.024217599868774413, 0.024227840423583984, 0.02405171203613281, 0.024175615310668946, 0.024194047927856444, 0.0247459831237793, 0.025218048095703126, 0.024893440246582032, 0.024379392623901368, 0.024167423248291017, 0.025105344772338868, 0.024142847061157227, 0.024123392105102538, 0.024517631530761717, 0.02459753608703613, 0.024688608169555665, 0.024812543869018554, 0.024210432052612304, 0.02411110305786133, 0.024070144653320313, 0.024037376403808593, 0.024341503143310548, 0.02413670349121094, 0.02393600082397461, 0.024214527130126954, 0.024159231185913087, 0.023900159835815428, 0.023980031967163085, 0.02411724853515625, 0.024186880111694335, 0.024163328170776367, 0.02407935905456543, 0.02509926414489746, 0.026251264572143555, 0.025242624282836915, 0.024650751113891603, 0.024802303314208983, 0.024774656295776368, 0.025084928512573244, 0.024774656295776368, 0.02507263946533203, 0.02498150444030762, 0.025076736450195314, 0.025017343521118163, 0.02553241539001465, 0.02512588882446289, 0.024983552932739257, 0.024985599517822265, 0.025078784942626952, 0.0250644474029541, 0.02510540771484375, 0.024896511077880858, 0.025016319274902343, 0.02512179183959961, 0.024978431701660156, 0.025479167938232423, 0.025396223068237304, 0.025203712463378908, 0.02503987121582031, 0.025267200469970705, 0.024871936798095705, 0.02525388717651367, 0.02561846351623535, 0.024283103942871094, 0.024070144653320313, 0.024321023941040038, 0.02428006362915039, 0.023937023162841797, 0.024164352416992187, 0.02428108787536621, 0.024365055084228517, 0.024284160614013672, 0.024411136627197266, 0.025195520401000978, 0.025808895111083984, 0.025999359130859375, 0.025418752670288085, 0.02508083152770996, 0.02428620719909668, 0.02429952049255371, 0.02427289581298828, 0.024252416610717774, 0.02430259132385254, 0.024997888565063478, 0.02488832092285156, 0.026960895538330077, 0.025571327209472656, 0.02524569511413574, 0.025029632568359376, 0.02524569511413574, 0.025135103225708007, 0.02512281608581543, 0.025071647644042967, 0.025048032760620117, 0.025001983642578125, 0.02516275215148926, 0.025663488388061522, 0.02527027130126953, 0.025195520401000978, 0.0243507194519043, 0.024178688049316405, 0.024204288482666016, 0.024170495986938476, 0.02411827278137207, 0.02428927993774414, 0.024567808151245117, 0.02524569511413574, 0.025177087783813477, 0.0252620792388916, 0.025178112030029298, 0.026005504608154296, 0.025424896240234376, 0.024425472259521484, 0.024182783126831055, 0.023777280807495117, 0.024222719192504884, 0.02493337631225586, 0.02414899253845215, 0.024147968292236328, 0.024169471740722655, 0.024165376663208008, 0.024172544479370117, 0.024122367858886717, 0.02428108787536621, 0.0241080322265625, 0.024165376663208008, 0.024161279678344725, 0.024174591064453126, 0.02388991928100586, 0.02394726371765137, 0.024130559921264647, 0.023855104446411132, 0.02390630340576172, 0.024210432052612304, 0.024173568725585938, 0.02408550453186035, 0.025814016342163085, 0.0255098876953125, 0.02550067138671875, 0.025177087783813477, 0.02517913627624512, 0.025230335235595702, 0.02513100814819336, 0.02508799934387207, 0.025075712203979493, 0.025058303833007813, 0.025146432876586914, 0.025182144165039062, 0.02512384033203125, 0.025259008407592775, 0.025116672515869142, 0.02509926414489746, 0.0251013126373291, 0.0251013126373291, 0.02516275215148926, 0.02517913627624512, 0.02527027130126953, 0.0252938232421875, 0.025195520401000978, 0.025209856033325196, 0.02526310348510742, 0.02531328010559082, 0.025203712463378908, 0.025149440765380858, 0.02469171142578125, 0.02511564826965332, 0.025011199951171875, 0.02534604835510254, 0.02515660858154297, 0.025225215911865235, 0.025338880538940428, 0.025297920227050782, 0.025223167419433593, 0.02515456008911133, 0.025195520401000978, 0.025203712463378908, 0.02517196846008301, 0.02517196846008301, 0.02536857604980469, 0.025841663360595703, 0.025438207626342774, 0.025181184768676756, 0.02498150444030762, 0.025190399169921874, 0.024954879760742187, 0.02490777587890625, 0.024944639205932616, 0.024977407455444335, 0.02488934326171875, 0.025196544647216795, 0.02490060806274414, 0.024996864318847657, 0.02513715171813965, 0.02512076759338379, 0.025164800643920897, 0.024806400299072266, 0.02498150444030762, 0.025068544387817384, 0.02510950469970703, 0.025819135665893556, 0.02567475128173828, 0.02535321617126465, 0.024862720489501954, 0.024589311599731444, 0.024833023071289064, 0.024564735412597655, 0.02434662437438965, 0.024846336364746095, 0.024844287872314453, 0.02488115119934082, 0.024803327560424804, 0.02477670478820801, 0.024846336364746095, 0.024763391494750975, 0.024861696243286133, 0.024845312118530274, 0.024806400299072266, 0.024778751373291014, 0.024958976745605467, 0.025172992706298827, 0.025289728164672853, 0.024870912551879884, 0.025016319274902343, 0.024921087265014647, 0.024890399932861327, 0.02479817581176758, 0.024910848617553712, 0.02488934326171875, 0.024731647491455077, 0.02486579132080078, 0.0249354248046875, 0.024993791580200195, 0.02503167915344238, 0.024983552932739257, 0.02494156837463379, 0.024885248184204102, 0.02485043144226074, 0.024896511077880858, 0.024922111511230468, 0.024763391494750975, 0.024954879760742187, 0.024777727127075197, 0.024930303573608398, 0.02493440055847168, 0.024992767333984374, 0.02503987121582031, 0.024943616867065428, 0.02494259262084961, 0.02492620849609375, 0.024879104614257814, 0.024868864059448242, 0.02490060806274414, 0.024952831268310546, 0.024970239639282226, 0.024851455688476562, 0.025051136016845704, 0.02488832092285156, 0.025025535583496093, 0.025084928512573244, 0.02502348709106445, 0.025116672515869142, 0.02546790313720703]",tokens/s,40.628964782860955,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",gpt_neo,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/0/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669494ef-46683afd36305f4903f44e62;7a692a93-b6f4-44ee-80ba-4e03cfde403c) - -Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: 0 is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,mistral,MB,5167.22688,6469.189632,0.0,5832.179712,5645.103616,s,1,12.206376953125,12.206376953125,0.0,12.206376953125,12.206376953125,12.206376953125,12.206376953125,[12.206376953125],,kWh,6.35285586493031e-05,3.479997887660139e-05,0.00011674231561600856,0.00021507085314191306,,MB,1788.575744,6527.909888,0.0,5874.122752,5159.561216,s,10,16.51814111328125,1.6518141113281248,9.873542810919853e-05,1.6518197631835938,1.6519470336914062,1.6519641174316406,1.6519777844238281,"[1.65170263671875, 1.65183349609375, 1.6518382568359375, 1.651981201171875, 1.6516925048828126, 1.6517708740234376, 1.6518060302734374, 1.651683349609375, 1.6519432373046874, 1.6518895263671876]",tokens/s,154.98111939131317,kWh,1.951330738770783e-05,1.0693061082604504e-05,0.00011504036980999888,0.0001452467382803112,tokens/kWh,1762518.064302046,MB,1794.961408,6548.881408,0.0,5892.99712,5159.563776,s,10,22.994319335937497,2.29943193359375,0.010221212599448972,2.3010397949218753,2.311905590820312,2.3136890014648435,2.3151157299804686,"[2.31150927734375, 2.315472412109375, 2.292669189453125, 2.304275390625, 2.28448974609375, 2.3017119140625, 2.30738134765625, 2.30036767578125, 2.284447509765625, 2.291994873046875]",tokens/s,27.39807127125445,kWh,2.8236872716042416e-05,1.547629983717476e-05,8.075556460440114e-05,0.00012446873715761834,tokens/kWh,506151.19457845297,,s,630,22.99186073684696,0.03649501704261417,0.0005944324340987428,0.03626137542724609,0.03727052688598633,0.037720371437072756,0.03881237518310547,"[0.037716991424560545, 0.037362686157226564, 0.03640627288818359, 0.038886398315429685, 0.038796287536621094, 0.03682304000854492, 0.03682611083984375, 0.03670732879638672, 0.036511745452880856, 0.036370433807373044, 0.036752384185791014, 0.03695513534545898, 0.03701657485961914, 0.036391937255859375, 0.03654655838012695, 0.036550655364990234, 0.03625471878051758, 0.036377601623535157, 0.03705855941772461, 0.03703807830810547, 0.03701862335205078, 0.03588198471069336, 0.036907009124755856, 0.03622195053100586, 0.036275199890136715, 0.03633663940429688, 0.036296703338623046, 0.0361973762512207, 0.03668787384033203, 0.03634483337402344, 0.03642367935180664, 0.036468734741210936, 0.03626598358154297, 0.03659571075439453, 0.03689574432373047, 0.03628134536743164, 0.03645132827758789, 0.036523006439208985, 0.03619635009765625, 0.03633049774169922, 0.03655168151855469, 0.03633049774169922, 0.03740467071533203, 0.03628543853759766, 0.0365926399230957, 0.03657625579833984, 0.03619942474365234, 0.03706163024902344, 0.036686847686767575, 0.036239360809326174, 0.03592601776123047, 0.03711180877685547, 0.037098495483398435, 0.0373043212890625, 0.03610214233398437, 0.03710566329956055, 0.03732582473754883, 0.03679334259033203, 0.03617996978759765, 0.03645951843261719, 0.03645747375488281, 0.036569087982177735, 0.036985855102539066, 0.03713945770263672, 0.037220352172851565, 0.03741388702392578, 0.03664486312866211, 0.03772313690185547, 0.0370513916015625, 0.03617996978759765, 0.03671244812011719, 0.03615948867797852, 0.03624959945678711, 0.03622092819213867, 0.03623628616333008, 0.03598745727539063, 0.0358656005859375, 0.03625574493408203, 0.036280319213867186, 0.03618611145019531, 0.03600076675415039, 0.03610214233398437, 0.037294078826904296, 0.03808358383178711, 0.038491134643554685, 0.03765248107910156, 0.03667865753173828, 0.03701964950561523, 0.03618611145019531, 0.03724697494506836, 0.03700428771972656, 0.03633152008056641, 0.0370513916015625, 0.036792320251464845, 0.03604991912841797, 0.03686195373535156, 0.03686809539794922, 0.036841472625732424, 0.03650559997558594, 0.036275199890136715, 0.036915199279785156, 0.03682099151611328, 0.0368721923828125, 0.03672063827514648, 0.03699609756469727, 0.036139007568359374, 0.036765697479248044, 0.036383743286132815, 0.03651891326904297, 0.03705036926269531, 0.03648102569580078, 0.03685068893432617, 0.03689471817016601, 0.03618815994262695, 0.036659198760986327, 0.03662847900390625, 0.03687014389038086, 0.038830078125, 0.03778559875488281, 0.03834163284301758, 0.03668172836303711, 0.036208641052246096, 0.03614310455322266, 0.036219905853271485, 0.035963905334472655, 0.03646464157104492, 0.03718656158447266, 0.036395008087158204, 0.03671756744384766, 0.036762622833251955, 0.03620761489868164, 0.03619635009765625, 0.036348926544189454, 0.03659366226196289, 0.036831230163574216, 0.036241409301757815, 0.036490238189697266, 0.03656499099731445, 0.03609907150268555, 0.03604172897338867, 0.03618201446533203, 0.03614003372192383, 0.03632230377197266, 0.036517887115478515, 0.03620761489868164, 0.036327423095703124, 0.036201473236083984, 0.03649740982055664, 0.03664179229736328, 0.03614003372192383, 0.03612364959716797, 0.03639910507202149, 0.03745075225830078, 0.03618406295776367, 0.03602329635620117, 0.03591987228393555, 0.03662745666503906, 0.036431873321533206, 0.03606220626831055, 0.036890625, 0.036139007568359374, 0.036397056579589845, 0.036055038452148434, 0.03605299377441406, 0.036173824310302735, 0.036089855194091795, 0.036034561157226565, 0.036086784362792966, 0.03601408004760742, 0.03603046417236328, 0.036397056579589845, 0.036550655364990234, 0.036514816284179685, 0.03770470428466797, 0.03609292984008789, 0.036067329406738284, 0.036119552612304685, 0.035697662353515625, 0.03633561706542969, 0.03629056167602539, 0.036141056060791016, 0.03609088134765625, 0.03612160110473633, 0.0364400634765625, 0.037126174926757814, 0.03635811233520508, 0.036162559509277346, 0.03680665588378906, 0.03818700790405274, 0.036429824829101565, 0.03635507202148437, 0.03619942474365234, 0.036435966491699216, 0.036318206787109376, 0.03625062561035156, 0.036229118347167966, 0.03632230377197266, 0.03635609436035156, 0.03633663940429688, 0.03638784027099609, 0.03625062561035156, 0.03626803207397461, 0.036261886596679685, 0.03621376037597656, 0.03634483337402344, 0.0362874870300293, 0.036222976684570314, 0.03638681411743164, 0.036195327758789066, 0.036222976684570314, 0.03616153717041016, 0.03623628616333008, 0.036168704986572264, 0.03615948867797852, 0.03617792129516602, 0.03622707366943359, 0.036139007568359374, 0.03621376037597656, 0.03614720153808594, 0.03619839859008789, 0.03589427185058594, 0.03610726547241211, 0.03618611145019531, 0.03618099212646484, 0.03623526382446289, 0.036241409301757815, 0.03618201446533203, 0.036171775817871094, 0.03623526382446289, 0.03623219299316406, 0.03618099212646484, 0.03604172897338867, 0.036296703338623046, 0.03630899047851562, 0.03632128143310547, 0.03628339385986328, 0.03629568099975586, 0.03632128143310547, 0.0372490234375, 0.03799039840698242, 0.03751731109619141, 0.037776382446289065, 0.03769343948364258, 0.037804031372070314, 0.03760435104370117, 0.03769343948364258, 0.03784806442260742, 0.03797196960449219, 0.038296577453613284, 0.037804031372070314, 0.03774156951904297, 0.03726028823852539, 0.03704115295410156, 0.0362608642578125, 0.03629260635375976, 0.038184959411621096, 0.03706265640258789, 0.03624448013305664, 0.03605299377441406, 0.035958782196044925, 0.03595468902587891, 0.03610214233398437, 0.036596736907958984, 0.03618201446533203, 0.03599769592285156, 0.03597926330566406, 0.03608575820922852, 0.03599769592285156, 0.03597824096679687, 0.035963905334472655, 0.03614310455322266, 0.036067329406738284, 0.036021247863769534, 0.035999744415283204, 0.036013057708740234, 0.03610419082641601, 0.036087806701660154, 0.036057086944580076, 0.03687833786010742, 0.036239360809326174, 0.03597312164306641, 0.035953662872314454, 0.03623628616333008, 0.036934654235839845, 0.036083713531494144, 0.03629056167602539, 0.03628543853759766, 0.03609292984008789, 0.03607551956176758, 0.036055038452148434, 0.036087806701660154, 0.03602227020263672, 0.036001792907714845, 0.03608473587036133, 0.03596495819091797, 0.03605193710327149, 0.03609702301025391, 0.036170753479003906, 0.03626291275024414, 0.03622195053100586, 0.03628646469116211, 0.036209663391113284, 0.03615027236938476, 0.03607449722290039, 0.03614822387695313, 0.036154369354248046, 0.03648102569580078, 0.03614822387695313, 0.03665100860595703, 0.03728486251831055, 0.03629875183105469, 0.03630080032348633, 0.03680460739135742, 0.036631553649902344, 0.036162559509277346, 0.037988353729248046, 0.03646156692504883, 0.036354049682617184, 0.036203521728515625, 0.0362342414855957, 0.03640934371948242, 0.036395008087158204, 0.03662745666503906, 0.03642060852050781, 0.036274177551269535, 0.037108734130859376, 0.037566463470458986, 0.04027084732055664, 0.037465087890625, 0.036307968139648435, 0.03626393508911133, 0.03626291275024414, 0.03609702301025391, 0.03642265701293945, 0.037359615325927735, 0.03640729522705078, 0.03608473587036133, 0.035817470550537106, 0.03627110290527344, 0.03621683120727539, 0.03605196762084961, 0.03618918228149414, 0.036503551483154296, 0.03695718383789062, 0.0362239990234375, 0.03615027236938476, 0.03599769592285156, 0.036187137603759766, 0.036034561157226565, 0.036117504119873044, 0.036283424377441406, 0.03617379379272461, 0.03609292984008789, 0.036106239318847655, 0.03633152008056641, 0.03614617538452149, 0.036122623443603515, 0.036034561157226565, 0.036141056060791016, 0.0361082878112793, 0.03612057495117187, 0.03749273681640625, 0.04001279830932617, 0.03799039840698242, 0.0379791374206543, 0.0371701774597168, 0.03614822387695313, 0.03612160110473633, 0.03609804916381836, 0.03609292984008789, 0.03616153717041016, 0.036086784362792966, 0.03607756805419922, 0.036086784362792966, 0.03612979125976563, 0.03609088134765625, 0.03632025527954102, 0.03607654571533203, 0.03687628936767578, 0.036549633026123046, 0.036242431640625, 0.03611545562744141, 0.03607961654663086, 0.03617792129516602, 0.03609804916381836, 0.03621887969970703, 0.03649433517456055, 0.036429824829101565, 0.03620761489868164, 0.03608063888549805, 0.03607961654663086, 0.03615846252441406, 0.03602841567993164, 0.036206592559814454, 0.03613798522949219, 0.03614310455322266, 0.03631206512451172, 0.03597721481323242, 0.03638579177856445, 0.03681382369995117, 0.03624959945678711, 0.03626496124267578, 0.036296703338623046, 0.036139007568359374, 0.03714252853393555, 0.037901313781738284, 0.03881267166137695, 0.03758492660522461, 0.03682096099853516, 0.03721420669555664, 0.0378152961730957, 0.03718348693847656, 0.036245502471923825, 0.03670016098022461, 0.03727052688598633, 0.03643801498413086, 0.03671244812011719, 0.036327423095703124, 0.03620249557495117, 0.03631718444824219, 0.036327423095703124, 0.0362977294921875, 0.03657113647460938, 0.03672678375244141, 0.03727052688598633, 0.03653529739379883, 0.03651071929931641, 0.03616665649414062, 0.03615641784667969, 0.03654143905639649, 0.035983360290527344, 0.03611238479614258, 0.03749990463256836, 0.03701145553588867, 0.03701964950561523, 0.03660083389282227, 0.03612979125976563, 0.03616972732543945, 0.03768832015991211, 0.038811649322509766, 0.03748863983154297, 0.03709235382080078, 0.03623116683959961, 0.036160511016845705, 0.0364031982421875, 0.03621683120727539, 0.03621068954467774, 0.03616665649414062, 0.036566017150878906, 0.036157440185546875, 0.038583297729492184, 0.03654659271240234, 0.036236255645751954, 0.03606630325317383, 0.036195327758789066, 0.03610419082641601, 0.03611238479614258, 0.03615027236938476, 0.03604787063598633, 0.03608063888549805, 0.03600588989257812, 0.03600691223144531, 0.036141056060791016, 0.03621683120727539, 0.036162559509277346, 0.03625574493408203, 0.03619430541992188, 0.03617484664916992, 0.0360447998046875, 0.035710975646972655, 0.03611340713500977, 0.03659161758422851, 0.037356544494628906, 0.036822017669677735, 0.0390010871887207, 0.0374917106628418, 0.03632844924926758, 0.036923393249511716, 0.03654143905639649, 0.03648921585083008, 0.03650969696044922, 0.03647180938720703, 0.0360447998046875, 0.03662233734130859, 0.03706982421875, 0.036752384185791014, 0.03652505493164063, 0.03624038314819336, 0.03645439910888672, 0.03625881576538086, 0.03687116622924805, 0.036318206787109376, 0.0361082878112793, 0.03587071990966797, 0.03646464157104492, 0.03918643188476562, 0.03640422439575195, 0.036193279266357424, 0.03709542465209961, 0.03760025787353516, 0.03660595321655274, 0.03624959945678711, 0.036187137603759766, 0.036132865905761716, 0.036980735778808595, 0.03632128143310547, 0.03566592025756836, 0.036127742767333985, 0.036211711883544925, 0.03622604751586914, 0.03616563034057617, 0.03697049713134765, 0.03680460739135742, 0.03629875183105469, 0.036245502471923825, 0.03630899047851562, 0.036038654327392575, 0.03610521697998047, 0.0367646713256836, 0.03746099090576172, 0.03625267028808594, 0.03625062561035156, 0.036144126892089845, 0.03618304061889648, 0.03607244873046875, 0.03616460800170898, 0.03608575820922852, 0.03695820617675781, 0.036278270721435545, 0.036122623443603515, 0.03617279815673828, 0.03609600067138672, 0.036795391082763675, 0.036116481781005856, 0.03605196762084961, 0.036132865905761716, 0.03621683120727539, 0.036241409301757815, 0.036160511016845705, 0.036212734222412106, 0.036171775817871094, 0.03611852645874023, 0.03614310455322266, 0.03636019134521484, 0.036206592559814454, 0.036094974517822266, 0.035958782196044925, 0.03604991912841797, 0.03606118392944336, 0.03605606460571289, 0.03618201446533203, 0.036340736389160154, 0.036370433807373044, 0.036274177551269535, 0.03637145614624023, 0.036209663391113284, 0.036162559509277346, 0.03608575820922852, 0.03615846252441406, 0.03613491058349609, 0.03613695907592773, 0.036257793426513675, 0.036070400238037106, 0.036139007568359374, 0.03632537460327148, 0.03628646469116211, 0.03609088134765625, 0.03724492645263672, 0.03649638366699219, 0.036094974517822266, 0.03609395217895508, 0.0361082878112793, 0.036176895141601564, 0.036211711883544925, 0.03627008056640625, 0.0358021125793457, 0.0361338882446289, 0.038059009552001956, 0.03768524932861328, 0.03729715347290039, 0.036395008087158204, 0.03616767883300781, 0.03620044708251953, 0.036057086944580076, 0.036144126892089845, 0.036229118347167966, 0.036122623443603515, 0.03609190368652344, 0.03610726547241211, 0.0360816650390625, 0.036446208953857424, 0.036275199890136715, 0.03626291275024414, 0.036170753479003906, 0.03618406295776367, 0.036135936737060545, 0.03622092819213867, 0.03618201446533203, 0.03612979125976563, 0.03604787063598633, 0.036209663391113284, 0.03620556640625, 0.03621478271484375, 0.03620556640625, 0.03615641784667969, 0.03612979125976563, 0.03625471878051758, 0.036326400756835936, 0.03634175872802734, 0.03696025466918945, 0.03639910507202149, 0.03821977615356445, 0.037222400665283206, 0.03686195373535156, 0.03651379013061523, 0.03624857711791992, 0.03626496124267578, 0.036141056060791016, 0.036563968658447264, 0.03731763076782227, 0.03646156692504883, 0.036125694274902344, 0.0361082878112793, 0.03608063888549805, 0.036162559509277346, 0.03614310455322266, 0.03621683120727539, 0.03616358566284179, 0.0361267204284668, 0.03609088134765625]",tokens/s,27.401001041658077,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained - dispatch_model(model, **device_map_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model - model.to(device) - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to - return super().to(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to - return self._apply(convert) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply - module._apply(fn) - [Previous line repeated 2 more times] - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply - self._buffers[key] = fn(buf) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert - return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 22.18 GiB of which 8.50 MiB is free. Process 88518 has 22.17 GiB memory in use. Of the allocated memory 21.91 GiB is allocated by PyTorch, and 17.87 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) - -",qwen2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/B/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669493e1-057546d81e5e67da3c9b6320;98b57586-874e-4539-8e6a-aa20ea0103aa) - -Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: B is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,t,t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 304, in hf_raise_for_status - response.raise_for_status() - File ""/usr/local/lib/python3.10/dist-packages/requests/models.py"", line 1024, in raise_for_status - raise HTTPError(http_error_msg, response=self) -requests.exceptions.HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/t/resolve/main/config.json - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 402, in cached_file - resolved_file = hf_hub_download( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1221, in hf_hub_download - return _hf_hub_download_to_cache_dir( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1325, in _hf_hub_download_to_cache_dir - _raise_on_head_call_error(head_call_error, force_download, local_files_only) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1823, in _raise_on_head_call_error - raise head_call_error - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1722, in _get_metadata_or_catch_error - metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_validators.py"", line 114, in _inner_fn - return fn(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 1645, in get_hf_file_metadata - r = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 372, in _request_wrapper - response = _request_wrapper( - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/file_download.py"", line 396, in _request_wrapper - hf_raise_for_status(response) - File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status - raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-669490cd-5670af7d50823e5925c31534;4524844e-3120-4598-a881-3652146e5428) - -Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. -Please make sure you specified the correct `repo_id` and `repo_type`. -If you are trying to access a private or gated repo, make sure you are authenticated. - -The above exception was the direct cause of the following exception: - -Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 40, in __init__ - super().__init__(config) - File ""/workspace/optimum_benchmark/backends/base.py"", line 65, in __init__ - self.pretrained_config = get_transformers_pretrained_config(self.config.model, **self.config.model_kwargs) - File ""/workspace/optimum_benchmark/backends/transformers_utils.py"", line 22, in get_transformers_pretrained_config - return AutoConfig.from_pretrained(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/configuration_auto.py"", line 965, in from_pretrained - config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 632, in get_config_dict - config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/configuration_utils.py"", line 689, in _get_config_dict - resolved_config_file = cached_file( - File ""/usr/local/lib/python3.10/dist-packages/transformers/utils/hub.py"", line 425, in cached_file - raise EnvironmentError( -OSError: t is not a local folder and is not a valid model identifier listed on 'https://huggingface.co/models' -If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3907, in from_pretrained - hf_quantizer.postprocess_model(model) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model - return self._process_model_after_weight_loading(model, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading - model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) - File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 462, in post_init_awq_exllama_modules - model = exllama_post_init(model) - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 144, in exllama_post_init - submodule.post_init() - File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 77, in post_init - self.q4 = exl_ext.make_q4( -NameError: name 'exl_ext' is not defined - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,,,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.261056,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run - report = scenario.run(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run - self.run_model_loading_tracking(backend) - File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking - backend.load() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load - self.load_transformers_model() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model - self.load_transformers_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights - self.load_transformers_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained - self.pretrained_model = self.automodel_loader.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa - raise ValueError( -ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",codegen,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.293824,Linux,x86_64,Linux-5.10.219-208.866.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.3.1,,4.42.4,,0.32.1,,,,1.21.2,,,,0.11.1,,"Traceback (most recent call last): - File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch - benchmark_report = Benchmark.launch(benchmark_config) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch - report = launcher.launch(worker=cls.run, worker_args=[config]) - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch - raise ChildProcessError(response[""traceback""]) -ChildProcessError: Traceback (most recent call last): - File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 102, in target - report = worker(*worker_args) - File ""/workspace/optimum_benchmark/benchmark/base.py"", line 60, in run - backend: Backend = backend_factory(backend_config) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 82, in __init__ - self.load_model_with_no_weights() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 258, in load_model_with_no_weights - self.load_model_from_pretrained() - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 172, in load_model_from_pretrained - self.pretrained_model = self.automodel_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained - return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3704, in from_pretrained - config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1490, in _autoset_attn_implementation - config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1656, in _check_and_enable_sdpa - raise ValueError( -ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` - -",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,64,1,,, +version https://git-lfs.github.com/spec/v1 +oid sha256:985b467c7a02114eb0ce9ba23a1f0c197aa9e07cf082a7c17abfa027d2ce0816 +size 10685439